Revision: 8575
Author: [email protected]
Date: Fri Jul 8 02:16:20 2011
Log: Fix compilation on ARM and x64.
Enable incremental slots collection on x64.
Fix test-api/ScriptUsingStringResource test.
[email protected]
Review URL: http://codereview.chromium.org/7322012
http://code.google.com/p/v8/source/detail?r=8575
Modified:
/branches/experimental/gc/src/arm/assembler-arm-inl.h
/branches/experimental/gc/src/arm/code-stubs-arm.h
/branches/experimental/gc/src/ia32/code-stubs-ia32.cc
/branches/experimental/gc/src/incremental-marking.h
/branches/experimental/gc/src/x64/assembler-x64-inl.h
/branches/experimental/gc/src/x64/code-stubs-x64.cc
/branches/experimental/gc/src/x64/code-stubs-x64.h
/branches/experimental/gc/test/cctest/test-api.cc
=======================================
--- /branches/experimental/gc/src/arm/assembler-arm-inl.h Wed May 25
06:12:33 2011
+++ /branches/experimental/gc/src/arm/assembler-arm-inl.h Fri Jul 8
02:16:20 2011
@@ -79,8 +79,9 @@
Assembler::set_target_address_at(pc_, target);
if (code != NULL && IsCodeTarget(rmode_)) {
Object* target_code = Code::GetCodeFromTargetAddress(target);
+ // TODO(gc) We do not compact code pages.
code->GetHeap()->incremental_marking()->RecordWrite(
- code, HeapObject::cast(target_code));
+ code, NULL, HeapObject::cast(target_code));
}
}
@@ -107,8 +108,11 @@
ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
if (code != NULL && target->IsHeapObject()) {
+ // It is safe to record this slot as a simple in object slot
+ // because it resides outside of code stream and updating it
+ // does not require code cache flushing.
code->GetHeap()->incremental_marking()->RecordWrite(
- code, HeapObject::cast(target));
+ code, target_object_address(), HeapObject::cast(target));
}
}
@@ -141,7 +145,8 @@
Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
Memory::Address_at(pc_) = address;
if (code != NULL) {
- code->GetHeap()->incremental_marking()->RecordWrite(code, cell);
+ code->GetHeap()->incremental_marking()->RecordWrite(
+ code, &Memory::Object_at(pc_), cell);
}
}
=======================================
--- /branches/experimental/gc/src/arm/code-stubs-arm.h Fri Jun 10 14:58:26
2011
+++ /branches/experimental/gc/src/arm/code-stubs-arm.h Fri Jul 8 02:16:20
2011
@@ -460,8 +460,14 @@
value) { // One scratch reg.
}
- static void Patch(Code* stub, bool enable) {
- ASSERT(!enable);
+ enum Mode {
+ STORE_BUFFER_ONLY,
+ INCREMENTAL,
+ INCREMENTAL_COMPACTION
+ };
+
+ static void Patch(Code* stub, Mode mode) {
+ ASSERT(mode == STORE_BUFFER_ONLY);
}
private:
=======================================
--- /branches/experimental/gc/src/ia32/code-stubs-ia32.cc Wed Jul 6
13:56:48 2011
+++ /branches/experimental/gc/src/ia32/code-stubs-ia32.cc Fri Jul 8
02:16:20 2011
@@ -6240,7 +6240,7 @@
// The first two instructions are generated with labels so as to get the
// offset fixed up correctly by the bind(Label*) call. We patch it back
and
- // forth between a 2-byte compare instruction (a nop in this position)
and the
+ // forth between a compare instructions (a nop in this position) and the
// real branch when we start and stop incremental heap marking.
__ jmp(&skip_to_incremental_noncompacting, Label::kNear);
__ jmp(&skip_to_incremental_compacting, Label::kFar);
@@ -6258,12 +6258,15 @@
__ bind(&skip_to_incremental_compacting);
GenerateIncremental(masm, INCREMENTAL_COMPACTION);
- if (!masm->isolate()->heap()->incremental_marking()->IsMarking()) {
+ // TODO(1545) ensure that GC can't happen after stub was generated by
before
+ // it was added to a stub cache.
+ IncrementalMarking* marking =
masm->isolate()->heap()->incremental_marking();
+ if (!marking->IsMarking() || marking->IsCompacting()) {
ASSERT(masm->byte_at(0) == kTwoByteJumpInstruction);
masm->set_byte_at(0, kTwoByteNopInstruction);
}
- if (!masm->isolate()->heap()->incremental_marking()->IsMarking()) {
+ if (!marking->IsMarking()) {
ASSERT(masm->byte_at(2) == kFiveByteJumpInstruction);
masm->set_byte_at(2, kFiveByteNopInstruction);
}
@@ -6311,9 +6314,7 @@
}
-void RecordWriteStub::InformIncrementalMarker(
- MacroAssembler* masm,
- RecordWriteStub::Mode mode) {
+void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode
mode) {
regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
int argument_count = 3;
__ PrepareCallCFunction(argument_count, regs_.scratch0());
@@ -6348,8 +6349,8 @@
void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
MacroAssembler* masm,
- RecordWriteStub::OnNoNeedToInformIncrementalMarker on_no_need,
- RecordWriteStub::Mode mode) {
+ OnNoNeedToInformIncrementalMarker on_no_need,
+ Mode mode) {
Label object_is_black, need_incremental, need_incremental_pop_object;
// Let's look at the color of the object: If it is not black we don't
have
=======================================
--- /branches/experimental/gc/src/incremental-marking.h Wed Jul 6 13:56:48
2011
+++ /branches/experimental/gc/src/incremental-marking.h Fri Jul 8 02:16:20
2011
@@ -159,6 +159,8 @@
}
MarkingDeque* marking_deque() { return &marking_deque_; }
+
+ bool IsCompacting() { return IsMarking() && is_compacting_; }
private:
void set_should_hurry(bool val) {
=======================================
--- /branches/experimental/gc/src/x64/assembler-x64-inl.h Tue May 17
02:31:37 2011
+++ /branches/experimental/gc/src/x64/assembler-x64-inl.h Fri Jul 8
02:16:20 2011
@@ -244,8 +244,9 @@
Assembler::set_target_address_at(pc_, target);
Object* target_code = Code::GetCodeFromTargetAddress(target);
if (code != NULL) {
+ // TODO(gc) We do not compact code pages.
code->GetHeap()->incremental_marking()->RecordWrite(
- code, HeapObject::cast(target_code));
+ code, NULL, HeapObject::cast(target_code));
}
} else {
Memory::Address_at(pc_) = target;
@@ -284,11 +285,11 @@
void RelocInfo::set_target_object(Object* target, Code* code) {
ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
- *reinterpret_cast<Object**>(pc_) = target;
+ Memory::Object_at(pc_) = target;
CPU::FlushICache(pc_, sizeof(Address));
if (code != NULL && target->IsHeapObject()) {
code->GetHeap()->incremental_marking()->RecordWrite(
- code, HeapObject::cast(target));
+ code, &Memory::Object_at(pc_), HeapObject::cast(target));
}
}
@@ -316,7 +317,8 @@
Memory::Address_at(pc_) = address;
CPU::FlushICache(pc_, sizeof(Address));
if (code != NULL) {
- code->GetHeap()->incremental_marking()->RecordWrite(code, cell);
+ code->GetHeap()->incremental_marking()->RecordWrite(
+ code, &Memory::Object_at(pc_), cell);
}
}
=======================================
--- /branches/experimental/gc/src/x64/code-stubs-x64.cc Fri Jun 10 14:58:26
2011
+++ /branches/experimental/gc/src/x64/code-stubs-x64.cc Fri Jul 8 02:16:20
2011
@@ -5158,18 +5158,16 @@
// we keep the GC informed. The word in the object where the value has
been
// written is in the address register.
void RecordWriteStub::Generate(MacroAssembler* masm) {
- Label skip_non_incremental_part;
-
- // The first instruction is generated as a label so as to get the offset
- // fixed up correctly by the bind(Label*) call. We patch it back and
forth
- // between a 2-byte compare instruction (a nop in this position) and the
real
- // branch when we start and stop incremental heap marking.
- __ jmp(&skip_non_incremental_part, Label::kNear);
- if (!masm->isolate()->heap()->incremental_marking()->IsMarking()) {
- ASSERT(masm->byte_at(masm->pc_offset() - 2) ==
- kSkipNonIncrementalPartInstruction);
- masm->set_byte_at(masm->pc_offset() - 2, kTwoByteNopInstruction);
- }
+ Label skip_to_incremental_noncompacting;
+ Label skip_to_incremental_compacting;
+
+ // The first two instructions are generated with labels so as to get the
+ // offset fixed up correctly by the bind(Label*) call. We patch it back
and
+ // forth between a compare instructions (a nop in this position) and the
+ // real branch when we start and stop incremental heap marking.
+ // See RecordWriteStub::Patch for details.
+ __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
+ __ jmp(&skip_to_incremental_compacting, Label::kFar);
if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
__ RememberedSetHelper(
@@ -5178,12 +5176,28 @@
__ ret(0);
}
- __ bind(&skip_non_incremental_part);
- GenerateIncremental(masm);
+ __ bind(&skip_to_incremental_noncompacting);
+ GenerateIncremental(masm, INCREMENTAL);
+
+ __ bind(&skip_to_incremental_compacting);
+ GenerateIncremental(masm, INCREMENTAL_COMPACTION);
+
+ // TODO(1545) ensure that GC can't happen after stub was generated by
before
+ // it was added to a stub cache.
+ IncrementalMarking* marking =
masm->isolate()->heap()->incremental_marking();
+ if (!marking->IsMarking() || marking->IsCompacting()) {
+ ASSERT(masm->byte_at(0) == kTwoByteJumpInstruction);
+ masm->set_byte_at(0, kTwoByteNopInstruction);
+ }
+
+ if (!marking->IsMarking()) {
+ ASSERT(masm->byte_at(2) == kFiveByteJumpInstruction);
+ masm->set_byte_at(2, kFiveByteNopInstruction);
+ }
}
-void RecordWriteStub::GenerateIncremental(MacroAssembler* masm) {
+void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode)
{
regs_.Save(masm);
if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
@@ -5203,8 +5217,8 @@
// First notify the incremental marker if necessary, then update the
// remembered set.
CheckNeedsToInformIncrementalMarker(
- masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker);
- InformIncrementalMarker(masm);
+ masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
+ InformIncrementalMarker(masm, mode);
regs_.Restore(masm);
__ RememberedSetHelper(
address_, value_, save_fp_regs_mode_,
MacroAssembler::kReturnAtEnd);
@@ -5213,14 +5227,14 @@
}
CheckNeedsToInformIncrementalMarker(
- masm, kReturnOnNoNeedToInformIncrementalMarker);
- InformIncrementalMarker(masm);
+ masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
+ InformIncrementalMarker(masm, mode);
regs_.Restore(masm);
__ ret(0);
}
-void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
+void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode
mode) {
regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
#ifdef _WIN64
Register arg3 = r8;
@@ -5231,32 +5245,44 @@
Register arg2 = rsi;
Register arg1 = rdi;
#endif
- bool save_address = arg1.is(regs_.address());
- if (save_address) {
- __ movq(arg3, regs_.address());
- }
+ Register address =
+ arg1.is(regs_.address()) ? kScratchRegister : regs_.address();
+ ASSERT(!address.is(regs_.object()));
+ ASSERT(!address.is(arg1));
+ __ Move(address, regs_.address());
__ Move(arg1, regs_.object());
- if (save_address) {
- __ movq(arg2, Operand(arg3, 0));
+ if (mode == INCREMENTAL_COMPACTION) {
+ // TODO(gc) Can we just set address arg2 in the beginning?
+ __ Move(arg2, address);
} else {
- __ movq(arg2, Operand(regs_.address(), 0));
+ ASSERT(mode == INCREMENTAL);
+ __ movq(arg2, Operand(address, 0));
}
__ LoadAddress(arg3, ExternalReference::isolate_address());
// TODO(gc): Create a fast version of this C function that does not
duplicate
// the checks done in the stub.
int argument_count = 3;
__ PrepareCallCFunction(argument_count);
- __ CallCFunction(
- ExternalReference::incremental_marking_record_write_function(
- masm->isolate()),
- argument_count);
+ if (mode == INCREMENTAL_COMPACTION) {
+ __ CallCFunction(
+ ExternalReference::incremental_evacuation_record_write_function(
+ masm->isolate()),
+ argument_count);
+ } else {
+ ASSERT(mode == INCREMENTAL);
+ __ CallCFunction(
+ ExternalReference::incremental_marking_record_write_function(
+ masm->isolate()),
+ argument_count);
+ }
regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
}
void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
MacroAssembler* masm,
- RecordWriteStub::OnNoNeedToInformIncrementalMarker on_no_need) {
+ OnNoNeedToInformIncrementalMarker on_no_need,
+ Mode mode) {
Label on_black;
// Let's look at the color of the object: If it is not black we don't
have
=======================================
--- /branches/experimental/gc/src/x64/code-stubs-x64.h Fri Jun 10 14:58:26
2011
+++ /branches/experimental/gc/src/x64/code-stubs-x64.h Fri Jul 8 02:16:20
2011
@@ -548,22 +548,56 @@
value) { // One scratch reg.
}
- static const byte kTwoByteNopInstruction = 0x3c; // Cmpb al,
#imm8.
- static const byte kSkipNonIncrementalPartInstruction = 0xeb; // Jmp
#imm8.
-
- static byte GetInstruction(bool enable) {
- // Can't use ternary operator here, because gcc makes an undefined
- // reference to a static const int.
- if (enable) {
- return kSkipNonIncrementalPartInstruction;
- } else {
- return kTwoByteNopInstruction;
- }
+ enum Mode {
+ STORE_BUFFER_ONLY,
+ INCREMENTAL,
+ INCREMENTAL_COMPACTION
+ };
+
+ static const byte kTwoByteNopInstruction = 0x3c; // Cmpb al, #imm8.
+ static const byte kTwoByteJumpInstruction = 0xeb; // Jmp #imm8.
+
+ static const byte kFiveByteNopInstruction = 0x3d; // Cmpl eax, #imm32.
+ static const byte kFiveByteJumpInstruction = 0xe9; // Jmp #imm32.
+
+ static Mode GetMode(Code* stub) {
+ byte first_instruction = stub->instruction_start()[0];
+ byte second_instruction = stub->instruction_start()[2];
+
+ if (first_instruction == kTwoByteJumpInstruction) {
+ return INCREMENTAL;
+ }
+
+ ASSERT(first_instruction == kTwoByteNopInstruction);
+
+ if (second_instruction == kFiveByteJumpInstruction) {
+ return INCREMENTAL_COMPACTION;
+ }
+
+ ASSERT(second_instruction == kFiveByteNopInstruction);
+
+ return STORE_BUFFER_ONLY;
}
- static void Patch(Code* stub, bool enable) {
- ASSERT(*stub->instruction_start() == GetInstruction(!enable));
- *stub->instruction_start() = GetInstruction(enable);
+ static void Patch(Code* stub, Mode mode) {
+ switch (mode) {
+ case STORE_BUFFER_ONLY:
+ ASSERT(GetMode(stub) == INCREMENTAL ||
+ GetMode(stub) == INCREMENTAL_COMPACTION);
+ stub->instruction_start()[0] = kTwoByteNopInstruction;
+ stub->instruction_start()[2] = kFiveByteNopInstruction;
+ break;
+ case INCREMENTAL:
+ ASSERT(GetMode(stub) == STORE_BUFFER_ONLY);
+ stub->instruction_start()[0] = kTwoByteJumpInstruction;
+ break;
+ case INCREMENTAL_COMPACTION:
+ ASSERT(GetMode(stub) == STORE_BUFFER_ONLY);
+ stub->instruction_start()[0] = kTwoByteNopInstruction;
+ stub->instruction_start()[2] = kFiveByteJumpInstruction;
+ break;
+ }
+ ASSERT(GetMode(stub) == mode);
}
private:
@@ -695,11 +729,12 @@
};
void Generate(MacroAssembler* masm);
- void GenerateIncremental(MacroAssembler* masm);
+ void GenerateIncremental(MacroAssembler* masm, Mode mode);
void CheckNeedsToInformIncrementalMarker(
MacroAssembler* masm,
- OnNoNeedToInformIncrementalMarker on_no_need);
- void InformIncrementalMarker(MacroAssembler* masm);
+ OnNoNeedToInformIncrementalMarker on_no_need,
+ Mode mode);
+ void InformIncrementalMarker(MacroAssembler* masm, Mode mode);
Major MajorKey() { return RecordWrite; }
=======================================
--- /branches/experimental/gc/test/cctest/test-api.cc Tue May 24 05:03:26
2011
+++ /branches/experimental/gc/test/cctest/test-api.cc Fri Jul 8 02:16:20
2011
@@ -408,7 +408,7 @@
CHECK_EQ(0, TestResource::dispose_count);
}
i::Isolate::Current()->compilation_cache()->Clear();
- HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
+ HEAP->CollectAllAvailableGarbage();
CHECK_EQ(1, TestResource::dispose_count);
}
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev