Reviewers: Michael Starzinger,
Description:
Cleaned up the weak lists hanging off the heap a bit.
* Route all access to the 3 weak lists through getters/setters.
* Removed superfluous visiting already done by ProcessWeakReferences.
R=mstarzin...@chromium.org
Please review this at https://codereview.chromium.org/273653006/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files (+15, -24 lines):
M src/heap.h
M src/heap.cc
M src/mark-compact.cc
Index: src/heap.cc
diff --git a/src/heap.cc b/src/heap.cc
index
13771e613e5dc61cc6ef1163bcecf880e39c7be9..cea7cb4715c209ad11e7ad895799f4325127c0ab
100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -144,9 +144,9 @@ Heap::Heap()
ASSERT(MB >= Page::kPageSize);
memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
- native_contexts_list_ = NULL;
- array_buffers_list_ = Smi::FromInt(0);
- allocation_sites_list_ = Smi::FromInt(0);
+ set_native_contexts_list(NULL);
+ set_array_buffers_list(Smi::FromInt(0));
+ set_allocation_sites_list(Smi::FromInt(0));
// Put a dummy entry in the remembered pages so we can find the list the
// minidump even if there are no real unmapped pages.
RememberUnmappedPage(NULL, false);
@@ -962,7 +962,7 @@ void Heap::EnsureFromSpaceIsCommitted() {
void Heap::ClearJSFunctionResultCaches() {
if (isolate_->bootstrapper()->IsActive()) return;
- Object* context = native_contexts_list_;
+ Object* context = native_contexts_list();
while (!context->IsUndefined()) {
// Get the caches for this context. GC can happen when the context
// is not fully initialized, so the caches can be undefined.
@@ -988,7 +988,7 @@ void Heap::ClearNormalizedMapCaches() {
return;
}
- Object* context = native_contexts_list_;
+ Object* context = native_contexts_list();
while (!context->IsUndefined()) {
// GC can happen when the context is not fully initialized,
// so the cache can be undefined.
@@ -1569,9 +1569,6 @@ void Heap::Scavenge() {
collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor);
}
- // Scavenge object reachable from the native contexts list directly.
- scavenge_visitor.VisitPointer(BitCast<Object**>(&native_contexts_list_));
-
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
while (isolate()->global_handles()->IterateObjectGroups(
@@ -1704,7 +1701,7 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer*
retainer,
VisitWeakList<Context>(
this, native_contexts_list(), retainer, record_slots);
// Update the head of the list of contexts.
- native_contexts_list_ = head;
+ set_native_contexts_list(head);
}
@@ -1725,7 +1722,7 @@ void Heap::TearDownArrayBuffers() {
Runtime::FreeArrayBuffer(isolate(), buffer);
o = buffer->weak_next();
}
- array_buffers_list_ = undefined;
+ set_array_buffers_list(undefined);
}
@@ -5292,9 +5289,9 @@ bool Heap::CreateHeapObjects() {
CreateInitialObjects();
CHECK_EQ(0, gc_count_);
- native_contexts_list_ = undefined_value();
- array_buffers_list_ = undefined_value();
- allocation_sites_list_ = undefined_value();
+ set_native_contexts_list(undefined_value());
+ set_array_buffers_list(undefined_value());
+ set_allocation_sites_list(undefined_value());
weak_object_to_code_table_ = undefined_value();
return true;
}
Index: src/heap.h
diff --git a/src/heap.h b/src/heap.h
index
9c100fcf1fc5aabf89d0da6a4aa69a175ebb461d..7a0d90f0ed5a4c15a217fe5bc5cf7b525624b30b
100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -833,17 +833,19 @@ class Heap {
void set_native_contexts_list(Object* object) {
native_contexts_list_ = object;
}
- Object* native_contexts_list() { return native_contexts_list_; }
+ Object* native_contexts_list() const { return native_contexts_list_; }
void set_array_buffers_list(Object* object) {
array_buffers_list_ = object;
}
- Object* array_buffers_list() { return array_buffers_list_; }
+ Object* array_buffers_list() const { return array_buffers_list_; }
void set_allocation_sites_list(Object* object) {
allocation_sites_list_ = object;
}
Object* allocation_sites_list() { return allocation_sites_list_; }
+
+ // Used in CreateAllocationSiteStub and the (de)serializer.
Object** allocation_sites_list_address() { return
&allocation_sites_list_; }
Object* weak_object_to_code_table() { return weak_object_to_code_table_;
}
@@ -936,11 +938,6 @@ class Heap {
return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
}
- // Get address of native contexts list for serialization support.
- Object** native_contexts_list_address() {
- return &native_contexts_list_;
- }
-
#ifdef VERIFY_HEAP
// Verify the heap is in its normal state before or after a GC.
void Verify();
Index: src/mark-compact.cc
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index
ec8e941795b9dceb6f42df6802d50ec85c98f525..38013962dc4576e84e7b029c3d6b247f0004704d
100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -2483,7 +2483,7 @@ void MarkCompactCollector::AfterMarking() {
void MarkCompactCollector::ProcessMapCaches() {
- Object* raw_context = heap()->native_contexts_list_;
+ Object* raw_context = heap()->native_contexts_list();
while (raw_context != heap()->undefined_value()) {
Context* context = reinterpret_cast<Context*>(raw_context);
if (IsMarked(context)) {
@@ -3642,9 +3642,6 @@ void
MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
}
}
- // Update the head of the native contexts list in the heap.
- updating_visitor.VisitPointer(heap_->native_contexts_list_address());
-
heap_->string_table()->Iterate(&updating_visitor);
updating_visitor.VisitPointer(heap_->weak_object_to_code_table_address());
if (heap_->weak_object_to_code_table()->IsHashTable()) {
--
--
v8-dev mailing list
v8-dev@googlegroups.com
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to v8-dev+unsubscr...@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.