Revision: 12526
Author:   mstarzin...@chromium.org
Date:     Mon Sep 17 03:04:39 2012
Log:      Integrate map marking into static marking visitor.

This refactors the specialized marking of map contents to be done by the
static marking visitor shared between full and incremental marking. This
also fixes an issue where some maps weren't accounted for in the object
stats tracker. But more importantly, it simplifies the code base.

R=verwa...@chromium.org

Review URL: https://codereview.chromium.org/10919294
http://code.google.com/p/v8/source/detail?r=12526

Modified:
 /branches/bleeding_edge/src/heap.h
 /branches/bleeding_edge/src/incremental-marking-inl.h
 /branches/bleeding_edge/src/incremental-marking.cc
 /branches/bleeding_edge/src/incremental-marking.h
 /branches/bleeding_edge/src/mark-compact-inl.h
 /branches/bleeding_edge/src/mark-compact.cc
 /branches/bleeding_edge/src/mark-compact.h
 /branches/bleeding_edge/src/objects-visiting-inl.h
 /branches/bleeding_edge/src/objects-visiting.h

=======================================
--- /branches/bleeding_edge/src/heap.h  Fri Sep 14 04:48:31 2012
+++ /branches/bleeding_edge/src/heap.h  Mon Sep 17 03:04:39 2012
@@ -1498,13 +1498,6 @@
   void ClearJSFunctionResultCaches();

   void ClearNormalizedMapCaches();
-
-  // Clears the cache of ICs related to this map.
-  void ClearCacheOnMap(Map* map) {
-    if (FLAG_cleanup_code_caches_at_gc) {
-      map->ClearCodeCache(this);
-    }
-  }

   GCTracer* tracer() { return tracer_; }

=======================================
--- /branches/bleeding_edge/src/incremental-marking-inl.h Tue Sep 11 07:01:39 2012 +++ /branches/bleeding_edge/src/incremental-marking-inl.h Mon Sep 17 03:04:39 2012
@@ -123,27 +123,6 @@
   Marking::WhiteToGrey(mark_bit);
   marking_deque_.PushGrey(obj);
 }
-
-
-bool IncrementalMarking::MarkObjectAndPush(HeapObject* obj) {
-  MarkBit mark_bit = Marking::MarkBitFrom(obj);
-  if (!mark_bit.Get()) {
-    WhiteToGreyAndPush(obj, mark_bit);
-    return true;
-  }
-  return false;
-}
-
-
-bool IncrementalMarking::MarkObjectWithoutPush(HeapObject* obj) {
-  MarkBit mark_bit = Marking::MarkBitFrom(obj);
-  if (!mark_bit.Get()) {
-    mark_bit.Set();
-    MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
-    return true;
-  }
-  return false;
-}


 } }  // namespace v8::internal
=======================================
--- /branches/bleeding_edge/src/incremental-marking.cc Tue Sep 11 07:01:39 2012 +++ /branches/bleeding_edge/src/incremental-marking.cc Mon Sep 17 03:04:39 2012
@@ -44,7 +44,6 @@
       state_(STOPPED),
       marking_deque_memory_(NULL),
       marking_deque_memory_committed_(false),
-      marker_(this, heap->mark_compact_collector()),
       steps_count_(0),
       steps_took_(0),
       longest_step_(0.0),
@@ -226,6 +225,7 @@
     }
   }

+  // Marks the object grey and pushes it on the marking stack.
   INLINE(static void MarkObject(Heap* heap, Object* obj)) {
     HeapObject* heap_object = HeapObject::cast(obj);
     MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
@@ -238,6 +238,20 @@
heap->incremental_marking()->WhiteToGreyAndPush(heap_object, mark_bit);
     }
   }
+
+  // Marks the object black without pushing it on the marking stack.
+  // Returns true if object needed marking and false otherwise.
+  INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
+    HeapObject* heap_object = HeapObject::cast(obj);
+    MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
+    if (Marking::IsWhite(mark_bit)) {
+      mark_bit.Set();
+      MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
+                                            heap_object->Size());
+      return true;
+    }
+    return false;
+  }
 };


@@ -641,23 +655,6 @@
       } else if (map == native_context_map) {
         // Native contexts have weak fields.
         IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj);
-      } else if (map->instance_type() == MAP_TYPE) {
-        Map* map = Map::cast(obj);
-        heap_->ClearCacheOnMap(map);
-
-        // When map collection is enabled we have to mark through map's
- // transitions and back pointers in a special way to make these links - // weak. Only maps for subclasses of JSReceiver can have transitions.
-        STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
-        if (FLAG_collect_maps &&
-            map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
-          marker_.MarkMapContents(map);
-        } else {
-          IncrementalMarkingMarkingVisitor::VisitPointers(
-              heap_,
-              HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
-              HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
-        }
       } else {
         MarkBit map_mark_bit = Marking::MarkBitFrom(map);
         if (Marking::IsWhite(map_mark_bit)) {
@@ -822,23 +819,6 @@
         MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());

         IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
-      } else if (map->instance_type() == MAP_TYPE) {
-        Map* map = Map::cast(obj);
-        heap_->ClearCacheOnMap(map);
-
-        // When map collection is enabled we have to mark through map's
- // transitions and back pointers in a special way to make these links - // weak. Only maps for subclasses of JSReceiver can have transitions.
-        STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
-        if (FLAG_collect_maps &&
-            map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
-          marker_.MarkMapContents(map);
-        } else {
-          IncrementalMarkingMarkingVisitor::VisitPointers(
-              heap_,
-              HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
-              HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
-        }
       } else {
         IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
       }
=======================================
--- /branches/bleeding_edge/src/incremental-marking.h Tue Sep 11 07:01:39 2012 +++ /branches/bleeding_edge/src/incremental-marking.h Mon Sep 17 03:04:39 2012
@@ -174,16 +174,6 @@
     ASSERT(Marking::IsBlack(mark_bit));
     return true;
   }
-
-  // Marks the object grey and pushes it on the marking stack.
-  // Returns true if object needed marking and false otherwise.
-  // This is for incremental marking only.
-  INLINE(bool MarkObjectAndPush(HeapObject* obj));
-
-  // Marks the object black without pushing it on the marking stack.
-  // Returns true if object needed marking and false otherwise.
-  // This is for incremental marking only.
-  INLINE(bool MarkObjectWithoutPush(HeapObject* obj));

   inline int steps_count() {
     return steps_count_;
@@ -275,7 +265,6 @@
   VirtualMemory* marking_deque_memory_;
   bool marking_deque_memory_committed_;
   MarkingDeque marking_deque_;
-  Marker<IncrementalMarking> marker_;

   int steps_count_;
   double steps_took_;
=======================================
--- /branches/bleeding_edge/src/mark-compact-inl.h      Wed May 16 03:07:50 2012
+++ /branches/bleeding_edge/src/mark-compact-inl.h      Mon Sep 17 03:04:39 2012
@@ -50,15 +50,6 @@
   abort_incremental_marking_ =
       ((flags & Heap::kAbortIncrementalMarkingMask) != 0);
 }
-
-
-bool MarkCompactCollector::MarkObjectAndPush(HeapObject* obj) {
-  if (MarkObjectWithoutPush(obj)) {
-    marking_deque_.PushBlack(obj);
-    return true;
-  }
-  return false;
-}


 void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
@@ -66,19 +57,11 @@
   if (!mark_bit.Get()) {
     mark_bit.Set();
     MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
-    ProcessNewlyMarkedObject(obj);
+    ASSERT(IsMarked(obj));
+    ASSERT(HEAP->Contains(obj));
+    marking_deque_.PushBlack(obj);
   }
 }
-
-
-bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* obj) {
-  MarkBit mark_bit = Marking::MarkBitFrom(obj);
-  if (!mark_bit.Get()) {
-    SetMark(obj, mark_bit);
-    return true;
-  }
-  return false;
-}


 void MarkCompactCollector::SetMark(HeapObject* obj, MarkBit mark_bit) {
@@ -86,9 +69,6 @@
   ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
   mark_bit.Set();
   MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
-  if (obj->IsMap()) {
-    heap_->ClearCacheOnMap(Map::cast(obj));
-  }
 }


=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Fri Sep 14 06:37:41 2012
+++ /branches/bleeding_edge/src/mark-compact.cc Mon Sep 17 03:04:39 2012
@@ -68,8 +68,7 @@
       migration_slots_buffer_(NULL),
       heap_(NULL),
       code_flusher_(NULL),
-      encountered_weak_maps_(NULL),
-      marker_(this, this) { }
+      encountered_weak_maps_(NULL) { }


 #ifdef DEBUG
@@ -1067,10 +1066,22 @@
     }
   }

+  // Marks the object black and pushes it on the marking stack.
   INLINE(static void MarkObject(Heap* heap, HeapObject* object)) {
     MarkBit mark = Marking::MarkBitFrom(object);
     heap->mark_compact_collector()->MarkObject(object, mark);
   }
+
+  // Marks the object black without pushing it on the marking stack.
+  // Returns true if object needed marking and false otherwise.
+ INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
+    MarkBit mark_bit = Marking::MarkBitFrom(object);
+    if (!mark_bit.Get()) {
+      heap->mark_compact_collector()->SetMark(object, mark_bit);
+      return true;
+    }
+    return false;
+  }

   // Mark object pointed to by p.
   INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector,
@@ -1879,97 +1890,6 @@
 };


-void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
-  ASSERT(IsMarked(object));
-  ASSERT(HEAP->Contains(object));
-  if (object->IsMap()) {
-    Map* map = Map::cast(object);
-    heap_->ClearCacheOnMap(map);
-
- // When map collection is enabled we have to mark through map's transitions - // in a special way to make transition links weak. Only maps for subclasses
-    // of JSReceiver can have transitions.
-    STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
- if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
-      marker_.MarkMapContents(map);
-    } else {
-      marking_deque_.PushBlack(map);
-    }
-  } else {
-    marking_deque_.PushBlack(object);
-  }
-}
-
-
-// Force instantiation of template instances.
-template void Marker<IncrementalMarking>::MarkMapContents(Map* map);
-template void Marker<MarkCompactCollector>::MarkMapContents(Map* map);
-
-
-template <class T>
-void Marker<T>::MarkMapContents(Map* map) {
- // Make sure that the back pointer stored either in the map itself or inside - // its transitions array is marked. Treat pointers in the transitions array as
-  // weak and also mark that array to prevent visiting it later.
- base_marker()->MarkObjectAndPush(HeapObject::cast(map->GetBackPointer()));
-
-  Object** transitions_slot =
-      HeapObject::RawField(map, Map::kTransitionsOrBackPointerOffset);
-  Object* transitions = *transitions_slot;
-  if (transitions->IsTransitionArray()) {
-    MarkTransitionArray(reinterpret_cast<TransitionArray*>(transitions));
-  } else {
-    // Already marked by marking map->GetBackPointer().
-    ASSERT(transitions->IsMap() || transitions->IsUndefined());
-  }
-
- // Mark the Object* fields of the Map. Since the transitions array has been - // marked already, it is fine that one of these fields contains a pointer to
-  // it.
-  Object** start_slot =
-      HeapObject::RawField(map, Map::kPointerFieldsBeginOffset);
- Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
-  for (Object** slot = start_slot; slot < end_slot; slot++) {
-    Object* obj = *slot;
-    if (!obj->NonFailureIsHeapObject()) continue;
-    mark_compact_collector()->RecordSlot(start_slot, slot, obj);
-    base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(obj));
-  }
-}
-
-
-template <class T>
-void Marker<T>::MarkTransitionArray(TransitionArray* transitions) {
-  if (!base_marker()->MarkObjectWithoutPush(transitions)) return;
-  Object** transitions_start = transitions->data_start();
-
- // We don't have to record the descriptors_pointer slot since the cell space
-  // is not compacted.
- JSGlobalPropertyCell* descriptors_cell = transitions->descriptors_pointer();
-  base_marker()->MarkObjectAndPush(descriptors_cell);
-
-  if (transitions->HasPrototypeTransitions()) {
- // Mark prototype transitions array but don't push it into marking stack.
-    // This will make references from it weak. We will clean dead prototype
-    // transitions in ClearNonLiveTransitions.
-    Object** proto_trans_slot = transitions->GetPrototypeTransitionsSlot();
- HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot);
-    base_marker()->MarkObjectWithoutPush(prototype_transitions);
-    mark_compact_collector()->RecordSlot(
-        transitions_start, proto_trans_slot, prototype_transitions);
-  }
-
-  for (int i = 0; i < transitions->number_of_transitions(); ++i) {
-    Object** key_slot = transitions->GetKeySlot(i);
-    Object* key = *key_slot;
-    if (key->IsHeapObject()) {
-      base_marker()->MarkObjectAndPush(HeapObject::cast(key));
- mark_compact_collector()->RecordSlot(transitions_start, key_slot, key);
-    }
-  }
-}
-
-
 // Fill the marking stack with overflowed objects returned by the given
 // iterator.  Stop when the marking stack is filled or the end of the space
 // is reached, whichever comes first.
=======================================
--- /branches/bleeding_edge/src/mark-compact.h  Tue Sep 11 07:01:39 2012
+++ /branches/bleeding_edge/src/mark-compact.h  Mon Sep 17 03:04:39 2012
@@ -403,33 +403,6 @@
 };


-// -------------------------------------------------------------------------
-// Marker shared between incremental and non-incremental marking
-template<class BaseMarker> class Marker {
- public:
- Marker(BaseMarker* base_marker, MarkCompactCollector* mark_compact_collector)
-      : base_marker_(base_marker),
-        mark_compact_collector_(mark_compact_collector) {}
-
-  // Mark pointers in a Map and its DescriptorArray together, possibly
-  // treating transitions or back pointers weak.
-  void MarkMapContents(Map* map);
-  void MarkTransitionArray(TransitionArray* transitions);
-
- private:
-  BaseMarker* base_marker() {
-    return base_marker_;
-  }
-
-  MarkCompactCollector* mark_compact_collector() {
-    return mark_compact_collector_;
-  }
-
-  BaseMarker* base_marker_;
-  MarkCompactCollector* mark_compact_collector_;
-};
-
-
 // Defined in isolate.h.
 class ThreadLocalTop;

@@ -656,8 +629,6 @@
   friend class MarkCompactMarkingVisitor;
   friend class CodeMarkingVisitor;
   friend class SharedFunctionInfoMarkingVisitor;
-  friend class Marker<IncrementalMarking>;
-  friend class Marker<MarkCompactCollector>;

   // Mark non-optimize code for functions inlined into the given optimized
   // code. This will prevent it from being flushed.
@@ -675,25 +646,13 @@
   void AfterMarking();

   // Marks the object black and pushes it on the marking stack.
-  // Returns true if object needed marking and false otherwise.
-  // This is for non-incremental marking only.
-  INLINE(bool MarkObjectAndPush(HeapObject* obj));
-
-  // Marks the object black and pushes it on the marking stack.
   // This is for non-incremental marking only.
   INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit));

-  // Marks the object black without pushing it on the marking stack.
-  // Returns true if object needed marking and false otherwise.
-  // This is for non-incremental marking only.
-  INLINE(bool MarkObjectWithoutPush(HeapObject* obj));
-
   // Marks the object black assuming that it is not yet marked.
   // This is for non-incremental marking only.
   INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit));

-  void ProcessNewlyMarkedObject(HeapObject* obj);
-
   // Mark the heap roots and all objects reachable from them.
   void MarkRoots(RootMarkingVisitor* visitor);

@@ -796,7 +755,6 @@
   MarkingDeque marking_deque_;
   CodeFlusher* code_flusher_;
   Object* encountered_weak_maps_;
-  Marker<MarkCompactCollector> marker_;

   List<Page*> evacuation_candidates_;
   List<Code*> invalidated_code_;
=======================================
--- /branches/bleeding_edge/src/objects-visiting-inl.h Mon Aug 27 09:08:27 2012 +++ /branches/bleeding_edge/src/objects-visiting-inl.h Mon Sep 17 03:04:39 2012
@@ -134,12 +134,9 @@
                   Oddball::BodyDescriptor,
                   void>::Visit);

-  table_.Register(kVisitMap,
-                  &FixedBodyVisitor<StaticVisitor,
-                  Map::BodyDescriptor,
-                  void>::Visit);
+  table_.Register(kVisitMap, &VisitMap);

-  table_.Register(kVisitCode, &StaticVisitor::VisitCode);
+  table_.Register(kVisitCode, &VisitCode);

   // Registration for kVisitSharedFunctionInfo is done by StaticVisitor.

@@ -244,6 +241,32 @@
     collector->RecordSlot(slot, slot, *slot);
   }
 }
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitMap(
+    Map* map, HeapObject* object) {
+  Heap* heap = map->GetHeap();
+  Map* map_object = Map::cast(object);
+
+  // Clears the cache of ICs related to this map.
+  if (FLAG_cleanup_code_caches_at_gc) {
+    map_object->ClearCodeCache(heap);
+  }
+
+  // When map collection is enabled we have to mark through map's
+  // transitions and back pointers in a special way to make these links
+  // weak.  Only maps for subclasses of JSReceiver can have transitions.
+  STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
+  if (FLAG_collect_maps &&
+      map_object->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
+    MarkMapContents(heap, map_object);
+  } else {
+    StaticVisitor::VisitPointers(heap,
+        HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
+        HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
+  }
+}


 template<typename StaticVisitor>
@@ -267,6 +290,60 @@
       HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
       HeapObject::RawField(object, last_property_offset));
 }
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
+    Heap* heap, Map* map) {
+  // Make sure that the back pointer stored either in the map itself or
+  // inside its transitions array is marked. Skip recording the back
+  // pointer slot since map space is not compacted.
+  StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
+
+  // Treat pointers in the transitions array as weak and also mark that
+  // array to prevent visiting it later. Skip recording the transition
+  // array slot, since it will be implicitly recorded when the pointer
+  // fields of this map are visited.
+  TransitionArray* transitions = map->unchecked_transition_array();
+  if (transitions->IsTransitionArray()) {
+    MarkTransitionArray(heap, transitions);
+  } else {
+    // Already marked by marking map->GetBackPointer() above.
+    ASSERT(transitions->IsMap() || transitions->IsUndefined());
+  }
+
+  // Mark the pointer fields of the Map. Since the transitions array has
+  // been marked already, it is fine that one of these fields contains a
+  // pointer to it.
+  StaticVisitor::VisitPointers(heap,
+      HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
+      HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
+}
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
+    Heap* heap, TransitionArray* transitions) {
+  if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
+
+  // Skip recording the descriptors_pointer slot since the cell space
+  // is not compacted and descriptors are referenced through a cell.
+  StaticVisitor::MarkObject(heap, transitions->descriptors_pointer());
+
+  if (transitions->HasPrototypeTransitions()) {
+    // Mark prototype transitions array but do not push it onto marking
+    // stack, this will make references from it weak. We will clean dead
+    // prototype transitions in ClearNonLiveTransitions.
+    Object** slot = transitions->GetPrototypeTransitionsSlot();
+    HeapObject* obj = HeapObject::cast(*slot);
+    heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
+    StaticVisitor::MarkObjectWithoutPush(heap, obj);
+  }
+
+  for (int i = 0; i < transitions->number_of_transitions(); ++i) {
+    StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
+  }
+}


 void Code::CodeIterateBody(ObjectVisitor* v) {
=======================================
--- /branches/bleeding_edge/src/objects-visiting.h      Fri Aug 17 02:03:08 2012
+++ /branches/bleeding_edge/src/objects-visiting.h      Mon Sep 17 03:04:39 2012
@@ -398,9 +398,15 @@
   static inline void VisitNativeContext(Map* map, HeapObject* object);

  protected:
+  static inline void VisitMap(Map* map, HeapObject* object);
   static inline void VisitCode(Map* map, HeapObject* object);
   static inline void VisitJSRegExp(Map* map, HeapObject* object);

+  // Mark pointers in a Map and its TransitionArray together, possibly
+  // treating transitions or back pointers weak.
+  static void MarkMapContents(Heap* heap, Map* map);
+ static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
+
   class DataObjectVisitor {
    public:
     template<int size>

--
v8-dev mailing list
v8-dev@googlegroups.com
http://groups.google.com/group/v8-dev

Reply via email to