Revision: 5679
Author: ant...@chromium.org
Date: Wed Oct 20 12:28:30 2010
Log: Revert r5455 from the 2.4 branch.

That's an experimental revert to see if r5455 provoked crbug.com/59202.

It's more involved than plain unpatch due to merge conflicts.

Review URL: http://codereview.chromium.org/3959001
http://code.google.com/p/v8/source/detail?r=5679

Modified:
 /branches/2.4/src/global-handles.cc
 /branches/2.4/src/global-handles.h
 /branches/2.4/src/heap-inl.h
 /branches/2.4/src/heap.cc
 /branches/2.4/src/heap.h

=======================================
--- /branches/2.4/src/global-handles.cc Thu Sep 30 03:07:24 2010
+++ /branches/2.4/src/global-handles.cc Wed Oct 20 12:28:30 2010
@@ -372,14 +372,13 @@

 int post_gc_processing_count = 0;

-bool GlobalHandles::PostGarbageCollectionProcessing() {
+void GlobalHandles::PostGarbageCollectionProcessing() {
   // Process weak global handle callbacks. This must be done after the
   // GC is completely done, because the callbacks may invoke arbitrary
   // API functions.
   // At the same time deallocate all DESTROYED nodes.
   ASSERT(Heap::gc_state() == Heap::NOT_IN_GC);
   const int initial_post_gc_processing_count = ++post_gc_processing_count;
-  bool weak_callback_invoked = false;
   Node** p = &head_;
   while (*p != NULL) {
     if ((*p)->PostGarbageCollectionProcessing()) {
@@ -390,7 +389,6 @@
         // restart the processing).
         break;
       }
-      weak_callback_invoked = true;
     }
     if ((*p)->state_ == Node::DESTROYED) {
       // Delete the link.
@@ -409,7 +407,6 @@
   if (first_deallocated()) {
     first_deallocated()->set_next(head());
   }
-  return weak_callback_invoked;
 }


=======================================
--- /branches/2.4/src/global-handles.h  Wed Sep 15 05:33:05 2010
+++ /branches/2.4/src/global-handles.h  Wed Oct 20 12:28:30 2010
@@ -95,9 +95,8 @@
   // Tells whether global handle is weak.
   static bool IsWeak(Object** location);

-  // Process pending weak handles.  Returns true if any weak handle
-  // callback has been invoked.
-  static bool PostGarbageCollectionProcessing();
+  // Process pending weak handles.
+  static void PostGarbageCollectionProcessing();

   // Iterates over all strong handles.
   static void IterateStrongRoots(ObjectVisitor* v);
=======================================
--- /branches/2.4/src/heap-inl.h        Mon Oct 18 07:30:25 2010
+++ /branches/2.4/src/heap-inl.h        Wed Oct 20 12:28:30 2010
@@ -35,16 +35,6 @@
 namespace v8 {
 namespace internal {

-void Heap::UpdateOldSpaceLimits() {
-  intptr_t old_gen_size = PromotedSpaceSize();
-  old_gen_promotion_limit_ =
-      old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
-  old_gen_allocation_limit_ =
-      old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
-  old_gen_exhausted_ = false;
-}
-
-
 int Heap::MaxObjectSizeInPagedSpace() {
   return Page::kMaxHeapObjectSize;
 }
@@ -421,7 +411,7 @@
     }                                                                     \
     if (!__object__->IsRetryAfterGC()) RETURN_EMPTY;                      \
     Counters::gc_last_resort_from_handles.Increment();                    \
-    Heap::CollectAllAvailableGarbage();                                   \
+    Heap::CollectAllGarbage(false);                                       \
     {                                                                     \
       AlwaysAllocateScope __scope__;                                      \
       __object__ = FUNCTION_CALL;                                         \
=======================================
--- /branches/2.4/src/heap.cc   Wed Oct 20 03:13:22 2010
+++ /branches/2.4/src/heap.cc   Wed Oct 20 12:28:30 2010
@@ -55,6 +55,7 @@
 String* Heap::hidden_symbol_;
 Object* Heap::roots_[Heap::kRootListLength];

+
 NewSpace Heap::new_space_;
 OldSpace* Heap::old_pointer_space_ = NULL;
 OldSpace* Heap::old_data_space_ = NULL;
@@ -63,6 +64,9 @@
 CellSpace* Heap::cell_space_ = NULL;
 LargeObjectSpace* Heap::lo_space_ = NULL;

+static const intptr_t kMinimumPromotionLimit = 2 * MB;
+static const intptr_t kMinimumAllocationLimit = 8 * MB;
+
 intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
 intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;

@@ -414,25 +418,17 @@
 }


-void Heap::CollectAllGarbage(bool force_compaction,
-                             CollectionPolicy collectionPolicy) {
+void Heap::CollectAllGarbage(bool force_compaction) {
   // Since we are ignoring the return value, the exact choice of space does
   // not matter, so long as we do not specify NEW_SPACE, which would not
   // cause a full GC.
   MarkCompactCollector::SetForceCompaction(force_compaction);
-  CollectGarbage(OLD_POINTER_SPACE, collectionPolicy);
+  CollectGarbage(OLD_POINTER_SPACE);
   MarkCompactCollector::SetForceCompaction(false);
 }


-void Heap::CollectAllAvailableGarbage() {
-  CompilationCache::Clear();
-  CollectAllGarbage(true, AGGRESSIVE);
-}
-
-
-void Heap::CollectGarbage(AllocationSpace space,
-                          CollectionPolicy collectionPolicy) {
+void Heap::CollectGarbage(AllocationSpace space) {
   // The VM is in the GC state until exiting this function.
   VMState state(GC);

@@ -459,7 +455,7 @@
         ? &Counters::gc_scavenger
         : &Counters::gc_compactor;
     rate->Start();
-    PerformGarbageCollection(collector, &tracer, collectionPolicy);
+    PerformGarbageCollection(collector, &tracer);
     rate->Stop();

     GarbageCollectionEpilogue();
@@ -475,7 +471,7 @@

 void Heap::PerformScavenge() {
   GCTracer tracer;
-  PerformGarbageCollection(SCAVENGER, &tracer, NORMAL);
+  PerformGarbageCollection(SCAVENGER, &tracer);
 }


@@ -665,8 +661,7 @@
 }

 void Heap::PerformGarbageCollection(GarbageCollector collector,
-                                    GCTracer* tracer,
-                                    CollectionPolicy collectionPolicy) {
+                                    GCTracer* tracer) {
   VerifySymbolTable();
   if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
     ASSERT(!allocation_allowed_);
@@ -696,45 +691,25 @@

     UpdateSurvivalRateTrend(start_new_space_size);

-    UpdateOldSpaceLimits();
-
-    // Major GC would invoke weak handle callbacks on weakly reachable
-    // handles, but won't collect weakly reachable objects until next
- // major GC. Therefore if we collect aggressively and weak handle callback
-    // has been invoked, we rerun major GC to release objects which become
-    // garbage.
-    if (collectionPolicy == AGGRESSIVE) {
-      // Note: as weak callbacks can execute arbitrary code, we cannot
-      // hope that eventually there will be no weak callbacks invocations.
-      // Therefore stop recollecting after several attempts.
-      const int kMaxNumberOfAttempts = 7;
-      for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
-        { DisableAssertNoAllocation allow_allocation;
-          GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
-          if (!GlobalHandles::PostGarbageCollectionProcessing()) break;
-        }
-        MarkCompact(tracer);
-        // Weak handle callbacks can allocate data, so keep limits correct.
-        UpdateOldSpaceLimits();
-      }
-    } else {
-      if (high_survival_rate_during_scavenges &&
-          IsStableOrIncreasingSurvivalTrend()) {
- // Stable high survival rates of young objects both during partial and - // full collection indicate that mutator is either building or modifying
-        // a structure with a long lifetime.
- // In this case we aggressively raise old generation memory limits to
-        // postpone subsequent mark-sweep collection and thus trade memory
-        // space for the mutation speed.
-        old_gen_promotion_limit_ *= 2;
-        old_gen_allocation_limit_ *= 2;
-      }
+    int old_gen_size = PromotedSpaceSize();
+    old_gen_promotion_limit_ =
+        old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
+    old_gen_allocation_limit_ =
+        old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
+
+    if (high_survival_rate_during_scavenges &&
+        IsStableOrIncreasingSurvivalTrend()) {
+ // Stable high survival rates of young objects both during partial and + // full collection indicate that mutator is either building or modifying
+      // a structure with a long lifetime.
+      // In this case we aggressively raise old generation memory limits to
+      // postpone subsequent mark-sweep collection and thus trade memory
+      // space for the mutation speed.
+      old_gen_promotion_limit_ *= 2;
+      old_gen_allocation_limit_ *= 2;
     }

-    { DisableAssertNoAllocation allow_allocation;
-      GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
-      GlobalHandles::PostGarbageCollectionProcessing();
-    }
+    old_gen_exhausted_ = false;
   } else {
     tracer_ = tracer;
     Scavenge();
@@ -745,6 +720,12 @@

   Counters::objs_since_last_young.Set(0);

+  if (collector == MARK_COMPACTOR) {
+    DisableAssertNoAllocation allow_allocation;
+    GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
+    GlobalHandles::PostGarbageCollectionProcessing();
+  }
+
   // Update relocatables.
   Relocatable::PostGarbageCollectionProcessing();

=======================================
--- /branches/2.4/src/heap.h    Mon Oct 18 07:30:25 2010
+++ /branches/2.4/src/heap.h    Wed Oct 20 12:28:30 2010
@@ -692,20 +692,13 @@
   static void GarbageCollectionPrologue();
   static void GarbageCollectionEpilogue();

-  enum CollectionPolicy { NORMAL, AGGRESSIVE };
-
   // Performs garbage collection operation.
// Returns whether required_space bytes are available after the collection.
-  static void CollectGarbage(AllocationSpace space,
-                             CollectionPolicy collectionPolicy = NORMAL);
+  static void CollectGarbage(AllocationSpace space);

   // Performs a full garbage collection. Force compaction if the
   // parameter is true.
-  static void CollectAllGarbage(bool force_compaction,
- CollectionPolicy collectionPolicy = NORMAL);
-
-  // Last hope GC, should try to squeeze as much as possible.
-  static void CollectAllAvailableGarbage();
+  static void CollectAllGarbage(bool force_compaction);

   // Notify the heap that a context has been disposed.
   static int NotifyContextDisposed() { return ++contexts_disposed_; }
@@ -1227,13 +1220,7 @@

   // Performs garbage collection
   static void PerformGarbageCollection(GarbageCollector collector,
-                                       GCTracer* tracer,
-                                       CollectionPolicy collectionPolicy);
-
-  static const intptr_t kMinimumPromotionLimit = 2 * MB;
-  static const intptr_t kMinimumAllocationLimit = 8 * MB;
-
-  inline static void UpdateOldSpaceLimits();
+                                       GCTracer* tracer);

// Allocate an uninitialized object in map space. The behavior is identical // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't

--
v8-dev mailing list
v8-dev@googlegroups.com
http://groups.google.com/group/v8-dev

Reply via email to