Revision: 8515
Author:   [email protected]
Date:     Fri Jul  1 05:47:18 2011
Log:      Remove unmarked entries from per context map caches.

Made "map_cache" a weak field of global context and added a pass over
all caches late in the marking phase.

[email protected]
BUG=v8:1516
TEST=cctest/test-api/Regress1516

Review URL: http://codereview.chromium.org/7285031
http://code.google.com/p/v8/source/detail?r=8515

Modified:
 /branches/bleeding_edge/src/contexts.h
 /branches/bleeding_edge/src/mark-compact.cc
 /branches/bleeding_edge/src/mark-compact.h
 /branches/bleeding_edge/test/cctest/test-api.cc

=======================================
--- /branches/bleeding_edge/src/contexts.h      Tue Jun 28 08:22:08 2011
+++ /branches/bleeding_edge/src/contexts.h      Fri Jul  1 05:47:18 2011
@@ -225,7 +225,6 @@
     OPAQUE_REFERENCE_FUNCTION_INDEX,
     CONTEXT_EXTENSION_FUNCTION_INDEX,
     OUT_OF_MEMORY_INDEX,
-    MAP_CACHE_INDEX,
     CONTEXT_DATA_INDEX,
     ALLOW_CODE_GEN_FROM_STRINGS_INDEX,
     DERIVED_GET_TRAP_INDEX,
@@ -234,6 +233,7 @@
     // Properties from here are treated as weak references by the full GC.
     // Scavenge treats them as strong references.
     OPTIMIZED_FUNCTIONS_LIST,  // Weak.
+    MAP_CACHE_INDEX,  // Weak.
     NEXT_CONTEXT_LINK,  // Weak.

     // Total number of slots.
=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Wed Jun 15 23:37:49 2011
+++ /branches/bleeding_edge/src/mark-compact.cc Fri Jul  1 05:47:18 2011
@@ -1424,6 +1424,12 @@
   // reachable from the weak roots.
   ProcessExternalMarking();

+  // Object literal map caches reference symbols (cache keys) and maps
+  // (cache values). At this point still useful maps have already been
+  // marked. Mark the keys for the alive values before we process the
+  // symbol table.
+  ProcessMapCaches();
+
   // Prune the symbol table removing all symbols only pointed to by the
   // symbol table.  Cannot use symbol_table() here because the symbol
   // table is marked.
@@ -1450,6 +1456,57 @@
   // Clean up dead objects from the runtime profiler.
   heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
 }
+
+
+void MarkCompactCollector::ProcessMapCaches() {
+  Object* raw_context = heap()->global_contexts_list_;
+  while (raw_context != heap()->undefined_value()) {
+    Context* context = reinterpret_cast<Context*>(raw_context);
+    if (context->IsMarked()) {
+      HeapObject* raw_map_cache =
+          HeapObject::cast(context->get(Context::MAP_CACHE_INDEX));
+      // A map cache may be reachable from the stack. In this case
+      // it's already transitively marked and it's too late to clean
+      // up its parts.
+      if (!raw_map_cache->IsMarked() &&
+          raw_map_cache != heap()->undefined_value()) {
+        MapCache* map_cache = reinterpret_cast<MapCache*>(raw_map_cache);
+        int existing_elements = map_cache->NumberOfElements();
+        int used_elements = 0;
+        for (int i = MapCache::kElementsStartIndex;
+             i < map_cache->length();
+             i += MapCache::kEntrySize) {
+          Object* raw_key = map_cache->get(i);
+          if (raw_key == heap()->undefined_value() ||
+              raw_key == heap()->null_value()) continue;
+          STATIC_ASSERT(MapCache::kEntrySize == 2);
+          Object* raw_map = map_cache->get(i + 1);
+          if (raw_map->IsHeapObject() &&
+              HeapObject::cast(raw_map)->IsMarked()) {
+            ++used_elements;
+          } else {
+            // Delete useless entries with unmarked maps.
+            ASSERT(raw_map->IsMap());
+            map_cache->set_null_unchecked(heap(), i);
+            map_cache->set_null_unchecked(heap(), i + 1);
+          }
+        }
+        if (used_elements == 0) {
+ context->set(Context::MAP_CACHE_INDEX, heap()->undefined_value());
+        } else {
+          // Note: we don't actually shrink the cache here to avoid
+          // extra complexity during GC. We rely on subsequent cache
+          // usages (EnsureCapacity) to do this.
+          map_cache->ElementsRemoved(existing_elements - used_elements);
+          MarkObject(map_cache);
+        }
+      }
+    }
+    // Move to next element in the list.
+    raw_context = context->get(Context::NEXT_CONTEXT_LINK);
+  }
+  ProcessMarkingStack();
+}


 #ifdef DEBUG
=======================================
--- /branches/bleeding_edge/src/mark-compact.h  Fri Apr  1 04:59:00 2011
+++ /branches/bleeding_edge/src/mark-compact.h  Fri Jul  1 05:47:18 2011
@@ -306,6 +306,10 @@
   // flag on the marking stack.
   void RefillMarkingStack();

+  // After reachable maps have been marked process per context object
+  // literal map caches removing unmarked entries.
+  void ProcessMapCaches();
+
   // Callback function for telling whether the object *p is an unmarked
   // heap object.
   static bool IsUnmarkedHeapObject(Object** p);
=======================================
--- /branches/bleeding_edge/test/cctest/test-api.cc     Fri Jun 24 07:30:10 2011
+++ /branches/bleeding_edge/test/cctest/test-api.cc     Fri Jul  1 05:47:18 2011
@@ -14659,3 +14659,28 @@
   obj->Set(v8_str("2000000000"), v8_str("foobar"));
   CHECK_EQ(v8_str("DONT_CHANGE"), obj->Get(v8_str("2000000000")));
 }
+
+
+THREADED_TEST(Regress1516) {
+  v8::HandleScope scope;
+
+  LocalContext context;
+  { v8::HandleScope temp_scope;
+    CompileRun("({'a': 0})");
+  }
+
+  int elements;
+  { i::MapCache* map_cache =
+        i::MapCache::cast(i::Isolate::Current()->context()->map_cache());
+    elements = map_cache->NumberOfElements();
+    CHECK_LE(1, elements);
+  }
+
+  i::Isolate::Current()->heap()->CollectAllGarbage(true);
+ { i::Object* raw_map_cache = i::Isolate::Current()->context()->map_cache(); + if (raw_map_cache != i::Isolate::Current()->heap()->undefined_value()) {
+      i::MapCache* map_cache = i::MapCache::cast(raw_map_cache);
+      CHECK_GT(elements, map_cache->NumberOfElements());
+    }
+  }
+}

--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to