Reviewers: Benedikt Meurer,

Description:
Remove overzealous checking of --cache-optimized-code flag.

[email protected]

Please review this at https://codereview.chromium.org/1206803003/

Base URL: https://chromium.googlesource.com/v8/v8.git@local_opt-code-map-2

Affected files (+20, -29 lines):
  M src/code-stubs-hydrogen.cc
  M src/compiler.cc
  M src/heap/mark-compact.cc
  M src/heap/objects-visiting-inl.h
  M src/objects.cc
  M test/cctest/test-compiler.cc


Index: src/code-stubs-hydrogen.cc
diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc
index 2749b810e4cb9dfc5bc52b221a4accc5080507d2..c5f8836c3ac51e9ab191bd12a17ea806a213d882 100644
--- a/src/code-stubs-hydrogen.cc
+++ b/src/code-stubs-hydrogen.cc
@@ -1818,14 +1818,10 @@ HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() { Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
                         context());

-  // Initialize the code pointer in the function to be the one
-  // found in the shared function info object.
-  // But first check if there is an optimized version for our context.
-  if (FLAG_cache_optimized_code) {
- BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
-  } else {
-    BuildInstallCode(js_function, shared_info);
-  }
+  // Initialize the code pointer in the function to be the one found in the
+  // shared function info object. But first check if there is an optimized
+  // version for our context.
+ BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);

   return js_function;
 }
Index: src/compiler.cc
diff --git a/src/compiler.cc b/src/compiler.cc
index df209651c08451c86d0c81fe42d0ab53841ecec9..6055a59dc3fcc90102ecefe0c8bc5fa37e4f6f03 100644
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -688,18 +688,16 @@ MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(

 MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
     Handle<JSFunction> function, BailoutId osr_ast_id) {
-  if (FLAG_cache_optimized_code) {
-    Handle<SharedFunctionInfo> shared(function->shared());
-    DisallowHeapAllocation no_gc;
-    CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
-        function->context()->native_context(), osr_ast_id);
-    if (cached.code != nullptr) {
-      // Caching of optimized code enabled and optimized code found.
- if (cached.literals != nullptr) function->set_literals(cached.literals);
-      DCHECK(!cached.code->marked_for_deoptimization());
-      DCHECK(function->shared()->is_compiled());
-      return Handle<Code>(cached.code);
-    }
+  Handle<SharedFunctionInfo> shared(function->shared());
+  DisallowHeapAllocation no_gc;
+  CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
+      function->context()->native_context(), osr_ast_id);
+  if (cached.code != nullptr) {
+    // Caching of optimized code enabled and optimized code found.
+ if (cached.literals != nullptr) function->set_literals(cached.literals);
+    DCHECK(!cached.code->marked_for_deoptimization());
+    DCHECK(function->shared()->is_compiled());
+    return Handle<Code>(cached.code);
   }
   return MaybeHandle<Code>();
 }
Index: src/heap/mark-compact.cc
diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
index 2c98b7adc4e47a01103168d819a1117dc99c0ccb..0638e6666b0e6c327b722a75a875edb4c2cf5782 100644
--- a/src/heap/mark-compact.cc
+++ b/src/heap/mark-compact.cc
@@ -899,7 +899,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
         PrintF(" - age: %d]\n", code->GetAge());
       }
       // Always flush the optimized code map if requested by flag.
-      if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
+      if (FLAG_flush_optimized_code_cache &&
           !shared->optimized_code_map()->IsSmi()) {
         shared->ClearOptimizedCodeMap();
       }
@@ -947,7 +947,7 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
         PrintF(" - age: %d]\n", code->GetAge());
       }
       // Always flush the optimized code map if requested by flag.
-      if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
+      if (FLAG_flush_optimized_code_cache &&
           !candidate->optimized_code_map()->IsSmi()) {
         candidate->ClearOptimizedCodeMap();
       }
Index: src/heap/objects-visiting-inl.h
diff --git a/src/heap/objects-visiting-inl.h b/src/heap/objects-visiting-inl.h index fa90be5c40b1351959cc392604419655f063759d..e72934d08cc4709919ef1081439346631e909aff 100644
--- a/src/heap/objects-visiting-inl.h
+++ b/src/heap/objects-visiting-inl.h
@@ -410,14 +410,14 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
   if (FLAG_cleanup_code_caches_at_gc) {
     shared->ClearTypeFeedbackInfoAtGCTime();
   }
-  if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
+  if (FLAG_flush_optimized_code_cache &&
       !shared->optimized_code_map()->IsSmi()) {
     // Always flush the optimized code map if requested by flag.
     shared->ClearOptimizedCodeMap();
   }
   MarkCompactCollector* collector = heap->mark_compact_collector();
   if (collector->is_code_flushing_enabled()) {
- if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
+    if (!shared->optimized_code_map()->IsSmi()) {
       // Add the shared function info holding an optimized code map to
       // the code flusher for processing of code maps after marking.
       collector->code_flusher()->AddOptimizedCodeMap(shared);
@@ -439,7 +439,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
       return;
     }
   } else {
- if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
+    if (!shared->optimized_code_map()->IsSmi()) {
       // Flush optimized code map on major GCs without code flushing,
       // needed because cached code doesn't contain breakpoints.
       shared->ClearOptimizedCodeMap();
Index: src/objects.cc
diff --git a/src/objects.cc b/src/objects.cc
index 4c32fc52713b9b66f52c9ac2d7485745ab92ff79..2a2b5ed971c647f44f04486928315e6193260be8 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -10598,7 +10598,6 @@ CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap(
     Context* native_context, BailoutId osr_ast_id) {
   DisallowHeapAllocation no_gc;
   DCHECK(native_context->IsNativeContext());
-  if (!FLAG_cache_optimized_code) return {nullptr, nullptr};
   Object* value = optimized_code_map();
   if (!value->IsSmi()) {
     FixedArray* optimized_code_map = FixedArray::cast(value);
Index: test/cctest/test-compiler.cc
diff --git a/test/cctest/test-compiler.cc b/test/cctest/test-compiler.cc
index 10a5c9ade32135f0d6a857a580c4844c66ffd866..493189cb543ec812905c779748e55470e8518851 100644
--- a/test/cctest/test-compiler.cc
+++ b/test/cctest/test-compiler.cc
@@ -371,11 +371,9 @@ TEST(FeedbackVectorUnaffectedByScopeChanges) {
 // Test that optimized code for different closures is actually shared
 // immediately by the FastNewClosureStub when run in the same context.
 TEST(OptimizedCodeSharing) {
- // Skip test if --cache-optimized-code is not activated by default because
-  // FastNewClosureStub that is baked into the snapshot is incorrect.
-  if (!FLAG_cache_optimized_code) return;
   FLAG_stress_compaction = false;
   FLAG_allow_natives_syntax = true;
+  FLAG_cache_optimized_code = true;
   CcTest::InitializeVM();
   v8::HandleScope scope(CcTest::isolate());
   for (int i = 0; i < 10; i++) {


--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/d/optout.

Reply via email to