Revision: 4979
Author: lukezarko
Date: Tue Jun 29 11:05:31 2010
Log: [Isolates] Move more statics (part IV)

stub-cache*.cc, stub-cache.h, (serialize.cc)
-> key_reference/value_reference moved into StubCache
-> primary_/secondary_ became instance members of StubCache

spaces.cc/spaces.h
-> comments_statistics

string-stream.cc
-> debug_object_cache
-> current_security_token

regexp-macro-assembler.cc
-> canonicalize
* is word_character_map[] used as a constant?

scopes.cc
* note about Allocator (it might benefit from taking an isolate pointer)


Review URL: http://codereview.chromium.org/2862032
http://code.google.com/p/v8/source/detail?r=4979

Modified:
 /branches/experimental/isolates/src/arm/stub-cache-arm.cc
 /branches/experimental/isolates/src/ia32/stub-cache-ia32.cc
 /branches/experimental/isolates/src/isolate.h
 /branches/experimental/isolates/src/regexp-macro-assembler.cc
 /branches/experimental/isolates/src/scopes.cc
 /branches/experimental/isolates/src/serialize.cc
 /branches/experimental/isolates/src/spaces.cc
 /branches/experimental/isolates/src/spaces.h
 /branches/experimental/isolates/src/string-stream.cc
 /branches/experimental/isolates/src/stub-cache.cc
 /branches/experimental/isolates/src/stub-cache.h
 /branches/experimental/isolates/src/x64/stub-cache-x64.cc

=======================================
--- /branches/experimental/isolates/src/arm/stub-cache-arm.cc Fri Jun 25 15:53:25 2010 +++ /branches/experimental/isolates/src/arm/stub-cache-arm.cc Tue Jun 29 11:05:31 2010
@@ -39,13 +39,14 @@
 #define __ ACCESS_MASM(masm)


-static void ProbeTable(MacroAssembler* masm,
+static void ProbeTable(Isolate* isolate,
+                       MacroAssembler* masm,
                        Code::Flags flags,
                        StubCache::Table table,
                        Register name,
                        Register offset) {
-  ExternalReference key_offset(SCTableReference::keyReference(table));
-  ExternalReference value_offset(SCTableReference::valueReference(table));
+ ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); + ExternalReference value_offset(isolate->stub_cache()->value_reference(table));

   Label miss;

@@ -89,6 +90,7 @@
                               Register name,
                               Register scratch,
                               Register extra) {
+  Isolate* isolate = Isolate::Current();
   Label miss;

   // Make sure that code is valid. The shifting code relies on the
@@ -116,7 +118,7 @@
           Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));

   // Probe the primary table.
-  ProbeTable(masm, flags, kPrimary, name, scratch);
+  ProbeTable(isolate, masm, flags, kPrimary, name, scratch);

   // Primary miss: Compute hash for secondary probe.
   __ sub(scratch, scratch, Operand(name));
@@ -126,7 +128,7 @@
           Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));

   // Probe the secondary table.
-  ProbeTable(masm, flags, kSecondary, name, scratch);
+  ProbeTable(isolate, masm, flags, kSecondary, name, scratch);

   // Cache miss: Fall-through and let caller handle the miss by
   // entering the runtime system.
=======================================
--- /branches/experimental/isolates/src/ia32/stub-cache-ia32.cc Fri Jun 25 15:53:25 2010 +++ /branches/experimental/isolates/src/ia32/stub-cache-ia32.cc Tue Jun 29 11:05:31 2010
@@ -39,14 +39,15 @@
 #define __ ACCESS_MASM(masm)


-static void ProbeTable(MacroAssembler* masm,
+static void ProbeTable(Isolate* isolate,
+                       MacroAssembler* masm,
                        Code::Flags flags,
                        StubCache::Table table,
                        Register name,
                        Register offset,
                        Register extra) {
-  ExternalReference key_offset(SCTableReference::keyReference(table));
-  ExternalReference value_offset(SCTableReference::valueReference(table));
+ ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); + ExternalReference value_offset(isolate->stub_cache()->value_reference(table));

   Label miss;

@@ -107,6 +108,7 @@
                               Register name,
                               Register scratch,
                               Register extra) {
+  Isolate* isolate = Isolate::Current();
   Label miss;

   // Make sure that code is valid. The shifting code relies on the
@@ -134,7 +136,7 @@
   __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);

   // Probe the primary table.
-  ProbeTable(masm, flags, kPrimary, name, scratch, extra);
+  ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra);

   // Primary miss: Compute hash for secondary probe.
   __ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
@@ -146,7 +148,7 @@
   __ and_(scratch, (kSecondaryTableSize - 1) << kHeapObjectTagSize);

   // Probe the secondary table.
-  ProbeTable(masm, flags, kSecondary, name, scratch, extra);
+  ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra);

   // Cache miss: Fall-through and let caller handle the miss by
   // entering the runtime system.
=======================================
--- /branches/experimental/isolates/src/isolate.h       Mon Jun 28 10:16:23 2010
+++ /branches/experimental/isolates/src/isolate.h       Tue Jun 29 11:05:31 2010
@@ -182,12 +182,27 @@

 #define ISOLATE_PLATFORM_INIT_LIST(V)

+#endif
+
+
+#ifdef DEBUG
+
+#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) \ + V(CommentStatistic, paged_space_comments_statistics, \
+      CommentStatistic::kMaxComments + 1)
+#else
+
+#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
+
 #endif

#define ISOLATE_INIT_ARRAY_LIST(V) \ /* SerializerDeserializer state. */ \ V(Object*, serialize_partial_snapshot_cache, kPartialSnapshotCacheCapacity) \
-  V(int, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize)
+ V(int, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
+  ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
+
+typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;

#define ISOLATE_INIT_LIST(V) \ /* AssertNoZoneAllocation state. */ \
@@ -214,6 +229,8 @@
V(Relocatable*, relocatable_top, NULL) \ /* State for CodeEntry in profile-generator. */ \ V(unsigned, code_entry_next_call_uid, NULL) \ + V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \ + V(Object*, string_stream_current_security_token, NULL) \
   ISOLATE_PLATFORM_INIT_LIST(V)

 class Isolate {
@@ -590,6 +607,11 @@
   }

   Builtins* builtins() { return &builtins_; }
+
+  unibrow::Mapping<unibrow::Ecma262Canonicalize>*
+      regexp_macro_assembler_canonicalize() {
+    return &regexp_macro_assembler_canonicalize_;
+  }

   void* PreallocatedStorageNew(size_t size);
   void PreallocatedStorageDelete(void* p);
@@ -705,6 +727,8 @@
   StringInputBuffer objects_string_compare_buffer_a_;
   StringInputBuffer objects_string_compare_buffer_b_;
   StaticResource<StringInputBuffer> objects_string_input_buffer_;
+  unibrow::Mapping<unibrow::Ecma262Canonicalize>
+      regexp_macro_assembler_canonicalize_;

 #ifdef DEBUG
   // A static array of histogram info for each type.
=======================================
--- /branches/experimental/isolates/src/regexp-macro-assembler.cc Mon Jun 21 10:40:11 2010 +++ /branches/experimental/isolates/src/regexp-macro-assembler.cc Tue Jun 29 11:05:31 2010
@@ -185,10 +185,7 @@
 }


-static unibrow::Mapping<unibrow::Ecma262Canonicalize> canonicalize;
-
-
-byte NativeRegExpMacroAssembler::word_character_map[] = {
+const byte NativeRegExpMacroAssembler::word_character_map[] = {
     0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
     0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
     0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u, 0x00u,
@@ -215,6 +212,8 @@
     Address byte_offset1,
     Address byte_offset2,
     size_t byte_length) {
+  unibrow::Mapping<unibrow::Ecma262Canonicalize>* canonicalize =
+      Isolate::Current()->regexp_macro_assembler_canonicalize();
   // This function is not allowed to cause a garbage collection.
   // A GC might move the calling generated code and invalidate the
   // return address on the stack.
@@ -228,10 +227,10 @@
     unibrow::uchar c2 = substring2[i];
     if (c1 != c2) {
       unibrow::uchar s1[1] = { c1 };
-      canonicalize.get(c1, '\0', s1);
+      canonicalize->get(c1, '\0', s1);
       if (s1[0] != c2) {
         unibrow::uchar s2[1] = { c2 };
-        canonicalize.get(c2, '\0', s2);
+        canonicalize->get(c2, '\0', s2);
         if (s1[0] != s2[0]) {
           return 0;
         }
=======================================
--- /branches/experimental/isolates/src/scopes.cc       Wed Jun  2 09:14:32 2010
+++ /branches/experimental/isolates/src/scopes.cc       Tue Jun 29 11:05:31 2010
@@ -37,6 +37,8 @@
// ----------------------------------------------------------------------------
 // A Zone allocator for use with LocalsMap.

+// TODO(isolates): It is probably worth it to change the Allocator class to
+//                 take a pointer to an isolate.
 class ZoneAllocator: public Allocator {
  public:
   /* nothing to do */
=======================================
--- /branches/experimental/isolates/src/serialize.cc Thu Jun 24 09:41:05 2010 +++ /branches/experimental/isolates/src/serialize.cc Tue Jun 29 11:05:31 2010
@@ -159,6 +159,8 @@


 void ExternalReferenceTable::PopulateTable() {
+  Isolate* isolate = Isolate::Current();
+
   for (int type_code = 0; type_code < kTypeCodeCount; type_code++) {
     max_id_[type_code] = 0;
   }
@@ -292,7 +294,7 @@
         Vector<char>::New(top_format_length + StrLength(address_name) + 1);
     const char* chars = name.start();
     OS::SNPrintF(name, top_address_format, address_name);
-    Add(Isolate::Current()->get_address_from_id((Isolate::AddressId)i),
+    Add(isolate->get_address_from_id((Isolate::AddressId)i),
         TOP_ADDRESS, i, chars);
   }

@@ -310,20 +312,22 @@
   ACCESSOR_DESCRIPTOR_LIST(ACCESSOR_DESCRIPTOR_DECLARATION)
 #undef ACCESSOR_DESCRIPTOR_DECLARATION

+  StubCache* stub_cache = isolate->stub_cache();
+
   // Stub cache tables
-  Add(SCTableReference::keyReference(StubCache::kPrimary).address(),
+  Add(stub_cache->key_reference(StubCache::kPrimary).address(),
       STUB_CACHE_TABLE,
       1,
       "StubCache::primary_->key");
-  Add(SCTableReference::valueReference(StubCache::kPrimary).address(),
+  Add(stub_cache->value_reference(StubCache::kPrimary).address(),
       STUB_CACHE_TABLE,
       2,
       "StubCache::primary_->value");
-  Add(SCTableReference::keyReference(StubCache::kSecondary).address(),
+  Add(stub_cache->key_reference(StubCache::kSecondary).address(),
       STUB_CACHE_TABLE,
       3,
       "StubCache::secondary_->key");
-  Add(SCTableReference::valueReference(StubCache::kSecondary).address(),
+  Add(stub_cache->value_reference(StubCache::kSecondary).address(),
       STUB_CACHE_TABLE,
       4,
       "StubCache::secondary_->value");
=======================================
--- /branches/experimental/isolates/src/spaces.cc       Mon Jun 21 15:48:21 2010
+++ /branches/experimental/isolates/src/spaces.cc       Tue Jun 29 11:05:31 2010
@@ -2198,28 +2198,14 @@


 #ifdef DEBUG
-struct CommentStatistic {
-  const char* comment;
-  int size;
-  int count;
-  void Clear() {
-    comment = NULL;
-    size = 0;
-    count = 0;
-  }
-};
-
-
-// must be small, since an iteration is used for lookup
-const int kMaxComments = 64;
-static CommentStatistic comments_statistics[kMaxComments+1];
-
-
 void PagedSpace::ReportCodeStatistics() {
+  Isolate* isolate = Isolate::Current();
+  CommentStatistic* comments_statistics =
+      isolate->paged_space_comments_statistics();
   ReportCodeKindStatistics();
   PrintF("Code comment statistics (\"   [ comment-txt   :    size/   "
          "count  (average)\"):\n");
-  for (int i = 0; i <= kMaxComments; i++) {
+  for (int i = 0; i <= CommentStatistic::kMaxComments; i++) {
     const CommentStatistic& cs = comments_statistics[i];
     if (cs.size > 0) {
PrintF(" %-30s: %10d/%6d (%d)\n", cs.comment, cs.size, cs.count,
@@ -2231,23 +2217,30 @@


 void PagedSpace::ResetCodeStatistics() {
+  Isolate* isolate = Isolate::Current();
+  CommentStatistic* comments_statistics =
+      isolate->paged_space_comments_statistics();
   ClearCodeKindStatistics();
-  for (int i = 0; i < kMaxComments; i++) comments_statistics[i].Clear();
-  comments_statistics[kMaxComments].comment = "Unknown";
-  comments_statistics[kMaxComments].size = 0;
-  comments_statistics[kMaxComments].count = 0;
+  for (int i = 0; i < CommentStatistic::kMaxComments; i++) {
+    comments_statistics[i].Clear();
+  }
+  comments_statistics[CommentStatistic::kMaxComments].comment = "Unknown";
+  comments_statistics[CommentStatistic::kMaxComments].size = 0;
+  comments_statistics[CommentStatistic::kMaxComments].count = 0;
 }


-// Adds comment to 'comment_statistics' table. Performance OK sa long as
+// Adds comment to 'comment_statistics' table. Performance OK as long as
 // 'kMaxComments' is small
-static void EnterComment(const char* comment, int delta) {
+static void EnterComment(Isolate* isolate, const char* comment, int delta) {
+  CommentStatistic* comments_statistics =
+      isolate->paged_space_comments_statistics();
   // Do not count empty comments
   if (delta <= 0) return;
-  CommentStatistic* cs = &comments_statistics[kMaxComments];
+ CommentStatistic* cs = &comments_statistics[CommentStatistic::kMaxComments];
   // Search for a free or matching entry in 'comments_statistics': 'cs'
   // points to result.
-  for (int i = 0; i < kMaxComments; i++) {
+  for (int i = 0; i < CommentStatistic::kMaxComments; i++) {
     if (comments_statistics[i].comment == NULL) {
       cs = &comments_statistics[i];
       cs->comment = comment;
@@ -2265,7 +2258,7 @@

// Call for each nested comment start (start marked with '[ xxx', end marked
 // with ']'.  RelocIterator 'it' must point to a comment reloc info.
-static void CollectCommentStatistics(RelocIterator* it) {
+static void CollectCommentStatistics(Isolate* isolate, RelocIterator* it) {
   ASSERT(!it->done());
   ASSERT(it->rinfo()->rmode() == RelocInfo::COMMENT);
   const char* tmp = reinterpret_cast<const char*>(it->rinfo()->data());
@@ -2290,13 +2283,13 @@
       flat_delta += static_cast<int>(it->rinfo()->pc() - prev_pc);
       if (txt[0] == ']') break;  // End of nested  comment
       // A new comment
-      CollectCommentStatistics(it);
+      CollectCommentStatistics(isolate, it);
       // Skip code that was covered with previous comment
       prev_pc = it->rinfo()->pc();
     }
     it->next();
   }
-  EnterComment(comment_txt, flat_delta);
+  EnterComment(isolate, comment_txt, flat_delta);
 }


@@ -2316,7 +2309,7 @@
       while (!it.done()) {
         if (it.rinfo()->rmode() == RelocInfo::COMMENT) {
           delta += static_cast<int>(it.rinfo()->pc() - prev_pc);
-          CollectCommentStatistics(&it);
+          CollectCommentStatistics(isolate, &it);
           prev_pc = it.rinfo()->pc();
         }
         it.next();
@@ -2325,7 +2318,7 @@
       ASSERT(code->instruction_start() <= prev_pc &&
              prev_pc <= code->relocation_start());
       delta += static_cast<int>(code->relocation_start() - prev_pc);
-      EnterComment("NoComment", delta);
+      EnterComment(isolate, "NoComment", delta);
     }
   }
 }
=======================================
--- /branches/experimental/isolates/src/spaces.h        Mon Jun 21 15:48:21 2010
+++ /branches/experimental/isolates/src/spaces.h        Tue Jun 29 11:05:31 2010
@@ -2203,6 +2203,22 @@
 };


+#ifdef DEBUG
+struct CommentStatistic {
+  const char* comment;
+  int size;
+  int count;
+  void Clear() {
+    comment = NULL;
+    size = 0;
+    count = 0;
+  }
+  // Must be small, since an iteration is used for lookup.
+  static const int kMaxComments = 64;
+};
+#endif
+
+
 } }  // namespace v8::internal

 #endif  // V8_SPACES_H_
=======================================
--- /branches/experimental/isolates/src/string-stream.cc Thu Jun 10 10:14:01 2010 +++ /branches/experimental/isolates/src/string-stream.cc Tue Jun 29 11:05:31 2010
@@ -34,9 +34,6 @@
 namespace internal {

 static const int kMentionedObjectCacheMaxSize = 256;
-static List<HeapObject*, PreallocatedStorage>* debug_object_cache = NULL;
-static Object* current_security_token = NULL;
-

 char* HeapStringAllocator::allocate(unsigned bytes) {
   space_ = NewArray<char>(bytes);
@@ -195,6 +192,8 @@
     return;
   }
   if (o->IsHeapObject()) {
+    DebugObjectCache* debug_object_cache = Isolate::Current()->
+        string_stream_debug_object_cache();
     for (int i = 0; i < debug_object_cache->length(); i++) {
       if ((*debug_object_cache)[i] == o) {
         Add("#%d#", i);
@@ -286,17 +285,20 @@


 void StringStream::ClearMentionedObjectCache() {
-  current_security_token = NULL;
-  if (debug_object_cache == NULL) {
-    debug_object_cache = new List<HeapObject*, PreallocatedStorage>(0);
-  }
-  debug_object_cache->Clear();
+  Isolate* isolate = Isolate::Current();
+  isolate->set_string_stream_current_security_token(NULL);
+  if (isolate->string_stream_debug_object_cache() == NULL) {
+    isolate->set_string_stream_debug_object_cache(
+        new List<HeapObject*, PreallocatedStorage>(0));
+  }
+  isolate->string_stream_debug_object_cache()->Clear();
 }


 #ifdef DEBUG
 bool StringStream::IsMentionedObjectCacheClear() {
-  return (debug_object_cache->length() == 0);
+  return (
+ Isolate::Current()->string_stream_debug_object_cache()->length() == 0);
 }
 #endif

@@ -412,6 +414,8 @@


 void StringStream::PrintMentionedObjectCache() {
+  DebugObjectCache* debug_object_cache =
+      Isolate::Current()->string_stream_debug_object_cache();
   Add("==== Key         ============================================\n\n");
   for (int i = 0; i < debug_object_cache->length(); i++) {
     HeapObject* printee = (*debug_object_cache)[i];
@@ -444,12 +448,14 @@


 void StringStream::PrintSecurityTokenIfChanged(Object* f) {
-  if (!f->IsHeapObject() || !HEAP->Contains(HeapObject::cast(f))) {
+  Isolate* isolate = Isolate::Current();
+  Heap* heap = isolate->heap();
+  if (!f->IsHeapObject() || !heap->Contains(HeapObject::cast(f))) {
     return;
   }
   Map* map = HeapObject::cast(f)->map();
   if (!map->IsHeapObject() ||
-      !HEAP->Contains(map) ||
+      !heap->Contains(map) ||
       !map->IsMap() ||
       !f->IsJSFunction()) {
     return;
@@ -458,17 +464,17 @@
   JSFunction* fun = JSFunction::cast(f);
   Object* perhaps_context = fun->unchecked_context();
   if (perhaps_context->IsHeapObject() &&
-      HEAP->Contains(HeapObject::cast(perhaps_context)) &&
+      heap->Contains(HeapObject::cast(perhaps_context)) &&
       perhaps_context->IsContext()) {
     Context* context = fun->context();
-    if (!HEAP->Contains(context)) {
+    if (!heap->Contains(context)) {
       Add("(Function context is outside heap)\n");
       return;
     }
     Object* token = context->global_context()->security_token();
-    if (token != current_security_token) {
+    if (token != isolate->string_stream_current_security_token()) {
       Add("Security context: %o\n", token);
-      current_security_token = token;
+      isolate->set_string_stream_current_security_token(token);
     }
   } else {
     Add("(Function context is corrupt)\n");
=======================================
--- /branches/experimental/isolates/src/stub-cache.cc Fri Jun 25 15:32:52 2010 +++ /branches/experimental/isolates/src/stub-cache.cc Tue Jun 29 11:05:31 2010
@@ -39,10 +39,9 @@
 // StubCache implementation.


-StubCache::Entry StubCache::primary_[StubCache::kPrimaryTableSize];
-StubCache::Entry StubCache::secondary_[StubCache::kSecondaryTableSize];
-
 StubCache::StubCache() {
+  memset(primary_, 0, sizeof(primary_[0]) * StubCache::kPrimaryTableSize);
+ memset(secondary_, 0, sizeof(secondary_[0]) * StubCache::kSecondaryTableSize);
 }


=======================================
--- /branches/experimental/isolates/src/stub-cache.h Wed Jun 9 16:02:44 2010 +++ /branches/experimental/isolates/src/stub-cache.h Tue Jun 29 11:05:31 2010
@@ -42,7 +42,20 @@
 // invalidate the cache whenever a prototype map is changed.  The stub
 // validates the map chain as in the mono-morphic case.

-class SCTableReference;
+class StubCache;
+
+class SCTableReference {
+ public:
+  Address address() const { return address_; }
+
+ private:
+  explicit SCTableReference(Address address) : address_(address) {}
+
+  Address address_;
+
+  friend class StubCache;
+};
+

 class StubCache {
  public:
@@ -51,7 +64,6 @@
     Code* value;
   };

-
   void Initialize(bool create_heap_objects);

   // Computes the right stub matching. Inserts the result in the
@@ -231,6 +243,29 @@
     kSecondary
   };

+
+  SCTableReference key_reference(StubCache::Table table) {
+    return SCTableReference(
+        reinterpret_cast<Address>(&first_entry(table)->key));
+  }
+
+
+  SCTableReference value_reference(StubCache::Table table) {
+    return SCTableReference(
+        reinterpret_cast<Address>(&first_entry(table)->value));
+  }
+
+
+  StubCache::Entry* first_entry(StubCache::Table table) {
+    switch (table) {
+      case StubCache::kPrimary: return StubCache::primary_;
+      case StubCache::kSecondary: return StubCache::secondary_;
+    }
+    UNREACHABLE();
+    return NULL;
+  }
+
+
  private:
   StubCache();

@@ -238,8 +273,8 @@
   friend class SCTableReference;
   static const int kPrimaryTableSize = 2048;
   static const int kSecondaryTableSize = 512;
-  static Entry primary_[];
-  static Entry secondary_[];
+  Entry primary_[kPrimaryTableSize];
+  Entry secondary_[kSecondaryTableSize];

   // Computes the hashed offsets for primary and secondary caches.
   RLYSTC int PrimaryOffset(String* name, Code::Flags flags, Map* map) {
@@ -292,36 +327,6 @@
 };


-class SCTableReference {
- public:
-  static SCTableReference keyReference(StubCache::Table table) {
-    return SCTableReference(
-        reinterpret_cast<Address>(&first_entry(table)->key));
-  }
-
-
-  static SCTableReference valueReference(StubCache::Table table) {
-    return SCTableReference(
-        reinterpret_cast<Address>(&first_entry(table)->value));
-  }
-
-  Address address() const { return address_; }
-
- private:
-  explicit SCTableReference(Address address) : address_(address) {}
-
-  static StubCache::Entry* first_entry(StubCache::Table table) {
-    switch (table) {
-      case StubCache::kPrimary: return StubCache::primary_;
-      case StubCache::kSecondary: return StubCache::secondary_;
-    }
-    UNREACHABLE();
-    return NULL;
-  }
-
-  Address address_;
-};
-
 // ------------------------------------------------------------------------


=======================================
--- /branches/experimental/isolates/src/x64/stub-cache-x64.cc Fri Jun 25 15:53:25 2010 +++ /branches/experimental/isolates/src/x64/stub-cache-x64.cc Tue Jun 29 11:05:31 2010
@@ -44,7 +44,8 @@
 #define __ ACCESS_MASM(masm)


-static void ProbeTable(MacroAssembler* masm,
+static void ProbeTable(Isolate* isolate,
+                       MacroAssembler* masm,
                        Code::Flags flags,
                        StubCache::Table table,
                        Register name,
@@ -53,7 +54,7 @@
   ASSERT_EQ(16, sizeof(StubCache::Entry));
   // The offset register holds the entry offset times four (due to masking
   // and shifting optimizations).
-  ExternalReference key_offset(SCTableReference::keyReference(table));
+ ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
   Label miss;

   __ movq(kScratchRegister, key_offset);
@@ -169,6 +170,8 @@
                               Register name,
                               Register scratch,
                               Register extra) {
+  Isolate* isolate = Isolate::Current();
+
   Label miss;
   USE(extra);  // The register extra is not used on the X64 platform.
   // Make sure that code is valid. The shifting code relies on the
@@ -193,7 +196,7 @@
__ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));

   // Probe the primary table.
-  ProbeTable(masm, flags, kPrimary, name, scratch);
+  ProbeTable(isolate, masm, flags, kPrimary, name, scratch);

   // Primary miss: Compute hash for secondary probe.
   __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
@@ -205,7 +208,7 @@
__ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));

   // Probe the secondary table.
-  ProbeTable(masm, flags, kSecondary, name, scratch);
+  ProbeTable(isolate, masm, flags, kSecondary, name, scratch);

   // Cache miss: Fall-through and let caller handle the miss by
   // entering the runtime system.

--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to