Reviewers: dcarney,

Description:
Do not check for interrupt when allocating stack locals.

R=dcar...@chromium.org
BUG=357137
LOG=N

Please review this at https://codereview.chromium.org/219373004/

SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge

Affected files (+57, -11 lines):
  M src/arm/full-codegen-arm.cc
  M src/arm64/full-codegen-arm64.cc
  M src/ia32/full-codegen-ia32.cc
  M src/mips/full-codegen-mips.cc
  M src/x64/full-codegen-x64.cc
  M test/cctest/test-heap.cc
  A + test/mjsunit/regress/regress-crbug-357137.js


Index: src/arm/full-codegen-arm.cc
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index 2ca80166ecb39c0fc325a5e87eca5a86582d4dee..3539bbc8dfb9d869c2d06b44cbac7893715448de 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -118,10 +118,14 @@ static void EmitStackCheck(MacroAssembler* masm_,
     Isolate* isolate = masm_->isolate();
   Label ok;
   ASSERT(scratch.is(sp) == (pointers == 0));
+  Heap::RootListIndex index;
   if (pointers != 0) {
     __ sub(scratch, sp, Operand(pointers * kPointerSize));
+    index = Heap::kRealStackLimitRootIndex;
+  } else {
+    index = Heap::kStackLimitRootIndex;
   }
-  __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex);
+  __ LoadRoot(stack_limit_scratch, index);
   __ cmp(scratch, Operand(stack_limit_scratch));
   __ b(hs, &ok);
   PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
Index: src/arm64/full-codegen-arm64.cc
diff --git a/src/arm64/full-codegen-arm64.cc b/src/arm64/full-codegen-arm64.cc index d8f140254e0de0eb92893580a275f604aac415ba..f2c69b32f2f3f6b65e3736e5703d536fb82f8fbc 100644
--- a/src/arm64/full-codegen-arm64.cc
+++ b/src/arm64/full-codegen-arm64.cc
@@ -117,10 +117,14 @@ static void EmitStackCheck(MacroAssembler* masm_,
   Label ok;
   ASSERT(jssp.Is(__ StackPointer()));
   ASSERT(scratch.Is(jssp) == (pointers == 0));
+  Heap::RootListIndex index;
   if (pointers != 0) {
     __ Sub(scratch, jssp, pointers * kPointerSize);
+    index = Heap::kRealStackLimitRootIndex;
+  } else {
+    index = Heap::kStackLimitRootIndex;
   }
-  __ CompareRoot(scratch, Heap::kStackLimitRootIndex);
+  __ CompareRoot(scratch, index);
   __ B(hs, &ok);
   PredictableCodeSizeScope predictable(masm_,
                                        Assembler::kCallSizeWithRelocation);
Index: src/ia32/full-codegen-ia32.cc
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 1a7d828c92d9e336bcad5de9fb54e49ac81cfb88..c3c1bda77185263bf16e184a56981240ca43eeda 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -106,12 +106,14 @@ static void EmitStackCheck(MacroAssembler* masm_,
                            Register scratch = esp) {
     Label ok;
     Isolate* isolate = masm_->isolate();
-    ExternalReference stack_limit =
-        ExternalReference::address_of_stack_limit(isolate);
     ASSERT(scratch.is(esp) == (pointers == 0));
+    ExternalReference stack_limit;
     if (pointers != 0) {
       __ mov(scratch, esp);
       __ sub(scratch, Immediate(pointers * kPointerSize));
+ stack_limit = ExternalReference::address_of_real_stack_limit(isolate);
+    } else {
+      stack_limit = ExternalReference::address_of_stack_limit(isolate);
     }
     __ cmp(scratch, Operand::StaticVariable(stack_limit));
     __ j(above_equal, &ok, Label::kNear);
Index: src/mips/full-codegen-mips.cc
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index a676fea898751900e8260a0b1af6b64dccf8e98e..8c92c94ea69a2f573ac380af40b364f1f6d4637d 100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -127,10 +127,14 @@ static void EmitStackCheck(MacroAssembler* masm_,
   Isolate* isolate = masm_->isolate();
   Label ok;
   ASSERT(scratch.is(sp) == (pointers == 0));
+  Heap::RootListIndex index;
   if (pointers != 0) {
     __ Subu(scratch, sp, Operand(pointers * kPointerSize));
+    index = Heap::kRealStackLimitRootIndex;
+  } else {
+    index = Heap::kStackLimitRootIndex;
   }
-  __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex);
+  __ LoadRoot(stack_limit_scratch, index);
   __ Branch(&ok, hs, scratch, Operand(stack_limit_scratch));
   PredictableCodeSizeScope predictable(masm_, 4 * Assembler::kInstrSize);
   __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
Index: src/x64/full-codegen-x64.cc
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 6d1e956aad1178698920737015b92080ab15430e..97cd460eeba0de2b8525c7866fa7f84a9d7090fe 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -107,11 +107,15 @@ static void EmitStackCheck(MacroAssembler* masm_,
     Isolate* isolate = masm_->isolate();
     Label ok;
     ASSERT(scratch.is(rsp) == (pointers == 0));
+    Heap::RootListIndex index;
     if (pointers != 0) {
       __ movp(scratch, rsp);
       __ subp(scratch, Immediate(pointers * kPointerSize));
+      index = Heap::kRealStackLimitRootIndex;
+    } else {
+      index = Heap::kStackLimitRootIndex;
     }
-    __ CompareRoot(scratch, Heap::kStackLimitRootIndex);
+    __ CompareRoot(scratch, index);
     __ j(above_equal, &ok, Label::kNear);
     __ call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
     __ bind(&ok);
Index: test/cctest/test-heap.cc
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc
index c1f20f1f06eb185ab91fd521589832304e0b9d57..25d19af76c73b41a75c56181c70a33a214588c53 100644
--- a/test/cctest/test-heap.cc
+++ b/test/cctest/test-heap.cc
@@ -3913,3 +3913,32 @@ TEST(CEntryStubOOM) {
 }

 #endif  // DEBUG
+
+
+static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
+
+
+static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
+  CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
+}
+
+
+TEST(Regress357137) {
+  CcTest::InitializeVM();
+  v8::Isolate* isolate = CcTest::isolate();
+  v8::HandleScope hscope(isolate);
+  v8::Handle<v8::ObjectTemplate> global =v8::ObjectTemplate::New(isolate);
+  global->Set(v8::String::NewFromUtf8(isolate, "interrupt"),
+              v8::FunctionTemplate::New(isolate, RequestInterrupt));
+  v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
+  ASSERT(!context.IsEmpty());
+  v8::Context::Scope cscope(context);
+
+  v8::Local<v8::Value> result = CompileRun(
+      "var locals = '';"
+      "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
+ "eval('function f() {' + locals + 'return function() { return v0; }; }');"
+      "interrupt();"  // This triggers a fake stack overflow in f.
+      "f()()");
+  CHECK_EQ(42.0, result->ToNumber()->Value());
+}
Index: test/mjsunit/regress/regress-crbug-357137.js
diff --git a/test/mjsunit/neuter-twice.js b/test/mjsunit/regress/regress-crbug-357137.js
similarity index 52%
copy from test/mjsunit/neuter-twice.js
copy to test/mjsunit/regress/regress-crbug-357137.js
index 3501cee4330f966c1eeaf6f5b7ebdc6418f2bcec..a780426f012510cfa085ab5404a5ad890054af8e 100644
--- a/test/mjsunit/neuter-twice.js
+++ b/test/mjsunit/regress/regress-crbug-357137.js
@@ -1,9 +1,8 @@
 // Copyright 2014 the V8 project authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
-//
-// Flags: --allow-natives-syntax

-var ab = new ArrayBuffer(100);
-%ArrayBufferNeuter(ab);
-%ArrayBufferNeuter(ab);
+var locals = "";
+for (var i = 0; i < 1024; i++) locals += "var v" + i + ";";
+eval("function f() {" + locals + "f();}");
+assertThrows("f()", RangeError);


--
--
v8-dev mailing list
v8-dev@googlegroups.com
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to v8-dev+unsubscr...@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.

Reply via email to