Revision: 24925
Author: machenb...@chromium.org
Date: Tue Oct 28 09:48:49 2014 UTC
Log: Version 3.30.20 (based on bleeding_edge revision r24916)
Performance and stability improvements on all platforms.
https://code.google.com/p/v8/source/detail?r=24925
Modified:
/trunk/ChangeLog
/trunk/include/v8.h
/trunk/src/api.cc
/trunk/src/array.js
/trunk/src/compilation-statistics.cc
/trunk/src/compilation-statistics.h
/trunk/src/compiler/arm/code-generator-arm.cc
/trunk/src/compiler/arm64/code-generator-arm64.cc
/trunk/src/compiler/ast-graph-builder.cc
/trunk/src/compiler/code-generator.cc
/trunk/src/compiler/code-generator.h
/trunk/src/compiler/common-operator.h
/trunk/src/compiler/control-reducer.cc
/trunk/src/compiler/control-reducer.h
/trunk/src/compiler/generic-node-inl.h
/trunk/src/compiler/generic-node.h
/trunk/src/compiler/graph-builder.cc
/trunk/src/compiler/graph-builder.h
/trunk/src/compiler/graph-visualizer.cc
/trunk/src/compiler/graph.cc
/trunk/src/compiler/ia32/code-generator-ia32.cc
/trunk/src/compiler/instruction-selector.cc
/trunk/src/compiler/js-generic-lowering.cc
/trunk/src/compiler/js-graph.h
/trunk/src/compiler/js-operator.h
/trunk/src/compiler/linkage.cc
/trunk/src/compiler/linkage.h
/trunk/src/compiler/machine-operator.h
/trunk/src/compiler/mips/code-generator-mips.cc
/trunk/src/compiler/node.h
/trunk/src/compiler/operator-properties-inl.h
/trunk/src/compiler/pipeline-statistics.cc
/trunk/src/compiler/pipeline-statistics.h
/trunk/src/compiler/pipeline.cc
/trunk/src/compiler/pipeline.h
/trunk/src/compiler/raw-machine-assembler.cc
/trunk/src/compiler/raw-machine-assembler.h
/trunk/src/compiler/schedule.h
/trunk/src/compiler/scheduler.cc
/trunk/src/compiler/typer.h
/trunk/src/compiler/x64/code-generator-x64.cc
/trunk/src/d8-debug.cc
/trunk/src/d8.cc
/trunk/src/extensions/statistics-extension.cc
/trunk/src/heap/mark-compact.cc
/trunk/src/mips64/builtins-mips64.cc
/trunk/src/mips64/macro-assembler-mips64.cc
/trunk/src/objects-inl.h
/trunk/src/objects.cc
/trunk/src/objects.h
/trunk/src/transitions-inl.h
/trunk/src/transitions.cc
/trunk/src/transitions.h
/trunk/src/type-feedback-vector-inl.h
/trunk/src/type-feedback-vector.cc
/trunk/src/type-feedback-vector.h
/trunk/src/utils.h
/trunk/src/version.cc
/trunk/test/cctest/compiler/codegen-tester.h
/trunk/test/cctest/compiler/function-tester.h
/trunk/test/cctest/compiler/graph-builder-tester.cc
/trunk/test/cctest/compiler/graph-builder-tester.h
/trunk/test/cctest/compiler/simplified-graph-builder.cc
/trunk/test/cctest/compiler/simplified-graph-builder.h
/trunk/test/cctest/compiler/test-changes-lowering.cc
/trunk/test/cctest/compiler/test-codegen-deopt.cc
/trunk/test/cctest/compiler/test-control-reducer.cc
/trunk/test/cctest/compiler/test-instruction.cc
/trunk/test/cctest/compiler/test-linkage.cc
/trunk/test/cctest/compiler/test-simplified-lowering.cc
/trunk/test/cctest/test-feedback-vector.cc
/trunk/test/cctest/test-heap.cc
/trunk/test/cctest/test-serialize.cc
/trunk/test/cctest/test-utils.cc
/trunk/test/mjsunit/regress/regress-3643.js
/trunk/test/unittests/compiler/change-lowering-unittest.cc
/trunk/test/unittests/compiler/instruction-selector-unittest.cc
=======================================
--- /trunk/ChangeLog Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/ChangeLog Tue Oct 28 09:48:49 2014 UTC
@@ -1,3 +1,8 @@
+2014-10-28: Version 3.30.20
+
+ Performance and stability improvements on all platforms.
+
+
2014-10-27: Version 3.30.19
Check string literals with escapes in PreParserTraits::GetSymbol()
=======================================
--- /trunk/include/v8.h Wed Oct 22 07:27:53 2014 UTC
+++ /trunk/include/v8.h Tue Oct 28 09:48:49 2014 UTC
@@ -1703,14 +1703,24 @@
*/
bool IsDataView() const;
- Local<Boolean> ToBoolean() const;
- Local<Number> ToNumber() const;
- Local<String> ToString() const;
- Local<String> ToDetailString() const;
- Local<Object> ToObject() const;
- Local<Integer> ToInteger() const;
- Local<Uint32> ToUint32() const;
- Local<Int32> ToInt32() const;
+ Local<Boolean> ToBoolean(Isolate* isolate) const;
+ Local<Number> ToNumber(Isolate* isolate) const;
+ Local<String> ToString(Isolate* isolate) const;
+ Local<String> ToDetailString(Isolate* isolate) const;
+ Local<Object> ToObject(Isolate* isolate) const;
+ Local<Integer> ToInteger(Isolate* isolate) const;
+ Local<Uint32> ToUint32(Isolate* isolate) const;
+ Local<Int32> ToInt32(Isolate* isolate) const;
+
+ // TODO(dcarney): deprecate all these.
+ inline Local<Boolean> ToBoolean() const;
+ inline Local<Number> ToNumber() const;
+ inline Local<String> ToString() const;
+ inline Local<String> ToDetailString() const;
+ inline Local<Object> ToObject() const;
+ inline Local<Integer> ToInteger() const;
+ inline Local<Uint32> ToUint32() const;
+ inline Local<Int32> ToInt32() const;
/**
* Attempts to convert a string to an array index.
@@ -6636,6 +6646,44 @@
template <class T> Value* Value::Cast(T* value) {
return static_cast<Value*>(value);
}
+
+
+Local<Boolean> Value::ToBoolean() const {
+ return ToBoolean(Isolate::GetCurrent());
+}
+
+
+Local<Number> Value::ToNumber() const {
+ return ToNumber(Isolate::GetCurrent());
+}
+
+
+Local<String> Value::ToString() const {
+ return ToString(Isolate::GetCurrent());
+}
+
+
+Local<String> Value::ToDetailString() const {
+ return ToDetailString(Isolate::GetCurrent());
+}
+
+
+Local<Object> Value::ToObject() const {
+ return ToObject(Isolate::GetCurrent());
+}
+
+
+Local<Integer> Value::ToInteger() const {
+ return ToInteger(Isolate::GetCurrent());
+}
+
+
+Local<Uint32> Value::ToUint32() const {
+ return ToUint32(Isolate::GetCurrent());
+}
+
+
+Local<Int32> Value::ToInt32() const { return
ToInt32(Isolate::GetCurrent()); }
Name* Name::Cast(v8::Value* value) {
=======================================
--- /trunk/src/api.cc Wed Oct 22 07:27:53 2014 UTC
+++ /trunk/src/api.cc Tue Oct 28 09:48:49 2014 UTC
@@ -2580,13 +2580,13 @@
}
-Local<String> Value::ToString() const {
+Local<String> Value::ToString(Isolate* v8_isolate) const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
i::Handle<i::Object> str;
if (obj->IsString()) {
str = obj;
} else {
- i::Isolate* isolate = i::Isolate::Current();
+ i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
LOG_API(isolate, "ToString");
ENTER_V8(isolate);
EXCEPTION_PREAMBLE(isolate);
@@ -2598,13 +2598,13 @@
}
-Local<String> Value::ToDetailString() const {
+Local<String> Value::ToDetailString(Isolate* v8_isolate) const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
i::Handle<i::Object> str;
if (obj->IsString()) {
str = obj;
} else {
- i::Isolate* isolate = i::Isolate::Current();
+ i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
LOG_API(isolate, "ToDetailString");
ENTER_V8(isolate);
EXCEPTION_PREAMBLE(isolate);
@@ -2616,13 +2616,13 @@
}
-Local<v8::Object> Value::ToObject() const {
+Local<v8::Object> Value::ToObject(Isolate* v8_isolate) const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
i::Handle<i::Object> val;
if (obj->IsJSObject()) {
val = obj;
} else {
- i::Isolate* isolate = i::Isolate::Current();
+ i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
LOG_API(isolate, "ToObject");
ENTER_V8(isolate);
EXCEPTION_PREAMBLE(isolate);
@@ -2634,12 +2634,12 @@
}
-Local<Boolean> Value::ToBoolean() const {
+Local<Boolean> Value::ToBoolean(Isolate* v8_isolate) const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
if (obj->IsBoolean()) {
return ToApiHandle<Boolean>(obj);
} else {
- i::Isolate* isolate = i::Isolate::Current();
+ i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
LOG_API(isolate, "ToBoolean");
ENTER_V8(isolate);
i::Handle<i::Object> val =
@@ -2649,13 +2649,13 @@
}
-Local<Number> Value::ToNumber() const {
+Local<Number> Value::ToNumber(Isolate* v8_isolate) const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
i::Handle<i::Object> num;
if (obj->IsNumber()) {
num = obj;
} else {
- i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate();
+ i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
LOG_API(isolate, "ToNumber");
ENTER_V8(isolate);
EXCEPTION_PREAMBLE(isolate);
@@ -2667,13 +2667,13 @@
}
-Local<Integer> Value::ToInteger() const {
+Local<Integer> Value::ToInteger(Isolate* v8_isolate) const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
i::Handle<i::Object> num;
if (obj->IsSmi()) {
num = obj;
} else {
- i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate();
+ i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
LOG_API(isolate, "ToInteger");
ENTER_V8(isolate);
EXCEPTION_PREAMBLE(isolate);
@@ -2935,13 +2935,13 @@
}
-Local<Int32> Value::ToInt32() const {
+Local<Int32> Value::ToInt32(Isolate* v8_isolate) const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
i::Handle<i::Object> num;
if (obj->IsSmi()) {
num = obj;
} else {
- i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate();
+ i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
LOG_API(isolate, "ToInt32");
ENTER_V8(isolate);
EXCEPTION_PREAMBLE(isolate);
@@ -2952,13 +2952,13 @@
}
-Local<Uint32> Value::ToUint32() const {
+Local<Uint32> Value::ToUint32(Isolate* v8_isolate) const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
i::Handle<i::Object> num;
if (obj->IsSmi()) {
num = obj;
} else {
- i::Isolate* isolate = i::HeapObject::cast(*obj)->GetIsolate();
+ i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
LOG_API(isolate, "ToUInt32");
ENTER_V8(isolate);
EXCEPTION_PREAMBLE(isolate);
@@ -6916,7 +6916,7 @@
ENTER_V8(isolate);
i::HandleScope scope(isolate);
TryCatch try_catch;
- Handle<String> str = obj->ToString();
+ Handle<String> str =
obj->ToString(reinterpret_cast<v8::Isolate*>(isolate));
if (str.IsEmpty()) return;
i::Handle<i::String> i_str = Utils::OpenHandle(*str);
length_ = v8::Utf8Length(*i_str, isolate);
@@ -6937,7 +6937,7 @@
ENTER_V8(isolate);
i::HandleScope scope(isolate);
TryCatch try_catch;
- Handle<String> str = obj->ToString();
+ Handle<String> str =
obj->ToString(reinterpret_cast<v8::Isolate*>(isolate));
if (str.IsEmpty()) return;
length_ = str->Length();
str_ = i::NewArray<uint16_t>(length_ + 1);
=======================================
--- /trunk/src/array.js Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/array.js Tue Oct 28 09:48:49 2014 UTC
@@ -302,12 +302,8 @@
for (var i = len - del_count; i > start_i; i--) {
var from_index = i + del_count - 1;
var to_index = i + num_additional_args - 1;
- // The spec could also be interpreted such that
- // %HasOwnProperty would be the appropriate test. We follow
- // KJS in consulting the prototype.
- var current = array[from_index];
- if (!IS_UNDEFINED(current) || from_index in array) {
- array[to_index] = current;
+ if (from_index in array) {
+ array[to_index] = array[from_index];
} else {
delete array[to_index];
}
@@ -316,12 +312,8 @@
for (var i = start_i; i < len - del_count; i++) {
var from_index = i + del_count;
var to_index = i + num_additional_args;
- // The spec could also be interpreted such that
- // %HasOwnProperty would be the appropriate test. We follow
- // KJS in consulting the prototype.
- var current = array[from_index];
- if (!IS_UNDEFINED(current) || from_index in array) {
- array[to_index] = current;
+ if (from_index in array) {
+ array[to_index] = array[from_index];
} else {
delete array[to_index];
}
=======================================
--- /trunk/src/compilation-statistics.cc Fri Oct 24 14:44:48 2014 UTC
+++ /trunk/src/compilation-statistics.cc Tue Oct 28 09:48:49 2014 UTC
@@ -47,7 +47,8 @@
void CompilationStatistics::BasicStats::Accumulate(const BasicStats&
stats) {
delta_ += stats.delta_;
total_allocated_bytes_ += stats.total_allocated_bytes_;
- if (stats.max_allocated_bytes_ > max_allocated_bytes_) {
+ if (stats.absolute_max_allocated_bytes_ > absolute_max_allocated_bytes_)
{
+ absolute_max_allocated_bytes_ = stats.absolute_max_allocated_bytes_;
max_allocated_bytes_ = stats.max_allocated_bytes_;
function_name_ = stats.function_name_;
}
@@ -66,9 +67,12 @@
static_cast<double>(stats.total_allocated_bytes_ * 100) /
static_cast<double>(total_stats.total_allocated_bytes_);
base::OS::SNPrintF(buffer, kBufferSize,
- "%28s %10.3f ms / %5.1f %% %10u total / %5.1f %% %10u
max",
+ "%28s %10.3f ms / %5.1f %%"
+ "%10u total / %5.1f %% "
+ "%10u max %10u abs_max",
name, ms, percent, stats.total_allocated_bytes_,
- size_percent, stats.max_allocated_bytes_);
+ size_percent, stats.max_allocated_bytes_,
+ stats.absolute_max_allocated_bytes_);
os << buffer;
if (stats.function_name_.size() > 0) {
@@ -79,8 +83,8 @@
static void WriteFullLine(std::ostream& os) {
- os << "-----------------------------------------------"
- "-----------------------------------------------\n";
+ os << "--------------------------------------------------------"
+ "--------------------------------------------------------\n";
}
@@ -92,8 +96,8 @@
static void WritePhaseKindBreak(std::ostream& os) {
- os << " ------------------"
- "-----------------------------------------------\n";
+ os << " ---------------------------"
+ "--------------------------------------------------------\n";
}
=======================================
--- /trunk/src/compilation-statistics.h Fri Oct 24 14:44:48 2014 UTC
+++ /trunk/src/compilation-statistics.h Tue Oct 28 09:48:49 2014 UTC
@@ -22,13 +22,17 @@
class BasicStats {
public:
- BasicStats() : total_allocated_bytes_(0), max_allocated_bytes_(0) {}
+ BasicStats()
+ : total_allocated_bytes_(0),
+ max_allocated_bytes_(0),
+ absolute_max_allocated_bytes_(0) {}
void Accumulate(const BasicStats& stats);
base::TimeDelta delta_;
size_t total_allocated_bytes_;
size_t max_allocated_bytes_;
+ size_t absolute_max_allocated_bytes_;
std::string function_name_;
};
=======================================
--- /trunk/src/compiler/arm/code-generator-arm.cc Wed Oct 15 13:35:30 2014
UTC
+++ /trunk/src/compiler/arm/code-generator-arm.cc Tue Oct 28 09:48:49 2014
UTC
@@ -682,7 +682,7 @@
__ stm(db_w, sp, saves);
}
} else if (descriptor->IsJSFunctionCall()) {
- CompilationInfo* info = linkage()->info();
+ CompilationInfo* info = this->info();
__ Prologue(info->IsCodePreAgingActive());
frame()->SetRegisterSaveAreaSize(
StandardFrameConstants::kFixedFrameSizeFromFp);
@@ -914,7 +914,7 @@
void CodeGenerator::EnsureSpaceForLazyDeopt() {
int space_needed = Deoptimizer::patch_size();
- if (!linkage()->info()->IsStub()) {
+ if (!info()->IsStub()) {
// Ensure that we have enough space after the previous lazy-bailout
// instruction for patching the code here.
int current_pc = masm()->pc_offset();
=======================================
--- /trunk/src/compiler/arm64/code-generator-arm64.cc Wed Oct 22 07:27:53
2014 UTC
+++ /trunk/src/compiler/arm64/code-generator-arm64.cc Tue Oct 28 09:48:49
2014 UTC
@@ -752,7 +752,7 @@
__ PushCalleeSavedRegisters();
frame()->SetRegisterSaveAreaSize(20 * kPointerSize);
} else if (descriptor->IsJSFunctionCall()) {
- CompilationInfo* info = linkage()->info();
+ CompilationInfo* info = this->info();
__ SetStackPointer(jssp);
__ Prologue(info->IsCodePreAgingActive());
frame()->SetRegisterSaveAreaSize(
@@ -967,7 +967,7 @@
void CodeGenerator::EnsureSpaceForLazyDeopt() {
int space_needed = Deoptimizer::patch_size();
- if (!linkage()->info()->IsStub()) {
+ if (!info()->IsStub()) {
// Ensure that we have enough space after the previous lazy-bailout
// instruction for patching the code here.
intptr_t current_pc = masm()->pc_offset();
=======================================
--- /trunk/src/compiler/ast-graph-builder.cc Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/compiler/ast-graph-builder.cc Tue Oct 28 09:48:49 2014 UTC
@@ -22,10 +22,10 @@
: StructuredGraphBuilder(local_zone, jsgraph->graph(),
jsgraph->common()),
info_(info),
jsgraph_(jsgraph),
- globals_(0, info->zone()),
+ globals_(0, local_zone),
breakable_(NULL),
execution_context_(NULL) {
- InitializeAstVisitor(info->zone());
+ InitializeAstVisitor(local_zone);
}
=======================================
--- /trunk/src/compiler/code-generator.cc Thu Oct 23 08:44:45 2014 UTC
+++ /trunk/src/compiler/code-generator.cc Tue Oct 28 09:48:49 2014 UTC
@@ -13,10 +13,11 @@
namespace compiler {
CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
- InstructionSequence* code)
+ InstructionSequence* code, CompilationInfo*
info)
: frame_(frame),
linkage_(linkage),
code_(code),
+ info_(info),
current_block_(BasicBlock::RpoNumber::Invalid()),
current_source_position_(SourcePosition::Invalid()),
masm_(code->zone()->isolate(), NULL, 0),
@@ -29,7 +30,7 @@
Handle<Code> CodeGenerator::GenerateCode() {
- CompilationInfo* info = linkage()->info();
+ CompilationInfo* info = this->info();
// Emit a code line info recording start event.
PositionsRecorder* recorder = masm()->positions_recorder();
@@ -166,7 +167,7 @@
masm()->positions_recorder()->WriteRecordedPositions();
if (FLAG_code_comments) {
Vector<char> buffer = Vector<char>::New(256);
- CompilationInfo* info = linkage()->info();
+ CompilationInfo* info = this->info();
int ln = Script::GetLineNumber(info->script(), code_pos);
int cn = Script::GetColumnNumber(info->script(), code_pos);
if (info->script()->name()->IsString()) {
@@ -196,7 +197,7 @@
void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
- CompilationInfo* info = linkage()->info();
+ CompilationInfo* info = this->info();
int deopt_count = static_cast<int>(deoptimization_states_.size());
if (deopt_count == 0) return;
Handle<DeoptimizationInputData> data =
=======================================
--- /trunk/src/compiler/code-generator.h Thu Oct 23 08:44:45 2014 UTC
+++ /trunk/src/compiler/code-generator.h Tue Oct 28 09:48:49 2014 UTC
@@ -21,7 +21,7 @@
class CodeGenerator FINAL : public GapResolver::Assembler {
public:
explicit CodeGenerator(Frame* frame, Linkage* linkage,
- InstructionSequence* code);
+ InstructionSequence* code, CompilationInfo* info);
// Generate native code.
Handle<Code> GenerateCode();
@@ -36,6 +36,7 @@
GapResolver* resolver() { return &resolver_; }
SafepointTableBuilder* safepoints() { return &safepoints_; }
Zone* zone() const { return code()->zone(); }
+ CompilationInfo* info() const { return info_; }
// Checks if {block} will appear directly after {current_block_} when
// assembling code, in which case, a fall-through can be used.
@@ -118,6 +119,7 @@
Frame* const frame_;
Linkage* const linkage_;
InstructionSequence* const code_;
+ CompilationInfo* const info_;
BasicBlock::RpoNumber current_block_;
SourcePosition current_source_position_;
MacroAssembler masm_;
=======================================
--- /trunk/src/compiler/common-operator.h Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/compiler/common-operator.h Tue Oct 28 09:48:49 2014 UTC
@@ -131,7 +131,7 @@
// Interface for building common operators that can be used at any level
of IR,
// including JavaScript, mid-level, and low-level.
-class CommonOperatorBuilder FINAL {
+class CommonOperatorBuilder FINAL : public ZoneObject {
public:
explicit CommonOperatorBuilder(Zone* zone);
@@ -174,6 +174,8 @@
const CommonOperatorBuilderImpl& impl_;
Zone* const zone_;
+
+ DISALLOW_COPY_AND_ASSIGN(CommonOperatorBuilder);
};
} // namespace compiler
=======================================
--- /trunk/src/compiler/control-reducer.cc Wed Oct 22 07:27:53 2014 UTC
+++ /trunk/src/compiler/control-reducer.cc Tue Oct 28 09:48:49 2014 UTC
@@ -14,7 +14,8 @@
namespace internal {
namespace compiler {
-enum VisitState { kUnvisited, kOnStack, kRevisit, kVisited };
+enum VisitState { kUnvisited = 0, kOnStack = 1, kRevisit = 2, kVisited = 3
};
+enum Reachability { kFromStart = 8 };
#define TRACE(x) \
if (FLAG_trace_turbo) PrintF x
@@ -39,23 +40,169 @@
ZoneDeque<Node*> revisit_;
Node* dead_;
- void Trim() {
- // Mark all nodes reachable from end.
+ void Reduce() {
+ Push(graph()->end());
+ do {
+ // Process the node on the top of the stack, potentially pushing more
+ // or popping the node off the stack.
+ ReduceTop();
+ // If the stack becomes empty, revisit any nodes in the revisit
queue.
+ // If no nodes in the revisit queue, try removing dead loops.
+ // If no dead loops, then finish.
+ } while (!stack_.empty() || TryRevisit() || RepairAndRemoveLoops());
+ }
+
+ bool TryRevisit() {
+ while (!revisit_.empty()) {
+ Node* n = revisit_.back();
+ revisit_.pop_back();
+ if (state_[n->id()] == kRevisit) { // state can change while in
queue.
+ Push(n);
+ return true;
+ }
+ }
+ return false;
+ }
+
+ // Repair the graph after the possible creation of non-terminating or
dead
+ // loops. Removing dead loops can produce more opportunities for
reduction.
+ bool RepairAndRemoveLoops() {
+ // TODO(turbofan): we can skip this if the graph has no loops, but
+ // we have to be careful about proper loop detection during reduction.
+
+ // Gather all nodes backwards-reachable from end (through inputs).
+ state_.assign(graph()->NodeCount(), kUnvisited);
NodeVector nodes(zone_);
- state_.assign(jsgraph_->graph()->NodeCount(), kUnvisited);
- Push(jsgraph_->graph()->end());
+ AddNodesReachableFromEnd(nodes);
+
+ // Walk forward through control nodes, looking for back edges to nodes
+ // that are not connected to end. Those are non-terminating loops
(NTLs).
+ Node* start = graph()->start();
+ ZoneVector<byte> fw_reachability(graph()->NodeCount(), 0, zone_);
+ fw_reachability[start->id()] = kFromStart | kOnStack;
+ stack_.push_back(start);
+
while (!stack_.empty()) {
- Node* node = stack_[stack_.size() - 1];
- stack_.pop_back();
- state_[node->id()] = kVisited;
- nodes.push_back(node);
- for (InputIter i = node->inputs().begin(); i != node->inputs().end();
- ++i) {
- Recurse(*i); // pushes node onto the stack if necessary.
+ Node* node = stack_.back();
+ TRACE(("ControlFw: #%d:%s\n", node->id(), node->op()->mnemonic()));
+ bool pop = true;
+ for (Node* const succ : node->uses()) {
+ byte reach = fw_reachability[succ->id()];
+ if ((reach & kOnStack) != 0 && state_[succ->id()] != kVisited) {
+ // {succ} is on stack and not reachable from end.
+ ConnectNTL(nodes, succ);
+ fw_reachability.resize(graph()->NodeCount(), 0);
+ pop = false; // continue traversing inputs to this node.
+ break;
+ }
+ if ((reach & kFromStart) == 0 &&
+ IrOpcode::IsControlOpcode(succ->opcode())) {
+ // {succ} is a control node and not yet reached from start.
+ fw_reachability[succ->id()] |= kFromStart | kOnStack;
+ stack_.push_back(succ);
+ pop = false; // "recurse" into successor control node.
+ break;
+ }
+ }
+ if (pop) {
+ fw_reachability[node->id()] &= ~kOnStack;
+ stack_.pop_back();
}
}
+
+ // Trim references from dead nodes to live nodes first.
+ jsgraph_->GetCachedNodes(&nodes);
+ TrimNodes(nodes);
+
+ // Any control nodes not reachable from start are dead, even loops.
+ for (size_t i = 0; i < nodes.size(); i++) {
+ Node* node = nodes[i];
+ byte reach = fw_reachability[node->id()];
+ if ((reach & kFromStart) == 0 &&
+ IrOpcode::IsControlOpcode(node->opcode())) {
+ ReplaceNode(node, dead()); // uses will be added to revisit queue.
+ }
+ }
+ return TryRevisit(); // try to push a node onto the stack.
+ }
+
+ // Connect {loop}, the header of a non-terminating loop, to the end node.
+ void ConnectNTL(NodeVector& nodes, Node* loop) {
+ TRACE(("ConnectNTL: #%d:%s\n", loop->id(), loop->op()->mnemonic()));
+
+ if (loop->opcode() != IrOpcode::kTerminate) {
+ // Insert a {Terminate} node if the loop has effects.
+ ZoneDeque<Node*> effects(zone_);
+ for (Node* const use : loop->uses()) {
+ if (use->opcode() == IrOpcode::kEffectPhi) effects.push_back(use);
+ }
+ int count = static_cast<int>(effects.size());
+ if (count > 0) {
+ Node** inputs = zone_->NewArray<Node*>(1 + count);
+ for (int i = 0; i < count; i++) inputs[i] = effects[i];
+ inputs[count] = loop;
+ loop = graph()->NewNode(common_->Terminate(count), 1 + count,
inputs);
+ TRACE(("AddTerminate: #%d:%s[%d]\n", loop->id(),
loop->op()->mnemonic(),
+ count));
+ }
+ }
+
+ Node* to_add = loop;
+ Node* end = graph()->end();
+ CHECK_EQ(IrOpcode::kEnd, end->opcode());
+ Node* merge = end->InputAt(0);
+ if (merge == NULL || merge->opcode() == IrOpcode::kDead) {
+ // The end node died; just connect end to {loop}.
+ end->ReplaceInput(0, loop);
+ } else if (merge->opcode() != IrOpcode::kMerge) {
+ // Introduce a final merge node for {end->InputAt(0)} and {loop}.
+ merge = graph()->NewNode(common_->Merge(2), merge, loop);
+ end->ReplaceInput(0, merge);
+ to_add = merge;
+ } else {
+ // Append a new input to the final merge at the end.
+ merge->AppendInput(graph()->zone(), loop);
+ merge->set_op(common_->Merge(merge->InputCount()));
+ }
+ nodes.push_back(to_add);
+ state_.resize(graph()->NodeCount(), kUnvisited);
+ state_[to_add->id()] = kVisited;
+ AddBackwardsReachableNodes(nodes, nodes.size() - 1);
+ }
+
+ void AddNodesReachableFromEnd(NodeVector& nodes) {
+ Node* end = graph()->end();
+ state_[end->id()] = kVisited;
+ if (!end->IsDead()) {
+ nodes.push_back(end);
+ AddBackwardsReachableNodes(nodes, nodes.size() - 1);
+ }
+ }
+
+ void AddBackwardsReachableNodes(NodeVector& nodes, size_t cursor) {
+ while (cursor < nodes.size()) {
+ Node* node = nodes[cursor++];
+ for (Node* const input : node->inputs()) {
+ if (state_[input->id()] != kVisited) {
+ state_[input->id()] = kVisited;
+ nodes.push_back(input);
+ }
+ }
+ }
+ }
+
+ void Trim() {
+ // Gather all nodes backwards-reachable from end through inputs.
+ state_.assign(graph()->NodeCount(), kUnvisited);
+ NodeVector nodes(zone_);
+ AddNodesReachableFromEnd(nodes);
+
// Process cached nodes in the JSGraph too.
jsgraph_->GetCachedNodes(&nodes);
+ TrimNodes(nodes);
+ }
+
+ void TrimNodes(NodeVector& nodes) {
// Remove dead->live edges.
for (size_t j = 0; j < nodes.size(); j++) {
Node* node = nodes[j];
@@ -75,17 +222,45 @@
// Verify that no inputs to live nodes are NULL.
for (size_t j = 0; j < nodes.size(); j++) {
Node* node = nodes[j];
- for (InputIter i = node->inputs().begin(); i != node->inputs().end();
- ++i) {
- CHECK_NE(NULL, *i);
+ for (Node* const input : node->inputs()) {
+ CHECK_NE(NULL, input);
}
- for (UseIter i = node->uses().begin(); i != node->uses().end(); ++i)
{
- size_t id = static_cast<size_t>((*i)->id());
+ for (Node* const use : node->uses()) {
+ size_t id = static_cast<size_t>(use->id());
CHECK_EQ(kVisited, state_[id]);
}
}
#endif
}
+
+ // Reduce the node on the top of the stack.
+ // If an input {i} is not yet visited or needs to be revisited, push {i}
onto
+ // the stack and return. Otherwise, all inputs are visited, so apply
+ // reductions for {node} and pop it off the stack.
+ void ReduceTop() {
+ size_t height = stack_.size();
+ Node* node = stack_.back();
+
+ if (node->IsDead()) return Pop(); // Node was killed while on stack.
+
+ TRACE(("ControlReduce: #%d:%s\n", node->id(), node->op()->mnemonic()));
+
+ // Recurse on an input if necessary.
+ for (Node* const input : node->inputs()) {
+ if (Recurse(input)) return;
+ }
+
+ // All inputs should be visited or on stack. Apply reductions to node.
+ Node* replacement = ReduceNode(node);
+ if (replacement != node) ReplaceNode(node, replacement);
+
+ // After reducing the node, pop it off the stack.
+ CHECK_EQ(static_cast<int>(height), static_cast<int>(stack_.size()));
+ Pop();
+
+ // If there was a replacement, reduce it after popping {node}.
+ if (replacement != node) Recurse(replacement);
+ }
// Push a node onto the stack if its state is {kUnvisited} or {kRevisit}.
bool Recurse(Node* node) {
@@ -103,13 +278,223 @@
state_[node->id()] = kOnStack;
stack_.push_back(node);
}
+
+ void Pop() {
+ int pos = static_cast<int>(stack_.size()) - 1;
+ DCHECK_GE(pos, 0);
+ DCHECK_EQ(kOnStack, state_[stack_[pos]->id()]);
+ state_[stack_[pos]->id()] = kVisited;
+ stack_.pop_back();
+ }
+
+ // Queue a node to be revisited if it has been visited once already.
+ void Revisit(Node* node) {
+ size_t id = static_cast<size_t>(node->id());
+ if (id < state_.size() && state_[id] == kVisited) {
+ TRACE((" Revisit #%d:%s\n", node->id(), node->op()->mnemonic()));
+ state_[id] = kRevisit;
+ revisit_.push_back(node);
+ }
+ }
+
+ Node* dead() {
+ if (dead_ == NULL) dead_ = graph()->NewNode(common_->Dead());
+ return dead_;
+ }
+
+
//===========================================================================
+ // Reducer implementation: perform reductions on a node.
+
//===========================================================================
+ Node* ReduceNode(Node* node) {
+ if (OperatorProperties::GetControlInputCount(node->op()) == 1) {
+ // If a node has only one control input and it is dead, replace with
dead.
+ Node* control = NodeProperties::GetControlInput(node);
+ if (control->opcode() == IrOpcode::kDead) {
+ TRACE(("ControlDead: #%d:%s\n", node->id(),
node->op()->mnemonic()));
+ return control;
+ }
+ }
+
+ // Reduce branches, phis, and merges.
+ switch (node->opcode()) {
+ case IrOpcode::kBranch:
+ return ReduceBranch(node);
+ case IrOpcode::kLoop:
+ case IrOpcode::kMerge:
+ return ReduceMerge(node);
+ case IrOpcode::kPhi:
+ case IrOpcode::kEffectPhi:
+ return ReducePhi(node);
+ default:
+ return node;
+ }
+ }
+
+ // Reduce redundant phis.
+ Node* ReducePhi(Node* node) {
+ int n = node->InputCount();
+ if (n <= 1) return dead(); // No non-control inputs.
+ if (n == 2) return node->InputAt(0); // Only one non-control input.
+
+ Node* replacement = NULL;
+ Node::Inputs inputs = node->inputs();
+ for (InputIter it = inputs.begin(); n > 1; --n, ++it) {
+ Node* input = *it;
+ if (input->opcode() == IrOpcode::kDead) continue; // ignore dead
inputs.
+ if (input != node && input != replacement) { // non-redundant
input.
+ if (replacement != NULL) return node;
+ replacement = input;
+ }
+ }
+ return replacement == NULL ? dead() : replacement;
+ }
+
+ // Reduce merges by trimming away dead inputs from the merge and phis.
+ Node* ReduceMerge(Node* node) {
+ // Count the number of live inputs.
+ int live = 0;
+ int index = 0;
+ int live_index = 0;
+ for (Node* const input : node->inputs()) {
+ if (input->opcode() != IrOpcode::kDead) {
+ live++;
+ live_index = index;
+ }
+ index++;
+ }
+
+ if (live > 1 && live == node->InputCount()) return node; // nothing
to do.
+
+ TRACE(("ReduceMerge: #%d:%s (%d live)\n", node->id(),
+ node->op()->mnemonic(), live));
+
+ if (live == 0) return dead(); // no remaining inputs.
+
+ // Gather phis and effect phis to be edited.
+ ZoneVector<Node*> phis(zone_);
+ for (Node* const use : node->uses()) {
+ if (use->opcode() == IrOpcode::kPhi ||
+ use->opcode() == IrOpcode::kEffectPhi) {
+ phis.push_back(use);
+ }
+ }
+
+ if (live == 1) {
+ // All phis are redundant. Replace them with their live input.
+ for (Node* const phi : phis) ReplaceNode(phi,
phi->InputAt(live_index));
+ // The merge itself is redundant.
+ return node->InputAt(live_index);
+ }
+
+ // Edit phis in place, removing dead inputs and revisiting them.
+ for (Node* const phi : phis) {
+ TRACE((" PhiInMerge: #%d:%s (%d live)\n", phi->id(),
+ phi->op()->mnemonic(), live));
+ RemoveDeadInputs(node, phi);
+ Revisit(phi);
+ }
+ // Edit the merge in place, removing dead inputs.
+ RemoveDeadInputs(node, node);
+ return node;
+ }
+
+ // Reduce branches if they have constant inputs.
+ Node* ReduceBranch(Node* node) {
+ Node* cond = node->InputAt(0);
+ bool is_true;
+ switch (cond->opcode()) {
+ case IrOpcode::kInt32Constant:
+ is_true = !Int32Matcher(cond).Is(0);
+ break;
+ case IrOpcode::kNumberConstant:
+ is_true = !NumberMatcher(cond).Is(0);
+ break;
+ case IrOpcode::kHeapConstant: {
+ Handle<Object> object =
+ HeapObjectMatcher<Object>(cond).Value().handle();
+ if (object->IsTrue())
+ is_true = true;
+ else if (object->IsFalse())
+ is_true = false;
+ else
+ return node; // TODO(turbofan): fold branches on strings,
objects.
+ break;
+ }
+ default:
+ return node;
+ }
+
+ TRACE(("BranchReduce: #%d:%s = %s\n", node->id(),
node->op()->mnemonic(),
+ is_true ? "true" : "false"));
+
+ // Replace IfTrue and IfFalse projections from this branch.
+ Node* control = NodeProperties::GetControlInput(node);
+ for (UseIter i = node->uses().begin(); i != node->uses().end();) {
+ Node* to = *i;
+ if (to->opcode() == IrOpcode::kIfTrue) {
+ TRACE((" IfTrue: #%d:%s\n", to->id(), to->op()->mnemonic()));
+ i.UpdateToAndIncrement(NULL);
+ ReplaceNode(to, is_true ? control : dead());
+ } else if (to->opcode() == IrOpcode::kIfFalse) {
+ TRACE((" IfFalse: #%d:%s\n", to->id(), to->op()->mnemonic()));
+ i.UpdateToAndIncrement(NULL);
+ ReplaceNode(to, is_true ? dead() : control);
+ } else {
+ ++i;
+ }
+ }
+ return control;
+ }
+
+ // Remove inputs to {node} corresponding to the dead inputs to {merge}
+ // and compact the remaining inputs, updating the operator.
+ void RemoveDeadInputs(Node* merge, Node* node) {
+ int pos = 0;
+ for (int i = 0; i < node->InputCount(); i++) {
+ // skip dead inputs.
+ if (i < merge->InputCount() &&
+ merge->InputAt(i)->opcode() == IrOpcode::kDead)
+ continue;
+ // compact live inputs.
+ if (pos != i) node->ReplaceInput(pos, node->InputAt(i));
+ pos++;
+ }
+ node->TrimInputCount(pos);
+ if (node->opcode() == IrOpcode::kPhi) {
+ node->set_op(common_->Phi(OpParameter<MachineType>(node->op()), pos
- 1));
+ } else if (node->opcode() == IrOpcode::kEffectPhi) {
+ node->set_op(common_->EffectPhi(pos - 1));
+ } else if (node->opcode() == IrOpcode::kMerge) {
+ node->set_op(common_->Merge(pos));
+ } else if (node->opcode() == IrOpcode::kLoop) {
+ node->set_op(common_->Loop(pos));
+ } else {
+ UNREACHABLE();
+ }
+ }
+
+ // Replace uses of {node} with {replacement} and revisit the uses.
+ void ReplaceNode(Node* node, Node* replacement) {
+ if (node == replacement) return;
+ TRACE((" Replace: #%d:%s with #%d:%s\n", node->id(),
+ node->op()->mnemonic(), replacement->id(),
+ replacement->op()->mnemonic()));
+ for (Node* const use : node->uses()) {
+ // Don't revisit this node if it refers to itself.
+ if (use != node) Revisit(use);
+ }
+ node->ReplaceUses(replacement);
+ node->Kill();
+ }
+
+ Graph* graph() { return jsgraph_->graph(); }
};
+
void ControlReducer::ReduceGraph(Zone* zone, JSGraph* jsgraph,
CommonOperatorBuilder* common) {
- ControlReducerImpl impl(zone, jsgraph, NULL);
- // Only trim the graph for now. Control reduction can reduce
non-terminating
- // loops to graphs that are unschedulable at the moment.
+ ControlReducerImpl impl(zone, jsgraph, common);
+ impl.Reduce();
impl.Trim();
}
@@ -118,6 +503,33 @@
ControlReducerImpl impl(zone, jsgraph, NULL);
impl.Trim();
}
+
+
+Node* ControlReducer::ReducePhiForTesting(JSGraph* jsgraph,
+ CommonOperatorBuilder* common,
+ Node* node) {
+ Zone zone(jsgraph->graph()->zone()->isolate());
+ ControlReducerImpl impl(&zone, jsgraph, common);
+ return impl.ReducePhi(node);
+}
+
+
+Node* ControlReducer::ReduceMergeForTesting(JSGraph* jsgraph,
+ CommonOperatorBuilder* common,
+ Node* node) {
+ Zone zone(jsgraph->graph()->zone()->isolate());
+ ControlReducerImpl impl(&zone, jsgraph, common);
+ return impl.ReduceMerge(node);
+}
+
+
+Node* ControlReducer::ReduceBranchForTesting(JSGraph* jsgraph,
+ CommonOperatorBuilder* common,
+ Node* node) {
+ Zone zone(jsgraph->graph()->zone()->isolate());
+ ControlReducerImpl impl(&zone, jsgraph, common);
+ return impl.ReduceBranch(node);
+}
}
}
} // namespace v8::internal::compiler
=======================================
--- /trunk/src/compiler/control-reducer.h Wed Oct 22 07:27:53 2014 UTC
+++ /trunk/src/compiler/control-reducer.h Tue Oct 28 09:48:49 2014 UTC
@@ -11,6 +11,7 @@
class JSGraph;
class CommonOperatorBuilder;
+class Node;
class ControlReducer {
public:
@@ -20,6 +21,16 @@
// Trim nodes in the graph that are not reachable from end.
static void TrimGraph(Zone* zone, JSGraph* graph);
+
+ // Testing interface.
+ static Node* ReducePhiForTesting(JSGraph* graph,
+ CommonOperatorBuilder* builder, Node*
node);
+ static Node* ReduceBranchForTesting(JSGraph* graph,
+ CommonOperatorBuilder* builder,
+ Node* node);
+ static Node* ReduceMergeForTesting(JSGraph* graph,
+ CommonOperatorBuilder* builder,
+ Node* node);
};
}
}
=======================================
--- /trunk/src/compiler/generic-node-inl.h Fri Oct 17 11:41:30 2014 UTC
+++ /trunk/src/compiler/generic-node-inl.h Tue Oct 28 09:48:49 2014 UTC
@@ -16,13 +16,16 @@
namespace compiler {
template <class B, class S>
-GenericNode<B, S>::GenericNode(GenericGraphBase* graph, int input_count)
+GenericNode<B, S>::GenericNode(GenericGraphBase* graph, int input_count,
+ int reserve_input_count)
: BaseClass(graph->zone()),
input_count_(input_count),
+ reserve_input_count_(reserve_input_count),
has_appendable_inputs_(false),
use_count_(0),
first_use_(NULL),
last_use_(NULL) {
+ DCHECK(reserve_input_count <= kMaxReservedInputs);
inputs_.static_ = reinterpret_cast<Input*>(this + 1);
AssignUniqueID(graph);
}
@@ -154,12 +157,18 @@
template <class B, class S>
void GenericNode<B, S>::AppendInput(Zone* zone, GenericNode<B, S>*
to_append) {
- EnsureAppendableInputs(zone);
Use* new_use = new (zone) Use;
Input new_input;
new_input.to = to_append;
new_input.use = new_use;
- inputs_.appendable_->push_back(new_input);
+ if (reserve_input_count_ > 0) {
+ DCHECK(!has_appendable_inputs_);
+ reserve_input_count_--;
+ inputs_.static_[input_count_] = new_input;
+ } else {
+ EnsureAppendableInputs(zone);
+ inputs_.appendable_->push_back(new_input);
+ }
new_use->input_index = input_count_;
new_use->from = this;
to_append->AppendUse(new_use);
@@ -224,15 +233,16 @@
}
template <class B, class S>
-S* GenericNode<B, S>::New(GenericGraphBase* graph, int input_count,
- S** inputs) {
+S* GenericNode<B, S>::New(GenericGraphBase* graph, int input_count, S**
inputs,
+ bool has_extensible_inputs) {
size_t node_size = sizeof(GenericNode);
- size_t inputs_size = input_count * sizeof(Input);
+ int reserve_input_count = has_extensible_inputs ?
kDefaultReservedInputs : 0;
+ size_t inputs_size = (input_count + reserve_input_count) * sizeof(Input);
size_t uses_size = input_count * sizeof(Use);
int size = static_cast<int>(node_size + inputs_size + uses_size);
Zone* zone = graph->zone();
void* buffer = zone->New(size);
- S* result = new (buffer) S(graph, input_count);
+ S* result = new (buffer) S(graph, input_count, reserve_input_count);
Input* input =
reinterpret_cast<Input*>(reinterpret_cast<char*>(buffer) +
node_size);
Use* use =
=======================================
--- /trunk/src/compiler/generic-node.h Thu Sep 25 00:05:09 2014 UTC
+++ /trunk/src/compiler/generic-node.h Tue Oct 28 09:48:49 2014 UTC
@@ -92,7 +92,8 @@
bool OwnedBy(GenericNode* owner) const;
- static S* New(GenericGraphBase* graph, int input_count, S** inputs);
+ static S* New(GenericGraphBase* graph, int input_count, S** inputs,
+ bool has_extensible_inputs);
protected:
friend class GenericGraphBase;
@@ -128,15 +129,21 @@
void* operator new(size_t, void* location) { return location; }
- GenericNode(GenericGraphBase* graph, int input_count);
+ GenericNode(GenericGraphBase* graph, int input_count,
+ int reserved_input_count);
private:
void AssignUniqueID(GenericGraphBase* graph);
typedef ZoneDeque<Input> InputDeque;
+ static const int kReservedInputCountBits = 2;
+ static const int kMaxReservedInputs = (1 << kReservedInputCountBits) - 1;
+ static const int kDefaultReservedInputs = kMaxReservedInputs;
+
NodeId id_;
- int input_count_ : 31;
+ int input_count_ : 29;
+ unsigned int reserve_input_count_ : kReservedInputCountBits;
bool has_appendable_inputs_ : 1;
union {
// When a node is initially allocated, it uses a static buffer to hold
its
=======================================
--- /trunk/src/compiler/graph-builder.cc Wed Oct 22 07:27:53 2014 UTC
+++ /trunk/src/compiler/graph-builder.cc Tue Oct 28 09:48:49 2014 UTC
@@ -25,13 +25,26 @@
common_(common),
environment_(NULL),
local_zone_(local_zone),
+ input_buffer_size_(0),
+ input_buffer_(NULL),
current_context_(NULL),
- exit_control_(NULL) {}
+ exit_control_(NULL) {
+ EnsureInputBufferSize(kInputBufferSizeIncrement);
+}
+
+
+Node** StructuredGraphBuilder::EnsureInputBufferSize(int size) {
+ if (size > input_buffer_size_) {
+ size += kInputBufferSizeIncrement;
+ input_buffer_ = local_zone()->NewArray<Node*>(size);
+ }
+ return input_buffer_;
+}
Node* StructuredGraphBuilder::MakeNode(const Operator* op,
int value_input_count,
- Node** value_inputs) {
+ Node** value_inputs, bool
incomplete) {
DCHECK(op->InputCount() == value_input_count);
bool has_context = OperatorProperties::HasContextInput(op);
@@ -44,14 +57,14 @@
Node* result = NULL;
if (!has_context && !has_framestate && !has_control && !has_effect) {
- result = graph()->NewNode(op, value_input_count, value_inputs);
+ result = graph()->NewNode(op, value_input_count, value_inputs,
incomplete);
} else {
int input_count_with_deps = value_input_count;
if (has_context) ++input_count_with_deps;
if (has_framestate) ++input_count_with_deps;
if (has_control) ++input_count_with_deps;
if (has_effect) ++input_count_with_deps;
- Node** buffer = local_zone()->NewArray<Node*>(input_count_with_deps);
+ Node** buffer = EnsureInputBufferSize(input_count_with_deps);
memcpy(buffer, value_inputs, kPointerSize * value_input_count);
Node** current_input = buffer + value_input_count;
if (has_context) {
@@ -69,7 +82,7 @@
if (has_control) {
*current_input++ = environment_->GetControlDependency();
}
- result = graph()->NewNode(op, input_count_with_deps, buffer);
+ result = graph()->NewNode(op, input_count_with_deps, buffer,
incomplete);
if (has_effect) {
environment_->UpdateEffectDependency(result);
}
@@ -125,7 +138,9 @@
// placing a singleton merge as the new control dependency.
if (this->IsMarkedAsUnreachable()) {
Node* other_control = other->control_dependency_;
- control_dependency_ = graph()->NewNode(common()->Merge(1),
other_control);
+ Node* inputs[] = {other_control};
+ control_dependency_ =
+ graph()->NewNode(common()->Merge(1), arraysize(inputs), inputs,
true);
effect_dependency_ = other->effect_dependency_;
values_ = other->values_;
return;
@@ -164,7 +179,7 @@
Node* StructuredGraphBuilder::NewPhi(int count, Node* input, Node*
control) {
const Operator* phi_op = common()->Phi(kMachAnyTagged, count);
- Node** buffer = local_zone()->NewArray<Node*>(count + 1);
+ Node** buffer = EnsureInputBufferSize(count + 1);
MemsetPointer(buffer, input, count);
buffer[count] = control;
return graph()->NewNode(phi_op, count + 1, buffer, true);
@@ -175,7 +190,7 @@
Node* StructuredGraphBuilder::NewEffectPhi(int count, Node* input,
Node* control) {
const Operator* phi_op = common()->EffectPhi(count);
- Node** buffer = local_zone()->NewArray<Node*>(count + 1);
+ Node** buffer = EnsureInputBufferSize(count + 1);
MemsetPointer(buffer, input, count);
buffer[count] = control;
return graph()->NewNode(phi_op, count + 1, buffer, true);
@@ -197,7 +212,8 @@
} else {
// Control node is a singleton, introduce a merge.
const Operator* op = common()->Merge(inputs);
- control = graph()->NewNode(op, control, other);
+ Node* inputs[] = {control, other};
+ control = graph()->NewNode(op, arraysize(inputs), inputs, true);
}
return control;
}
=======================================
--- /trunk/src/compiler/graph-builder.h Wed Oct 22 07:27:53 2014 UTC
+++ /trunk/src/compiler/graph-builder.h Tue Oct 28 09:48:49 2014 UTC
@@ -24,42 +24,44 @@
explicit GraphBuilder(Graph* graph) : graph_(graph) {}
virtual ~GraphBuilder() {}
- Node* NewNode(const Operator* op) {
- return MakeNode(op, 0, static_cast<Node**>(NULL));
+ Node* NewNode(const Operator* op, bool incomplete = false) {
+ return MakeNode(op, 0, static_cast<Node**>(NULL), incomplete);
}
- Node* NewNode(const Operator* op, Node* n1) { return MakeNode(op, 1,
&n1); }
+ Node* NewNode(const Operator* op, Node* n1) {
+ return MakeNode(op, 1, &n1, false);
+ }
Node* NewNode(const Operator* op, Node* n1, Node* n2) {
Node* buffer[] = {n1, n2};
- return MakeNode(op, arraysize(buffer), buffer);
+ return MakeNode(op, arraysize(buffer), buffer, false);
}
Node* NewNode(const Operator* op, Node* n1, Node* n2, Node* n3) {
Node* buffer[] = {n1, n2, n3};
- return MakeNode(op, arraysize(buffer), buffer);
+ return MakeNode(op, arraysize(buffer), buffer, false);
}
Node* NewNode(const Operator* op, Node* n1, Node* n2, Node* n3, Node*
n4) {
Node* buffer[] = {n1, n2, n3, n4};
- return MakeNode(op, arraysize(buffer), buffer);
+ return MakeNode(op, arraysize(buffer), buffer, false);
}
Node* NewNode(const Operator* op, Node* n1, Node* n2, Node* n3, Node* n4,
Node* n5) {
Node* buffer[] = {n1, n2, n3, n4, n5};
- return MakeNode(op, arraysize(buffer), buffer);
+ return MakeNode(op, arraysize(buffer), buffer, false);
}
Node* NewNode(const Operator* op, Node* n1, Node* n2, Node* n3, Node* n4,
Node* n5, Node* n6) {
Node* nodes[] = {n1, n2, n3, n4, n5, n6};
- return MakeNode(op, arraysize(nodes), nodes);
+ return MakeNode(op, arraysize(nodes), nodes, false);
}
- Node* NewNode(const Operator* op, int value_input_count,
- Node** value_inputs) {
- return MakeNode(op, value_input_count, value_inputs);
+ Node* NewNode(const Operator* op, int value_input_count, Node**
value_inputs,
+ bool incomplete = false) {
+ return MakeNode(op, value_input_count, value_inputs, incomplete);
}
Graph* graph() const { return graph_; }
@@ -67,7 +69,7 @@
protected:
// Base implementation used by all factory methods.
virtual Node* MakeNode(const Operator* op, int value_input_count,
- Node** value_inputs) = 0;
+ Node** value_inputs, bool incomplete) = 0;
private:
Graph* graph_;
@@ -95,8 +97,8 @@
// Helpers to create new control nodes.
Node* NewIfTrue() { return NewNode(common()->IfTrue()); }
Node* NewIfFalse() { return NewNode(common()->IfFalse()); }
- Node* NewMerge() { return NewNode(common()->Merge(1)); }
- Node* NewLoop() { return NewNode(common()->Loop(1)); }
+ Node* NewMerge() { return NewNode(common()->Merge(1), true); }
+ Node* NewLoop() { return NewNode(common()->Loop(1), true); }
Node* NewBranch(Node* condition) {
return NewNode(common()->Branch(), condition);
}
@@ -110,7 +112,7 @@
// ensures effect and control dependencies are wired up. The dependencies
// tracked by the environment might be mutated.
virtual Node* MakeNode(const Operator* op, int value_input_count,
- Node** value_inputs) FINAL;
+ Node** value_inputs, bool incomplete) FINAL;
Environment* environment() const { return environment_; }
void set_environment(Environment* env) { environment_ = env; }
@@ -148,6 +150,10 @@
// Zone local to the builder for data not leaking into the graph.
Zone* local_zone_;
+ // Temporary storage for building node input lists.
+ int input_buffer_size_;
+ Node** input_buffer_;
+
// Node representing the control dependency for dead code.
SetOncePointer<Node> dead_control_;
@@ -157,6 +163,12 @@
// Merge of all control nodes that exit the function body.
Node* exit_control_;
+ // Growth increment for the temporary buffer used to construct input
lists to
+ // new nodes.
+ static const int kInputBufferSizeIncrement = 64;
+
+ Node** EnsureInputBufferSize(int size);
+
DISALLOW_COPY_AND_ASSIGN(StructuredGraphBuilder);
};
=======================================
--- /trunk/src/compiler/graph-visualizer.cc Fri Oct 24 14:44:48 2014 UTC
+++ /trunk/src/compiler/graph-visualizer.cc Tue Oct 28 09:48:49 2014 UTC
@@ -118,7 +118,7 @@
void Print() { const_cast<Graph*>(graph_)->VisitNodeInputsFromEnd(this);
}
- GenericGraphVisit::Control PreEdge(Node* from, int index, Node* to);
+ void PreEdge(Node* from, int index, Node* to);
private:
std::ostream& os_;
@@ -129,8 +129,7 @@
};
-GenericGraphVisit::Control JSONGraphEdgeWriter::PreEdge(Node* from, int
index,
- Node* to) {
+void JSONGraphEdgeWriter::PreEdge(Node* from, int index, Node* to) {
if (first_edge_) {
first_edge_ = false;
} else {
@@ -152,7 +151,6 @@
}
os_ << "{\"source\":" << to->id() << ",\"target\":" << from->id()
<< ",\"index\":" << index << ",\"type\":\"" << edge_type << "\"}";
- return GenericGraphVisit::CONTINUE;
}
@@ -174,7 +172,6 @@
void Print();
GenericGraphVisit::Control Pre(Node* node);
- GenericGraphVisit::Control PreEdge(Node* from, int index, Node* to);
private:
void AnnotateNode(Node* node);
@@ -219,17 +216,6 @@
}
return GenericGraphVisit::CONTINUE;
}
-
-
-GenericGraphVisit::Control GraphVisualizer::PreEdge(Node* from, int index,
- Node* to) {
- if (use_to_def_) return GenericGraphVisit::CONTINUE;
- // When going from def to use, only consider white -> other edges, which
are
- // the dead nodes that use live nodes. We're probably not interested in
- // dead nodes that only use other dead nodes.
- if (white_nodes_.count(from) > 0) return GenericGraphVisit::CONTINUE;
- return GenericGraphVisit::SKIP;
-}
static bool IsLikelyBackEdge(Node* from, int index, Node* to) {
=======================================
--- /trunk/src/compiler/graph.cc Fri Oct 17 11:41:30 2014 UTC
+++ /trunk/src/compiler/graph.cc Tue Oct 28 09:48:49 2014 UTC
@@ -11,6 +11,7 @@
#include "src/compiler/node-aux-data-inl.h"
#include "src/compiler/node-properties.h"
#include "src/compiler/node-properties-inl.h"
+#include "src/compiler/opcodes.h"
#include "src/compiler/operator-properties.h"
#include "src/compiler/operator-properties-inl.h"
@@ -29,12 +30,14 @@
}
-Node* Graph::NewNode(
- const Operator* op, int input_count, Node** inputs, bool incomplete) {
+Node* Graph::NewNode(const Operator* op, int input_count, Node** inputs,
+ bool incomplete) {
DCHECK_LE(op->InputCount(), input_count);
- Node* result = Node::New(this, input_count, inputs);
+ Node* result = Node::New(this, input_count, inputs, incomplete);
result->Initialize(op);
- if (!incomplete) Decorate(result);
+ if (!incomplete) {
+ Decorate(result);
+ }
return result;
}
=======================================
--- /trunk/src/compiler/ia32/code-generator-ia32.cc Wed Oct 22 07:27:53
2014 UTC
+++ /trunk/src/compiler/ia32/code-generator-ia32.cc Tue Oct 28 09:48:49
2014 UTC
@@ -793,7 +793,7 @@
frame->SetRegisterSaveAreaSize(register_save_area_size);
}
} else if (descriptor->IsJSFunctionCall()) {
- CompilationInfo* info = linkage()->info();
+ CompilationInfo* info = this->info();
__ Prologue(info->IsCodePreAgingActive());
frame->SetRegisterSaveAreaSize(
StandardFrameConstants::kFixedFrameSizeFromFp);
@@ -1023,7 +1023,7 @@
void CodeGenerator::EnsureSpaceForLazyDeopt() {
int space_needed = Deoptimizer::patch_size();
- if (!linkage()->info()->IsStub()) {
+ if (!info()->IsStub()) {
// Ensure that we have enough space after the previous lazy-bailout
// instruction for patching the code here.
int current_pc = masm()->pc_offset();
=======================================
--- /trunk/src/compiler/instruction-selector.cc Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/compiler/instruction-selector.cc Tue Oct 28 09:48:49 2014 UTC
@@ -1070,6 +1070,13 @@
BasicBlock* fbranch) {
UNIMPLEMENTED();
}
+
+
+// static
+MachineOperatorBuilder::Flags
+InstructionSelector::SupportedMachineOperatorFlags() {
+ return MachineOperatorBuilder::Flag::kNoFlags;
+}
#endif // !V8_TURBOFAN_BACKEND
=======================================
--- /trunk/src/compiler/js-generic-lowering.cc Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/compiler/js-generic-lowering.cc Tue Oct 28 09:48:49 2014 UTC
@@ -19,7 +19,7 @@
JSGenericLowering::JSGenericLowering(CompilationInfo* info, JSGraph*
jsgraph)
: info_(info),
jsgraph_(jsgraph),
- linkage_(new (jsgraph->zone()) Linkage(info)) {}
+ linkage_(new (jsgraph->zone()) Linkage(jsgraph->zone(), info)) {}
void JSGenericLowering::PatchOperator(Node* node, const Operator* op) {
=======================================
--- /trunk/src/compiler/js-graph.h Fri Oct 17 11:41:30 2014 UTC
+++ /trunk/src/compiler/js-graph.h Tue Oct 28 09:48:49 2014 UTC
@@ -128,6 +128,8 @@
Node* NumberConstant(double value);
Factory* factory() { return isolate()->factory(); }
+
+ DISALLOW_COPY_AND_ASSIGN(JSGraph);
};
} // namespace compiler
=======================================
--- /trunk/src/compiler/js-operator.h Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/compiler/js-operator.h Tue Oct 28 09:48:49 2014 UTC
@@ -195,7 +195,7 @@
// Interface for building JavaScript-level operators, e.g. directly from
the
// AST. Most operators have no parameters, thus can be globally shared for
all
// graphs.
-class JSOperatorBuilder FINAL {
+class JSOperatorBuilder FINAL : public ZoneObject {
public:
explicit JSOperatorBuilder(Zone* zone);
@@ -266,6 +266,8 @@
const JSOperatorBuilderImpl& impl_;
Zone* const zone_;
+
+ DISALLOW_COPY_AND_ASSIGN(JSOperatorBuilder);
};
} // namespace compiler
=======================================
--- /trunk/src/compiler/linkage.cc Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/compiler/linkage.cc Tue Oct 28 09:48:49 2014 UTC
@@ -39,28 +39,30 @@
}
-Linkage::Linkage(CompilationInfo* info) : info_(info) {
+CallDescriptor* Linkage::ComputeIncoming(Zone* zone, CompilationInfo*
info) {
if (info->function() != NULL) {
// If we already have the function literal, use the number of
parameters
// plus the receiver.
- incoming_ = GetJSCallDescriptor(1 +
info->function()->parameter_count());
- } else if (!info->closure().is_null()) {
+ return GetJSCallDescriptor(1 + info->function()->parameter_count(),
zone);
+ }
+ if (!info->closure().is_null()) {
// If we are compiling a JS function, use a JS call descriptor,
// plus the receiver.
SharedFunctionInfo* shared = info->closure()->shared();
- incoming_ = GetJSCallDescriptor(1 + shared->formal_parameter_count());
- } else if (info->code_stub() != NULL) {
+ return GetJSCallDescriptor(1 + shared->formal_parameter_count(), zone);
+ }
+ if (info->code_stub() != NULL) {
// Use the code stub interface descriptor.
CallInterfaceDescriptor descriptor =
info->code_stub()->GetCallInterfaceDescriptor();
- incoming_ = GetStubCallDescriptor(descriptor);
- } else {
- incoming_ = NULL; // TODO(titzer): ?
+ return GetStubCallDescriptor(descriptor, 0, CallDescriptor::kNoFlags,
zone);
}
+ return NULL; // TODO(titzer): ?
}
-FrameOffset Linkage::GetFrameOffset(int spill_slot, Frame* frame, int
extra) {
+FrameOffset Linkage::GetFrameOffset(int spill_slot, Frame* frame,
+ int extra) const {
if (frame->GetSpillSlotCount() > 0 || incoming_->IsJSFunctionCall() ||
incoming_->kind() == CallDescriptor::kCallAddress) {
int offset;
@@ -87,24 +89,22 @@
}
-CallDescriptor* Linkage::GetJSCallDescriptor(int parameter_count) {
- return GetJSCallDescriptor(parameter_count, this->info_->zone());
+CallDescriptor* Linkage::GetJSCallDescriptor(int parameter_count) const {
+ return GetJSCallDescriptor(parameter_count, zone_);
}
CallDescriptor* Linkage::GetRuntimeCallDescriptor(
Runtime::FunctionId function, int parameter_count,
- Operator::Properties properties) {
- return GetRuntimeCallDescriptor(function, parameter_count, properties,
- this->info_->zone());
+ Operator::Properties properties) const {
+ return GetRuntimeCallDescriptor(function, parameter_count, properties,
zone_);
}
CallDescriptor* Linkage::GetStubCallDescriptor(
CallInterfaceDescriptor descriptor, int stack_parameter_count,
- CallDescriptor::Flags flags) {
- return GetStubCallDescriptor(descriptor, stack_parameter_count, flags,
- this->info_->zone());
+ CallDescriptor::Flags flags) const {
+ return GetStubCallDescriptor(descriptor, stack_parameter_count, flags,
zone_);
}
=======================================
--- /trunk/src/compiler/linkage.h Wed Oct 1 00:05:35 2014 UTC
+++ /trunk/src/compiler/linkage.h Tue Oct 28 09:48:49 2014 UTC
@@ -129,16 +129,18 @@
private:
friend class Linkage;
- Kind kind_;
- MachineType target_type_;
- LinkageLocation target_loc_;
- MachineSignature* machine_sig_;
- LocationSignature* location_sig_;
- size_t js_param_count_;
- Operator::Properties properties_;
- RegList callee_saved_registers_;
- Flags flags_;
- const char* debug_name_;
+ const Kind kind_;
+ const MachineType target_type_;
+ const LinkageLocation target_loc_;
+ const MachineSignature* const machine_sig_;
+ const LocationSignature* const location_sig_;
+ const size_t js_param_count_;
+ const Operator::Properties properties_;
+ const RegList callee_saved_registers_;
+ const Flags flags_;
+ const char* const debug_name_;
+
+ DISALLOW_COPY_AND_ASSIGN(CallDescriptor);
};
DEFINE_OPERATORS_FOR_FLAGS(CallDescriptor::Flags)
@@ -161,25 +163,28 @@
// Call[Runtime] CEntryStub, arg 1, arg 2, arg 3, [...], fun, #arg,
context
class Linkage : public ZoneObject {
public:
- explicit Linkage(CompilationInfo* info);
- explicit Linkage(CompilationInfo* info, CallDescriptor* incoming)
- : info_(info), incoming_(incoming) {}
+ Linkage(Zone* zone, CompilationInfo* info)
+ : zone_(zone), incoming_(ComputeIncoming(zone, info)) {}
+ Linkage(Zone* zone, CallDescriptor* incoming)
+ : zone_(zone), incoming_(incoming) {}
+
+ static CallDescriptor* ComputeIncoming(Zone* zone, CompilationInfo*
info);
// The call descriptor for this compilation unit describes the locations
// of incoming parameters and the outgoing return value(s).
- CallDescriptor* GetIncomingDescriptor() { return incoming_; }
- CallDescriptor* GetJSCallDescriptor(int parameter_count);
+ CallDescriptor* GetIncomingDescriptor() const { return incoming_; }
+ CallDescriptor* GetJSCallDescriptor(int parameter_count) const;
static CallDescriptor* GetJSCallDescriptor(int parameter_count, Zone*
zone);
- CallDescriptor* GetRuntimeCallDescriptor(Runtime::FunctionId function,
- int parameter_count,
- Operator::Properties
properties);
+ CallDescriptor* GetRuntimeCallDescriptor(
+ Runtime::FunctionId function, int parameter_count,
+ Operator::Properties properties) const;
static CallDescriptor* GetRuntimeCallDescriptor(
Runtime::FunctionId function, int parameter_count,
Operator::Properties properties, Zone* zone);
CallDescriptor* GetStubCallDescriptor(
CallInterfaceDescriptor descriptor, int stack_parameter_count = 0,
- CallDescriptor::Flags flags = CallDescriptor::kNoFlags);
+ CallDescriptor::Flags flags = CallDescriptor::kNoFlags) const;
static CallDescriptor* GetStubCallDescriptor(
CallInterfaceDescriptor descriptor, int stack_parameter_count,
CallDescriptor::Flags flags, Zone* zone);
@@ -192,37 +197,37 @@
MachineSignature* sig);
// Get the location of an (incoming) parameter to this function.
- LinkageLocation GetParameterLocation(int index) {
+ LinkageLocation GetParameterLocation(int index) const {
return incoming_->GetInputLocation(index + 1); // + 1 to skip target.
}
// Get the machine type of an (incoming) parameter to this function.
- MachineType GetParameterType(int index) {
+ MachineType GetParameterType(int index) const {
return incoming_->GetInputType(index + 1); // + 1 to skip target.
}
// Get the location where this function should place its return value.
- LinkageLocation GetReturnLocation() {
+ LinkageLocation GetReturnLocation() const {
return incoming_->GetReturnLocation(0);
}
// Get the machine type of this function's return value.
- MachineType GetReturnType() { return incoming_->GetReturnType(0); }
+ MachineType GetReturnType() const { return incoming_->GetReturnType(0); }
// Get the frame offset for a given spill slot. The location depends on
the
// calling convention and the specific frame layout, and may thus be
// architecture-specific. Negative spill slots indicate arguments on the
// caller's frame. The {extra} parameter indicates an additional offset
from
// the frame offset, e.g. to index into part of a double slot.
- FrameOffset GetFrameOffset(int spill_slot, Frame* frame, int extra = 0);
-
- CompilationInfo* info() const { return info_; }
+ FrameOffset GetFrameOffset(int spill_slot, Frame* frame, int extra = 0)
const;
static bool NeedsFrameState(Runtime::FunctionId function);
private:
- CompilationInfo* info_;
- CallDescriptor* incoming_;
+ Zone* const zone_;
+ CallDescriptor* const incoming_;
+
+ DISALLOW_COPY_AND_ASSIGN(Linkage);
};
} // namespace compiler
=======================================
--- /trunk/src/compiler/machine-operator.h Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/compiler/machine-operator.h Tue Oct 28 09:48:49 2014 UTC
@@ -56,7 +56,7 @@
// Interface for building machine-level operators. These operators are
// machine-level but machine-independent and thus define a language
suitable
// for generating code to run on architectures such as ia32, x64, arm, etc.
-class MachineOperatorBuilder FINAL {
+class MachineOperatorBuilder FINAL : public ZoneObject {
public:
// Flags that specify which operations are available. This is useful
// for operations that are unsupported by some back-ends.
@@ -209,6 +209,7 @@
const MachineOperatorBuilderImpl& impl_;
const MachineType word_;
const Flags flags_;
+ DISALLOW_COPY_AND_ASSIGN(MachineOperatorBuilder);
};
=======================================
--- /trunk/src/compiler/mips/code-generator-mips.cc Wed Oct 15 13:35:30
2014 UTC
+++ /trunk/src/compiler/mips/code-generator-mips.cc Tue Oct 28 09:48:49
2014 UTC
@@ -709,7 +709,7 @@
__ MultiPush(saves);
}
} else if (descriptor->IsJSFunctionCall()) {
- CompilationInfo* info = linkage()->info();
+ CompilationInfo* info = this->info();
__ Prologue(info->IsCodePreAgingActive());
frame()->SetRegisterSaveAreaSize(
StandardFrameConstants::kFixedFrameSizeFromFp);
@@ -942,7 +942,7 @@
void CodeGenerator::EnsureSpaceForLazyDeopt() {
int space_needed = Deoptimizer::patch_size();
- if (!linkage()->info()->IsStub()) {
+ if (!info()->IsStub()) {
// Ensure that we have enough space after the previous lazy-bailout
// instruction for patching the code here.
int current_pc = masm()->pc_offset();
=======================================
--- /trunk/src/compiler/node.h Fri Oct 17 11:41:30 2014 UTC
+++ /trunk/src/compiler/node.h Tue Oct 28 09:48:49 2014 UTC
@@ -48,8 +48,8 @@
// out-of-line indexed by the Node's id.
class Node FINAL : public GenericNode<NodeData, Node> {
public:
- Node(GenericGraphBase* graph, int input_count)
- : GenericNode<NodeData, Node>(graph, input_count) {}
+ Node(GenericGraphBase* graph, int input_count, int reserve_input_count)
+ : GenericNode<NodeData, Node>(graph, input_count,
reserve_input_count) {}
void Initialize(const Operator* op) { set_op(op); }
=======================================
--- /trunk/src/compiler/operator-properties-inl.h Mon Oct 27 07:54:22 2014
UTC
+++ /trunk/src/compiler/operator-properties-inl.h Tue Oct 28 09:48:49 2014
UTC
@@ -117,6 +117,7 @@
}
inline int OperatorProperties::GetControlInputCount(const Operator* op) {
+ // TODO(titzer): fix this mess; just make them a count on the operator.
switch (op->opcode()) {
case IrOpcode::kPhi:
case IrOpcode::kEffectPhi:
@@ -127,8 +128,8 @@
#define OPCODE_CASE(x) case IrOpcode::k##x:
CONTROL_OP_LIST(OPCODE_CASE)
#undef OPCODE_CASE
- // Branch operator is special
if (op->opcode() == IrOpcode::kBranch) return 1;
+ if (op->opcode() == IrOpcode::kTerminate) return 1;
// Control operators are Operator1<int>.
return OpParameter<int>(op);
default:
=======================================
--- /trunk/src/compiler/pipeline-statistics.cc Fri Oct 24 14:44:48 2014 UTC
+++ /trunk/src/compiler/pipeline-statistics.cc Tue Oct 28 09:48:49 2014 UTC
@@ -16,6 +16,10 @@
scope_.Reset(new ZonePool::StatsScope(pipeline_stats->zone_pool_));
timer_.Start();
outer_zone_initial_size_ = pipeline_stats->OuterZoneSize();
+ allocated_bytes_at_start_ =
+ outer_zone_initial_size_ -
+ pipeline_stats->total_stats_.outer_zone_initial_size_ +
+ pipeline_stats->zone_pool_->GetCurrentAllocatedBytes();
}
@@ -28,6 +32,8 @@
size_t outer_zone_diff =
pipeline_stats->OuterZoneSize() - outer_zone_initial_size_;
diff->max_allocated_bytes_ = outer_zone_diff +
scope_->GetMaxAllocatedBytes();
+ diff->absolute_max_allocated_bytes_ =
+ diff->max_allocated_bytes_ + allocated_bytes_at_start_;
diff->total_allocated_bytes_ =
outer_zone_diff + scope_->GetTotalAllocatedBytes();
scope_.Reset(NULL);
=======================================
--- /trunk/src/compiler/pipeline-statistics.h Fri Oct 24 14:44:48 2014 UTC
+++ /trunk/src/compiler/pipeline-statistics.h Tue Oct 28 09:48:49 2014 UTC
@@ -39,6 +39,7 @@
SmartPointer<ZonePool::StatsScope> scope_;
base::ElapsedTimer timer_;
size_t outer_zone_initial_size_;
+ size_t allocated_bytes_at_start_;
};
bool InPhaseKind() { return !phase_kind_stats_.scope_.is_empty(); }
=======================================
--- /trunk/src/compiler/pipeline.cc Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/compiler/pipeline.cc Tue Oct 28 09:48:49 2014 UTC
@@ -40,6 +40,128 @@
namespace internal {
namespace compiler {
+class PipelineData {
+ public:
+ explicit PipelineData(CompilationInfo* info, ZonePool* zone_pool,
+ PipelineStatistics* pipeline_statistics)
+ : isolate_(info->zone()->isolate()),
+ outer_zone_(info->zone()),
+ zone_pool_(zone_pool),
+ pipeline_statistics_(pipeline_statistics),
+ graph_zone_scope_(zone_pool_),
+ graph_zone_(graph_zone_scope_.zone()),
+ graph_(new (graph_zone()) Graph(graph_zone())),
+ source_positions_(new SourcePositionTable(graph())),
+ machine_(new (graph_zone()) MachineOperatorBuilder(
+ kMachPtr,
InstructionSelector::SupportedMachineOperatorFlags())),
+ common_(new (graph_zone()) CommonOperatorBuilder(graph_zone())),
+ javascript_(new (graph_zone()) JSOperatorBuilder(graph_zone())),
+ jsgraph_(new (graph_zone())
+ JSGraph(graph(), common(), javascript(), machine())),
+ typer_(new Typer(graph(), info->context())),
+ schedule_(NULL),
+ instruction_zone_scope_(zone_pool_),
+ instruction_zone_(instruction_zone_scope_.zone()) {}
+
+ // For machine graph testing only.
+ PipelineData(Graph* graph, Schedule* schedule, ZonePool* zone_pool)
+ : isolate_(graph->zone()->isolate()),
+ outer_zone_(NULL),
+ zone_pool_(zone_pool),
+ pipeline_statistics_(NULL),
+ graph_zone_scope_(zone_pool_),
+ graph_zone_(NULL),
+ graph_(graph),
+ source_positions_(new SourcePositionTable(graph)),
+ machine_(NULL),
+ common_(NULL),
+ javascript_(NULL),
+ jsgraph_(NULL),
+ typer_(NULL),
+ schedule_(schedule),
+ instruction_zone_scope_(zone_pool_),
+ instruction_zone_(instruction_zone_scope_.zone()) {}
+
+ ~PipelineData() {
+ DeleteInstructionZone();
+ DeleteGraphZone();
+ }
+
+ Isolate* isolate() const { return isolate_; }
+ ZonePool* zone_pool() const { return zone_pool_; }
+ PipelineStatistics* pipeline_statistics() { return pipeline_statistics_;
}
+
+ Zone* graph_zone() const { return graph_zone_; }
+ Graph* graph() const { return graph_; }
+ SourcePositionTable* source_positions() const {
+ return source_positions_.get();
+ }
+ MachineOperatorBuilder* machine() const { return machine_; }
+ CommonOperatorBuilder* common() const { return common_; }
+ JSOperatorBuilder* javascript() const { return javascript_; }
+ JSGraph* jsgraph() const { return jsgraph_; }
+ Typer* typer() const { return typer_.get(); }
+ Schedule* schedule() const { return schedule_; }
+ void set_schedule(Schedule* schedule) {
+ DCHECK_EQ(NULL, schedule_);
+ schedule_ = schedule;
+ }
+
+ Zone* instruction_zone() const { return instruction_zone_; }
+
+ void DeleteGraphZone() {
+ // Destroy objects with destructors first.
+ source_positions_.Reset(NULL);
+ typer_.Reset(NULL);
+ if (graph_zone_ == NULL) return;
+ // Destroy zone and clear pointers.
+ graph_zone_scope_.Destroy();
+ graph_zone_ = NULL;
+ graph_ = NULL;
+ machine_ = NULL;
+ common_ = NULL;
+ javascript_ = NULL;
+ jsgraph_ = NULL;
+ schedule_ = NULL;
+ }
+
+ void DeleteInstructionZone() {
+ if (instruction_zone_ == NULL) return;
+ instruction_zone_scope_.Destroy();
+ instruction_zone_ = NULL;
+ }
+
+ private:
+ Isolate* isolate_;
+ Zone* outer_zone_;
+ ZonePool* zone_pool_;
+ PipelineStatistics* pipeline_statistics_;
+
+ ZonePool::Scope graph_zone_scope_;
+ Zone* graph_zone_;
+ // All objects in the following group of fields are allocated in
graph_zone_.
+ // They are all set to NULL when the graph_zone_ is destroyed.
+ Graph* graph_;
+ // TODO(dcarney): make this into a ZoneObject.
+ SmartPointer<SourcePositionTable> source_positions_;
+ MachineOperatorBuilder* machine_;
+ CommonOperatorBuilder* common_;
+ JSOperatorBuilder* javascript_;
+ JSGraph* jsgraph_;
+ // TODO(dcarney): make this into a ZoneObject.
+ SmartPointer<Typer> typer_;
+ Schedule* schedule_;
+
+ // All objects in the following group of fields are allocated in
+ // instruction_zone_. They are all set to NULL when the
instruction_zone_ is
+ // destroyed.
+ ZonePool::Scope instruction_zone_scope_;
+ Zone* instruction_zone_;
+
+ DISALLOW_COPY_AND_ASSIGN(PipelineData);
+};
+
+
static inline bool VerifyGraphs() {
#ifdef DEBUG
return true;
@@ -157,7 +279,7 @@
SmartPointer<PipelineStatistics> pipeline_statistics;
if (FLAG_turbo_stats) {
pipeline_statistics.Reset(new PipelineStatistics(info(), &zone_pool));
- pipeline_statistics->BeginPhaseKind("create graph");
+ pipeline_statistics->BeginPhaseKind("graph creation");
}
if (FLAG_trace_turbo) {
@@ -170,32 +292,24 @@
tcf << AsC1VCompilation(info());
}
- // Build the graph.
- Graph graph(zone());
- SourcePositionTable source_positions(&graph);
- source_positions.AddDecorator();
- // TODO(turbofan): there is no need to type anything during initial graph
- // construction. This is currently only needed for the node cache,
which the
- // typer could sweep over later.
- Typer typer(&graph, info()->context());
- MachineOperatorBuilder machine(
- kMachPtr, InstructionSelector::SupportedMachineOperatorFlags());
- CommonOperatorBuilder common(zone());
- JSOperatorBuilder javascript(zone());
- JSGraph jsgraph(&graph, &common, &javascript, &machine);
+ // Initialize the graph and builders.
+ PipelineData data(info(), &zone_pool, pipeline_statistics.get());
+
+ data.source_positions()->AddDecorator();
+
Node* context_node;
{
PhaseScope phase_scope(pipeline_statistics.get(), "graph builder");
- ZonePool::Scope zone_scope(&zone_pool);
- AstGraphBuilderWithPositions graph_builder(zone_scope.zone(), info(),
- &jsgraph,
&source_positions);
+ ZonePool::Scope zone_scope(data.zone_pool());
+ AstGraphBuilderWithPositions graph_builder(
+ zone_scope.zone(), info(), data.jsgraph(),
data.source_positions());
graph_builder.CreateGraph();
context_node = graph_builder.GetFunctionContext();
}
{
PhaseScope phase_scope(pipeline_statistics.get(), "phi reduction");
PhiReducer phi_reducer;
- GraphReducer graph_reducer(&graph);
+ GraphReducer graph_reducer(data.graph());
graph_reducer.AddReducer(&phi_reducer);
graph_reducer.ReduceGraph();
// TODO(mstarzinger): Running reducer once ought to be enough for
everyone.
@@ -203,30 +317,30 @@
graph_reducer.ReduceGraph();
}
- VerifyAndPrintGraph(&graph, "Initial untyped", true);
+ VerifyAndPrintGraph(data.graph(), "Initial untyped", true);
if (info()->is_context_specializing()) {
- SourcePositionTable::Scope pos(&source_positions,
+ SourcePositionTable::Scope pos(data.source_positions(),
SourcePosition::Unknown());
// Specialize the code to the context as aggressively as possible.
- JSContextSpecializer spec(info(), &jsgraph, context_node);
+ JSContextSpecializer spec(info(), data.jsgraph(), context_node);
spec.SpecializeToContext();
- VerifyAndPrintGraph(&graph, "Context specialized", true);
+ VerifyAndPrintGraph(data.graph(), "Context specialized", true);
}
if (info()->is_inlining_enabled()) {
PhaseScope phase_scope(pipeline_statistics.get(), "inlining");
- SourcePositionTable::Scope pos(&source_positions,
+ SourcePositionTable::Scope pos(data.source_positions(),
SourcePosition::Unknown());
- ZonePool::Scope zone_scope(&zone_pool);
- JSInliner inliner(zone_scope.zone(), info(), &jsgraph);
+ ZonePool::Scope zone_scope(data.zone_pool());
+ JSInliner inliner(zone_scope.zone(), info(), data.jsgraph());
inliner.Inline();
- VerifyAndPrintGraph(&graph, "Inlined", true);
+ VerifyAndPrintGraph(data.graph(), "Inlined", true);
}
// Print a replay of the initial graph.
if (FLAG_print_turbo_replay) {
- GraphReplayPrinter::PrintReplay(&graph);
+ GraphReplayPrinter::PrintReplay(data.graph());
}
// Bailout here in case target architecture is not supported.
@@ -236,8 +350,8 @@
{
// Type the graph.
PhaseScope phase_scope(pipeline_statistics.get(), "typer");
- typer.Run();
- VerifyAndPrintGraph(&graph, "Typed");
+ data.typer()->Run();
+ VerifyAndPrintGraph(data.graph(), "Typed");
}
}
@@ -249,46 +363,46 @@
{
// Lower JSOperators where we can determine types.
PhaseScope phase_scope(pipeline_statistics.get(), "typed lowering");
- SourcePositionTable::Scope pos(&source_positions,
+ SourcePositionTable::Scope pos(data.source_positions(),
SourcePosition::Unknown());
- ValueNumberingReducer vn_reducer(zone());
- JSTypedLowering lowering(&jsgraph);
- SimplifiedOperatorReducer simple_reducer(&jsgraph);
- GraphReducer graph_reducer(&graph);
+ ValueNumberingReducer vn_reducer(data.graph_zone());
+ JSTypedLowering lowering(data.jsgraph());
+ SimplifiedOperatorReducer simple_reducer(data.jsgraph());
+ GraphReducer graph_reducer(data.graph());
graph_reducer.AddReducer(&vn_reducer);
graph_reducer.AddReducer(&lowering);
graph_reducer.AddReducer(&simple_reducer);
graph_reducer.ReduceGraph();
- VerifyAndPrintGraph(&graph, "Lowered typed");
+ VerifyAndPrintGraph(data.graph(), "Lowered typed");
}
{
// Lower simplified operators and insert changes.
PhaseScope phase_scope(pipeline_statistics.get(), "simplified
lowering");
- SourcePositionTable::Scope pos(&source_positions,
+ SourcePositionTable::Scope pos(data.source_positions(),
SourcePosition::Unknown());
- SimplifiedLowering lowering(&jsgraph);
+ SimplifiedLowering lowering(data.jsgraph());
lowering.LowerAllNodes();
- ValueNumberingReducer vn_reducer(zone());
- SimplifiedOperatorReducer simple_reducer(&jsgraph);
- GraphReducer graph_reducer(&graph);
+ ValueNumberingReducer vn_reducer(data.graph_zone());
+ SimplifiedOperatorReducer simple_reducer(data.jsgraph());
+ GraphReducer graph_reducer(data.graph());
graph_reducer.AddReducer(&vn_reducer);
graph_reducer.AddReducer(&simple_reducer);
graph_reducer.ReduceGraph();
- VerifyAndPrintGraph(&graph, "Lowered simplified");
+ VerifyAndPrintGraph(data.graph(), "Lowered simplified");
}
{
// Lower changes that have been inserted before.
PhaseScope phase_scope(pipeline_statistics.get(), "change lowering");
- SourcePositionTable::Scope pos(&source_positions,
+ SourcePositionTable::Scope pos(data.source_positions(),
SourcePosition::Unknown());
- Linkage linkage(info());
- ValueNumberingReducer vn_reducer(zone());
- SimplifiedOperatorReducer simple_reducer(&jsgraph);
- ChangeLowering lowering(&jsgraph, &linkage);
- MachineOperatorReducer mach_reducer(&jsgraph);
- GraphReducer graph_reducer(&graph);
+ Linkage linkage(data.graph_zone(), info());
+ ValueNumberingReducer vn_reducer(data.graph_zone());
+ SimplifiedOperatorReducer simple_reducer(data.jsgraph());
+ ChangeLowering lowering(data.jsgraph(), &linkage);
+ MachineOperatorReducer mach_reducer(data.jsgraph());
+ GraphReducer graph_reducer(data.graph());
// TODO(titzer): Figure out if we should run all reducers at once
here.
graph_reducer.AddReducer(&vn_reducer);
graph_reducer.AddReducer(&simple_reducer);
@@ -297,53 +411,49 @@
graph_reducer.ReduceGraph();
// TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
- VerifyAndPrintGraph(&graph, "Lowered changes", true);
+ VerifyAndPrintGraph(data.graph(), "Lowered changes", true);
}
{
PhaseScope phase_scope(pipeline_statistics.get(), "control
reduction");
- SourcePositionTable::Scope pos(&source_positions,
+ SourcePositionTable::Scope pos(data.source_positions(),
SourcePosition::Unknown());
- ZonePool::Scope zone_scope(&zone_pool);
- ControlReducer::ReduceGraph(zone_scope.zone(), &jsgraph, &common);
+ ZonePool::Scope zone_scope(data.zone_pool());
+ ControlReducer::ReduceGraph(zone_scope.zone(), data.jsgraph(),
+ data.common());
- VerifyAndPrintGraph(&graph, "Control reduced");
+ VerifyAndPrintGraph(data.graph(), "Control reduced");
}
}
{
// Lower any remaining generic JSOperators.
PhaseScope phase_scope(pipeline_statistics.get(), "generic lowering");
- SourcePositionTable::Scope pos(&source_positions,
+ SourcePositionTable::Scope pos(data.source_positions(),
SourcePosition::Unknown());
- JSGenericLowering lowering(info(), &jsgraph);
- GraphReducer graph_reducer(&graph);
+ JSGenericLowering lowering(info(), data.jsgraph());
+ GraphReducer graph_reducer(data.graph());
graph_reducer.AddReducer(&lowering);
graph_reducer.ReduceGraph();
// TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
- VerifyAndPrintGraph(&graph, "Lowered generic", true);
+ VerifyAndPrintGraph(data.graph(), "Lowered generic", true);
}
if (!pipeline_statistics.is_empty()) {
- pipeline_statistics->BeginPhaseKind("code generation");
+ pipeline_statistics->BeginPhaseKind("block building");
}
- source_positions.RemoveDecorator();
+ data.source_positions()->RemoveDecorator();
- Schedule* schedule;
- {
- PhaseScope phase_scope(pipeline_statistics.get(), "scheduling");
- // Compute a schedule.
- schedule = ComputeSchedule(&zone_pool, &graph);
- }
+ // Compute a schedule.
+ ComputeSchedule(&data);
Handle<Code> code = Handle<Code>::null();
{
// Generate optimized code.
- Linkage linkage(info());
- code = GenerateCode(pipeline_statistics.get(), &zone_pool, &linkage,
&graph,
- schedule, &source_positions);
+ Linkage linkage(data.instruction_zone(), info());
+ code = GenerateCode(&linkage, &data);
info()->SetCode(code);
}
@@ -362,11 +472,13 @@
}
-Schedule* Pipeline::ComputeSchedule(ZonePool* zone_pool, Graph* graph) {
- Schedule* schedule = Scheduler::ComputeSchedule(zone_pool, graph);
+void Pipeline::ComputeSchedule(PipelineData* data) {
+ PhaseScope phase_scope(data->pipeline_statistics(), "scheduling");
+ Schedule* schedule =
+ Scheduler::ComputeSchedule(data->zone_pool(), data->graph());
TraceSchedule(schedule);
if (VerifyGraphs()) ScheduleVerifier::Run(schedule);
- return schedule;
+ data->set_schedule(schedule);
}
@@ -375,16 +487,16 @@
Schedule* schedule) {
ZonePool zone_pool(isolate());
CHECK(SupportedBackend());
+ PipelineData data(graph, schedule, &zone_pool);
if (schedule == NULL) {
// TODO(rossberg): Should this really be untyped?
VerifyAndPrintGraph(graph, "Machine", true);
- schedule = ComputeSchedule(&zone_pool, graph);
+ ComputeSchedule(&data);
+ } else {
+ TraceSchedule(schedule);
}
- TraceSchedule(schedule);
- SourcePositionTable source_positions(graph);
- Handle<Code> code = GenerateCode(NULL, &zone_pool, linkage, graph,
schedule,
- &source_positions);
+ Handle<Code> code = GenerateCode(linkage, &data);
#if ENABLE_DISASSEMBLER
if (!code.is_null() && FLAG_print_opt_code) {
CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
@@ -396,29 +508,27 @@
}
-Handle<Code> Pipeline::GenerateCode(PipelineStatistics*
pipeline_statistics,
- ZonePool* zone_pool, Linkage* linkage,
- Graph* graph, Schedule* schedule,
- SourcePositionTable* source_positions)
{
- DCHECK_NOT_NULL(graph);
+Handle<Code> Pipeline::GenerateCode(Linkage* linkage, PipelineData* data) {
DCHECK_NOT_NULL(linkage);
- DCHECK_NOT_NULL(schedule);
+ DCHECK_NOT_NULL(data->graph());
+ DCHECK_NOT_NULL(data->schedule());
CHECK(SupportedBackend());
BasicBlockProfiler::Data* profiler_data = NULL;
if (FLAG_turbo_profiling) {
- profiler_data = BasicBlockInstrumentor::Instrument(info_, graph,
schedule);
+ profiler_data = BasicBlockInstrumentor::Instrument(info(),
data->graph(),
+ data->schedule());
}
- Zone* instruction_zone = schedule->zone();
- InstructionSequence sequence(instruction_zone, graph, schedule);
+ InstructionSequence sequence(data->instruction_zone(), data->graph(),
+ data->schedule());
// Select and schedule instructions covering the scheduled graph.
{
- PhaseScope phase_scope(pipeline_statistics, "select instructions");
- ZonePool::Scope zone_scope(zone_pool);
+ PhaseScope phase_scope(data->pipeline_statistics(), "select
instructions");
+ ZonePool::Scope zone_scope(data->zone_pool());
InstructionSelector selector(zone_scope.zone(), linkage, &sequence,
- schedule, source_positions);
+ data->schedule(),
data->source_positions());
selector.SelectInstructions();
}
@@ -427,22 +537,28 @@
os << "----- Instruction sequence before register allocation -----\n"
<< sequence;
TurboCfgFile tcf(isolate());
- tcf << AsC1V("CodeGen", schedule, source_positions, &sequence);
+ tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(),
+ &sequence);
+ }
+
+ data->DeleteGraphZone();
+
+ if (data->pipeline_statistics() != NULL) {
+ data->pipeline_statistics()->BeginPhaseKind("register allocation");
}
// Allocate registers.
Frame frame;
{
- int node_count = graph->NodeCount();
+ int node_count = sequence.VirtualRegisterCount();
if (node_count > UnallocatedOperand::kMaxVirtualRegisters) {
-
linkage->info()->AbortOptimization(kNotEnoughVirtualRegistersForValues);
+ info()->AbortOptimization(kNotEnoughVirtualRegistersForValues);
return Handle<Code>::null();
}
- ZonePool::Scope zone_scope(zone_pool);
- RegisterAllocator allocator(zone_scope.zone(), &frame, linkage->info(),
- &sequence);
- if (!allocator.Allocate(pipeline_statistics)) {
-
linkage->info()->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
+ ZonePool::Scope zone_scope(data->zone_pool());
+ RegisterAllocator allocator(zone_scope.zone(), &frame, info(),
&sequence);
+ if (!allocator.Allocate(data->pipeline_statistics())) {
+ info()->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
return Handle<Code>::null();
}
if (FLAG_trace_turbo) {
@@ -456,12 +572,16 @@
os << "----- Instruction sequence after register allocation -----\n"
<< sequence;
}
+
+ if (data->pipeline_statistics() != NULL) {
+ data->pipeline_statistics()->BeginPhaseKind("code generation");
+ }
// Generate native sequence.
Handle<Code> code;
{
- PhaseScope phase_scope(pipeline_statistics, "generate code");
- CodeGenerator generator(&frame, linkage, &sequence);
+ PhaseScope phase_scope(data->pipeline_statistics(), "generate code");
+ CodeGenerator generator(&frame, linkage, &sequence, info());
code = generator.GenerateCode();
}
if (profiler_data != NULL) {
=======================================
--- /trunk/src/compiler/pipeline.h Fri Oct 24 14:44:48 2014 UTC
+++ /trunk/src/compiler/pipeline.h Tue Oct 28 09:48:49 2014 UTC
@@ -18,13 +18,9 @@
// Clients of this interface shouldn't depend on lots of compiler
internals.
class Graph;
-class InstructionSequence;
class Linkage;
-class PipelineStatistics;
-class RegisterAllocator;
+class PipelineData;
class Schedule;
-class SourcePositionTable;
-class ZonePool;
class Pipeline {
public:
@@ -49,15 +45,11 @@
CompilationInfo* info() const { return info_; }
Isolate* isolate() { return info_->isolate(); }
- Zone* zone() { return info_->zone(); }
- Schedule* ComputeSchedule(ZonePool* zone_pool, Graph* graph);
+ void ComputeSchedule(PipelineData* data);
void VerifyAndPrintGraph(Graph* graph, const char* phase,
bool untyped = false);
- Handle<Code> GenerateCode(PipelineStatistics* pipeline_statistics,
- ZonePool* zone_pool, Linkage* linkage, Graph*
graph,
- Schedule* schedule,
- SourcePositionTable* source_positions);
+ Handle<Code> GenerateCode(Linkage* linkage, PipelineData* data);
};
}
}
=======================================
--- /trunk/src/compiler/raw-machine-assembler.cc Wed Oct 22 07:27:53 2014
UTC
+++ /trunk/src/compiler/raw-machine-assembler.cc Tue Oct 28 09:48:49 2014
UTC
@@ -151,10 +151,10 @@
Node* RawMachineAssembler::MakeNode(const Operator* op, int input_count,
- Node** inputs) {
+ Node** inputs, bool incomplete) {
DCHECK(ScheduleValid());
DCHECK(current_block_ != NULL);
- Node* node = graph()->NewNode(op, input_count, inputs);
+ Node* node = graph()->NewNode(op, input_count, inputs, incomplete);
BasicBlock* block = op->opcode() == IrOpcode::kParameter ?
schedule()->start()
:
CurrentBlock();
schedule()->AddNode(block, node);
=======================================
--- /trunk/src/compiler/raw-machine-assembler.h Wed Oct 15 13:35:30 2014 UTC
+++ /trunk/src/compiler/raw-machine-assembler.h Tue Oct 28 09:48:49 2014 UTC
@@ -416,8 +416,8 @@
Schedule* Export();
protected:
- virtual Node* MakeNode(const Operator* op, int input_count,
- Node** inputs) FINAL;
+ virtual Node* MakeNode(const Operator* op, int input_count, Node**
inputs,
+ bool incomplete) FINAL;
bool ScheduleValid() { return schedule_ != NULL; }
=======================================
--- /trunk/src/compiler/schedule.h Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/compiler/schedule.h Tue Oct 28 09:48:49 2014 UTC
@@ -277,6 +277,8 @@
BasicBlockVector rpo_order_; // Reverse-post-order block list.
BasicBlock* start_;
BasicBlock* end_;
+
+ DISALLOW_COPY_AND_ASSIGN(Schedule);
};
std::ostream& operator<<(std::ostream& os, const Schedule& s);
=======================================
--- /trunk/src/compiler/scheduler.cc Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/compiler/scheduler.cc Tue Oct 28 09:48:49 2014 UTC
@@ -382,7 +382,8 @@
}
bool IsFinalMerge(Node* node) {
- return (node == scheduler_->graph_->end()->InputAt(0));
+ return (node->opcode() == IrOpcode::kMerge &&
+ node == scheduler_->graph_->end()->InputAt(0));
}
};
=======================================
--- /trunk/src/compiler/typer.h Fri Oct 24 14:44:48 2014 UTC
+++ /trunk/src/compiler/typer.h Tue Oct 28 09:48:49 2014 UTC
@@ -71,6 +71,7 @@
ZoneVector<Handle<Object> > weaken_min_limits_;
ZoneVector<Handle<Object> > weaken_max_limits_;
+ DISALLOW_COPY_AND_ASSIGN(Typer);
};
}
}
=======================================
--- /trunk/src/compiler/x64/code-generator-x64.cc Wed Oct 22 07:27:53 2014
UTC
+++ /trunk/src/compiler/x64/code-generator-x64.cc Tue Oct 28 09:48:49 2014
UTC
@@ -766,7 +766,7 @@
frame()->SetRegisterSaveAreaSize(register_save_area_size);
}
} else if (descriptor->IsJSFunctionCall()) {
- CompilationInfo* info = linkage()->info();
+ CompilationInfo* info = this->info();
__ Prologue(info->IsCodePreAgingActive());
frame()->SetRegisterSaveAreaSize(
StandardFrameConstants::kFixedFrameSizeFromFp);
@@ -992,7 +992,7 @@
void CodeGenerator::EnsureSpaceForLazyDeopt() {
int space_needed = Deoptimizer::patch_size();
- if (!linkage()->info()->IsStub()) {
+ if (!info()->IsStub()) {
// Ensure that we have enough space after the previous lazy-bailout
// instruction for patching the code here.
int current_pc = masm()->pc_offset();
=======================================
--- /trunk/src/d8-debug.cc Wed Jun 4 00:06:13 2014 UTC
+++ /trunk/src/d8-debug.cc Tue Oct 28 09:48:49 2014 UTC
@@ -124,7 +124,7 @@
printf("%s\n", *text_str);
}
running =
response_details->Get(String::NewFromUtf8(isolate, "running"))
- ->ToBoolean()
+ ->ToBoolean(isolate)
->Value();
}
}
=======================================
--- /trunk/src/d8.cc Thu Oct 23 08:44:45 2014 UTC
+++ /trunk/src/d8.cc Tue Oct 28 09:48:49 2014 UTC
@@ -186,7 +186,7 @@
int name_length = 0;
uint16_t* name_buffer = NULL;
if (name->IsString()) {
- Local<String> name_string = name->ToString();
+ Local<String> name_string = Local<String>::Cast(name);
name_length = name_string->Length();
name_buffer = new uint16_t[name_length];
name_string->Write(name_buffer, 0, name_length);
@@ -410,7 +410,7 @@
Throw(args.GetIsolate(), "Invalid argument");
return;
}
- int index = data->RealmFind(args[0]->ToObject()->CreationContext());
+ int index =
data->RealmFind(args[0]->ToObject(isolate)->CreationContext());
if (index == -1) return;
args.GetReturnValue().Set(index);
}
@@ -480,7 +480,7 @@
Throw(args.GetIsolate(), "Invalid argument");
return;
}
- ScriptCompiler::Source script_source(args[1]->ToString());
+ ScriptCompiler::Source script_source(args[1]->ToString(isolate));
Handle<UnboundScript> script = ScriptCompiler::CompileUnbound(
isolate, &script_source);
if (script.IsEmpty()) return;
@@ -526,7 +526,7 @@
// Explicitly catch potential exceptions in toString().
v8::TryCatch try_catch;
- Handle<String> str_obj = args[i]->ToString();
+ Handle<String> str_obj = args[i]->ToString(args.GetIsolate());
if (try_catch.HasCaught()) {
try_catch.ReThrow();
return;
=======================================
--- /trunk/src/extensions/statistics-extension.cc Tue Aug 5 00:05:55 2014
UTC
+++ /trunk/src/extensions/statistics-extension.cc Tue Oct 28 09:48:49 2014
UTC
@@ -53,7 +53,8 @@
Heap* heap = isolate->heap();
if (args.Length() > 0) { // GC if first argument evaluates to true.
- if (args[0]->IsBoolean() && args[0]->ToBoolean()->Value()) {
+ if (args[0]->IsBoolean() &&
+ args[0]->ToBoolean(args.GetIsolate())->Value()) {
heap->CollectAllGarbage(Heap::kNoGCFlags, "counters extension");
}
}
=======================================
--- /trunk/src/heap/mark-compact.cc Fri Oct 24 14:44:48 2014 UTC
+++ /trunk/src/heap/mark-compact.cc Tue Oct 28 09:48:49 2014 UTC
@@ -2521,14 +2521,13 @@
// Note that we never eliminate a transition array, though we might
right-trim
// such that number_of_transitions() == 0. If this assumption changes,
- // TransitionArray::Insert() will need to deal with the case that a
transition
- // array disappeared during GC.
- int trim = t->number_of_transitions_storage() - transition_index;
+ // TransitionArray::CopyInsert() will need to deal with the case that a
+ // transition array disappeared during GC.
+ int trim = t->number_of_transitions() - transition_index;
if (trim > 0) {
heap_->RightTrimFixedArray<Heap::FROM_GC>(
t, t->IsSimpleTransition() ? trim
: trim *
TransitionArray::kTransitionSize);
- t->SetNumberOfTransitions(transition_index);
}
DCHECK(map->HasTransitionArray());
}
=======================================
--- /trunk/src/mips64/builtins-mips64.cc Fri Sep 26 00:05:23 2014 UTC
+++ /trunk/src/mips64/builtins-mips64.cc Tue Oct 28 09:48:49 2014 UTC
@@ -1044,7 +1044,7 @@
// Load deoptimization data from the code object.
// <deopt_data> = <code>[#deoptimization_data_offset]
- __ Uld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset -
kHeapObjectTag));
+ __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset -
kHeapObjectTag));
// Load the OSR entrypoint offset from the deoptimization data.
// <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
=======================================
--- /trunk/src/mips64/macro-assembler-mips64.cc Tue Oct 14 07:51:07 2014 UTC
+++ /trunk/src/mips64/macro-assembler-mips64.cc Tue Oct 28 09:48:49 2014 UTC
@@ -3091,7 +3091,7 @@
// Compute the handler entry address and jump to it. The handler table
is
// a fixed array of (smi-tagged) code offsets.
// v0 = exception, a1 = code object, a2 = state.
- Uld(a3, FieldMemOperand(a1, Code::kHandlerTableOffset));
+ ld(a3, FieldMemOperand(a1, Code::kHandlerTableOffset));
Daddu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
dsrl(a2, a2, StackHandler::kKindWidth); // Handler index.
dsll(a2, a2, kPointerSizeLog2);
=======================================
--- /trunk/src/objects-inl.h Fri Oct 24 14:44:48 2014 UTC
+++ /trunk/src/objects-inl.h Tue Oct 28 09:48:49 2014 UTC
@@ -2205,7 +2205,7 @@
}
-Object* FixedArray::get(int index) {
+Object* FixedArray::get(int index) const {
SLOW_DCHECK(index >= 0 && index < this->length());
return READ_FIELD(this, kHeaderSize + index * kPointerSize);
}
@@ -5203,8 +5203,9 @@
bool Map::CanHaveMoreTransitions() {
if (!HasTransitionArray()) return true;
- return transitions()->number_of_transitions() <=
- TransitionArray::kMaxNumberOfTransitions;
+ return FixedArray::SizeFor(transitions()->length() +
+ TransitionArray::kTransitionSize)
+ <= Page::kMaxRegularHeapObjectSize;
}
@@ -6992,14 +6993,6 @@
DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
}
-
-
-int Map::SlackForArraySize(int old_size, int size_limit) {
- const int max_slack = size_limit - old_size;
- DCHECK(max_slack >= 0);
- if (old_size < 4) return Min(max_slack, 1);
- return Min(max_slack, old_size / 2);
-}
void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
=======================================
--- /trunk/src/objects.cc Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/objects.cc Tue Oct 28 09:48:49 2014 UTC
@@ -6584,8 +6584,7 @@
if (old_size == 0) {
descriptors = DescriptorArray::Allocate(map->GetIsolate(), 0, 1);
} else {
- EnsureDescriptorSlack(
- map, SlackForArraySize(old_size, kMaxNumberOfDescriptors));
+ EnsureDescriptorSlack(map, old_size < 4 ? 1 : old_size / 2);
descriptors = handle(map->instance_descriptors());
}
}
@@ -6610,11 +6609,8 @@
DCHECK(child->is_prototype_map());
} else {
Handle<TransitionArray> transitions =
- TransitionArray::Insert(parent, name, child, flag);
- if (!parent->HasTransitionArray() ||
- *transitions != parent->transitions()) {
- parent->set_transitions(*transitions);
- }
+ TransitionArray::CopyInsert(parent, name, child, flag);
+ parent->set_transitions(*transitions);
child->SetBackPointer(*parent);
}
}
=======================================
--- /trunk/src/objects.h Mon Oct 27 07:54:22 2014 UTC
+++ /trunk/src/objects.h Tue Oct 28 09:48:49 2014 UTC
@@ -2415,7 +2415,7 @@
class FixedArray: public FixedArrayBase {
public:
// Setter and getter for elements.
- inline Object* get(int index);
+ inline Object* get(int index) const;
static inline Handle<Object> get(Handle<FixedArray> array, int index);
// Setter that uses write barrier.
inline void set(int index, Object* value);
@@ -5380,8 +5380,7 @@
static const int kMaxLoopNestingMarker = 6;
// Layout description.
- static const int kInstructionSizeOffset = HeapObject::kHeaderSize;
- static const int kRelocationInfoOffset = kInstructionSizeOffset +
kIntSize;
+ static const int kRelocationInfoOffset = HeapObject::kHeaderSize;
static const int kHandlerTableOffset = kRelocationInfoOffset +
kPointerSize;
static const int kDeoptimizationDataOffset =
kHandlerTableOffset + kPointerSize;
@@ -5390,8 +5389,8 @@
kDeoptimizationDataOffset + kPointerSize;
static const int kNextCodeLinkOffset = kTypeFeedbackInfoOffset +
kPointerSize;
static const int kGCMetadataOffset = kNextCodeLinkOffset + kPointerSize;
- static const int kICAgeOffset =
- kGCMetadataOffset + kPointerSize;
+ static const int kInstructionSizeOffset = kGCMetadataOffset +
kPointerSize;
+ static const int kICAgeOffset = kInstructionSizeOffset + kIntSize;
static const int kFlagsOffset = kICAgeOffset + kIntSize;
static const int kKindSpecificFlags1Offset = kFlagsOffset + kIntSize;
static const int kKindSpecificFlags2Offset =
@@ -6119,8 +6118,6 @@
static void AppendCallbackDescriptors(Handle<Map> map,
Handle<Object> descriptors);
- static inline int SlackForArraySize(int old_size, int size_limit);
-
static void EnsureDescriptorSlack(Handle<Map> map, int slack);
// Returns the found code or undefined if absent.
=======================================
***Additional files exist in this changeset.***
--
--
v8-dev mailing list
v8-dev@googlegroups.com
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to v8-dev+unsubscr...@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.