[turbofan] use ZonePool in most places in the compiler pipeline a temp zone is used.

R=jarin@chromium.org, bmeurer@chromium.org

BUG=

Review URL: https://codereview.chromium.org/663333003

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@24779 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
dcarney@chromium.org 2014-10-21 14:44:50 +00:00
parent 2642058760
commit 1c5fafe890
17 changed files with 126 additions and 92 deletions

View File

@ -17,8 +17,9 @@ namespace v8 {
namespace internal {
namespace compiler {
AstGraphBuilder::AstGraphBuilder(CompilationInfo* info, JSGraph* jsgraph)
: StructuredGraphBuilder(jsgraph->graph(), jsgraph->common()),
AstGraphBuilder::AstGraphBuilder(Zone* local_zone, CompilationInfo* info,
JSGraph* jsgraph)
: StructuredGraphBuilder(local_zone, jsgraph->graph(), jsgraph->common()),
info_(info),
jsgraph_(jsgraph),
globals_(0, info->zone()),

View File

@ -25,7 +25,7 @@ class Graph;
// of function inlining.
class AstGraphBuilder : public StructuredGraphBuilder, public AstVisitor {
public:
AstGraphBuilder(CompilationInfo* info, JSGraph* jsgraph);
AstGraphBuilder(Zone* local_zone, CompilationInfo* info, JSGraph* jsgraph);
// Creates a graph by visiting the entire AST.
bool CreateGraph();

View File

@ -21,16 +21,17 @@ enum VisitState { kUnvisited, kOnStack, kRevisit, kVisited };
class ControlReducerImpl {
public:
ControlReducerImpl(JSGraph* jsgraph, CommonOperatorBuilder* common)
: zone_(jsgraph->zone()->isolate()),
ControlReducerImpl(Zone* zone, JSGraph* jsgraph,
CommonOperatorBuilder* common)
: zone_(zone),
jsgraph_(jsgraph),
common_(common),
state_(jsgraph->graph()->NodeCount(), kUnvisited, &zone_),
stack_(&zone_),
revisit_(&zone_),
state_(jsgraph->graph()->NodeCount(), kUnvisited, zone_),
stack_(zone_),
revisit_(zone_),
dead_(NULL) {}
Zone zone_;
Zone* zone_;
JSGraph* jsgraph_;
CommonOperatorBuilder* common_;
ZoneVector<VisitState> state_;
@ -40,7 +41,7 @@ class ControlReducerImpl {
void Trim() {
// Mark all nodes reachable from end.
NodeVector nodes(&zone_);
NodeVector nodes(zone_);
state_.assign(jsgraph_->graph()->NodeCount(), kUnvisited);
Push(jsgraph_->graph()->end());
while (!stack_.empty()) {
@ -104,17 +105,17 @@ class ControlReducerImpl {
}
};
void ControlReducer::ReduceGraph(JSGraph* jsgraph,
void ControlReducer::ReduceGraph(Zone* zone, JSGraph* jsgraph,
CommonOperatorBuilder* common) {
ControlReducerImpl impl(jsgraph, NULL);
ControlReducerImpl impl(zone, jsgraph, NULL);
// Only trim the graph for now. Control reduction can reduce non-terminating
// loops to graphs that are unschedulable at the moment.
impl.Trim();
}
void ControlReducer::TrimGraph(JSGraph* jsgraph) {
ControlReducerImpl impl(jsgraph, NULL);
void ControlReducer::TrimGraph(Zone* zone, JSGraph* jsgraph) {
ControlReducerImpl impl(zone, jsgraph, NULL);
impl.Trim();
}
}

View File

@ -15,10 +15,11 @@ class CommonOperatorBuilder;
class ControlReducer {
public:
// Perform branch folding and dead code elimination on the graph.
static void ReduceGraph(JSGraph* graph, CommonOperatorBuilder* builder);
static void ReduceGraph(Zone* zone, JSGraph* graph,
CommonOperatorBuilder* builder);
// Trim nodes in the graph that are not reachable from end.
static void TrimGraph(JSGraph* graph);
static void TrimGraph(Zone* zone, JSGraph* graph);
};
}
}

View File

@ -19,12 +19,12 @@ namespace internal {
namespace compiler {
StructuredGraphBuilder::StructuredGraphBuilder(Graph* graph,
StructuredGraphBuilder::StructuredGraphBuilder(Zone* local_zone, Graph* graph,
CommonOperatorBuilder* common)
: GraphBuilder(graph),
common_(common),
environment_(NULL),
local_zone_(isolate()),
local_zone_(local_zone),
current_context_(NULL),
exit_control_(NULL) {}

View File

@ -79,7 +79,8 @@ class GraphBuilder {
// StubGraphBuilder).
class StructuredGraphBuilder : public GraphBuilder {
public:
StructuredGraphBuilder(Graph* graph, CommonOperatorBuilder* common);
StructuredGraphBuilder(Zone* zone, Graph* graph,
CommonOperatorBuilder* common);
virtual ~StructuredGraphBuilder() {}
// Creates a new Phi node having {count} input values.
@ -123,7 +124,7 @@ class StructuredGraphBuilder : public GraphBuilder {
Node* dead_control();
Zone* graph_zone() const { return graph()->zone(); }
Zone* local_zone() { return &local_zone_; }
Zone* local_zone() const { return local_zone_; }
Isolate* isolate() const { return graph_zone()->isolate(); }
CommonOperatorBuilder* common() const { return common_; }
@ -145,7 +146,7 @@ class StructuredGraphBuilder : public GraphBuilder {
Environment* environment_;
// Zone local to the builder for data not leaking into the graph.
Zone local_zone_;
Zone* local_zone_;
// Node representing the control dependency for dead code.
SetOncePointer<Node> dead_control_;

View File

@ -13,12 +13,12 @@ namespace v8 {
namespace internal {
namespace compiler {
InstructionSelector::InstructionSelector(Linkage* linkage,
InstructionSelector::InstructionSelector(Zone* local_zone, Linkage* linkage,
InstructionSequence* sequence,
Schedule* schedule,
SourcePositionTable* source_positions,
Features features)
: zone_(sequence->isolate()),
: zone_(local_zone),
linkage_(linkage),
sequence_(sequence),
source_positions_(source_positions),

View File

@ -25,8 +25,9 @@ class InstructionSelector FINAL {
// Forward declarations.
class Features;
InstructionSelector(Linkage* linkage, InstructionSequence* sequence,
Schedule* schedule, SourcePositionTable* source_positions,
InstructionSelector(Zone* local_zone, Linkage* linkage,
InstructionSequence* sequence, Schedule* schedule,
SourcePositionTable* source_positions,
Features features = SupportedFeatures());
// Visit code for the entire graph with the included schedule.
@ -187,11 +188,11 @@ class InstructionSelector FINAL {
Linkage* linkage() const { return linkage_; }
InstructionSequence* sequence() const { return sequence_; }
Zone* instruction_zone() const { return sequence()->zone(); }
Zone* zone() { return &zone_; }
Zone* zone() const { return zone_; }
// ===========================================================================
Zone zone_;
Zone* const zone_;
Linkage* const linkage_;
InstructionSequence* const sequence_;
SourcePositionTable* const source_positions_;

View File

@ -409,7 +409,7 @@ void JSInliner::TryInlineJSCall(Node* call_node) {
JSGraph jsgraph(&graph, jsgraph_->common(), jsgraph_->javascript(),
jsgraph_->machine());
AstGraphBuilder graph_builder(&info, &jsgraph);
AstGraphBuilder graph_builder(local_zone_, &info, &jsgraph);
graph_builder.CreateGraph();
Inlinee::UnifyReturn(&jsgraph);

View File

@ -16,8 +16,8 @@ class JSCallFunctionAccessor;
class JSInliner {
public:
JSInliner(CompilationInfo* info, JSGraph* jsgraph)
: info_(info), jsgraph_(jsgraph) {}
JSInliner(Zone* local_zone, CompilationInfo* info, JSGraph* jsgraph)
: local_zone_(local_zone), info_(info), jsgraph_(jsgraph) {}
void Inline();
void TryInlineJSCall(Node* node);
@ -25,6 +25,7 @@ class JSInliner {
private:
friend class InlinerVisitor;
Zone* local_zone_;
CompilationInfo* info_;
JSGraph* jsgraph_;

View File

@ -178,9 +178,11 @@ void Pipeline::PrintAllocator(const char* phase,
class AstGraphBuilderWithPositions : public AstGraphBuilder {
public:
explicit AstGraphBuilderWithPositions(CompilationInfo* info, JSGraph* jsgraph,
explicit AstGraphBuilderWithPositions(Zone* local_zone, CompilationInfo* info,
JSGraph* jsgraph,
SourcePositionTable* source_positions)
: AstGraphBuilder(info, jsgraph), source_positions_(source_positions) {}
: AstGraphBuilder(local_zone, info, jsgraph),
source_positions_(source_positions) {}
bool CreateGraph() {
SourcePositionTable::Scope pos(source_positions_,
@ -253,8 +255,9 @@ Handle<Code> Pipeline::GenerateCode() {
{
PhaseStats graph_builder_stats(info(), &zone_pool, PhaseStats::CREATE_GRAPH,
"graph builder");
AstGraphBuilderWithPositions graph_builder(info(), &jsgraph,
&source_positions);
ZonePool::Scope zone_scope(&zone_pool);
AstGraphBuilderWithPositions graph_builder(zone_scope.zone(), info(),
&jsgraph, &source_positions);
graph_builder.CreateGraph();
context_node = graph_builder.GetFunctionContext();
}
@ -284,7 +287,8 @@ Handle<Code> Pipeline::GenerateCode() {
if (info()->is_inlining_enabled()) {
SourcePositionTable::Scope pos(&source_positions,
SourcePosition::Unknown());
JSInliner inliner(info(), &jsgraph);
ZonePool::Scope zone_scope(&zone_pool);
JSInliner inliner(zone_scope.zone(), info(), &jsgraph);
inliner.Inline();
VerifyAndPrintGraph(&graph, "Inlined", true);
}
@ -367,7 +371,8 @@ Handle<Code> Pipeline::GenerateCode() {
SourcePosition::Unknown());
PhaseStats control_reducer_stats(
info(), &zone_pool, PhaseStats::CREATE_GRAPH, "control reduction");
ControlReducer::ReduceGraph(&jsgraph, &common);
ZonePool::Scope zone_scope(&zone_pool);
ControlReducer::ReduceGraph(zone_scope.zone(), &jsgraph, &common);
VerifyAndPrintGraph(&graph, "Control reduced");
}
@ -398,7 +403,8 @@ Handle<Code> Pipeline::GenerateCode() {
PhaseStats codegen_stats(info(), &zone_pool, PhaseStats::CODEGEN,
"codegen");
Linkage linkage(info());
code = GenerateCode(&linkage, &graph, schedule, &source_positions);
code =
GenerateCode(&zone_pool, &linkage, &graph, schedule, &source_positions);
info()->SetCode(code);
}
@ -430,17 +436,18 @@ Schedule* Pipeline::ComputeSchedule(ZonePool* zone_pool, Graph* graph) {
Handle<Code> Pipeline::GenerateCodeForMachineGraph(Linkage* linkage,
Graph* graph,
Schedule* schedule) {
ZonePool zone_pool(isolate());
CHECK(SupportedBackend());
if (schedule == NULL) {
// TODO(rossberg): Should this really be untyped?
VerifyAndPrintGraph(graph, "Machine", true);
ZonePool zone_pool(isolate());
schedule = ComputeSchedule(&zone_pool, graph);
}
TraceSchedule(schedule);
SourcePositionTable source_positions(graph);
Handle<Code> code = GenerateCode(linkage, graph, schedule, &source_positions);
Handle<Code> code =
GenerateCode(&zone_pool, linkage, graph, schedule, &source_positions);
#if ENABLE_DISASSEMBLER
if (!code.is_null() && FLAG_print_opt_code) {
CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
@ -452,8 +459,8 @@ Handle<Code> Pipeline::GenerateCodeForMachineGraph(Linkage* linkage,
}
Handle<Code> Pipeline::GenerateCode(Linkage* linkage, Graph* graph,
Schedule* schedule,
Handle<Code> Pipeline::GenerateCode(ZonePool* zone_pool, Linkage* linkage,
Graph* graph, Schedule* schedule,
SourcePositionTable* source_positions) {
DCHECK_NOT_NULL(graph);
DCHECK_NOT_NULL(linkage);
@ -470,8 +477,9 @@ Handle<Code> Pipeline::GenerateCode(Linkage* linkage, Graph* graph,
// Select and schedule instructions covering the scheduled graph.
{
InstructionSelector selector(linkage, &sequence, schedule,
source_positions);
ZonePool::Scope zone_scope(zone_pool);
InstructionSelector selector(zone_scope.zone(), linkage, &sequence,
schedule, source_positions);
selector.SelectInstructions();
}
@ -491,8 +499,10 @@ Handle<Code> Pipeline::GenerateCode(Linkage* linkage, Graph* graph,
linkage->info()->AbortOptimization(kNotEnoughVirtualRegistersForValues);
return Handle<Code>::null();
}
RegisterAllocator allocator(&frame, linkage->info(), &sequence);
if (!allocator.Allocate()) {
ZonePool::Scope zone_scope(zone_pool);
RegisterAllocator allocator(zone_scope.zone(), &frame, linkage->info(),
&sequence);
if (!allocator.Allocate(zone_pool)) {
linkage->info()->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
return Handle<Code>::null();
}

View File

@ -61,7 +61,8 @@ class Pipeline {
void PrintAllocator(const char* phase, const RegisterAllocator* allocator);
void VerifyAndPrintGraph(Graph* graph, const char* phase,
bool untyped = false);
Handle<Code> GenerateCode(Linkage* linkage, Graph* graph, Schedule* schedule,
Handle<Code> GenerateCode(ZonePool* zone_pool, Linkage* linkage, Graph* graph,
Schedule* schedule,
SourcePositionTable* source_positions);
};
}

View File

@ -499,9 +499,11 @@ LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
}
RegisterAllocator::RegisterAllocator(Frame* frame, CompilationInfo* info,
RegisterAllocator::RegisterAllocator(Zone* local_zone, Frame* frame,
CompilationInfo* info,
InstructionSequence* code)
: zone_(code->isolate()),
: zone_(local_zone),
zone_pool_(NULL),
frame_(frame),
info_(info),
code_(code),
@ -1094,7 +1096,9 @@ void RegisterAllocator::ResolvePhis(const InstructionBlock* block) {
}
bool RegisterAllocator::Allocate() {
bool RegisterAllocator::Allocate(ZonePool* zone_pool) {
DCHECK_EQ(NULL, zone_pool_);
zone_pool_ = zone_pool;
assigned_registers_ = new (code_zone())
BitVector(Register::NumAllocatableRegisters(), code_zone());
assigned_double_registers_ = new (code_zone())
@ -1116,6 +1120,46 @@ bool RegisterAllocator::Allocate() {
}
class RegisterAllocatorPhase : public CompilationPhase {
public:
RegisterAllocatorPhase(const char* name, RegisterAllocator* allocator)
: CompilationPhase(name, allocator->info()),
allocator_(allocator),
allocator_zone_start_allocation_size_(0),
stats_(NULL) {
if (FLAG_turbo_stats) {
allocator_zone_start_allocation_size_ =
allocator->info()->zone()->allocation_size();
if (allocator->zone_pool() != NULL) {
stats_ = new ZonePool::StatsScope(allocator->zone_pool());
}
}
}
~RegisterAllocatorPhase() {
if (FLAG_turbo_stats) {
unsigned size = allocator_->info()->zone()->allocation_size() -
allocator_zone_start_allocation_size_;
if (stats_ != NULL) {
size += static_cast<unsigned>(stats_->GetMaxAllocatedBytes());
}
isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size);
}
delete stats_;
#ifdef DEBUG
if (allocator_ != NULL) allocator_->Verify();
#endif
}
private:
RegisterAllocator* allocator_;
unsigned allocator_zone_start_allocation_size_;
ZonePool::StatsScope* stats_;
DISALLOW_COPY_AND_ASSIGN(RegisterAllocatorPhase);
};
void RegisterAllocator::MeetRegisterConstraints() {
RegisterAllocatorPhase phase("L_Register constraints", this);
for (int i = 0; i < code()->InstructionBlockCount(); ++i) {
@ -2205,27 +2249,6 @@ void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
range->set_assigned_register(reg, code_zone());
}
RegisterAllocatorPhase::RegisterAllocatorPhase(const char* name,
RegisterAllocator* allocator)
: CompilationPhase(name, allocator->info()), allocator_(allocator) {
if (FLAG_turbo_stats) {
allocator_zone_start_allocation_size_ =
allocator->zone()->allocation_size();
}
}
RegisterAllocatorPhase::~RegisterAllocatorPhase() {
if (FLAG_turbo_stats) {
unsigned size = allocator_->zone()->allocation_size() -
allocator_zone_start_allocation_size_;
isolate()->GetTStatistics()->SaveTiming(name(), base::TimeDelta(), size);
}
#ifdef DEBUG
if (allocator_ != NULL) allocator_->Verify();
#endif
}
}
}
} // namespace v8::internal::compiler

View File

@ -7,6 +7,7 @@
#include "src/allocation.h"
#include "src/compiler/instruction.h"
#include "src/compiler/zone-pool.h"
#include "src/macro-assembler.h"
#include "src/zone.h"
@ -318,8 +319,8 @@ class LiveRange : public ZoneObject {
class RegisterAllocator BASE_EMBEDDED {
public:
// TODO(dcarney): remove info
explicit RegisterAllocator(Frame* frame, CompilationInfo* info,
InstructionSequence* code);
explicit RegisterAllocator(Zone* local_zone, Frame* frame,
CompilationInfo* info, InstructionSequence* code);
static void TraceAlloc(const char* msg, ...);
@ -330,7 +331,8 @@ class RegisterAllocator BASE_EMBEDDED {
// Returns the register kind required by the given virtual register.
RegisterKind RequiredRegisterKind(int virtual_register) const;
bool Allocate();
// TODO(dcarney): fix compilation phase stats to not require this.
bool Allocate(ZonePool* zone_pool = NULL);
const ZoneList<LiveRange*>* live_ranges() const { return &live_ranges_; }
const Vector<LiveRange*>* fixed_live_ranges() const {
@ -342,13 +344,14 @@ class RegisterAllocator BASE_EMBEDDED {
CompilationInfo* info() const { return info_; }
inline InstructionSequence* code() const { return code_; }
ZonePool* zone_pool() const { return zone_pool_; }
// This zone is for datastructures only needed during register allocation.
inline Zone* zone() { return &zone_; }
inline Zone* zone() const { return zone_; }
// This zone is for InstructionOperands and moves that live beyond register
// allocation.
inline Zone* code_zone() { return code()->zone(); }
inline Zone* code_zone() const { return code()->zone(); }
int GetVirtualRegister() {
int vreg = code()->NextVirtualRegister();
@ -497,7 +500,9 @@ class RegisterAllocator BASE_EMBEDDED {
Frame* frame() const { return frame_; }
Zone zone_;
Zone* const zone_;
// TODO(dcarney): remove this.
ZonePool* zone_pool_;
Frame* const frame_;
CompilationInfo* const info_;
InstructionSequence* const code_;
@ -535,18 +540,6 @@ class RegisterAllocator BASE_EMBEDDED {
DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
};
class RegisterAllocatorPhase : public CompilationPhase {
public:
RegisterAllocatorPhase(const char* name, RegisterAllocator* allocator);
~RegisterAllocatorPhase();
private:
RegisterAllocator* allocator_;
unsigned allocator_zone_start_allocation_size_;
DISALLOW_COPY_AND_ASSIGN(RegisterAllocatorPhase);
};
}
}
} // namespace v8::internal::compiler

View File

@ -70,7 +70,8 @@ class DeoptCodegenTester {
code = new v8::internal::compiler::InstructionSequence(scope_->main_zone(),
graph, schedule);
SourcePositionTable source_positions(graph);
InstructionSelector selector(linkage, code, schedule, &source_positions);
InstructionSelector selector(scope_->main_zone(), linkage, code, schedule,
&source_positions);
selector.SelectInstructions();
if (FLAG_trace_turbo) {
@ -79,7 +80,7 @@ class DeoptCodegenTester {
}
Frame frame;
RegisterAllocator allocator(&frame, &info, code);
RegisterAllocator allocator(scope_->main_zone(), &frame, &info, code);
CHECK(allocator.Allocate());
if (FLAG_trace_turbo) {

View File

@ -37,7 +37,7 @@ class CTrimTester : HandleAndZoneScope {
Node* one;
Node* half;
void Trim() { ControlReducer::TrimGraph(&jsgraph); }
void Trim() { ControlReducer::TrimGraph(main_zone(), &jsgraph); }
};

View File

@ -40,7 +40,7 @@ InstructionSelectorTest::Stream InstructionSelectorTest::StreamBuilder::Build(
Linkage linkage(&info, call_descriptor());
InstructionSequence sequence(test_->zone(), graph(), schedule);
SourcePositionTable source_position_table(graph());
InstructionSelector selector(&linkage, &sequence, schedule,
InstructionSelector selector(test_->zone(), &linkage, &sequence, schedule,
&source_position_table, features);
selector.SelectInstructions();
if (FLAG_trace_turbo) {