Refactor polymorphic load and inline function graph construction.

Change the way we construct the graph for polymorphic loads to match that of
polymorphic stores.

Introduce a stack-allocated helper for saving and restoring all the
function-specific graph builder state that needs to change when we begin
translating an inlined function.  Make this class authoritative by moving
redundant state out of the builder and deferring to the current function's
state.

Ensure that we always print a tracing message when abandoning an inlining
attempt.

Review URL: http://codereview.chromium.org/6628012

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7074 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
kmillikin@chromium.org 2011-03-07 11:52:36 +00:00
parent 632e79bfe7
commit c3172a6b7e
16 changed files with 390 additions and 269 deletions

View File

@ -382,8 +382,9 @@ void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
}
LChunk::LChunk(HGraph* graph)
LChunk::LChunk(CompilationInfo* info, HGraph* graph)
: spill_slot_count_(0),
info_(info),
graph_(graph),
instructions_(32),
pointer_maps_(8),
@ -474,7 +475,7 @@ int LChunk::GetParameterStackSlot(int index) const {
// shift all parameter indexes down by the number of parameters, and
// make sure they end up negative so they are distinguishable from
// spill slots.
int result = index - graph()->info()->scope()->num_parameters() - 1;
int result = index - info()->scope()->num_parameters() - 1;
ASSERT(result < 0);
return result;
}
@ -482,7 +483,7 @@ int LChunk::GetParameterStackSlot(int index) const {
// A parameter relative to ebp in the arguments stub.
int LChunk::ParameterAt(int index) {
ASSERT(-1 <= index); // -1 is the receiver.
return (1 + graph()->info()->scope()->num_parameters() - index) *
return (1 + info()->scope()->num_parameters() - index) *
kPointerSize;
}
@ -521,7 +522,7 @@ Representation LChunk::LookupLiteralRepresentation(
LChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
chunk_ = new LChunk(graph());
chunk_ = new LChunk(info(), graph());
HPhase phase("Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
@ -538,8 +539,8 @@ LChunk* LChunkBuilder::Build() {
void LChunkBuilder::Abort(const char* format, ...) {
if (FLAG_trace_bailout) {
SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
PrintF("Aborting LChunk building in @\"%s\": ", *debug_name);
SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
PrintF("Aborting LChunk building in @\"%s\": ", *name);
va_list arguments;
va_start(arguments, format);
OS::VPrint(format, arguments);

View File

@ -1829,7 +1829,7 @@ class LStackCheck: public LTemplateInstruction<0, 0, 0> {
class LChunkBuilder;
class LChunk: public ZoneObject {
public:
explicit LChunk(HGraph* graph);
explicit LChunk(CompilationInfo* info, HGraph* graph);
void AddInstruction(LInstruction* instruction, HBasicBlock* block);
LConstantOperand* DefineConstantOperand(HConstant* constant);
@ -1842,6 +1842,7 @@ class LChunk: public ZoneObject {
int ParameterAt(int index);
int GetParameterStackSlot(int index) const;
int spill_slot_count() const { return spill_slot_count_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
void AddGapMove(int index, LOperand* from, LOperand* to);
@ -1878,6 +1879,7 @@ class LChunk: public ZoneObject {
private:
int spill_slot_count_;
CompilationInfo* info_;
HGraph* const graph_;
ZoneList<LInstruction*> instructions_;
ZoneList<LPointerMap*> pointer_maps_;
@ -1887,8 +1889,9 @@ class LChunk: public ZoneObject {
class LChunkBuilder BASE_EMBEDDED {
public:
LChunkBuilder(HGraph* graph, LAllocator* allocator)
LChunkBuilder(CompilationInfo* info, HGraph* graph, LAllocator* allocator)
: chunk_(NULL),
info_(info),
graph_(graph),
status_(UNUSED),
current_instruction_(NULL),
@ -1917,6 +1920,7 @@ class LChunkBuilder BASE_EMBEDDED {
};
LChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
bool is_unused() const { return status_ == UNUSED; }
@ -2023,6 +2027,7 @@ class LChunkBuilder BASE_EMBEDDED {
HArithmeticBinaryOperation* instr);
LChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
Status status_;
HInstruction* current_instruction_;

View File

@ -80,8 +80,8 @@ void LCodeGen::FinishCode(Handle<Code> code) {
void LCodeGen::Abort(const char* format, ...) {
if (FLAG_trace_bailout) {
SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
PrintF("Aborting LCodeGen in @\"%s\": ", *name);
va_list arguments;
va_start(arguments, format);
OS::VPrint(format, arguments);
@ -2438,7 +2438,7 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
LInstruction* instr) {
// Change context if needed.
bool change_context =
(graph()->info()->closure()->context() != function->context()) ||
(info()->closure()->context() != function->context()) ||
scope()->contains_with() ||
(scope()->num_heap_slots() > 0);
if (change_context) {

View File

@ -53,7 +53,7 @@ class LCodeGen BASE_EMBEDDED {
deoptimizations_(4),
deoptimization_literals_(8),
inlined_function_count_(0),
scope_(chunk->graph()->info()->scope()),
scope_(info->scope()),
status_(UNUSED),
deferred_(8),
osr_pc_offset_(-1),
@ -64,6 +64,7 @@ class LCodeGen BASE_EMBEDDED {
// Simple accessors.
MacroAssembler* masm() const { return masm_; }
CompilationInfo* info() const { return info_; }
// Support for converting LOperands to assembler types.
// LOperand must be a register.
@ -132,7 +133,7 @@ class LCodeGen BASE_EMBEDDED {
bool is_aborted() const { return status_ == ABORTED; }
int strict_mode_flag() const {
return info_->is_strict() ? kStrictMode : kNonStrictMode;
return info()->is_strict() ? kStrictMode : kNonStrictMode;
}
LChunk* chunk() const { return chunk_; }

View File

@ -281,18 +281,18 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
HTracer::Instance()->TraceCompilation(info->function());
}
TypeFeedbackOracle oracle(
code, Handle<Context>(info->closure()->context()->global_context()));
HGraphBuilder builder(&oracle);
Handle<Context> global_context(info->closure()->context()->global_context());
TypeFeedbackOracle oracle(code, global_context);
HGraphBuilder builder(info, &oracle);
HPhase phase(HPhase::kTotal);
HGraph* graph = builder.CreateGraph(info);
HGraph* graph = builder.CreateGraph();
if (Top::has_pending_exception()) {
info->SetCode(Handle<Code>::null());
return false;
}
if (graph != NULL && FLAG_build_lithium) {
Handle<Code> optimized_code = graph->Compile();
Handle<Code> optimized_code = graph->Compile(info);
if (!optimized_code.is_null()) {
info->SetCode(optimized_code);
FinishOptimization(info->closure(), start);

View File

@ -536,7 +536,6 @@ void HBasicBlock::FinishExit(HControlInstruction* instruction) {
HGraph::HGraph(CompilationInfo* info)
: HSubgraph(this),
next_block_id_(0),
info_(info),
blocks_(8),
values_(16),
phi_list_(NULL) {
@ -545,12 +544,7 @@ HGraph::HGraph(CompilationInfo* info)
}
bool HGraph::AllowCodeMotion() const {
return info()->shared_info()->opt_count() + 1 < Compiler::kDefaultMaxOptCount;
}
Handle<Code> HGraph::Compile() {
Handle<Code> HGraph::Compile(CompilationInfo* info) {
int values = GetMaximumValueID();
if (values > LAllocator::max_initial_value_ids()) {
if (FLAG_trace_bailout) PrintF("Function is too big\n");
@ -558,7 +552,7 @@ Handle<Code> HGraph::Compile() {
}
LAllocator allocator(values, this);
LChunkBuilder builder(this, &allocator);
LChunkBuilder builder(info, this, &allocator);
LChunk* chunk = builder.Build();
if (chunk == NULL) return Handle<Code>::null();
@ -569,7 +563,7 @@ Handle<Code> HGraph::Compile() {
if (!FLAG_use_lithium) return Handle<Code>::null();
MacroAssembler assembler(NULL, 0);
LCodeGen generator(chunk, &assembler, info());
LCodeGen generator(chunk, &assembler, info);
if (FLAG_eliminate_empty_blocks) {
chunk->MarkEmptyBlocks();
@ -579,13 +573,13 @@ Handle<Code> HGraph::Compile() {
if (FLAG_trace_codegen) {
PrintF("Crankshaft Compiler - ");
}
CodeGenerator::MakeCodePrologue(info());
CodeGenerator::MakeCodePrologue(info);
Code::Flags flags =
Code::ComputeFlags(Code::OPTIMIZED_FUNCTION, NOT_IN_LOOP);
Handle<Code> code =
CodeGenerator::MakeCodeEpilogue(&assembler, flags, info());
CodeGenerator::MakeCodeEpilogue(&assembler, flags, info);
generator.FinishCode(code);
CodeGenerator::PrintCode(code, info());
CodeGenerator::PrintCode(code, info);
return code;
}
return Handle<Code>::null();
@ -1176,8 +1170,9 @@ void HStackCheckEliminator::RemoveStackCheck(HBasicBlock* block) {
class HGlobalValueNumberer BASE_EMBEDDED {
public:
explicit HGlobalValueNumberer(HGraph* graph)
explicit HGlobalValueNumberer(HGraph* graph, CompilationInfo* info)
: graph_(graph),
info_(info),
block_side_effects_(graph_->blocks()->length()),
loop_side_effects_(graph_->blocks()->length()) {
ASSERT(Heap::allow_allocation(false));
@ -1197,9 +1192,14 @@ class HGlobalValueNumberer BASE_EMBEDDED {
void ProcessLoopBlock(HBasicBlock* block,
HBasicBlock* before_loop,
int loop_kills);
bool AllowCodeMotion();
bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header);
HGraph* graph() { return graph_; }
CompilationInfo* info() { return info_; }
HGraph* graph_;
CompilationInfo* info_;
// A map of block IDs to their side effects.
ZoneList<int> block_side_effects_;
@ -1300,10 +1300,15 @@ void HGlobalValueNumberer::ProcessLoopBlock(HBasicBlock* block,
}
bool HGlobalValueNumberer::AllowCodeMotion() {
return info()->shared_info()->opt_count() + 1 < Compiler::kDefaultMaxOptCount;
}
bool HGlobalValueNumberer::ShouldMove(HInstruction* instr,
HBasicBlock* loop_header) {
// If we've disabled code motion, don't move any instructions.
if (!graph_->AllowCodeMotion()) return false;
if (!AllowCodeMotion()) return false;
// If --aggressive-loop-invariant-motion, move everything except change
// instructions.
@ -1848,6 +1853,47 @@ void HGraph::ComputeMinusZeroChecks() {
}
// Implementation of utility class to encapsulate the translation state for
// a (possibly inlined) function.
FunctionState::FunctionState(HGraphBuilder* owner,
CompilationInfo* info,
TypeFeedbackOracle* oracle)
: owner_(owner),
compilation_info_(info),
oracle_(oracle),
call_context_(NULL),
function_return_(NULL),
test_context_(NULL),
outer_(owner->function_state()) {
if (outer_ != NULL) {
// State for an inline function.
if (owner->ast_context()->IsTest()) {
HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
if_true->MarkAsInlineReturnTarget();
if_false->MarkAsInlineReturnTarget();
// The AstContext constructor pushed on the context stack. This newed
// instance is the reason that AstContext can't be BASE_EMBEDDED.
test_context_ = new TestContext(owner, if_true, if_false);
} else {
function_return_ = owner->graph()->CreateBasicBlock();
function_return()->MarkAsInlineReturnTarget();
}
// Set this after possibly allocating a new TestContext above.
call_context_ = owner->ast_context();
}
// Push on the state stack.
owner->set_function_state(this);
}
FunctionState::~FunctionState() {
delete test_context_;
owner_->set_function_state(outer_);
}
// Implementation of utility classes to represent an expression's context in
// the AST.
AstContext::AstContext(HGraphBuilder* owner, Expression::Context kind)
@ -2018,8 +2064,8 @@ class HGraphBuilder::SubgraphScope BASE_EMBEDDED {
void HGraphBuilder::Bailout(const char* reason) {
if (FLAG_trace_bailout) {
SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
PrintF("Bailout in HGraphBuilder: @\"%s\": %s\n", *debug_name, reason);
SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
PrintF("Bailout in HGraphBuilder: @\"%s\": %s\n", *name, reason);
}
SetStackOverflow();
}
@ -2066,16 +2112,16 @@ void HGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs) {
}
HGraph* HGraphBuilder::CreateGraph(CompilationInfo* info) {
HGraph* HGraphBuilder::CreateGraph() {
ASSERT(subgraph() == NULL);
graph_ = new HGraph(info);
graph_ = new HGraph(info());
{
HPhase phase("Block building");
graph()->Initialize(CreateBasicBlock(graph()->start_environment()));
current_subgraph_ = graph();
Scope* scope = info->scope();
Scope* scope = info()->scope();
if (scope->HasIllegalRedeclaration()) {
Bailout("function with illegal redeclaration");
return NULL;
@ -2102,9 +2148,9 @@ HGraph* HGraphBuilder::CreateGraph(CompilationInfo* info) {
HEnvironment* initial_env = environment()->CopyWithoutHistory();
HBasicBlock* body_entry = CreateBasicBlock(initial_env);
current_block()->Goto(body_entry);
body_entry->SetJoinId(info->function()->id());
body_entry->SetJoinId(info()->function()->id());
set_current_block(body_entry);
VisitStatements(info->function()->body());
VisitStatements(info()->function()->body());
if (HasStackOverflow()) return NULL;
if (current_block() != NULL) {
@ -2142,7 +2188,7 @@ HGraph* HGraphBuilder::CreateGraph(CompilationInfo* info) {
// Perform common subexpression elimination and loop-invariant code motion.
if (FLAG_use_gvn) {
HPhase phase("Global value numbering", graph());
HGlobalValueNumberer gvn(graph());
HGlobalValueNumberer gvn(graph(), info());
gvn.Analyze();
}
@ -2423,7 +2469,7 @@ void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
return_value = environment()->Pop();
}
current_block()->AddLeaveInlined(return_value,
function_return_);
function_return());
set_current_block(NULL);
}
}
@ -2607,13 +2653,13 @@ void HGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
}
}
bool HGraph::HasOsrEntryAt(IterationStatement* statement) {
bool HGraphBuilder::HasOsrEntryAt(IterationStatement* statement) {
return statement->OsrEntryId() == info()->osr_ast_id();
}
void HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
if (!graph()->HasOsrEntryAt(statement)) return;
if (!HasOsrEntryAt(statement)) return;
HBasicBlock* non_osr_entry = graph()->CreateBasicBlock();
HBasicBlock* osr_entry = graph()->CreateBasicBlock();
@ -2780,7 +2826,7 @@ void HGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
void HGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
Handle<SharedFunctionInfo> shared_info =
Compiler::BuildFunctionInfo(expr, graph_->info()->script());
Compiler::BuildFunctionInfo(expr, info()->script());
CHECK_BAILOUT;
HFunctionLiteral* instr =
new HFunctionLiteral(shared_info, expr->pretenure());
@ -2822,10 +2868,10 @@ void HGraphBuilder::LookupGlobalPropertyCell(Variable* var,
if (var->is_this()) {
BAILOUT("global this reference");
}
if (!graph()->info()->has_global_object()) {
if (!info()->has_global_object()) {
BAILOUT("no global object to optimize VariableProxy");
}
Handle<GlobalObject> global(graph()->info()->global_object());
Handle<GlobalObject> global(info()->global_object());
global->Lookup(*var->name(), lookup);
if (!lookup->IsProperty()) {
BAILOUT("global variable cell not yet introduced");
@ -2846,7 +2892,7 @@ HValue* HGraphBuilder::BuildContextChainWalk(Variable* var) {
ASSERT(var->IsContextSlot());
HInstruction* context = new HContext;
AddInstruction(context);
int length = graph()->info()->scope()->ContextChainLength(var->scope());
int length = info()->scope()->ContextChainLength(var->scope());
while (length-- > 0) {
context = new HOuterContext(context);
AddInstruction(context);
@ -2877,7 +2923,7 @@ void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
LookupGlobalPropertyCell(variable, &lookup, false);
CHECK_BAILOUT;
Handle<GlobalObject> global(graph()->info()->global_object());
Handle<GlobalObject> global(info()->global_object());
// TODO(3039103): Handle global property load through an IC call when access
// checks are enabled.
if (global->IsAccessCheckNeeded()) {
@ -3147,17 +3193,20 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
HValue* value,
ZoneMapList* types,
Handle<String> name) {
// TODO(ager): We should recognize when the prototype chains for different
// maps are identical. In that case we can avoid repeatedly generating the
// same prototype map checks.
int count = 0;
HBasicBlock* join = NULL;
for (int i = 0; i < types->length() && count < kMaxStorePolymorphism; ++i) {
Handle<Map> map = types->at(i);
LookupResult lookup;
if (ComputeStoredField(map, name, &lookup)) {
++count;
if (join == NULL) {
if (count == 0) {
AddInstruction(new HCheckNonSmi(object)); // Only needed once.
join = graph()->CreateBasicBlock();
}
++count;
HBasicBlock* if_true = graph()->CreateBasicBlock();
HBasicBlock* if_false = graph()->CreateBasicBlock();
HCompareMap* compare = new HCompareMap(object, map, if_true, if_false);
@ -3176,14 +3225,20 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
}
}
// Finish up. We need a generic IC if there were types we couldn't
// resolve statically or if we want to handle maps we've never seen.
if (count < types->length() || !FLAG_deoptimize_uncommon_cases) {
// Finish up. Unconditionally deoptimize if we've handled all the maps we
// know about and do not want to handle ones we've never seen. Otherwise
// use a generic IC.
if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
current_block()->FinishExit(new HDeoptimize);
} else {
HInstruction* instr = BuildStoreNamedGeneric(object, name, value);
instr->set_position(expr->position());
AddInstruction(instr);
if (join == NULL) {
if (join != NULL) {
if (!ast_context()->IsEffect()) Push(value);
current_block()->Goto(join);
} else {
// The HSimulate for the store should not see the stored value in
// effect contexts (it is not materialized at expr->id() in the
// unoptimized code).
@ -3197,22 +3252,14 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
}
}
ast_context()->ReturnValue(value);
} else {
if (!ast_context()->IsEffect()) Push(value);
current_block()->Goto(join);
join->SetJoinId(expr->id());
set_current_block(join);
if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
}
} else {
current_block()->FinishExit(new HDeoptimize);
set_current_block(join);
if (join != NULL) {
join->SetJoinId(expr->id());
if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
return;
}
}
ASSERT(join != NULL);
join->SetJoinId(expr->id());
set_current_block(join);
if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
}
@ -3293,7 +3340,7 @@ void HGraphBuilder::HandleGlobalVariableAssignment(Variable* var,
CHECK_BAILOUT;
bool check_hole = !lookup.IsDontDelete() || lookup.IsReadOnly();
Handle<GlobalObject> global(graph()->info()->global_object());
Handle<GlobalObject> global(info()->global_object());
Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup));
HInstruction* instr = new HStoreGlobal(value, cell, check_hole);
instr->set_position(position);
@ -3491,64 +3538,62 @@ void HGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
HValue* object,
ZoneMapList* types,
Handle<String> name) {
int number_of_types = Min(types->length(), kMaxLoadPolymorphism);
ZoneMapList maps(number_of_types);
ZoneList<HSubgraph*> subgraphs(number_of_types);
bool needs_generic = (types->length() > kMaxLoadPolymorphism);
// Build subgraphs for each of the specific maps.
//
// TODO(ager): We should recognize when the prototype chains for
// different maps are identical. In that case we can avoid
// repeatedly generating the same prototype map checks.
for (int i = 0; i < number_of_types; ++i) {
// TODO(ager): We should recognize when the prototype chains for different
// maps are identical. In that case we can avoid repeatedly generating the
// same prototype map checks.
int count = 0;
HBasicBlock* join = NULL;
for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
Handle<Map> map = types->at(i);
LookupResult lookup;
map->LookupInDescriptors(NULL, *name, &lookup);
if (lookup.IsProperty() && lookup.type() == FIELD) {
HSubgraph* subgraph = CreateBranchSubgraph(environment());
SubgraphScope scope(this, subgraph);
if (count == 0) {
AddInstruction(new HCheckNonSmi(object)); // Only needed once.
join = graph()->CreateBasicBlock();
}
++count;
HBasicBlock* if_true = graph()->CreateBasicBlock();
HBasicBlock* if_false = graph()->CreateBasicBlock();
HCompareMap* compare = new HCompareMap(object, map, if_true, if_false);
current_block()->Finish(compare);
set_current_block(if_true);
HLoadNamedField* instr =
BuildLoadNamedField(object, expr, map, &lookup, false);
instr->set_position(expr->position());
instr->ClearFlag(HValue::kUseGVN); // Don't do GVN on polymorphic loads.
PushAndAdd(instr);
maps.Add(map);
subgraphs.Add(subgraph);
} else {
needs_generic = true;
instr->ClearFlag(HValue::kUseGVN);
AddInstruction(instr);
if (!ast_context()->IsEffect()) Push(instr);
current_block()->Goto(join);
set_current_block(if_false);
}
}
// If none of the properties were named fields we generate a
// generic load.
if (maps.length() == 0) {
// Finish up. Unconditionally deoptimize if we've handled all the maps we
// know about and do not want to handle ones we've never seen. Otherwise
// use a generic IC.
if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
current_block()->FinishExit(new HDeoptimize);
} else {
HInstruction* instr = BuildLoadNamedGeneric(object, expr);
instr->set_position(expr->position());
ast_context()->ReturnInstruction(instr, expr->id());
} else {
// Build subgraph for generic load through IC.
HSubgraph* default_graph = CreateBranchSubgraph(environment());
{ SubgraphScope scope(this, default_graph);
if (!needs_generic && FLAG_deoptimize_uncommon_cases) {
default_graph->exit_block()->FinishExit(new HDeoptimize());
default_graph->set_exit_block(NULL);
} else {
HInstruction* instr = BuildLoadNamedGeneric(object, expr);
instr->set_position(expr->position());
PushAndAdd(instr);
}
}
HBasicBlock* new_exit_block =
BuildTypeSwitch(object, &maps, &subgraphs, default_graph, expr->id());
set_current_block(new_exit_block);
// In an effect context, we did not materialized the value in the
// predecessor environments so there's no need to handle it here.
if (current_block() != NULL && !ast_context()->IsEffect()) {
ast_context()->ReturnValue(Pop());
if (join != NULL) {
AddInstruction(instr);
if (!ast_context()->IsEffect()) Push(instr);
current_block()->Goto(join);
} else {
ast_context()->ReturnInstruction(instr, expr->id());
return;
}
}
ASSERT(join != NULL);
join->SetJoinId(expr->id());
set_current_block(join);
if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
}
@ -3916,14 +3961,17 @@ void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
}
void HGraphBuilder::TraceInline(Handle<JSFunction> target, bool result) {
SmartPointer<char> callee = target->shared()->DebugName()->ToCString();
SmartPointer<char> caller =
graph()->info()->function()->debug_name()->ToCString();
if (result) {
PrintF("Inlined %s called from %s.\n", *callee, *caller);
} else {
PrintF("Do not inline %s called from %s.\n", *callee, *caller);
void HGraphBuilder::TraceInline(Handle<JSFunction> target, const char* reason) {
if (FLAG_trace_inlining) {
SmartPointer<char> callee = target->shared()->DebugName()->ToCString();
SmartPointer<char> caller =
info()->function()->debug_name()->ToCString();
if (reason == NULL) {
PrintF("Inlined %s called from %s.\n", *callee, *caller);
} else {
PrintF("Did not inline %s called from %s (%s).\n",
*callee, *caller, reason);
}
}
}
@ -3938,123 +3986,122 @@ bool HGraphBuilder::TryInline(Call* expr) {
// Do a quick check on source code length to avoid parsing large
// inlining candidates.
if (FLAG_limit_inlining && target->shared()->SourceSize() > kMaxSourceSize) {
if (FLAG_trace_inlining) TraceInline(target, false);
TraceInline(target, "target text too big");
return false;
}
// Target must be inlineable.
if (!target->IsInlineable()) return false;
if (!target->IsInlineable()) {
TraceInline(target, "target not inlineable");
return false;
}
// No context change required.
CompilationInfo* outer_info = graph()->info();
CompilationInfo* outer_info = info();
if (target->context() != outer_info->closure()->context() ||
outer_info->scope()->contains_with() ||
outer_info->scope()->num_heap_slots() > 0) {
TraceInline(target, "target requires context change");
return false;
}
// Don't inline deeper than two calls.
HEnvironment* env = environment();
if (env->outer() != NULL && env->outer()->outer() != NULL) return false;
if (env->outer() != NULL && env->outer()->outer() != NULL) {
TraceInline(target, "inline depth limit reached");
return false;
}
// Don't inline recursive functions.
if (target->shared() == outer_info->closure()->shared()) return false;
if (target->shared() == outer_info->closure()->shared()) {
TraceInline(target, "target is recursive");
return false;
}
// We don't want to add more than a certain number of nodes from inlining.
if (FLAG_limit_inlining && inlined_count_ > kMaxInlinedNodes) {
if (FLAG_trace_inlining) TraceInline(target, false);
TraceInline(target, "cumulative AST node limit reached");
return false;
}
int count_before = AstNode::Count();
// Parse and allocate variables.
CompilationInfo inner_info(target);
if (!ParserApi::Parse(&inner_info) ||
!Scope::Analyze(&inner_info)) {
CompilationInfo target_info(target);
if (!ParserApi::Parse(&target_info) ||
!Scope::Analyze(&target_info)) {
if (Top::has_pending_exception()) {
// Parse or scope error, never optimize this function.
SetStackOverflow();
// Stop trying to optimize and inline this function.
target->shared()->set_optimization_disabled(true);
}
TraceInline(target, "parse failure");
return false;
}
if (inner_info.scope()->num_heap_slots() > 0) return false;
FunctionLiteral* function = inner_info.function();
if (target_info.scope()->num_heap_slots() > 0) {
TraceInline(target, "target has context-allocated variables");
return false;
}
FunctionLiteral* function = target_info.function();
// Count the number of AST nodes added by inlining this call.
int nodes_added = AstNode::Count() - count_before;
if (FLAG_limit_inlining && nodes_added > kMaxInlinedSize) {
if (FLAG_trace_inlining) TraceInline(target, false);
TraceInline(target, "target AST is too large");
return false;
}
// Check if we can handle all declarations in the inlined functions.
VisitDeclarations(inner_info.scope()->declarations());
VisitDeclarations(target_info.scope()->declarations());
if (HasStackOverflow()) {
TraceInline(target, "target has non-trivial declaration");
ClearStackOverflow();
return false;
}
// Don't inline functions that uses the arguments object or that
// have a mismatching number of parameters.
Handle<SharedFunctionInfo> shared(target->shared());
Handle<SharedFunctionInfo> target_shared(target->shared());
int arity = expr->arguments()->length();
if (function->scope()->arguments() != NULL ||
arity != shared->formal_parameter_count()) {
arity != target_shared->formal_parameter_count()) {
TraceInline(target, "target requires special argument handling");
return false;
}
// All statements in the body must be inlineable.
for (int i = 0, count = function->body()->length(); i < count; ++i) {
if (!function->body()->at(i)->IsInlineable()) return false;
if (!function->body()->at(i)->IsInlineable()) {
TraceInline(target, "target contains unsupported syntax");
return false;
}
}
// Generate the deoptimization data for the unoptimized version of
// the target function if we don't already have it.
if (!shared->has_deoptimization_support()) {
if (!target_shared->has_deoptimization_support()) {
// Note that we compile here using the same AST that we will use for
// generating the optimized inline code.
inner_info.EnableDeoptimizationSupport();
if (!FullCodeGenerator::MakeCode(&inner_info)) return false;
shared->EnableDeoptimizationSupport(*inner_info.code());
Compiler::RecordFunctionCompilation(
Logger::FUNCTION_TAG, &inner_info, shared);
target_info.EnableDeoptimizationSupport();
if (!FullCodeGenerator::MakeCode(&target_info)) {
TraceInline(target, "could not generate deoptimization info");
return false;
}
target_shared->EnableDeoptimizationSupport(*target_info.code());
Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG,
&target_info,
target_shared);
}
// ----------------------------------------------------------------
// Save the pending call context and type feedback oracle. Set up new ones
// for the inlined function.
ASSERT(shared->has_deoptimization_support());
AstContext* saved_call_context = call_context();
HBasicBlock* saved_function_return = function_return();
TypeFeedbackOracle* saved_oracle = oracle();
// On-stack replacement cannot target inlined functions. Since we don't
// use a separate CompilationInfo structure for the inlined function, we
// save and restore the AST ID in the original compilation info.
int saved_osr_ast_id = graph()->info()->osr_ast_id();
TestContext* test_context = NULL;
if (ast_context()->IsTest()) {
// Inlined body is treated as if it occurs in an 'inlined' call context
// with true and false blocks that will forward to the real ones.
HBasicBlock* if_true = graph()->CreateBasicBlock();
HBasicBlock* if_false = graph()->CreateBasicBlock();
if_true->MarkAsInlineReturnTarget();
if_false->MarkAsInlineReturnTarget();
// AstContext constructor pushes on the context stack.
test_context = new TestContext(this, if_true, if_false);
function_return_ = NULL;
} else {
// Inlined body is treated as if it occurs in the original call context.
function_return_ = graph()->CreateBasicBlock();
function_return_->MarkAsInlineReturnTarget();
}
call_context_ = ast_context();
TypeFeedbackOracle new_oracle(
Handle<Code>(shared->code()),
ASSERT(target_shared->has_deoptimization_support());
TypeFeedbackOracle target_oracle(
Handle<Code>(target_shared->code()),
Handle<Context>(target->context()->global_context()));
oracle_ = &new_oracle;
graph()->info()->SetOsrAstId(AstNode::kNoNumber);
FunctionState target_state(this, &target_info, &target_oracle);
HSubgraph* body = CreateInlinedSubgraph(env, target, function);
body->exit_block()->AddInstruction(new HEnterInlined(target, function));
@ -4062,26 +4109,22 @@ bool HGraphBuilder::TryInline(Call* expr) {
if (HasStackOverflow()) {
// Bail out if the inline function did, as we cannot residualize a call
// instead.
delete test_context;
call_context_ = saved_call_context;
function_return_ = saved_function_return;
oracle_ = saved_oracle;
graph()->info()->SetOsrAstId(saved_osr_ast_id);
TraceInline(target, "inline graph construction failed");
return false;
}
// Update inlined nodes count.
inlined_count_ += nodes_added;
if (FLAG_trace_inlining) TraceInline(target, true);
TraceInline(target, NULL);
if (body->exit_block() != NULL) {
// Add a return of undefined if control can fall off the body. In a
// test context, undefined is false.
HValue* return_value = graph()->GetConstantUndefined();
if (test_context == NULL) {
ASSERT(function_return_ != NULL);
body->exit_block()->AddLeaveInlined(return_value, function_return_);
if (inlined_test_context() == NULL) {
ASSERT(function_return() != NULL);
body->exit_block()->AddLeaveInlined(return_value, function_return());
} else {
// The graph builder assumes control can reach both branches of a
// test, so we materialize the undefined value and test it rather than
@ -4094,8 +4137,10 @@ bool HGraphBuilder::TryInline(Call* expr) {
body->exit_block()->Finish(test);
HValue* const no_return_value = NULL;
empty_true->AddLeaveInlined(no_return_value, test_context->if_true());
empty_false->AddLeaveInlined(no_return_value, test_context->if_false());
empty_true->AddLeaveInlined(no_return_value,
inlined_test_context()->if_true());
empty_false->AddLeaveInlined(no_return_value,
inlined_test_context()->if_false());
}
body->set_exit_block(NULL);
}
@ -4107,13 +4152,14 @@ bool HGraphBuilder::TryInline(Call* expr) {
current_block()->Finish(new HGoto(body->entry_block()));
// Fix up the function exits.
if (test_context != NULL) {
HBasicBlock* if_true = test_context->if_true();
HBasicBlock* if_false = test_context->if_false();
if (inlined_test_context() != NULL) {
HBasicBlock* if_true = inlined_test_context()->if_true();
HBasicBlock* if_false = inlined_test_context()->if_false();
if_true->SetJoinId(expr->id());
if_false->SetJoinId(expr->id());
ASSERT(ast_context() == test_context);
delete test_context; // Destructor pops from expression context stack.
ASSERT(ast_context() == inlined_test_context());
// Pop the return test context from the expression context stack.
ClearInlinedTestContext();
// Forward to the real test context.
HValue* const no_return_value = NULL;
@ -4137,15 +4183,10 @@ bool HGraphBuilder::TryInline(Call* expr) {
set_current_block(NULL);
} else {
function_return_->SetJoinId(expr->id());
set_current_block(function_return_);
function_return()->SetJoinId(expr->id());
set_current_block(function_return());
}
call_context_ = saved_call_context;
function_return_ = saved_function_return;
oracle_ = saved_oracle;
graph()->info()->SetOsrAstId(saved_osr_ast_id);
return true;
}
@ -4253,7 +4294,7 @@ bool HGraphBuilder::TryCallApply(Call* expr) {
Property* prop = callee->AsProperty();
ASSERT(prop != NULL);
if (graph()->info()->scope()->arguments() == NULL) return false;
if (info()->scope()->arguments() == NULL) return false;
Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
if (!name->IsEqualTo(CStrVector("apply"))) return false;
@ -4409,10 +4450,9 @@ void HGraphBuilder::VisitCall(Call* expr) {
// If there is a global property cell for the name at compile time and
// access check is not enabled we assume that the function will not change
// and generate optimized code for calling the function.
CompilationInfo* info = graph()->info();
bool known_global_function = info->has_global_object() &&
!info->global_object()->IsAccessCheckNeeded() &&
expr->ComputeGlobalTarget(Handle<GlobalObject>(info->global_object()),
bool known_global_function = info()->has_global_object() &&
!info()->global_object()->IsAccessCheckNeeded() &&
expr->ComputeGlobalTarget(Handle<GlobalObject>(info()->global_object()),
var->name());
if (known_global_function) {
// Push the global object instead of the global receiver because
@ -5042,7 +5082,7 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
HValue* left = Pop();
Token::Value op = expr->op();
TypeInfo info = oracle()->CompareType(expr);
TypeInfo type_info = oracle()->CompareType(expr);
HInstruction* instr = NULL;
if (op == Token::INSTANCEOF) {
// Check to see if the rhs of the instanceof is a global function not
@ -5051,12 +5091,11 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
Handle<JSFunction> target = Handle<JSFunction>::null();
Variable* var = expr->right()->AsVariableProxy()->AsVariable();
bool global_function = (var != NULL) && var->is_global() && !var->is_this();
CompilationInfo* info = graph()->info();
if (global_function &&
info->has_global_object() &&
!info->global_object()->IsAccessCheckNeeded()) {
info()->has_global_object() &&
!info()->global_object()->IsAccessCheckNeeded()) {
Handle<String> name = var->name();
Handle<GlobalObject> global(info->global_object());
Handle<GlobalObject> global(info()->global_object());
LookupResult lookup;
global->Lookup(*name, &lookup);
if (lookup.IsProperty() &&
@ -5083,7 +5122,7 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
}
} else if (op == Token::IN) {
BAILOUT("Unsupported comparison: in");
} else if (info.IsNonPrimitive()) {
} else if (type_info.IsNonPrimitive()) {
switch (op) {
case Token::EQ:
case Token::EQ_STRICT: {
@ -5100,7 +5139,7 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
}
} else {
HCompare* compare = new HCompare(left, right, op);
Representation r = ToRepresentation(info);
Representation r = ToRepresentation(type_info);
compare->SetInputRepresentation(r);
instr = compare;
}

View File

@ -224,13 +224,8 @@ class HGraph: public HSubgraph {
public:
explicit HGraph(CompilationInfo* info);
CompilationInfo* info() const { return info_; }
bool AllowCodeMotion() const;
const ZoneList<HBasicBlock*>* blocks() const { return &blocks_; }
const ZoneList<HPhi*>* phi_list() const { return phi_list_; }
Handle<String> debug_name() const { return info_->function()->debug_name(); }
HEnvironment* start_environment() const { return start_environment_; }
void InitializeInferredTypes();
@ -247,7 +242,7 @@ class HGraph: public HSubgraph {
// which are not supported by the optimizing compiler.
bool CollectPhis();
Handle<Code> Compile();
Handle<Code> Compile(CompilationInfo* info);
void set_undefined_constant(HConstant* constant) {
undefined_constant_.set(constant);
@ -268,9 +263,6 @@ class HGraph: public HSubgraph {
arguments_object_.set(object);
}
// True iff. we are compiling for OSR and the statement is the entry.
bool HasOsrEntryAt(IterationStatement* statement);
int GetMaximumValueID() const { return values_.length(); }
int GetNextBlockID() { return next_block_id_++; }
int GetNextValueID(HValue* value) {
@ -309,7 +301,6 @@ class HGraph: public HSubgraph {
void CheckForBackEdge(HBasicBlock* block, HBasicBlock* successor);
int next_block_id_;
CompilationInfo* info_;
HEnvironment* start_environment_;
ZoneList<HBasicBlock*> blocks_;
ZoneList<HValue*> values_;
@ -460,6 +451,8 @@ class HEnvironment: public ZoneObject {
class HGraphBuilder;
// This class is not BASE_EMBEDDED because our inlining implementation uses
// new and delete.
class AstContext {
public:
bool IsEffect() const { return kind_ == Expression::kEffect; }
@ -551,6 +544,47 @@ class TestContext: public AstContext {
};
class FunctionState BASE_EMBEDDED {
public:
FunctionState(HGraphBuilder* owner,
CompilationInfo* info,
TypeFeedbackOracle* oracle);
~FunctionState();
CompilationInfo* compilation_info() { return compilation_info_; }
TypeFeedbackOracle* oracle() { return oracle_; }
AstContext* call_context() { return call_context_; }
HBasicBlock* function_return() { return function_return_; }
TestContext* test_context() { return test_context_; }
void ClearInlinedTestContext() {
delete test_context_;
test_context_ = NULL;
}
private:
HGraphBuilder* owner_;
CompilationInfo* compilation_info_;
TypeFeedbackOracle* oracle_;
// During function inlining, expression context of the call being
// inlined. NULL when not inlining.
AstContext* call_context_;
// When inlining in an effect of value context, this is the return block.
// It is NULL otherwise. When inlining in a test context, there are a
// pair of return blocks in the context. When not inlining, there is no
// local return point.
HBasicBlock* function_return_;
// When inlining a call in a test context, a context containing a pair of
// return blocks. NULL in all other cases.
TestContext* test_context_;
FunctionState* outer_;
};
class HGraphBuilder: public AstVisitor {
public:
enum BreakType { BREAK, CONTINUE };
@ -600,18 +634,21 @@ class HGraphBuilder: public AstVisitor {
BreakAndContinueScope* next_;
};
explicit HGraphBuilder(TypeFeedbackOracle* oracle)
: oracle_(oracle),
HGraphBuilder(CompilationInfo* info, TypeFeedbackOracle* oracle)
: function_state_(NULL),
initial_function_state_(this, info, oracle),
ast_context_(NULL),
break_scope_(NULL),
graph_(NULL),
current_subgraph_(NULL),
ast_context_(NULL),
call_context_(NULL),
function_return_(NULL),
inlined_count_(0),
break_scope_(NULL) {
inlined_count_(0) {
// This is not initialized in the initializer list because the
// constructor for the initial state relies on function_state_ == NULL
// to know it's the initial state.
function_state_= &initial_function_state_;
}
HGraph* CreateGraph(CompilationInfo* info);
HGraph* CreateGraph();
// Simple accessors.
HGraph* graph() const { return graph_; }
@ -653,11 +690,30 @@ class HGraphBuilder: public AstVisitor {
static const int kMaxSourceSize = 600;
// Simple accessors.
TypeFeedbackOracle* oracle() const { return oracle_; }
FunctionState* function_state() const { return function_state_; }
void set_function_state(FunctionState* state) { function_state_ = state; }
AstContext* ast_context() const { return ast_context_; }
void set_ast_context(AstContext* context) { ast_context_ = context; }
AstContext* call_context() const { return call_context_; }
HBasicBlock* function_return() const { return function_return_; }
// Accessors forwarded to the function state.
CompilationInfo* info() const {
return function_state()->compilation_info();
}
TypeFeedbackOracle* oracle() const { return function_state()->oracle(); }
AstContext* call_context() const {
return function_state()->call_context();
}
HBasicBlock* function_return() const {
return function_state()->function_return();
}
TestContext* inlined_test_context() const {
return function_state()->test_context();
}
void ClearInlinedTestContext() {
function_state()->ClearInlinedTestContext();
}
// Generators for inline runtime functions.
#define INLINE_FUNCTION_GENERATOR_DECLARATION(Name, argc, ressize) \
@ -670,6 +726,8 @@ class HGraphBuilder: public AstVisitor {
void Bailout(const char* reason);
void PreProcessOsrEntry(IterationStatement* statement);
// True iff. we are compiling for OSR and the statement is the entry.
bool HasOsrEntryAt(IterationStatement* statement);
HBasicBlock* CreateJoin(HBasicBlock* first,
HBasicBlock* second,
@ -752,7 +810,11 @@ class HGraphBuilder: public AstVisitor {
HValue* receiver,
Handle<Map> receiver_map,
CheckType check_type);
void TraceInline(Handle<JSFunction> target, bool result);
// If --trace-inlining, print a line of the inlining trace. Inlining
// succeeded if the reason string is NULL and failed if there is a
// non-NULL reason string.
void TraceInline(Handle<JSFunction> target, const char* failure_reason);
void HandleGlobalVariableAssignment(Variable* var,
HValue* value,
@ -844,26 +906,25 @@ class HGraphBuilder: public AstVisitor {
HSubgraph* default_graph,
int join_id);
TypeFeedbackOracle* oracle_;
HGraph* graph_;
HSubgraph* current_subgraph_;
// The translation state of the currently-being-translated function.
FunctionState* function_state_;
// The base of the function state stack.
FunctionState initial_function_state_;
// Expression context of the currently visited subexpression. NULL when
// visiting statements.
AstContext* ast_context_;
// During function inlining, expression context of the call being
// inlined. NULL when not inlining.
AstContext* call_context_;
// A stack of breakable statements entered.
BreakAndContinueScope* break_scope_;
// When inlining a call in an effect or value context, the return
// block. NULL otherwise. When inlining a call in a test context, there
// are a pair of target blocks in the call context.
HBasicBlock* function_return_;
HGraph* graph_;
HSubgraph* current_subgraph_;
int inlined_count_;
BreakAndContinueScope* break_scope_;
friend class FunctionState; // Pushes and pops the state stack.
friend class AstContext; // Pushes and pops the AST context stack.
DISALLOW_COPY_AND_ASSIGN(HGraphBuilder);

View File

@ -93,8 +93,8 @@ void LCodeGen::FinishCode(Handle<Code> code) {
void LCodeGen::Abort(const char* format, ...) {
if (FLAG_trace_bailout) {
SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
PrintF("Aborting LCodeGen in @\"%s\": ", *name);
va_list arguments;
va_start(arguments, format);
OS::VPrint(format, arguments);
@ -2361,7 +2361,7 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
LInstruction* instr) {
// Change context if needed.
bool change_context =
(graph()->info()->closure()->context() != function->context()) ||
(info()->closure()->context() != function->context()) ||
scope()->contains_with() ||
(scope()->num_heap_slots() > 0);
if (change_context) {
@ -2380,7 +2380,7 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
RecordPosition(pointers->position());
// Invoke function.
if (*function == *graph()->info()->closure()) {
if (*function == *info()->closure()) {
__ CallSelf();
} else {
__ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));

View File

@ -56,7 +56,7 @@ class LCodeGen BASE_EMBEDDED {
deoptimizations_(4),
deoptimization_literals_(8),
inlined_function_count_(0),
scope_(chunk->graph()->info()->scope()),
scope_(info->scope()),
status_(UNUSED),
deferred_(8),
osr_pc_offset_(-1),
@ -67,6 +67,7 @@ class LCodeGen BASE_EMBEDDED {
// Simple accessors.
MacroAssembler* masm() const { return masm_; }
CompilationInfo* info() const { return info_; }
// Support for converting LOperands to assembler types.
Operand ToOperand(LOperand* op) const;
@ -124,7 +125,7 @@ class LCodeGen BASE_EMBEDDED {
bool is_aborted() const { return status_ == ABORTED; }
int strict_mode_flag() const {
return info_->is_strict() ? kStrictMode : kNonStrictMode;
return info()->is_strict() ? kStrictMode : kNonStrictMode;
}
LChunk* chunk() const { return chunk_; }

View File

@ -469,7 +469,7 @@ int LChunk::GetParameterStackSlot(int index) const {
// shift all parameter indexes down by the number of parameters, and
// make sure they end up negative so they are distinguishable from
// spill slots.
int result = index - graph()->info()->scope()->num_parameters() - 1;
int result = index - info()->scope()->num_parameters() - 1;
ASSERT(result < 0);
return result;
}
@ -477,7 +477,7 @@ int LChunk::GetParameterStackSlot(int index) const {
// A parameter relative to ebp in the arguments stub.
int LChunk::ParameterAt(int index) {
ASSERT(-1 <= index); // -1 is the receiver.
return (1 + graph()->info()->scope()->num_parameters() - index) *
return (1 + info()->scope()->num_parameters() - index) *
kPointerSize;
}
@ -516,7 +516,7 @@ Representation LChunk::LookupLiteralRepresentation(
LChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
chunk_ = new LChunk(graph());
chunk_ = new LChunk(info(), graph());
HPhase phase("Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
@ -533,8 +533,8 @@ LChunk* LChunkBuilder::Build() {
void LChunkBuilder::Abort(const char* format, ...) {
if (FLAG_trace_bailout) {
SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
PrintF("Aborting LChunk building in @\"%s\": ", *debug_name);
SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
PrintF("Aborting LChunk building in @\"%s\": ", *name);
va_list arguments;
va_start(arguments, format);
OS::VPrint(format, arguments);

View File

@ -1917,8 +1917,9 @@ class LStackCheck: public LTemplateInstruction<0, 0, 0> {
class LChunkBuilder;
class LChunk: public ZoneObject {
public:
explicit LChunk(HGraph* graph)
explicit LChunk(CompilationInfo* info, HGraph* graph)
: spill_slot_count_(0),
info_(info),
graph_(graph),
instructions_(32),
pointer_maps_(8),
@ -1935,6 +1936,7 @@ class LChunk: public ZoneObject {
int ParameterAt(int index);
int GetParameterStackSlot(int index) const;
int spill_slot_count() const { return spill_slot_count_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
void AddGapMove(int index, LOperand* from, LOperand* to);
@ -1971,6 +1973,7 @@ class LChunk: public ZoneObject {
private:
int spill_slot_count_;
CompilationInfo* info_;
HGraph* const graph_;
ZoneList<LInstruction*> instructions_;
ZoneList<LPointerMap*> pointer_maps_;
@ -1980,8 +1983,9 @@ class LChunk: public ZoneObject {
class LChunkBuilder BASE_EMBEDDED {
public:
LChunkBuilder(HGraph* graph, LAllocator* allocator)
LChunkBuilder(CompilationInfo* info, HGraph* graph, LAllocator* allocator)
: chunk_(NULL),
info_(info),
graph_(graph),
status_(UNUSED),
current_instruction_(NULL),
@ -2010,6 +2014,7 @@ class LChunkBuilder BASE_EMBEDDED {
};
LChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
bool is_unused() const { return status_ == UNUSED; }
@ -2116,6 +2121,7 @@ class LChunkBuilder BASE_EMBEDDED {
HArithmeticBinaryOperation* instr);
LChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
Status status_;
HInstruction* current_instruction_;

View File

@ -1274,7 +1274,7 @@ void LAllocator::BuildLiveRanges() {
found = true;
int operand_index = iterator.Current();
PrintF("Function: %s\n",
*graph_->info()->function()->debug_name()->ToCString());
*chunk_->info()->function()->debug_name()->ToCString());
PrintF("Value %d used before first definition!\n", operand_index);
LiveRange* range = LiveRangeFor(operand_index);
PrintF("First use is at %d\n", range->first_pos()->pos().Value());

View File

@ -108,8 +108,8 @@ void LCodeGen::FinishCode(Handle<Code> code) {
void LCodeGen::Abort(const char* format, ...) {
if (FLAG_trace_bailout) {
SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
PrintF("Aborting LCodeGen in @\"%s\": ", *name);
va_list arguments;
va_start(arguments, format);
OS::VPrint(format, arguments);
@ -2309,7 +2309,7 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
LInstruction* instr) {
// Change context if needed.
bool change_context =
(graph()->info()->closure()->context() != function->context()) ||
(info()->closure()->context() != function->context()) ||
scope()->contains_with() ||
(scope()->num_heap_slots() > 0);
if (change_context) {
@ -2326,7 +2326,7 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
RecordPosition(pointers->position());
// Invoke function.
if (*function == *graph()->info()->closure()) {
if (*function == *info()->closure()) {
__ CallSelf();
} else {
__ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset));

View File

@ -56,7 +56,7 @@ class LCodeGen BASE_EMBEDDED {
jump_table_(4),
deoptimization_literals_(8),
inlined_function_count_(0),
scope_(chunk->graph()->info()->scope()),
scope_(info->scope()),
status_(UNUSED),
deferred_(8),
osr_pc_offset_(-1),
@ -66,6 +66,7 @@ class LCodeGen BASE_EMBEDDED {
// Simple accessors.
MacroAssembler* masm() const { return masm_; }
CompilationInfo* info() const { return info_; }
// Support for converting LOperands to assembler types.
Register ToRegister(LOperand* op) const;
@ -119,7 +120,7 @@ class LCodeGen BASE_EMBEDDED {
bool is_aborted() const { return status_ == ABORTED; }
int strict_mode_flag() const {
return info_->is_strict() ? kStrictMode : kNonStrictMode;
return info()->is_strict() ? kStrictMode : kNonStrictMode;
}
LChunk* chunk() const { return chunk_; }

View File

@ -470,7 +470,7 @@ int LChunk::GetParameterStackSlot(int index) const {
// shift all parameter indexes down by the number of parameters, and
// make sure they end up negative so they are distinguishable from
// spill slots.
int result = index - graph()->info()->scope()->num_parameters() - 1;
int result = index - info()->scope()->num_parameters() - 1;
ASSERT(result < 0);
return result;
}
@ -478,7 +478,7 @@ int LChunk::GetParameterStackSlot(int index) const {
// A parameter relative to ebp in the arguments stub.
int LChunk::ParameterAt(int index) {
ASSERT(-1 <= index); // -1 is the receiver.
return (1 + graph()->info()->scope()->num_parameters() - index) *
return (1 + info()->scope()->num_parameters() - index) *
kPointerSize;
}
@ -517,7 +517,7 @@ Representation LChunk::LookupLiteralRepresentation(
LChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
chunk_ = new LChunk(graph());
chunk_ = new LChunk(info(), graph());
HPhase phase("Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
@ -534,8 +534,8 @@ LChunk* LChunkBuilder::Build() {
void LChunkBuilder::Abort(const char* format, ...) {
if (FLAG_trace_bailout) {
SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
PrintF("Aborting LChunk building in @\"%s\": ", *debug_name);
SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
PrintF("Aborting LChunk building in @\"%s\": ", *name);
va_list arguments;
va_start(arguments, format);
OS::VPrint(format, arguments);

View File

@ -1843,8 +1843,9 @@ class LStackCheck: public LTemplateInstruction<0, 0, 0> {
class LChunkBuilder;
class LChunk: public ZoneObject {
public:
explicit LChunk(HGraph* graph)
explicit LChunk(CompilationInfo* info, HGraph* graph)
: spill_slot_count_(0),
info_(info),
graph_(graph),
instructions_(32),
pointer_maps_(8),
@ -1861,6 +1862,7 @@ class LChunk: public ZoneObject {
int ParameterAt(int index);
int GetParameterStackSlot(int index) const;
int spill_slot_count() const { return spill_slot_count_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
void AddGapMove(int index, LOperand* from, LOperand* to);
@ -1897,6 +1899,7 @@ class LChunk: public ZoneObject {
private:
int spill_slot_count_;
CompilationInfo* info_;
HGraph* const graph_;
ZoneList<LInstruction*> instructions_;
ZoneList<LPointerMap*> pointer_maps_;
@ -1906,8 +1909,9 @@ class LChunk: public ZoneObject {
class LChunkBuilder BASE_EMBEDDED {
public:
LChunkBuilder(HGraph* graph, LAllocator* allocator)
LChunkBuilder(CompilationInfo* info, HGraph* graph, LAllocator* allocator)
: chunk_(NULL),
info_(info),
graph_(graph),
status_(UNUSED),
current_instruction_(NULL),
@ -1936,6 +1940,7 @@ class LChunkBuilder BASE_EMBEDDED {
};
LChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
bool is_unused() const { return status_ == UNUSED; }
@ -2042,6 +2047,7 @@ class LChunkBuilder BASE_EMBEDDED {
HArithmeticBinaryOperation* instr);
LChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
Status status_;
HInstruction* current_instruction_;