2012-05-23 14:24:29 +00:00
|
|
|
// Copyright 2012 the V8 project authors. All rights reserved.
|
2014-04-29 06:42:26 +00:00
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
2011-01-07 11:49:22 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#include "v8.h"
|
2011-01-07 11:49:22 +00:00
|
|
|
#include "lithium.h"
|
2012-07-11 14:42:17 +00:00
|
|
|
#include "scopes.h"
|
2014-05-12 07:49:11 +00:00
|
|
|
#include "serialize.h"
|
2012-07-11 14:42:17 +00:00
|
|
|
|
|
|
|
#if V8_TARGET_ARCH_IA32
|
|
|
|
#include "ia32/lithium-ia32.h"
|
2012-07-12 15:10:34 +00:00
|
|
|
#include "ia32/lithium-codegen-ia32.h"
|
2012-07-11 14:42:17 +00:00
|
|
|
#elif V8_TARGET_ARCH_X64
|
|
|
|
#include "x64/lithium-x64.h"
|
2012-07-12 15:10:34 +00:00
|
|
|
#include "x64/lithium-codegen-x64.h"
|
2012-07-11 14:42:17 +00:00
|
|
|
#elif V8_TARGET_ARCH_ARM
|
|
|
|
#include "arm/lithium-arm.h"
|
2012-07-12 15:10:34 +00:00
|
|
|
#include "arm/lithium-codegen-arm.h"
|
2012-07-11 14:42:17 +00:00
|
|
|
#elif V8_TARGET_ARCH_MIPS
|
|
|
|
#include "mips/lithium-mips.h"
|
2012-07-12 15:10:34 +00:00
|
|
|
#include "mips/lithium-codegen-mips.h"
|
2014-03-21 09:28:26 +00:00
|
|
|
#elif V8_TARGET_ARCH_ARM64
|
|
|
|
#include "arm64/lithium-arm64.h"
|
|
|
|
#include "arm64/lithium-codegen-arm64.h"
|
2012-07-11 14:42:17 +00:00
|
|
|
#else
|
|
|
|
#error "Unknown architecture."
|
|
|
|
#endif
|
2011-01-07 11:49:22 +00:00
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
|
2011-01-19 13:55:56 +00:00
|
|
|
|
|
|
|
void LOperand::PrintTo(StringStream* stream) {
|
|
|
|
LUnallocated* unalloc = NULL;
|
|
|
|
switch (kind()) {
|
|
|
|
case INVALID:
|
2012-01-24 02:13:28 +00:00
|
|
|
stream->Add("(0)");
|
2011-01-19 13:55:56 +00:00
|
|
|
break;
|
|
|
|
case UNALLOCATED:
|
|
|
|
unalloc = LUnallocated::cast(this);
|
|
|
|
stream->Add("v%d", unalloc->virtual_register());
|
2013-05-02 09:51:07 +00:00
|
|
|
if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
|
|
|
|
stream->Add("(=%dS)", unalloc->fixed_slot_index());
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
switch (unalloc->extended_policy()) {
|
2011-01-19 13:55:56 +00:00
|
|
|
case LUnallocated::NONE:
|
|
|
|
break;
|
|
|
|
case LUnallocated::FIXED_REGISTER: {
|
2013-05-02 09:51:07 +00:00
|
|
|
int reg_index = unalloc->fixed_register_index();
|
2011-01-19 13:55:56 +00:00
|
|
|
const char* register_name =
|
2013-05-02 09:51:07 +00:00
|
|
|
Register::AllocationIndexToString(reg_index);
|
2011-01-19 13:55:56 +00:00
|
|
|
stream->Add("(=%s)", register_name);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case LUnallocated::FIXED_DOUBLE_REGISTER: {
|
2013-05-02 09:51:07 +00:00
|
|
|
int reg_index = unalloc->fixed_register_index();
|
2011-01-19 13:55:56 +00:00
|
|
|
const char* double_register_name =
|
2013-05-02 09:51:07 +00:00
|
|
|
DoubleRegister::AllocationIndexToString(reg_index);
|
2011-01-19 13:55:56 +00:00
|
|
|
stream->Add("(=%s)", double_register_name);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case LUnallocated::MUST_HAVE_REGISTER:
|
|
|
|
stream->Add("(R)");
|
|
|
|
break;
|
2014-05-06 12:11:00 +00:00
|
|
|
case LUnallocated::MUST_HAVE_DOUBLE_REGISTER:
|
|
|
|
stream->Add("(D)");
|
|
|
|
break;
|
2011-01-19 13:55:56 +00:00
|
|
|
case LUnallocated::WRITABLE_REGISTER:
|
|
|
|
stream->Add("(WR)");
|
|
|
|
break;
|
|
|
|
case LUnallocated::SAME_AS_FIRST_INPUT:
|
|
|
|
stream->Add("(1)");
|
|
|
|
break;
|
|
|
|
case LUnallocated::ANY:
|
|
|
|
stream->Add("(-)");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
case CONSTANT_OPERAND:
|
|
|
|
stream->Add("[constant:%d]", index());
|
|
|
|
break;
|
|
|
|
case STACK_SLOT:
|
|
|
|
stream->Add("[stack:%d]", index());
|
|
|
|
break;
|
|
|
|
case DOUBLE_STACK_SLOT:
|
|
|
|
stream->Add("[double_stack:%d]", index());
|
|
|
|
break;
|
|
|
|
case REGISTER:
|
|
|
|
stream->Add("[%s|R]", Register::AllocationIndexToString(index()));
|
|
|
|
break;
|
|
|
|
case DOUBLE_REGISTER:
|
|
|
|
stream->Add("[%s|R]", DoubleRegister::AllocationIndexToString(index()));
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-03-11 02:55:06 +00:00
|
|
|
|
|
|
|
template<LOperand::Kind kOperandKind, int kNumCachedOperands>
|
|
|
|
LSubKindOperand<kOperandKind, kNumCachedOperands>*
|
|
|
|
LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
|
|
|
|
|
|
|
|
|
|
|
|
template<LOperand::Kind kOperandKind, int kNumCachedOperands>
|
|
|
|
void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
|
|
|
|
if (cache) return;
|
|
|
|
cache = new LSubKindOperand[kNumCachedOperands];
|
|
|
|
for (int i = 0; i < kNumCachedOperands; i++) {
|
|
|
|
cache[i].ConvertTo(kOperandKind, i);
|
2012-03-28 13:12:00 +00:00
|
|
|
}
|
2014-03-11 02:55:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template<LOperand::Kind kOperandKind, int kNumCachedOperands>
|
|
|
|
void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
|
|
|
|
delete[] cache;
|
|
|
|
}
|
2012-03-12 13:56:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
void LOperand::SetUpCaches() {
|
2014-03-11 02:55:06 +00:00
|
|
|
#define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache();
|
2012-03-28 13:12:00 +00:00
|
|
|
LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
|
|
|
|
#undef LITHIUM_OPERAND_SETUP
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void LOperand::TearDownCaches() {
|
2014-03-11 02:55:06 +00:00
|
|
|
#define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache();
|
2012-03-28 13:12:00 +00:00
|
|
|
LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
|
|
|
|
#undef LITHIUM_OPERAND_TEARDOWN
|
2012-03-12 13:56:56 +00:00
|
|
|
}
|
2011-01-19 13:55:56 +00:00
|
|
|
|
2012-03-28 13:12:00 +00:00
|
|
|
|
2011-01-10 11:31:21 +00:00
|
|
|
bool LParallelMove::IsRedundant() const {
|
|
|
|
for (int i = 0; i < move_operands_.length(); ++i) {
|
|
|
|
if (!move_operands_[i].IsRedundant()) return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void LParallelMove::PrintDataTo(StringStream* stream) const {
|
2011-01-17 11:25:36 +00:00
|
|
|
bool first = true;
|
|
|
|
for (int i = 0; i < move_operands_.length(); ++i) {
|
2011-01-10 11:31:21 +00:00
|
|
|
if (!move_operands_[i].IsEliminated()) {
|
2011-01-17 11:25:36 +00:00
|
|
|
LOperand* source = move_operands_[i].source();
|
|
|
|
LOperand* destination = move_operands_[i].destination();
|
|
|
|
if (!first) stream->Add(" ");
|
|
|
|
first = false;
|
|
|
|
if (source->Equals(destination)) {
|
|
|
|
destination->PrintTo(stream);
|
2011-01-10 11:31:21 +00:00
|
|
|
} else {
|
2011-01-17 11:25:36 +00:00
|
|
|
destination->PrintTo(stream);
|
2011-01-10 11:31:21 +00:00
|
|
|
stream->Add(" = ");
|
2011-01-17 11:25:36 +00:00
|
|
|
source->PrintTo(stream);
|
2011-01-10 11:31:21 +00:00
|
|
|
}
|
2011-01-17 11:25:36 +00:00
|
|
|
stream->Add(";");
|
2011-01-10 11:31:21 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-01-11 15:51:08 +00:00
|
|
|
void LEnvironment::PrintTo(StringStream* stream) {
|
2012-08-06 14:13:09 +00:00
|
|
|
stream->Add("[id=%d|", ast_id().ToInt());
|
2013-02-11 14:12:13 +00:00
|
|
|
if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
|
|
|
|
stream->Add("deopt_id=%d|", deoptimization_index());
|
|
|
|
}
|
2013-04-24 09:31:55 +00:00
|
|
|
stream->Add("parameters=%d|", parameter_count());
|
|
|
|
stream->Add("arguments_stack_height=%d|", arguments_stack_height());
|
2011-01-11 15:51:08 +00:00
|
|
|
for (int i = 0; i < values_.length(); ++i) {
|
|
|
|
if (i != 0) stream->Add(";");
|
|
|
|
if (values_[i] == NULL) {
|
|
|
|
stream->Add("[hole]");
|
|
|
|
} else {
|
|
|
|
values_[i]->PrintTo(stream);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stream->Add("]");
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-06-11 12:42:31 +00:00
|
|
|
void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
|
2011-01-11 15:51:08 +00:00
|
|
|
// Do not record arguments as pointers.
|
|
|
|
if (op->IsStackSlot() && op->index() < 0) return;
|
|
|
|
ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
|
2012-06-11 12:42:31 +00:00
|
|
|
pointer_operands_.Add(op, zone);
|
2011-01-11 15:51:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-09-19 18:36:47 +00:00
|
|
|
void LPointerMap::RemovePointer(LOperand* op) {
|
|
|
|
// Do not record arguments as pointers.
|
|
|
|
if (op->IsStackSlot() && op->index() < 0) return;
|
|
|
|
ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
|
|
|
|
for (int i = 0; i < pointer_operands_.length(); ++i) {
|
|
|
|
if (pointer_operands_[i]->Equals(op)) {
|
|
|
|
pointer_operands_.Remove(i);
|
|
|
|
--i;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-06-11 12:42:31 +00:00
|
|
|
void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
|
2011-09-19 18:36:47 +00:00
|
|
|
// Do not record arguments as pointers.
|
|
|
|
if (op->IsStackSlot() && op->index() < 0) return;
|
|
|
|
ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
|
2012-06-11 12:42:31 +00:00
|
|
|
untagged_operands_.Add(op, zone);
|
2011-09-19 18:36:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-01-11 15:51:08 +00:00
|
|
|
void LPointerMap::PrintTo(StringStream* stream) {
|
|
|
|
stream->Add("{");
|
|
|
|
for (int i = 0; i < pointer_operands_.length(); ++i) {
|
|
|
|
if (i != 0) stream->Add(";");
|
|
|
|
pointer_operands_[i]->PrintTo(stream);
|
|
|
|
}
|
2013-10-21 13:35:48 +00:00
|
|
|
stream->Add("}");
|
2011-01-11 15:51:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-07 13:15:41 +00:00
|
|
|
int StackSlotOffset(int index) {
|
|
|
|
if (index >= 0) {
|
|
|
|
// Local or spill slot. Skip the frame pointer, function, and
|
|
|
|
// context in the fixed part of the frame.
|
2013-11-20 13:44:24 +00:00
|
|
|
return -(index + 1) * kPointerSize -
|
|
|
|
StandardFrameConstants::kFixedFrameSizeFromFp;
|
2013-02-07 13:15:41 +00:00
|
|
|
} else {
|
|
|
|
// Incoming parameter. Skip the return address.
|
2013-07-23 13:46:10 +00:00
|
|
|
return -(index + 1) * kPointerSize + kFPOnStackSize + kPCOnStackSize;
|
2013-02-07 13:15:41 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-04 12:01:59 +00:00
|
|
|
LChunk::LChunk(CompilationInfo* info, HGraph* graph)
|
|
|
|
: spill_slot_count_(0),
|
|
|
|
info_(info),
|
|
|
|
graph_(graph),
|
|
|
|
instructions_(32, graph->zone()),
|
|
|
|
pointer_maps_(8, graph->zone()),
|
2014-05-02 06:37:54 +00:00
|
|
|
inlined_closures_(1, graph->zone()),
|
2014-05-20 13:03:25 +00:00
|
|
|
deprecation_dependencies_(MapLess(), MapAllocator(graph->zone())),
|
2014-05-05 11:03:14 +00:00
|
|
|
stability_dependencies_(MapLess(), MapAllocator(graph->zone())) {
|
2013-02-04 12:01:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
LLabel* LChunk::GetLabel(int block_id) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
HBasicBlock* block = graph_->blocks()->at(block_id);
|
|
|
|
int first_instruction = block->first_instruction_index();
|
|
|
|
return LLabel::cast(instructions_[first_instruction]);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
int LChunk::LookupDestination(int block_id) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
LLabel* cur = GetLabel(block_id);
|
|
|
|
while (cur->replacement() != NULL) {
|
|
|
|
cur = cur->replacement();
|
|
|
|
}
|
|
|
|
return cur->block_id();
|
|
|
|
}
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
Label* LChunk::GetAssemblyLabel(int block_id) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
LLabel* label = GetLabel(block_id);
|
|
|
|
ASSERT(!label->HasReplacement());
|
|
|
|
return label->label();
|
|
|
|
}
|
|
|
|
|
2013-07-05 09:52:11 +00:00
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
void LChunk::MarkEmptyBlocks() {
|
2013-06-25 12:22:26 +00:00
|
|
|
LPhase phase("L_Mark empty blocks", this);
|
2012-07-11 14:42:17 +00:00
|
|
|
for (int i = 0; i < graph()->blocks()->length(); ++i) {
|
|
|
|
HBasicBlock* block = graph()->blocks()->at(i);
|
|
|
|
int first = block->first_instruction_index();
|
|
|
|
int last = block->last_instruction_index();
|
|
|
|
LInstruction* first_instr = instructions()->at(first);
|
|
|
|
LInstruction* last_instr = instructions()->at(last);
|
|
|
|
|
|
|
|
LLabel* label = LLabel::cast(first_instr);
|
|
|
|
if (last_instr->IsGoto()) {
|
|
|
|
LGoto* goto_instr = LGoto::cast(last_instr);
|
|
|
|
if (label->IsRedundant() &&
|
|
|
|
!label->is_loop_header()) {
|
|
|
|
bool can_eliminate = true;
|
|
|
|
for (int i = first + 1; i < last && can_eliminate; ++i) {
|
|
|
|
LInstruction* cur = instructions()->at(i);
|
|
|
|
if (cur->IsGap()) {
|
|
|
|
LGap* gap = LGap::cast(cur);
|
|
|
|
if (!gap->IsRedundant()) {
|
|
|
|
can_eliminate = false;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
can_eliminate = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (can_eliminate) {
|
|
|
|
label->set_replacement(GetLabel(goto_instr->block_id()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
|
2012-07-11 14:42:17 +00:00
|
|
|
LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
|
2013-04-22 09:48:35 +00:00
|
|
|
gap->set_hydrogen_value(instr->hydrogen_value());
|
2012-07-11 14:42:17 +00:00
|
|
|
int index = -1;
|
|
|
|
if (instr->IsControl()) {
|
|
|
|
instructions_.Add(gap, zone());
|
|
|
|
index = instructions_.length();
|
|
|
|
instructions_.Add(instr, zone());
|
|
|
|
} else {
|
|
|
|
index = instructions_.length();
|
|
|
|
instructions_.Add(instr, zone());
|
|
|
|
instructions_.Add(gap, zone());
|
|
|
|
}
|
|
|
|
if (instr->HasPointerMap()) {
|
|
|
|
pointer_maps_.Add(instr->pointer_map(), zone());
|
|
|
|
instr->pointer_map()->set_lithium_position(index);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
|
2012-07-11 14:42:17 +00:00
|
|
|
return LConstantOperand::Create(constant->id(), zone());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
int LChunk::GetParameterStackSlot(int index) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
// The receiver is at index 0, the first parameter at index 1, so we
|
|
|
|
// shift all parameter indexes down by the number of parameters, and
|
|
|
|
// make sure they end up negative so they are distinguishable from
|
|
|
|
// spill slots.
|
2013-11-15 10:52:05 +00:00
|
|
|
int result = index - info()->num_parameters() - 1;
|
|
|
|
|
2012-07-11 14:42:17 +00:00
|
|
|
ASSERT(result < 0);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// A parameter relative to ebp in the arguments stub.
|
2012-07-12 15:29:14 +00:00
|
|
|
int LChunk::ParameterAt(int index) {
|
2012-07-11 14:42:17 +00:00
|
|
|
ASSERT(-1 <= index); // -1 is the receiver.
|
|
|
|
return (1 + info()->scope()->num_parameters() - index) *
|
|
|
|
kPointerSize;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
LGap* LChunk::GetGapAt(int index) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
return LGap::cast(instructions_[index]);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
bool LChunk::IsGapAt(int index) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
return instructions_[index]->IsGap();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
int LChunk::NearestGapPos(int index) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
while (!IsGapAt(index)) index--;
|
|
|
|
return index;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
|
2012-07-11 14:42:17 +00:00
|
|
|
GetGapAt(index)->GetOrCreateParallelMove(
|
|
|
|
LGap::START, zone())->AddMove(from, to, zone());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
|
2012-07-11 16:17:02 +00:00
|
|
|
return HConstant::cast(graph_->LookupValue(operand->index()));
|
2012-07-11 14:42:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
Representation LChunk::LookupLiteralRepresentation(
|
2012-07-11 14:42:17 +00:00
|
|
|
LConstantOperand* operand) const {
|
|
|
|
return graph_->LookupValue(operand->index())->representation();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-05-02 06:37:54 +00:00
|
|
|
void LChunk::CommitDependencies(Handle<Code> code) const {
|
2014-05-20 13:03:25 +00:00
|
|
|
for (MapSet::const_iterator it = deprecation_dependencies_.begin(),
|
|
|
|
iend = deprecation_dependencies_.end(); it != iend; ++it) {
|
|
|
|
Handle<Map> map = *it;
|
|
|
|
ASSERT(!map->is_deprecated());
|
|
|
|
ASSERT(map->CanBeDeprecated());
|
|
|
|
Map::AddDependentCode(map, DependentCode::kTransitionGroup, code);
|
|
|
|
}
|
|
|
|
|
2014-05-05 11:03:14 +00:00
|
|
|
for (MapSet::const_iterator it = stability_dependencies_.begin(),
|
|
|
|
iend = stability_dependencies_.end(); it != iend; ++it) {
|
|
|
|
Handle<Map> map = *it;
|
|
|
|
ASSERT(map->is_stable());
|
|
|
|
ASSERT(map->CanTransition());
|
|
|
|
Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code);
|
|
|
|
}
|
|
|
|
|
2014-05-02 06:37:54 +00:00
|
|
|
info_->CommitDependencies(code);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
LChunk* LChunk::NewChunk(HGraph* graph) {
|
2013-06-03 15:32:22 +00:00
|
|
|
DisallowHandleAllocation no_handles;
|
|
|
|
DisallowHeapAllocation no_gc;
|
2013-11-08 14:16:34 +00:00
|
|
|
graph->DisallowAddingNewValues();
|
2012-07-12 15:10:34 +00:00
|
|
|
int values = graph->GetMaximumValueID();
|
2012-08-28 07:18:06 +00:00
|
|
|
CompilationInfo* info = graph->info();
|
2012-07-12 15:10:34 +00:00
|
|
|
if (values > LUnallocated::kMaxVirtualRegisters) {
|
2013-08-02 09:53:11 +00:00
|
|
|
info->set_bailout_reason(kNotEnoughVirtualRegistersForValues);
|
2012-07-12 15:10:34 +00:00
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
LAllocator allocator(values, graph);
|
2012-08-28 07:18:06 +00:00
|
|
|
LChunkBuilder builder(info, graph, &allocator);
|
2012-07-12 15:29:14 +00:00
|
|
|
LChunk* chunk = builder.Build();
|
2012-07-12 15:10:34 +00:00
|
|
|
if (chunk == NULL) return NULL;
|
|
|
|
|
|
|
|
if (!allocator.Allocate(chunk)) {
|
2013-08-02 09:53:11 +00:00
|
|
|
info->set_bailout_reason(kNotEnoughVirtualRegistersRegalloc);
|
2012-07-12 15:10:34 +00:00
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2013-02-04 12:01:59 +00:00
|
|
|
chunk->set_allocated_double_registers(
|
|
|
|
allocator.assigned_double_registers());
|
|
|
|
|
2012-07-12 15:10:34 +00:00
|
|
|
return chunk;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-18 09:50:46 +00:00
|
|
|
Handle<Code> LChunk::Codegen() {
|
2012-07-12 15:10:34 +00:00
|
|
|
MacroAssembler assembler(info()->isolate(), NULL, 0);
|
2013-02-18 18:06:12 +00:00
|
|
|
LOG_CODE_EVENT(info()->isolate(),
|
|
|
|
CodeStartLinePosInfoRecordEvent(
|
|
|
|
assembler.positions_recorder()));
|
2012-07-12 15:10:34 +00:00
|
|
|
LCodeGen generator(this, &assembler, info());
|
|
|
|
|
|
|
|
MarkEmptyBlocks();
|
|
|
|
|
|
|
|
if (generator.GenerateCode()) {
|
2014-04-02 11:30:13 +00:00
|
|
|
generator.CheckEnvironmentUsage();
|
2013-05-24 10:57:59 +00:00
|
|
|
CodeGenerator::MakeCodePrologue(info(), "optimized");
|
2013-04-18 09:50:46 +00:00
|
|
|
Code::Flags flags = info()->flags();
|
2012-07-12 15:10:34 +00:00
|
|
|
Handle<Code> code =
|
|
|
|
CodeGenerator::MakeCodeEpilogue(&assembler, flags, info());
|
|
|
|
generator.FinishCode(code);
|
2014-05-02 06:37:54 +00:00
|
|
|
CommitDependencies(code);
|
2013-04-18 09:50:46 +00:00
|
|
|
code->set_is_crankshafted(true);
|
2013-08-16 19:52:29 +00:00
|
|
|
void* jit_handler_data =
|
|
|
|
assembler.positions_recorder()->DetachJITHandlerData();
|
|
|
|
LOG_CODE_EVENT(info()->isolate(),
|
|
|
|
CodeEndLinePosInfoRecordEvent(*code, jit_handler_data));
|
2013-02-18 18:06:12 +00:00
|
|
|
|
2012-07-12 15:10:34 +00:00
|
|
|
CodeGenerator::PrintCode(code, info());
|
2014-05-22 09:36:20 +00:00
|
|
|
ASSERT(!(info()->isolate()->serializer_enabled() &&
|
2014-05-12 07:49:11 +00:00
|
|
|
info()->GetMustNotHaveEagerFrame() &&
|
|
|
|
generator.NeedsEagerFrame()));
|
2012-07-12 15:10:34 +00:00
|
|
|
return code;
|
|
|
|
}
|
2014-03-20 09:10:15 +00:00
|
|
|
assembler.AbortedCodeGeneration();
|
2012-07-12 15:10:34 +00:00
|
|
|
return Handle<Code>::null();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-04 12:01:59 +00:00
|
|
|
void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
|
|
|
|
allocated_double_registers_ = allocated_registers;
|
|
|
|
BitVector* doubles = allocated_double_registers();
|
|
|
|
BitVector::Iterator iterator(doubles);
|
|
|
|
while (!iterator.Done()) {
|
|
|
|
if (info()->saves_caller_doubles()) {
|
|
|
|
if (kDoubleSize == kPointerSize * 2) {
|
|
|
|
spill_slot_count_ += 2;
|
|
|
|
} else {
|
|
|
|
spill_slot_count_++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
iterator.Advance();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-07 14:36:26 +00:00
|
|
|
LEnvironment* LChunkBuilderBase::CreateEnvironment(
|
|
|
|
HEnvironment* hydrogen_env,
|
|
|
|
int* argument_index_accumulator,
|
|
|
|
ZoneList<HValue*>* objects_to_materialize) {
|
|
|
|
if (hydrogen_env == NULL) return NULL;
|
|
|
|
|
|
|
|
LEnvironment* outer = CreateEnvironment(hydrogen_env->outer(),
|
|
|
|
argument_index_accumulator,
|
|
|
|
objects_to_materialize);
|
|
|
|
BailoutId ast_id = hydrogen_env->ast_id();
|
|
|
|
ASSERT(!ast_id.IsNone() ||
|
|
|
|
hydrogen_env->frame_type() != JS_FUNCTION);
|
|
|
|
int value_count = hydrogen_env->length() - hydrogen_env->specials_count();
|
|
|
|
LEnvironment* result =
|
|
|
|
new(zone()) LEnvironment(hydrogen_env->closure(),
|
|
|
|
hydrogen_env->frame_type(),
|
|
|
|
ast_id,
|
|
|
|
hydrogen_env->parameter_count(),
|
|
|
|
argument_count_,
|
|
|
|
value_count,
|
|
|
|
outer,
|
|
|
|
hydrogen_env->entry(),
|
|
|
|
zone());
|
|
|
|
int argument_index = *argument_index_accumulator;
|
|
|
|
|
|
|
|
// Store the environment description into the environment
|
|
|
|
// (with holes for nested objects)
|
|
|
|
for (int i = 0; i < hydrogen_env->length(); ++i) {
|
|
|
|
if (hydrogen_env->is_special_index(i)) continue;
|
|
|
|
|
|
|
|
LOperand* op;
|
|
|
|
HValue* value = hydrogen_env->values()->at(i);
|
2014-03-05 12:45:46 +00:00
|
|
|
CHECK(!value->IsPushArgument()); // Do not deopt outgoing arguments
|
2014-01-07 14:36:26 +00:00
|
|
|
if (value->IsArgumentsObject() || value->IsCapturedObject()) {
|
|
|
|
op = LEnvironment::materialization_marker();
|
|
|
|
} else {
|
|
|
|
op = UseAny(value);
|
|
|
|
}
|
|
|
|
result->AddValue(op,
|
|
|
|
value->representation(),
|
|
|
|
value->CheckFlag(HInstruction::kUint32));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Recursively store the nested objects into the environment
|
|
|
|
for (int i = 0; i < hydrogen_env->length(); ++i) {
|
|
|
|
if (hydrogen_env->is_special_index(i)) continue;
|
|
|
|
|
|
|
|
HValue* value = hydrogen_env->values()->at(i);
|
|
|
|
if (value->IsArgumentsObject() || value->IsCapturedObject()) {
|
|
|
|
AddObjectToMaterialize(value, objects_to_materialize, result);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (hydrogen_env->frame_type() == JS_FUNCTION) {
|
|
|
|
*argument_index_accumulator = argument_index;
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Add an object to the supplied environment and object materialization list.
|
|
|
|
//
|
|
|
|
// Notes:
|
|
|
|
//
|
|
|
|
// We are building three lists here:
|
|
|
|
//
|
|
|
|
// 1. In the result->object_mapping_ list (added to by the
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
// LEnvironment::Add*Object methods), we store the lengths (number
|
|
|
|
// of fields) of the captured objects in depth-first traversal order, or
|
|
|
|
// in case of duplicated objects, we store the index to the duplicate object
|
|
|
|
// (with a tag to differentiate between captured and duplicated objects).
|
2014-01-07 14:36:26 +00:00
|
|
|
//
|
|
|
|
// 2. The object fields are stored in the result->values_ list
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
// (added to by the LEnvironment.AddValue method) sequentially as lists
|
|
|
|
// of fields with holes for nested objects (the holes will be expanded
|
|
|
|
// later by LCodegen::AddToTranslation according to the
|
|
|
|
// LEnvironment.object_mapping_ list).
|
2014-01-07 14:36:26 +00:00
|
|
|
//
|
|
|
|
// 3. The auxiliary objects_to_materialize array stores the hydrogen values
|
|
|
|
// in the same order as result->object_mapping_ list. This is used
|
|
|
|
// to detect duplicate values and calculate the corresponding object index.
|
|
|
|
void LChunkBuilderBase::AddObjectToMaterialize(HValue* value,
|
|
|
|
ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) {
|
|
|
|
int object_index = objects_to_materialize->length();
|
|
|
|
// Store the hydrogen value into the de-duplication array
|
|
|
|
objects_to_materialize->Add(value, zone());
|
|
|
|
// Find out whether we are storing a duplicated value
|
|
|
|
int previously_materialized_object = -1;
|
|
|
|
for (int prev = 0; prev < object_index; ++prev) {
|
|
|
|
if (objects_to_materialize->at(prev) == value) {
|
|
|
|
previously_materialized_object = prev;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Store the captured object length (or duplicated object index)
|
|
|
|
// into the environment. For duplicated objects, we stop here.
|
|
|
|
int length = value->OperandCount();
|
|
|
|
bool is_arguments = value->IsArgumentsObject();
|
|
|
|
if (previously_materialized_object >= 0) {
|
|
|
|
result->AddDuplicateObject(previously_materialized_object);
|
|
|
|
return;
|
|
|
|
} else {
|
|
|
|
result->AddNewObject(is_arguments ? length - 1 : length, is_arguments);
|
|
|
|
}
|
|
|
|
// Store the captured object's fields into the environment
|
|
|
|
for (int i = is_arguments ? 1 : 0; i < length; ++i) {
|
|
|
|
LOperand* op;
|
|
|
|
HValue* arg_value = value->OperandAt(i);
|
|
|
|
if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
|
|
|
|
// Insert a hole for nested objects
|
|
|
|
op = LEnvironment::materialization_marker();
|
|
|
|
} else {
|
|
|
|
ASSERT(!arg_value->IsPushArgument());
|
|
|
|
// For ordinary values, tell the register allocator we need the value
|
|
|
|
// to be alive here
|
|
|
|
op = UseAny(arg_value);
|
|
|
|
}
|
|
|
|
result->AddValue(op,
|
|
|
|
arg_value->representation(),
|
|
|
|
arg_value->CheckFlag(HInstruction::kUint32));
|
|
|
|
}
|
|
|
|
// Recursively store all the nested captured objects into the environment
|
|
|
|
for (int i = is_arguments ? 1 : 0; i < length; ++i) {
|
|
|
|
HValue* arg_value = value->OperandAt(i);
|
|
|
|
if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
|
|
|
|
AddObjectToMaterialize(arg_value, objects_to_materialize, result);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-10-02 11:43:41 +00:00
|
|
|
LInstruction* LChunkBuilder::CheckElideControlInstruction(
|
|
|
|
HControlInstruction* instr) {
|
|
|
|
HBasicBlock* successor;
|
|
|
|
if (!instr->KnownSuccessorBlock(&successor)) return NULL;
|
|
|
|
return new(zone()) LGoto(successor);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-25 12:22:26 +00:00
|
|
|
LPhase::~LPhase() {
|
|
|
|
if (ShouldProduceTraceOutput()) {
|
|
|
|
isolate()->GetHTracer()->TraceLithium(name(), chunk_);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-01-07 11:49:22 +00:00
|
|
|
} } // namespace v8::internal
|