2012-05-23 14:24:29 +00:00
|
|
|
// Copyright 2012 the V8 project authors. All rights reserved.
|
2014-04-29 06:42:26 +00:00
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
2011-01-07 11:49:22 +00:00
|
|
|
|
2014-09-24 07:08:27 +00:00
|
|
|
#include "src/lithium.h"
|
|
|
|
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/v8.h"
|
2014-06-20 08:40:11 +00:00
|
|
|
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/scopes.h"
|
|
|
|
#include "src/serialize.h"
|
2012-07-11 14:42:17 +00:00
|
|
|
|
|
|
|
#if V8_TARGET_ARCH_IA32
|
2014-06-20 08:40:11 +00:00
|
|
|
#include "src/ia32/lithium-ia32.h" // NOLINT
|
|
|
|
#include "src/ia32/lithium-codegen-ia32.h" // NOLINT
|
2012-07-11 14:42:17 +00:00
|
|
|
#elif V8_TARGET_ARCH_X64
|
2014-06-20 08:40:11 +00:00
|
|
|
#include "src/x64/lithium-x64.h" // NOLINT
|
|
|
|
#include "src/x64/lithium-codegen-x64.h" // NOLINT
|
2012-07-11 14:42:17 +00:00
|
|
|
#elif V8_TARGET_ARCH_ARM
|
2014-06-20 08:40:11 +00:00
|
|
|
#include "src/arm/lithium-arm.h" // NOLINT
|
|
|
|
#include "src/arm/lithium-codegen-arm.h" // NOLINT
|
2012-07-11 14:42:17 +00:00
|
|
|
#elif V8_TARGET_ARCH_MIPS
|
2014-06-20 08:40:11 +00:00
|
|
|
#include "src/mips/lithium-mips.h" // NOLINT
|
|
|
|
#include "src/mips/lithium-codegen-mips.h" // NOLINT
|
2014-03-21 09:28:26 +00:00
|
|
|
#elif V8_TARGET_ARCH_ARM64
|
2014-06-20 08:40:11 +00:00
|
|
|
#include "src/arm64/lithium-arm64.h" // NOLINT
|
|
|
|
#include "src/arm64/lithium-codegen-arm64.h" // NOLINT
|
2014-07-09 11:08:26 +00:00
|
|
|
#elif V8_TARGET_ARCH_MIPS64
|
|
|
|
#include "src/mips64/lithium-mips64.h" // NOLINT
|
|
|
|
#include "src/mips64/lithium-codegen-mips64.h" // NOLINT
|
2014-05-23 16:37:27 +00:00
|
|
|
#elif V8_TARGET_ARCH_X87
|
2014-06-20 08:40:11 +00:00
|
|
|
#include "src/x87/lithium-x87.h" // NOLINT
|
|
|
|
#include "src/x87/lithium-codegen-x87.h" // NOLINT
|
2012-07-11 14:42:17 +00:00
|
|
|
#else
|
|
|
|
#error "Unknown architecture."
|
|
|
|
#endif
|
2011-01-07 11:49:22 +00:00
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
|
2011-01-19 13:55:56 +00:00
|
|
|
|
|
|
|
void LOperand::PrintTo(StringStream* stream) {
|
|
|
|
LUnallocated* unalloc = NULL;
|
|
|
|
switch (kind()) {
|
|
|
|
case INVALID:
|
2012-01-24 02:13:28 +00:00
|
|
|
stream->Add("(0)");
|
2011-01-19 13:55:56 +00:00
|
|
|
break;
|
|
|
|
case UNALLOCATED:
|
|
|
|
unalloc = LUnallocated::cast(this);
|
|
|
|
stream->Add("v%d", unalloc->virtual_register());
|
2013-05-02 09:51:07 +00:00
|
|
|
if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
|
|
|
|
stream->Add("(=%dS)", unalloc->fixed_slot_index());
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
switch (unalloc->extended_policy()) {
|
2011-01-19 13:55:56 +00:00
|
|
|
case LUnallocated::NONE:
|
|
|
|
break;
|
|
|
|
case LUnallocated::FIXED_REGISTER: {
|
2013-05-02 09:51:07 +00:00
|
|
|
int reg_index = unalloc->fixed_register_index();
|
2014-07-30 13:54:45 +00:00
|
|
|
if (reg_index < 0 ||
|
|
|
|
reg_index >= Register::kMaxNumAllocatableRegisters) {
|
|
|
|
stream->Add("(=invalid_reg#%d)", reg_index);
|
|
|
|
} else {
|
|
|
|
const char* register_name =
|
|
|
|
Register::AllocationIndexToString(reg_index);
|
|
|
|
stream->Add("(=%s)", register_name);
|
|
|
|
}
|
2011-01-19 13:55:56 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case LUnallocated::FIXED_DOUBLE_REGISTER: {
|
2013-05-02 09:51:07 +00:00
|
|
|
int reg_index = unalloc->fixed_register_index();
|
2014-07-30 13:54:45 +00:00
|
|
|
if (reg_index < 0 ||
|
|
|
|
reg_index >= DoubleRegister::kMaxNumAllocatableRegisters) {
|
|
|
|
stream->Add("(=invalid_double_reg#%d)", reg_index);
|
|
|
|
} else {
|
|
|
|
const char* double_register_name =
|
|
|
|
DoubleRegister::AllocationIndexToString(reg_index);
|
|
|
|
stream->Add("(=%s)", double_register_name);
|
|
|
|
}
|
2011-01-19 13:55:56 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case LUnallocated::MUST_HAVE_REGISTER:
|
|
|
|
stream->Add("(R)");
|
|
|
|
break;
|
2014-05-06 12:11:00 +00:00
|
|
|
case LUnallocated::MUST_HAVE_DOUBLE_REGISTER:
|
|
|
|
stream->Add("(D)");
|
|
|
|
break;
|
2011-01-19 13:55:56 +00:00
|
|
|
case LUnallocated::WRITABLE_REGISTER:
|
|
|
|
stream->Add("(WR)");
|
|
|
|
break;
|
|
|
|
case LUnallocated::SAME_AS_FIRST_INPUT:
|
|
|
|
stream->Add("(1)");
|
|
|
|
break;
|
|
|
|
case LUnallocated::ANY:
|
|
|
|
stream->Add("(-)");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
case CONSTANT_OPERAND:
|
|
|
|
stream->Add("[constant:%d]", index());
|
|
|
|
break;
|
|
|
|
case STACK_SLOT:
|
|
|
|
stream->Add("[stack:%d]", index());
|
|
|
|
break;
|
|
|
|
case DOUBLE_STACK_SLOT:
|
|
|
|
stream->Add("[double_stack:%d]", index());
|
|
|
|
break;
|
2014-07-30 13:54:45 +00:00
|
|
|
case REGISTER: {
|
|
|
|
int reg_index = index();
|
|
|
|
if (reg_index < 0 || reg_index >= Register::kMaxNumAllocatableRegisters) {
|
|
|
|
stream->Add("(=invalid_reg#%d|R)", reg_index);
|
|
|
|
} else {
|
|
|
|
stream->Add("[%s|R]", Register::AllocationIndexToString(reg_index));
|
|
|
|
}
|
2011-01-19 13:55:56 +00:00
|
|
|
break;
|
2014-07-30 13:54:45 +00:00
|
|
|
}
|
|
|
|
case DOUBLE_REGISTER: {
|
|
|
|
int reg_index = index();
|
|
|
|
if (reg_index < 0 ||
|
|
|
|
reg_index >= DoubleRegister::kMaxNumAllocatableRegisters) {
|
|
|
|
stream->Add("(=invalid_double_reg#%d|R)", reg_index);
|
|
|
|
} else {
|
|
|
|
stream->Add("[%s|R]",
|
|
|
|
DoubleRegister::AllocationIndexToString(reg_index));
|
|
|
|
}
|
2011-01-19 13:55:56 +00:00
|
|
|
break;
|
2014-07-30 13:54:45 +00:00
|
|
|
}
|
2011-01-19 13:55:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-03-11 02:55:06 +00:00
|
|
|
|
|
|
|
template<LOperand::Kind kOperandKind, int kNumCachedOperands>
|
|
|
|
LSubKindOperand<kOperandKind, kNumCachedOperands>*
|
|
|
|
LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
|
|
|
|
|
|
|
|
|
|
|
|
template<LOperand::Kind kOperandKind, int kNumCachedOperands>
|
|
|
|
void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
|
|
|
|
if (cache) return;
|
|
|
|
cache = new LSubKindOperand[kNumCachedOperands];
|
|
|
|
for (int i = 0; i < kNumCachedOperands; i++) {
|
|
|
|
cache[i].ConvertTo(kOperandKind, i);
|
2012-03-28 13:12:00 +00:00
|
|
|
}
|
2014-03-11 02:55:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template<LOperand::Kind kOperandKind, int kNumCachedOperands>
|
|
|
|
void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
|
|
|
|
delete[] cache;
|
2014-08-26 13:07:18 +00:00
|
|
|
cache = NULL;
|
2014-03-11 02:55:06 +00:00
|
|
|
}
|
2012-03-12 13:56:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
void LOperand::SetUpCaches() {
|
2014-03-11 02:55:06 +00:00
|
|
|
#define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache();
|
2012-03-28 13:12:00 +00:00
|
|
|
LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
|
|
|
|
#undef LITHIUM_OPERAND_SETUP
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void LOperand::TearDownCaches() {
|
2014-03-11 02:55:06 +00:00
|
|
|
#define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache();
|
2012-03-28 13:12:00 +00:00
|
|
|
LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
|
|
|
|
#undef LITHIUM_OPERAND_TEARDOWN
|
2012-03-12 13:56:56 +00:00
|
|
|
}
|
2011-01-19 13:55:56 +00:00
|
|
|
|
2012-03-28 13:12:00 +00:00
|
|
|
|
2011-01-10 11:31:21 +00:00
|
|
|
bool LParallelMove::IsRedundant() const {
|
|
|
|
for (int i = 0; i < move_operands_.length(); ++i) {
|
|
|
|
if (!move_operands_[i].IsRedundant()) return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void LParallelMove::PrintDataTo(StringStream* stream) const {
|
2011-01-17 11:25:36 +00:00
|
|
|
bool first = true;
|
|
|
|
for (int i = 0; i < move_operands_.length(); ++i) {
|
2011-01-10 11:31:21 +00:00
|
|
|
if (!move_operands_[i].IsEliminated()) {
|
2011-01-17 11:25:36 +00:00
|
|
|
LOperand* source = move_operands_[i].source();
|
|
|
|
LOperand* destination = move_operands_[i].destination();
|
|
|
|
if (!first) stream->Add(" ");
|
|
|
|
first = false;
|
|
|
|
if (source->Equals(destination)) {
|
|
|
|
destination->PrintTo(stream);
|
2011-01-10 11:31:21 +00:00
|
|
|
} else {
|
2011-01-17 11:25:36 +00:00
|
|
|
destination->PrintTo(stream);
|
2011-01-10 11:31:21 +00:00
|
|
|
stream->Add(" = ");
|
2011-01-17 11:25:36 +00:00
|
|
|
source->PrintTo(stream);
|
2011-01-10 11:31:21 +00:00
|
|
|
}
|
2011-01-17 11:25:36 +00:00
|
|
|
stream->Add(";");
|
2011-01-10 11:31:21 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-01-11 15:51:08 +00:00
|
|
|
void LEnvironment::PrintTo(StringStream* stream) {
|
2012-08-06 14:13:09 +00:00
|
|
|
stream->Add("[id=%d|", ast_id().ToInt());
|
2013-02-11 14:12:13 +00:00
|
|
|
if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
|
|
|
|
stream->Add("deopt_id=%d|", deoptimization_index());
|
|
|
|
}
|
2013-04-24 09:31:55 +00:00
|
|
|
stream->Add("parameters=%d|", parameter_count());
|
|
|
|
stream->Add("arguments_stack_height=%d|", arguments_stack_height());
|
2011-01-11 15:51:08 +00:00
|
|
|
for (int i = 0; i < values_.length(); ++i) {
|
|
|
|
if (i != 0) stream->Add(";");
|
|
|
|
if (values_[i] == NULL) {
|
|
|
|
stream->Add("[hole]");
|
|
|
|
} else {
|
|
|
|
values_[i]->PrintTo(stream);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
stream->Add("]");
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-06-11 12:42:31 +00:00
|
|
|
void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
|
2011-01-11 15:51:08 +00:00
|
|
|
// Do not record arguments as pointers.
|
|
|
|
if (op->IsStackSlot() && op->index() < 0) return;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
|
2012-06-11 12:42:31 +00:00
|
|
|
pointer_operands_.Add(op, zone);
|
2011-01-11 15:51:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-09-19 18:36:47 +00:00
|
|
|
void LPointerMap::RemovePointer(LOperand* op) {
|
|
|
|
// Do not record arguments as pointers.
|
|
|
|
if (op->IsStackSlot() && op->index() < 0) return;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
|
2011-09-19 18:36:47 +00:00
|
|
|
for (int i = 0; i < pointer_operands_.length(); ++i) {
|
|
|
|
if (pointer_operands_[i]->Equals(op)) {
|
|
|
|
pointer_operands_.Remove(i);
|
|
|
|
--i;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-06-11 12:42:31 +00:00
|
|
|
void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
|
2011-09-19 18:36:47 +00:00
|
|
|
// Do not record arguments as pointers.
|
|
|
|
if (op->IsStackSlot() && op->index() < 0) return;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
|
2012-06-11 12:42:31 +00:00
|
|
|
untagged_operands_.Add(op, zone);
|
2011-09-19 18:36:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-01-11 15:51:08 +00:00
|
|
|
void LPointerMap::PrintTo(StringStream* stream) {
|
|
|
|
stream->Add("{");
|
|
|
|
for (int i = 0; i < pointer_operands_.length(); ++i) {
|
|
|
|
if (i != 0) stream->Add(";");
|
|
|
|
pointer_operands_[i]->PrintTo(stream);
|
|
|
|
}
|
2013-10-21 13:35:48 +00:00
|
|
|
stream->Add("}");
|
2011-01-11 15:51:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-07 13:15:41 +00:00
|
|
|
int StackSlotOffset(int index) {
|
|
|
|
if (index >= 0) {
|
|
|
|
// Local or spill slot. Skip the frame pointer, function, and
|
|
|
|
// context in the fixed part of the frame.
|
2013-11-20 13:44:24 +00:00
|
|
|
return -(index + 1) * kPointerSize -
|
|
|
|
StandardFrameConstants::kFixedFrameSizeFromFp;
|
2013-02-07 13:15:41 +00:00
|
|
|
} else {
|
|
|
|
// Incoming parameter. Skip the return address.
|
2013-07-23 13:46:10 +00:00
|
|
|
return -(index + 1) * kPointerSize + kFPOnStackSize + kPCOnStackSize;
|
2013-02-07 13:15:41 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-04 12:01:59 +00:00
|
|
|
LChunk::LChunk(CompilationInfo* info, HGraph* graph)
|
|
|
|
: spill_slot_count_(0),
|
|
|
|
info_(info),
|
|
|
|
graph_(graph),
|
2014-07-30 13:54:45 +00:00
|
|
|
instructions_(32, info->zone()),
|
|
|
|
pointer_maps_(8, info->zone()),
|
|
|
|
inlined_closures_(1, info->zone()),
|
|
|
|
deprecation_dependencies_(MapLess(), MapAllocator(info->zone())),
|
|
|
|
stability_dependencies_(MapLess(), MapAllocator(info->zone())) {}
|
2013-02-04 12:01:59 +00:00
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
LLabel* LChunk::GetLabel(int block_id) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
HBasicBlock* block = graph_->blocks()->at(block_id);
|
|
|
|
int first_instruction = block->first_instruction_index();
|
|
|
|
return LLabel::cast(instructions_[first_instruction]);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
int LChunk::LookupDestination(int block_id) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
LLabel* cur = GetLabel(block_id);
|
|
|
|
while (cur->replacement() != NULL) {
|
|
|
|
cur = cur->replacement();
|
|
|
|
}
|
|
|
|
return cur->block_id();
|
|
|
|
}
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
Label* LChunk::GetAssemblyLabel(int block_id) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
LLabel* label = GetLabel(block_id);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!label->HasReplacement());
|
2012-07-11 14:42:17 +00:00
|
|
|
return label->label();
|
|
|
|
}
|
|
|
|
|
2013-07-05 09:52:11 +00:00
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
void LChunk::MarkEmptyBlocks() {
|
2013-06-25 12:22:26 +00:00
|
|
|
LPhase phase("L_Mark empty blocks", this);
|
2012-07-11 14:42:17 +00:00
|
|
|
for (int i = 0; i < graph()->blocks()->length(); ++i) {
|
|
|
|
HBasicBlock* block = graph()->blocks()->at(i);
|
|
|
|
int first = block->first_instruction_index();
|
|
|
|
int last = block->last_instruction_index();
|
|
|
|
LInstruction* first_instr = instructions()->at(first);
|
|
|
|
LInstruction* last_instr = instructions()->at(last);
|
|
|
|
|
|
|
|
LLabel* label = LLabel::cast(first_instr);
|
|
|
|
if (last_instr->IsGoto()) {
|
|
|
|
LGoto* goto_instr = LGoto::cast(last_instr);
|
|
|
|
if (label->IsRedundant() &&
|
|
|
|
!label->is_loop_header()) {
|
|
|
|
bool can_eliminate = true;
|
|
|
|
for (int i = first + 1; i < last && can_eliminate; ++i) {
|
|
|
|
LInstruction* cur = instructions()->at(i);
|
|
|
|
if (cur->IsGap()) {
|
|
|
|
LGap* gap = LGap::cast(cur);
|
|
|
|
if (!gap->IsRedundant()) {
|
|
|
|
can_eliminate = false;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
can_eliminate = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (can_eliminate) {
|
|
|
|
label->set_replacement(GetLabel(goto_instr->block_id()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
|
2014-07-30 13:54:45 +00:00
|
|
|
LInstructionGap* gap = new (zone()) LInstructionGap(block);
|
2013-04-22 09:48:35 +00:00
|
|
|
gap->set_hydrogen_value(instr->hydrogen_value());
|
2012-07-11 14:42:17 +00:00
|
|
|
int index = -1;
|
|
|
|
if (instr->IsControl()) {
|
|
|
|
instructions_.Add(gap, zone());
|
|
|
|
index = instructions_.length();
|
|
|
|
instructions_.Add(instr, zone());
|
|
|
|
} else {
|
|
|
|
index = instructions_.length();
|
|
|
|
instructions_.Add(instr, zone());
|
|
|
|
instructions_.Add(gap, zone());
|
|
|
|
}
|
|
|
|
if (instr->HasPointerMap()) {
|
|
|
|
pointer_maps_.Add(instr->pointer_map(), zone());
|
|
|
|
instr->pointer_map()->set_lithium_position(index);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
|
2012-07-11 14:42:17 +00:00
|
|
|
return LConstantOperand::Create(constant->id(), zone());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
int LChunk::GetParameterStackSlot(int index) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
// The receiver is at index 0, the first parameter at index 1, so we
|
|
|
|
// shift all parameter indexes down by the number of parameters, and
|
|
|
|
// make sure they end up negative so they are distinguishable from
|
|
|
|
// spill slots.
|
2013-11-15 10:52:05 +00:00
|
|
|
int result = index - info()->num_parameters() - 1;
|
|
|
|
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(result < 0);
|
2012-07-11 14:42:17 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// A parameter relative to ebp in the arguments stub.
|
2012-07-12 15:29:14 +00:00
|
|
|
int LChunk::ParameterAt(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(-1 <= index); // -1 is the receiver.
|
2012-07-11 14:42:17 +00:00
|
|
|
return (1 + info()->scope()->num_parameters() - index) *
|
|
|
|
kPointerSize;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
LGap* LChunk::GetGapAt(int index) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
return LGap::cast(instructions_[index]);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
bool LChunk::IsGapAt(int index) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
return instructions_[index]->IsGap();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
int LChunk::NearestGapPos(int index) const {
|
2012-07-11 14:42:17 +00:00
|
|
|
while (!IsGapAt(index)) index--;
|
|
|
|
return index;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
|
2012-07-11 14:42:17 +00:00
|
|
|
GetGapAt(index)->GetOrCreateParallelMove(
|
|
|
|
LGap::START, zone())->AddMove(from, to, zone());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
|
2012-07-11 16:17:02 +00:00
|
|
|
return HConstant::cast(graph_->LookupValue(operand->index()));
|
2012-07-11 14:42:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
Representation LChunk::LookupLiteralRepresentation(
|
2012-07-11 14:42:17 +00:00
|
|
|
LConstantOperand* operand) const {
|
|
|
|
return graph_->LookupValue(operand->index())->representation();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-05-02 06:37:54 +00:00
|
|
|
void LChunk::CommitDependencies(Handle<Code> code) const {
|
2014-05-20 13:03:25 +00:00
|
|
|
for (MapSet::const_iterator it = deprecation_dependencies_.begin(),
|
|
|
|
iend = deprecation_dependencies_.end(); it != iend; ++it) {
|
|
|
|
Handle<Map> map = *it;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!map->is_deprecated());
|
|
|
|
DCHECK(map->CanBeDeprecated());
|
2014-05-20 13:03:25 +00:00
|
|
|
Map::AddDependentCode(map, DependentCode::kTransitionGroup, code);
|
|
|
|
}
|
|
|
|
|
2014-05-05 11:03:14 +00:00
|
|
|
for (MapSet::const_iterator it = stability_dependencies_.begin(),
|
|
|
|
iend = stability_dependencies_.end(); it != iend; ++it) {
|
|
|
|
Handle<Map> map = *it;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map->is_stable());
|
|
|
|
DCHECK(map->CanTransition());
|
2014-05-05 11:03:14 +00:00
|
|
|
Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code);
|
|
|
|
}
|
|
|
|
|
2014-05-02 06:37:54 +00:00
|
|
|
info_->CommitDependencies(code);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-12 15:29:14 +00:00
|
|
|
LChunk* LChunk::NewChunk(HGraph* graph) {
|
2013-06-03 15:32:22 +00:00
|
|
|
DisallowHandleAllocation no_handles;
|
|
|
|
DisallowHeapAllocation no_gc;
|
2013-11-08 14:16:34 +00:00
|
|
|
graph->DisallowAddingNewValues();
|
2012-07-12 15:10:34 +00:00
|
|
|
int values = graph->GetMaximumValueID();
|
2012-08-28 07:18:06 +00:00
|
|
|
CompilationInfo* info = graph->info();
|
2012-07-12 15:10:34 +00:00
|
|
|
if (values > LUnallocated::kMaxVirtualRegisters) {
|
2014-09-24 07:08:27 +00:00
|
|
|
info->AbortOptimization(kNotEnoughVirtualRegistersForValues);
|
2012-07-12 15:10:34 +00:00
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
LAllocator allocator(values, graph);
|
2012-08-28 07:18:06 +00:00
|
|
|
LChunkBuilder builder(info, graph, &allocator);
|
2012-07-12 15:29:14 +00:00
|
|
|
LChunk* chunk = builder.Build();
|
2012-07-12 15:10:34 +00:00
|
|
|
if (chunk == NULL) return NULL;
|
|
|
|
|
|
|
|
if (!allocator.Allocate(chunk)) {
|
2014-09-24 07:08:27 +00:00
|
|
|
info->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
|
2012-07-12 15:10:34 +00:00
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2013-02-04 12:01:59 +00:00
|
|
|
chunk->set_allocated_double_registers(
|
|
|
|
allocator.assigned_double_registers());
|
|
|
|
|
2012-07-12 15:10:34 +00:00
|
|
|
return chunk;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-18 09:50:46 +00:00
|
|
|
Handle<Code> LChunk::Codegen() {
|
2014-08-06 17:48:31 +00:00
|
|
|
MacroAssembler assembler(info()->isolate(), NULL, 0);
|
2013-02-18 18:06:12 +00:00
|
|
|
LOG_CODE_EVENT(info()->isolate(),
|
|
|
|
CodeStartLinePosInfoRecordEvent(
|
|
|
|
assembler.positions_recorder()));
|
2014-10-02 07:04:28 +00:00
|
|
|
// Code serializer only takes unoptimized code.
|
|
|
|
DCHECK(!info()->will_serialize());
|
2012-07-12 15:10:34 +00:00
|
|
|
LCodeGen generator(this, &assembler, info());
|
|
|
|
|
|
|
|
MarkEmptyBlocks();
|
|
|
|
|
|
|
|
if (generator.GenerateCode()) {
|
2014-04-02 11:30:13 +00:00
|
|
|
generator.CheckEnvironmentUsage();
|
2013-05-24 10:57:59 +00:00
|
|
|
CodeGenerator::MakeCodePrologue(info(), "optimized");
|
2013-04-18 09:50:46 +00:00
|
|
|
Code::Flags flags = info()->flags();
|
2012-07-12 15:10:34 +00:00
|
|
|
Handle<Code> code =
|
|
|
|
CodeGenerator::MakeCodeEpilogue(&assembler, flags, info());
|
|
|
|
generator.FinishCode(code);
|
2014-05-02 06:37:54 +00:00
|
|
|
CommitDependencies(code);
|
2013-04-18 09:50:46 +00:00
|
|
|
code->set_is_crankshafted(true);
|
2013-08-16 19:52:29 +00:00
|
|
|
void* jit_handler_data =
|
|
|
|
assembler.positions_recorder()->DetachJITHandlerData();
|
|
|
|
LOG_CODE_EVENT(info()->isolate(),
|
|
|
|
CodeEndLinePosInfoRecordEvent(*code, jit_handler_data));
|
2013-02-18 18:06:12 +00:00
|
|
|
|
2012-07-12 15:10:34 +00:00
|
|
|
CodeGenerator::PrintCode(code, info());
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!(info()->isolate()->serializer_enabled() &&
|
2014-05-12 07:49:11 +00:00
|
|
|
info()->GetMustNotHaveEagerFrame() &&
|
|
|
|
generator.NeedsEagerFrame()));
|
2012-07-12 15:10:34 +00:00
|
|
|
return code;
|
|
|
|
}
|
2014-03-20 09:10:15 +00:00
|
|
|
assembler.AbortedCodeGeneration();
|
2012-07-12 15:10:34 +00:00
|
|
|
return Handle<Code>::null();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-04 12:01:59 +00:00
|
|
|
void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
|
|
|
|
allocated_double_registers_ = allocated_registers;
|
|
|
|
BitVector* doubles = allocated_double_registers();
|
|
|
|
BitVector::Iterator iterator(doubles);
|
|
|
|
while (!iterator.Done()) {
|
|
|
|
if (info()->saves_caller_doubles()) {
|
|
|
|
if (kDoubleSize == kPointerSize * 2) {
|
|
|
|
spill_slot_count_ += 2;
|
|
|
|
} else {
|
|
|
|
spill_slot_count_++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
iterator.Advance();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-24 07:08:27 +00:00
|
|
|
void LChunkBuilderBase::Abort(BailoutReason reason) {
|
|
|
|
info()->AbortOptimization(reason);
|
|
|
|
status_ = ABORTED;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void LChunkBuilderBase::Retry(BailoutReason reason) {
|
|
|
|
info()->RetryOptimization(reason);
|
|
|
|
status_ = ABORTED;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-07 14:36:26 +00:00
|
|
|
LEnvironment* LChunkBuilderBase::CreateEnvironment(
|
2014-09-01 09:31:14 +00:00
|
|
|
HEnvironment* hydrogen_env, int* argument_index_accumulator,
|
2014-01-07 14:36:26 +00:00
|
|
|
ZoneList<HValue*>* objects_to_materialize) {
|
|
|
|
if (hydrogen_env == NULL) return NULL;
|
|
|
|
|
2014-09-01 09:31:14 +00:00
|
|
|
LEnvironment* outer =
|
|
|
|
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator,
|
|
|
|
objects_to_materialize);
|
2014-01-07 14:36:26 +00:00
|
|
|
BailoutId ast_id = hydrogen_env->ast_id();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!ast_id.IsNone() ||
|
2014-01-07 14:36:26 +00:00
|
|
|
hydrogen_env->frame_type() != JS_FUNCTION);
|
2014-09-01 09:31:14 +00:00
|
|
|
|
|
|
|
int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION)
|
|
|
|
? 0
|
|
|
|
: hydrogen_env->specials_count();
|
|
|
|
|
|
|
|
int value_count = hydrogen_env->length() - omitted_count;
|
2014-01-07 14:36:26 +00:00
|
|
|
LEnvironment* result =
|
|
|
|
new(zone()) LEnvironment(hydrogen_env->closure(),
|
|
|
|
hydrogen_env->frame_type(),
|
|
|
|
ast_id,
|
|
|
|
hydrogen_env->parameter_count(),
|
|
|
|
argument_count_,
|
|
|
|
value_count,
|
|
|
|
outer,
|
|
|
|
hydrogen_env->entry(),
|
|
|
|
zone());
|
|
|
|
int argument_index = *argument_index_accumulator;
|
|
|
|
|
|
|
|
// Store the environment description into the environment
|
|
|
|
// (with holes for nested objects)
|
|
|
|
for (int i = 0; i < hydrogen_env->length(); ++i) {
|
2014-09-01 09:31:14 +00:00
|
|
|
if (hydrogen_env->is_special_index(i) &&
|
|
|
|
hydrogen_env->frame_type() != JS_FUNCTION) {
|
|
|
|
continue;
|
|
|
|
}
|
2014-01-07 14:36:26 +00:00
|
|
|
LOperand* op;
|
|
|
|
HValue* value = hydrogen_env->values()->at(i);
|
2014-05-23 14:06:42 +00:00
|
|
|
CHECK(!value->IsPushArguments()); // Do not deopt outgoing arguments
|
2014-01-07 14:36:26 +00:00
|
|
|
if (value->IsArgumentsObject() || value->IsCapturedObject()) {
|
|
|
|
op = LEnvironment::materialization_marker();
|
|
|
|
} else {
|
|
|
|
op = UseAny(value);
|
|
|
|
}
|
|
|
|
result->AddValue(op,
|
|
|
|
value->representation(),
|
|
|
|
value->CheckFlag(HInstruction::kUint32));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Recursively store the nested objects into the environment
|
|
|
|
for (int i = 0; i < hydrogen_env->length(); ++i) {
|
|
|
|
if (hydrogen_env->is_special_index(i)) continue;
|
|
|
|
|
|
|
|
HValue* value = hydrogen_env->values()->at(i);
|
|
|
|
if (value->IsArgumentsObject() || value->IsCapturedObject()) {
|
|
|
|
AddObjectToMaterialize(value, objects_to_materialize, result);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (hydrogen_env->frame_type() == JS_FUNCTION) {
|
|
|
|
*argument_index_accumulator = argument_index;
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Add an object to the supplied environment and object materialization list.
|
|
|
|
//
|
|
|
|
// Notes:
|
|
|
|
//
|
|
|
|
// We are building three lists here:
|
|
|
|
//
|
|
|
|
// 1. In the result->object_mapping_ list (added to by the
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
// LEnvironment::Add*Object methods), we store the lengths (number
|
|
|
|
// of fields) of the captured objects in depth-first traversal order, or
|
|
|
|
// in case of duplicated objects, we store the index to the duplicate object
|
|
|
|
// (with a tag to differentiate between captured and duplicated objects).
|
2014-01-07 14:36:26 +00:00
|
|
|
//
|
|
|
|
// 2. The object fields are stored in the result->values_ list
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
// (added to by the LEnvironment.AddValue method) sequentially as lists
|
|
|
|
// of fields with holes for nested objects (the holes will be expanded
|
|
|
|
// later by LCodegen::AddToTranslation according to the
|
|
|
|
// LEnvironment.object_mapping_ list).
|
2014-01-07 14:36:26 +00:00
|
|
|
//
|
|
|
|
// 3. The auxiliary objects_to_materialize array stores the hydrogen values
|
|
|
|
// in the same order as result->object_mapping_ list. This is used
|
|
|
|
// to detect duplicate values and calculate the corresponding object index.
|
|
|
|
void LChunkBuilderBase::AddObjectToMaterialize(HValue* value,
|
|
|
|
ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) {
|
|
|
|
int object_index = objects_to_materialize->length();
|
|
|
|
// Store the hydrogen value into the de-duplication array
|
|
|
|
objects_to_materialize->Add(value, zone());
|
|
|
|
// Find out whether we are storing a duplicated value
|
|
|
|
int previously_materialized_object = -1;
|
|
|
|
for (int prev = 0; prev < object_index; ++prev) {
|
|
|
|
if (objects_to_materialize->at(prev) == value) {
|
|
|
|
previously_materialized_object = prev;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Store the captured object length (or duplicated object index)
|
|
|
|
// into the environment. For duplicated objects, we stop here.
|
|
|
|
int length = value->OperandCount();
|
|
|
|
bool is_arguments = value->IsArgumentsObject();
|
|
|
|
if (previously_materialized_object >= 0) {
|
|
|
|
result->AddDuplicateObject(previously_materialized_object);
|
|
|
|
return;
|
|
|
|
} else {
|
|
|
|
result->AddNewObject(is_arguments ? length - 1 : length, is_arguments);
|
|
|
|
}
|
|
|
|
// Store the captured object's fields into the environment
|
|
|
|
for (int i = is_arguments ? 1 : 0; i < length; ++i) {
|
|
|
|
LOperand* op;
|
|
|
|
HValue* arg_value = value->OperandAt(i);
|
|
|
|
if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
|
|
|
|
// Insert a hole for nested objects
|
|
|
|
op = LEnvironment::materialization_marker();
|
|
|
|
} else {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!arg_value->IsPushArguments());
|
2014-01-07 14:36:26 +00:00
|
|
|
// For ordinary values, tell the register allocator we need the value
|
|
|
|
// to be alive here
|
|
|
|
op = UseAny(arg_value);
|
|
|
|
}
|
|
|
|
result->AddValue(op,
|
|
|
|
arg_value->representation(),
|
|
|
|
arg_value->CheckFlag(HInstruction::kUint32));
|
|
|
|
}
|
|
|
|
// Recursively store all the nested captured objects into the environment
|
|
|
|
for (int i = is_arguments ? 1 : 0; i < length; ++i) {
|
|
|
|
HValue* arg_value = value->OperandAt(i);
|
|
|
|
if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
|
|
|
|
AddObjectToMaterialize(arg_value, objects_to_materialize, result);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-25 12:22:26 +00:00
|
|
|
LPhase::~LPhase() {
|
|
|
|
if (ShouldProduceTraceOutput()) {
|
|
|
|
isolate()->GetHTracer()->TraceLithium(name(), chunk_);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-01-07 11:49:22 +00:00
|
|
|
} } // namespace v8::internal
|