[turbofan] add non fixed slot constraint to register allocator

R=jarin@chromium.org

BUG=

Review URL: https://codereview.chromium.org/1018853003

Cr-Commit-Position: refs/heads/master@{#27373}
This commit is contained in:
dcarney 2015-03-23 09:03:14 -07:00 committed by Commit bot
parent 24a9b8815e
commit 0a835afb29
10 changed files with 240 additions and 59 deletions

View File

@ -75,6 +75,11 @@ class OperandGenerator {
GetVReg(node)));
}
InstructionOperand UseUniqueSlot(Node* node) {
return Use(node, UnallocatedOperand(UnallocatedOperand::MUST_HAVE_SLOT,
GetVReg(node)));
}
// Use register or operand for the node. If a register is chosen, it won't
// alias any temporary or output registers.
InstructionOperand UseUnique(Node* node) {

View File

@ -1031,7 +1031,7 @@ FrameStateDescriptor* InstructionSelector::GetFrameStateDescriptor(
}
static InstructionOperand UseOrImmediate(OperandGenerator* g, Node* input) {
static InstructionOperand SlotOrImmediate(OperandGenerator* g, Node* input) {
switch (input->opcode()) {
case IrOpcode::kInt32Constant:
case IrOpcode::kNumberConstant:
@ -1039,7 +1039,7 @@ static InstructionOperand UseOrImmediate(OperandGenerator* g, Node* input) {
case IrOpcode::kHeapConstant:
return g->UseImmediate(input);
default:
return g->UseUnique(input);
return g->UseUniqueSlot(input);
}
}
@ -1074,19 +1074,19 @@ void InstructionSelector::AddFrameStateInputs(
size_t value_index = 0;
for (StateValuesAccess::TypedNode input_node :
StateValuesAccess(parameters)) {
inputs->push_back(UseOrImmediate(&g, input_node.node));
inputs->push_back(SlotOrImmediate(&g, input_node.node));
descriptor->SetType(value_index++, input_node.type);
}
if (descriptor->HasContext()) {
inputs->push_back(UseOrImmediate(&g, context));
inputs->push_back(SlotOrImmediate(&g, context));
descriptor->SetType(value_index++, kMachAnyTagged);
}
for (StateValuesAccess::TypedNode input_node : StateValuesAccess(locals)) {
inputs->push_back(UseOrImmediate(&g, input_node.node));
inputs->push_back(SlotOrImmediate(&g, input_node.node));
descriptor->SetType(value_index++, input_node.type);
}
for (StateValuesAccess::TypedNode input_node : StateValuesAccess(stack)) {
inputs->push_back(UseOrImmediate(&g, input_node.node));
inputs->push_back(SlotOrImmediate(&g, input_node.node));
descriptor->SetType(value_index++, input_node.type);
}
DCHECK(value_index == descriptor->GetSize());

View File

@ -33,6 +33,8 @@ std::ostream& operator<<(std::ostream& os,
unalloc->fixed_register_index()) << ")";
case UnallocatedOperand::MUST_HAVE_REGISTER:
return os << "(R)";
case UnallocatedOperand::MUST_HAVE_SLOT:
return os << "(S)";
case UnallocatedOperand::SAME_AS_FIRST_INPUT:
return os << "(1)";
case UnallocatedOperand::ANY:

View File

@ -137,6 +137,7 @@ class UnallocatedOperand : public InstructionOperand {
FIXED_REGISTER,
FIXED_DOUBLE_REGISTER,
MUST_HAVE_REGISTER,
MUST_HAVE_SLOT,
SAME_AS_FIRST_INPUT
};
@ -254,6 +255,10 @@ class UnallocatedOperand : public InstructionOperand {
return basic_policy() == EXTENDED_POLICY &&
extended_policy() == MUST_HAVE_REGISTER;
}
bool HasSlotPolicy() const {
return basic_policy() == EXTENDED_POLICY &&
extended_policy() == MUST_HAVE_SLOT;
}
bool HasSameAsInputPolicy() const {
return basic_policy() == EXTENDED_POLICY &&
extended_policy() == SAME_AS_FIRST_INPUT;

View File

@ -162,6 +162,13 @@ void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
constraint->type_ = kRegister;
}
break;
case UnallocatedOperand::MUST_HAVE_SLOT:
if (sequence()->IsDouble(vreg)) {
constraint->type_ = kDoubleSlot;
} else {
constraint->type_ = kSlot;
}
break;
case UnallocatedOperand::SAME_AS_FIRST_INPUT:
constraint->type_ = kSameAsFirst;
break;
@ -200,6 +207,12 @@ void RegisterAllocatorVerifier::CheckConstraint(
CHECK(op->IsStackSlot());
CHECK_EQ(op->index(), constraint->value_);
return;
case kSlot:
CHECK(op->IsStackSlot());
return;
case kDoubleSlot:
CHECK(op->IsDoubleStackSlot());
return;
case kNone:
CHECK(op->IsRegister() || op->IsStackSlot());
return;
@ -301,7 +314,10 @@ class OperandMap : public ZoneObject {
if (i->IsEliminated()) continue;
auto cur = map().find(i->source());
CHECK(cur != map().end());
to_insert.insert(std::make_pair(i->destination(), cur->second));
auto res =
to_insert.insert(std::make_pair(i->destination(), cur->second));
// Ensure injectivity of moves.
CHECK(res.second);
}
// Drop current mappings.
for (auto i = moves->begin(); i != moves->end(); ++i) {

View File

@ -30,6 +30,8 @@ class RegisterAllocatorVerifier FINAL : public ZoneObject {
kFixedRegister,
kDoubleRegister,
kFixedDoubleRegister,
kSlot,
kDoubleSlot,
kFixedSlot,
kNone,
kNoneDouble,

View File

@ -39,17 +39,22 @@ static void RemoveElement(ZoneVector<LiveRange*>* v, LiveRange* range) {
UsePosition::UsePosition(LifetimePosition pos, InstructionOperand* operand,
InstructionOperand* hint)
: operand_(operand),
hint_(hint),
pos_(pos),
next_(nullptr),
requires_reg_(false),
register_beneficial_(true) {
: operand_(operand), hint_(hint), pos_(pos), next_(nullptr), flags_(0) {
bool register_beneficial = true;
UsePositionType type = UsePositionType::kAny;
if (operand_ != nullptr && operand_->IsUnallocated()) {
const UnallocatedOperand* unalloc = UnallocatedOperand::cast(operand_);
requires_reg_ = unalloc->HasRegisterPolicy();
register_beneficial_ = !unalloc->HasAnyPolicy();
if (unalloc->HasRegisterPolicy()) {
type = UsePositionType::kRequiresRegister;
} else if (unalloc->HasSlotPolicy()) {
type = UsePositionType::kRequiresSlot;
register_beneficial = false;
} else {
register_beneficial = !unalloc->HasAnyPolicy();
}
}
flags_ = TypeField::encode(type) |
RegisterBeneficialField::encode(register_beneficial);
DCHECK(pos_.IsValid());
}
@ -59,10 +64,11 @@ bool UsePosition::HasHint() const {
}
bool UsePosition::RequiresRegister() const { return requires_reg_; }
bool UsePosition::RegisterIsBeneficial() const { return register_beneficial_; }
void UsePosition::set_type(UsePositionType type, bool register_beneficial) {
DCHECK_IMPLIES(type == UsePositionType::kRequiresSlot, !register_beneficial);
flags_ = TypeField::encode(type) |
RegisterBeneficialField::encode(register_beneficial);
}
void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
@ -117,6 +123,7 @@ bool LiveRange::HasOverlap(UseInterval* target) const {
LiveRange::LiveRange(int id, Zone* zone)
: id_(id),
spilled_(false),
has_slot_use_(false),
is_phi_(false),
is_non_loop_phi_(false),
kind_(UNALLOCATED_REGISTERS),
@ -140,7 +147,7 @@ void LiveRange::set_assigned_register(int reg,
DCHECK(!HasRegisterAssigned() && !IsSpilled());
assigned_register_ = reg;
// TODO(dcarney): stop aliasing hint operands.
ConvertUsesToOperand(GetAssignedOperand(operand_cache));
ConvertUsesToOperand(GetAssignedOperand(operand_cache), nullptr);
}
@ -161,16 +168,32 @@ void LiveRange::SpillAtDefinition(Zone* zone, int gap_index,
void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence,
InstructionOperand* op) {
auto to_spill = TopLevel()->spills_at_definition_;
if (to_spill == nullptr) return;
InstructionOperand* op,
bool might_be_duplicated) {
DCHECK(!IsChild());
auto zone = sequence->zone();
for (; to_spill != nullptr; to_spill = to_spill->next) {
for (auto to_spill = spills_at_definition_; to_spill != nullptr;
to_spill = to_spill->next) {
auto gap = sequence->GapAt(to_spill->gap_index);
auto move = gap->GetOrCreateParallelMove(GapInstruction::START, zone);
// Skip insertion if it's possible that the move exists already as a
// constraint move from a fixed output register to a slot.
if (might_be_duplicated) {
bool found = false;
auto move_ops = move->move_operands();
for (auto move_op = move_ops->begin(); move_op != move_ops->end();
++move_op) {
if (move_op->IsEliminated()) continue;
if (move_op->source()->Equals(to_spill->operand) &&
move_op->destination()->Equals(op)) {
found = true;
break;
}
}
if (found) continue;
}
move->AddMove(to_spill->operand, op, zone);
}
TopLevel()->spills_at_definition_ = nullptr;
}
@ -234,7 +257,7 @@ UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
UsePosition* pos = NextUsePosition(start);
while (pos != nullptr && !pos->RequiresRegister()) {
while (pos != nullptr && pos->type() != UsePositionType::kRequiresRegister) {
pos = pos->next();
}
return pos;
@ -509,18 +532,27 @@ void LiveRange::AddUsePosition(LifetimePosition pos,
}
void LiveRange::ConvertUsesToOperand(InstructionOperand* op) {
auto use_pos = first_pos();
while (use_pos != nullptr) {
DCHECK(Start().Value() <= use_pos->pos().Value() &&
use_pos->pos().Value() <= End().Value());
if (use_pos->HasOperand()) {
DCHECK(op->IsRegister() || op->IsDoubleRegister() ||
!use_pos->RequiresRegister());
use_pos->operand()->ConvertTo(op->kind(), op->index());
void LiveRange::ConvertUsesToOperand(InstructionOperand* op,
InstructionOperand* spill_op) {
for (auto pos = first_pos(); pos != nullptr; pos = pos->next()) {
DCHECK(Start().Value() <= pos->pos().Value() &&
pos->pos().Value() <= End().Value());
if (!pos->HasOperand()) {
continue;
}
switch (pos->type()) {
case UsePositionType::kRequiresSlot:
if (spill_op != nullptr) {
pos->operand()->ConvertTo(spill_op->kind(), spill_op->index());
}
break;
case UsePositionType::kRequiresRegister:
DCHECK(op->IsRegister() || op->IsDoubleRegister());
// Fall through.
case UsePositionType::kAny:
pos->operand()->ConvertTo(op->kind(), op->index());
break;
}
use_pos = use_pos->next();
}
}
@ -957,12 +989,15 @@ void RegisterAllocator::AssignSpillSlots() {
void RegisterAllocator::CommitAssignment() {
for (auto range : live_ranges()) {
if (range == nullptr || range->IsEmpty()) continue;
// Register assignments were committed in set_assigned_register.
if (range->HasRegisterAssigned()) continue;
auto assigned = range->GetAssignedOperand(operand_cache());
range->ConvertUsesToOperand(assigned);
if (range->IsSpilled()) {
range->CommitSpillsAtDefinition(code(), assigned);
InstructionOperand* spill_operand = nullptr;
if (!range->TopLevel()->HasNoSpillType()) {
spill_operand = range->TopLevel()->GetSpillOperand();
}
range->ConvertUsesToOperand(assigned, spill_operand);
if (!range->IsChild() && spill_operand != nullptr) {
range->CommitSpillsAtDefinition(code(), spill_operand,
range->has_slot_use());
}
}
}
@ -977,7 +1012,7 @@ SpillRange* RegisterAllocator::AssignSpillRangeToLiveRange(LiveRange* range) {
bool RegisterAllocator::TryReuseSpillForPhi(LiveRange* range) {
if (range->IsChild() || !range->is_phi()) return false;
DCHECK(range->HasNoSpillType());
DCHECK(!range->HasSpillOperand());
auto lookup = phi_map_.find(range->id());
DCHECK(lookup != phi_map_.end());
@ -1040,12 +1075,16 @@ bool RegisterAllocator::TryReuseSpillForPhi(LiveRange* range) {
}
auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
if (pos == nullptr) {
auto spill_range = AssignSpillRangeToLiveRange(range->TopLevel());
auto spill_range = range->TopLevel()->HasSpillRange()
? range->TopLevel()->GetSpillRange()
: AssignSpillRangeToLiveRange(range->TopLevel());
CHECK(first_op_spill->TryMerge(spill_range));
Spill(range);
return true;
} else if (pos->pos().Value() > range->Start().NextInstruction().Value()) {
auto spill_range = AssignSpillRangeToLiveRange(range->TopLevel());
auto spill_range = range->TopLevel()->HasSpillRange()
? range->TopLevel()->GetSpillRange()
: AssignSpillRangeToLiveRange(range->TopLevel());
CHECK(first_op_spill->TryMerge(spill_range));
SpillBetween(range, range->Start(), pos->pos());
DCHECK(UnhandledIsSorted());
@ -1304,6 +1343,8 @@ void RegisterAllocator::ProcessInstructions(const InstructionBlock* block,
for (size_t i = 0; i < instr->OutputCount(); i++) {
auto output = instr->OutputAt(i);
if (output->IsUnallocated()) {
// Unsupported.
DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
live->Remove(out_vreg);
} else if (output->IsConstant()) {
@ -1344,14 +1385,22 @@ void RegisterAllocator::ProcessInstructions(const InstructionBlock* block,
use_pos = curr_position.InstructionEnd();
}
Use(block_start_position, use_pos, input, nullptr);
if (input->IsUnallocated()) {
live->Add(UnallocatedOperand::cast(input)->virtual_register());
UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
int vreg = unalloc->virtual_register();
live->Add(vreg);
if (unalloc->HasSlotPolicy()) {
LiveRangeFor(vreg)->set_has_slot_use(true);
}
}
Use(block_start_position, use_pos, input, nullptr);
}
for (size_t i = 0; i < instr->TempCount(); i++) {
auto temp = instr->TempAt(i);
// Unsupported.
DCHECK_IMPLIES(temp->IsUnallocated(),
!UnallocatedOperand::cast(temp)->HasSlotPolicy());
if (instr->ClobbersTemps()) {
if (temp->IsRegister()) continue;
if (temp->IsUnallocated()) {
@ -1773,20 +1822,25 @@ void RegisterAllocator::BuildLiveRanges() {
for (auto range : live_ranges()) {
if (range == nullptr) continue;
range->kind_ = RequiredRegisterKind(range->id());
// Give slots to all ranges with a non fixed slot use.
if (range->has_slot_use() && range->HasNoSpillType()) {
AssignSpillRangeToLiveRange(range);
}
// TODO(bmeurer): This is a horrible hack to make sure that for constant
// live ranges, every use requires the constant to be in a register.
// Without this hack, all uses with "any" policy would get the constant
// operand assigned.
if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
for (auto pos = range->first_pos(); pos != nullptr; pos = pos->next_) {
pos->register_beneficial_ = true;
// TODO(dcarney): should the else case assert requires_reg_ == false?
if (pos->type() == UsePositionType::kRequiresSlot) continue;
UsePositionType new_type = UsePositionType::kAny;
// Can't mark phis as needing a register.
if (!code()
->InstructionAt(pos->pos().InstructionIndex())
->IsGapMoves()) {
pos->requires_reg_ = true;
new_type = UsePositionType::kRequiresRegister;
}
pos->set_type(new_type, true);
}
}
}

View File

@ -141,6 +141,9 @@ class UseInterval FINAL : public ZoneObject {
};
enum class UsePositionType : uint8_t { kAny, kRequiresRegister, kRequiresSlot };
// Representation of a use position.
class UsePosition FINAL : public ZoneObject {
public:
@ -152,22 +155,27 @@ class UsePosition FINAL : public ZoneObject {
InstructionOperand* hint() const { return hint_; }
bool HasHint() const;
bool RequiresRegister() const;
bool RegisterIsBeneficial() const;
bool RegisterIsBeneficial() const {
return RegisterBeneficialField::decode(flags_);
}
UsePositionType type() const { return TypeField::decode(flags_); }
LifetimePosition pos() const { return pos_; }
UsePosition* next() const { return next_; }
void set_next(UsePosition* next) { next_ = next; }
void set_type(UsePositionType type, bool register_beneficial);
InstructionOperand* const operand_;
InstructionOperand* const hint_;
LifetimePosition const pos_;
UsePosition* next_;
bool requires_reg_ : 1;
bool register_beneficial_ : 1;
private:
typedef BitField8<UsePositionType, 0, 2> TypeField;
typedef BitField8<bool, 2, 1> RegisterBeneficialField;
uint8_t flags_;
DISALLOW_COPY_AND_ASSIGN(UsePosition);
};
@ -233,6 +241,8 @@ class LiveRange FINAL : public ZoneObject {
void set_is_non_loop_phi(bool is_non_loop_phi) {
is_non_loop_phi_ = is_non_loop_phi;
}
bool has_slot_use() const { return has_slot_use_; }
void set_has_slot_use(bool has_slot_use) { has_slot_use_ = has_slot_use; }
// Returns use position in this live range that follows both start
// and last processed use position.
@ -309,7 +319,8 @@ class LiveRange FINAL : public ZoneObject {
void SetSpillRange(SpillRange* spill_range);
void CommitSpillOperand(InstructionOperand* operand);
void CommitSpillsAtDefinition(InstructionSequence* sequence,
InstructionOperand* operand);
InstructionOperand* operand,
bool might_be_duplicated);
void SetSpillStartIndex(int start) {
spill_start_index_ = Min(start, spill_start_index_);
@ -338,16 +349,18 @@ class LiveRange FINAL : public ZoneObject {
private:
struct SpillAtDefinitionList;
void ConvertUsesToOperand(InstructionOperand* op);
void ConvertUsesToOperand(InstructionOperand* op,
InstructionOperand* spill_op);
UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
void AdvanceLastProcessedMarker(UseInterval* to_start_of,
LifetimePosition but_not_past) const;
// TODO(dcarney): pack this structure better.
int id_;
bool spilled_;
bool is_phi_;
bool is_non_loop_phi_;
bool spilled_ : 1;
bool has_slot_use_ : 1; // Relevant only for parent.
bool is_phi_ : 1;
bool is_non_loop_phi_ : 1;
RegisterKind kind_;
int assigned_register_;
UseInterval* last_interval_;

View File

@ -373,6 +373,9 @@ InstructionOperand InstructionSequenceTest::ConvertInputOp(TestOperand op) {
case kRegister:
return Unallocated(op, UnallocatedOperand::MUST_HAVE_REGISTER,
UnallocatedOperand::USED_AT_START);
case kSlot:
return Unallocated(op, UnallocatedOperand::MUST_HAVE_SLOT,
UnallocatedOperand::USED_AT_START);
case kFixedRegister:
CHECK(0 <= op.value_ && op.value_ < num_general_registers_);
return Unallocated(op, UnallocatedOperand::FIXED_REGISTER, op.value_);

View File

@ -491,6 +491,87 @@ TEST_F(RegisterAllocatorTest, RegressionLoadConstantBeforeSpill) {
Allocate();
}
namespace {
enum class ParameterType { kFixedSlot, kSlot, kRegister, kFixedRegister };
const ParameterType kParameterTypes[] = {
ParameterType::kFixedSlot, ParameterType::kSlot, ParameterType::kRegister,
ParameterType::kFixedRegister};
class SlotConstraintTest : public RegisterAllocatorTest,
public ::testing::WithParamInterface<
::testing::tuple<ParameterType, int>> {
public:
static const int kMaxVariant = 5;
protected:
ParameterType parameter_type() const {
return ::testing::get<0>(B::GetParam());
}
int variant() const { return ::testing::get<1>(B::GetParam()); }
private:
typedef ::testing::WithParamInterface<::testing::tuple<ParameterType, int>> B;
};
}
#if GTEST_HAS_COMBINE
TEST_P(SlotConstraintTest, SlotConstraint) {
StartBlock();
VReg p_0;
switch (parameter_type()) {
case ParameterType::kFixedSlot:
p_0 = Parameter(Slot(-1));
break;
case ParameterType::kSlot:
p_0 = Parameter(Slot(-1));
break;
case ParameterType::kRegister:
p_0 = Parameter(Reg());
break;
case ParameterType::kFixedRegister:
p_0 = Parameter(Reg(1));
break;
}
switch (variant()) {
case 0:
EmitI(Slot(p_0), Reg(p_0));
break;
case 1:
EmitI(Slot(p_0));
break;
case 2:
EmitI(Reg(p_0));
EmitI(Slot(p_0));
break;
case 3:
EmitI(Slot(p_0));
EmitI(Reg(p_0));
break;
case 4:
EmitI(Slot(p_0, -1), Slot(p_0), Reg(p_0), Reg(p_0, 1));
break;
default:
UNREACHABLE();
break;
}
EndBlock(Last());
Allocate();
}
INSTANTIATE_TEST_CASE_P(
RegisterAllocatorTest, SlotConstraintTest,
::testing::Combine(::testing::ValuesIn(kParameterTypes),
::testing::Range(0, SlotConstraintTest::kMaxVariant)));
#endif // GTEST_HAS_COMBINE
} // namespace compiler
} // namespace internal
} // namespace v8