[turbofan] Spill rsi and rdi in their existing locations.
We push the context and the js function onto the stack as part of the frame construction. The register allocator is presented with virtual registers for the above as defined from their corresponding registers. It then goes on to spilling them somewhere else on the stack. This means each function spends two redundant spills and two unnecessary stack slots. This change addresses this issue. We present these parameters (context and function) to the register allocator as an UnallocatedOperand having a "secondary storage". The secondary storage is then associated to the live range as its spill operand. We capture the definition of the live range so that we can then commit the spill (in this case, eliminate) through a variation of the mechanics of the CommitAssignment phase. The register allocator validator also needed update to understand UnallocatedOperands with a secondary storage. The change renames the SpillAtDefinitionList and related APIs to better capture their intent - the old names suggested spills happened upon calling. In reality, potential spill locations were thus recorded, and later committed (or not, in certain cases) after register allocation. BUG= v8:4548 LOG=n Review URL: https://codereview.chromium.org/1426943010 Cr-Commit-Position: refs/heads/master@{#31988}
This commit is contained in:
parent
ff283f7ded
commit
20f3a07782
@ -136,6 +136,9 @@ class Frame : public ZoneObject {
|
||||
return frame_slot_count_ - 1;
|
||||
}
|
||||
|
||||
static const int kContextSlot = 2;
|
||||
static const int kJSFunctionSlot = 3;
|
||||
|
||||
private:
|
||||
int AllocateAlignedFrameSlot(int width) {
|
||||
DCHECK(width == 4 || width == 8);
|
||||
|
@ -72,6 +72,14 @@ class OperandGenerator {
|
||||
return Define(node, ToUnallocatedOperand(location, type, GetVReg(node)));
|
||||
}
|
||||
|
||||
InstructionOperand DefineAsDualLocation(Node* node,
|
||||
LinkageLocation primary_location,
|
||||
LinkageLocation secondary_location) {
|
||||
return Define(node,
|
||||
ToDualLocationUnallocatedOperand(
|
||||
primary_location, secondary_location, GetVReg(node)));
|
||||
}
|
||||
|
||||
InstructionOperand Use(Node* node) {
|
||||
return Use(node, UnallocatedOperand(UnallocatedOperand::NONE,
|
||||
UnallocatedOperand::USED_AT_START,
|
||||
@ -211,6 +219,18 @@ class OperandGenerator {
|
||||
return operand;
|
||||
}
|
||||
|
||||
UnallocatedOperand ToDualLocationUnallocatedOperand(
|
||||
LinkageLocation primary_location, LinkageLocation secondary_location,
|
||||
int virtual_register) {
|
||||
// We only support the primary location being a register and the secondary
|
||||
// one a slot.
|
||||
DCHECK(primary_location.IsRegister() &&
|
||||
secondary_location.IsCalleeFrameSlot());
|
||||
int reg_id = primary_location.AsRegister();
|
||||
int slot_id = secondary_location.AsCalleeFrameSlot();
|
||||
return UnallocatedOperand(reg_id, slot_id, virtual_register);
|
||||
}
|
||||
|
||||
UnallocatedOperand ToUnallocatedOperand(LinkageLocation location,
|
||||
MachineType type,
|
||||
int virtual_register) {
|
||||
|
@ -1097,9 +1097,15 @@ void InstructionSelector::VisitGuard(Node* node) {
|
||||
void InstructionSelector::VisitParameter(Node* node) {
|
||||
OperandGenerator g(this);
|
||||
int index = ParameterIndexOf(node->op());
|
||||
Emit(kArchNop,
|
||||
g.DefineAsLocation(node, linkage()->GetParameterLocation(index),
|
||||
linkage()->GetParameterType(index)));
|
||||
InstructionOperand op =
|
||||
linkage()->ParameterHasSecondaryLocation(index)
|
||||
? g.DefineAsDualLocation(
|
||||
node, linkage()->GetParameterLocation(index),
|
||||
linkage()->GetParameterSecondaryLocation(index))
|
||||
: g.DefineAsLocation(node, linkage()->GetParameterLocation(index),
|
||||
linkage()->GetParameterType(index));
|
||||
|
||||
Emit(kArchNop, op);
|
||||
}
|
||||
|
||||
|
||||
|
@ -192,6 +192,12 @@ class UnallocatedOperand : public InstructionOperand {
|
||||
value_ |= LifetimeField::encode(lifetime);
|
||||
}
|
||||
|
||||
UnallocatedOperand(int reg_id, int slot_id, int virtual_register)
|
||||
: UnallocatedOperand(FIXED_REGISTER, reg_id, virtual_register) {
|
||||
value_ |= HasSecondaryStorageField::encode(true);
|
||||
value_ |= SecondaryStorageField::encode(slot_id);
|
||||
}
|
||||
|
||||
// Predicates for the operand policy.
|
||||
bool HasAnyPolicy() const {
|
||||
return basic_policy() == EXTENDED_POLICY && extended_policy() == ANY;
|
||||
@ -222,6 +228,15 @@ class UnallocatedOperand : public InstructionOperand {
|
||||
return basic_policy() == EXTENDED_POLICY &&
|
||||
extended_policy() == FIXED_DOUBLE_REGISTER;
|
||||
}
|
||||
bool HasSecondaryStorage() const {
|
||||
return basic_policy() == EXTENDED_POLICY &&
|
||||
extended_policy() == FIXED_REGISTER &&
|
||||
HasSecondaryStorageField::decode(value_);
|
||||
}
|
||||
int GetSecondaryStorage() const {
|
||||
DCHECK(HasSecondaryStorage());
|
||||
return SecondaryStorageField::decode(value_);
|
||||
}
|
||||
|
||||
// [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
|
||||
BasicPolicy basic_policy() const {
|
||||
@ -301,7 +316,9 @@ class UnallocatedOperand : public InstructionOperand {
|
||||
// BitFields specific to BasicPolicy::EXTENDED_POLICY.
|
||||
class ExtendedPolicyField : public BitField64<ExtendedPolicy, 36, 3> {};
|
||||
class LifetimeField : public BitField64<Lifetime, 39, 1> {};
|
||||
class FixedRegisterField : public BitField64<int, 40, 6> {};
|
||||
class HasSecondaryStorageField : public BitField64<bool, 40, 1> {};
|
||||
class FixedRegisterField : public BitField64<int, 41, 6> {};
|
||||
class SecondaryStorageField : public BitField64<int, 47, 3> {};
|
||||
|
||||
private:
|
||||
explicit UnallocatedOperand(int virtual_register)
|
||||
|
@ -524,6 +524,28 @@ LinkageLocation Linkage::GetOsrValueLocation(int index) const {
|
||||
return incoming_->GetInputLocation(parameter_index);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool Linkage::ParameterHasSecondaryLocation(int index) const {
|
||||
if (incoming_->kind() != CallDescriptor::kCallJSFunction) return false;
|
||||
LinkageLocation loc = GetParameterLocation(index);
|
||||
return (loc == regloc(kJSFunctionRegister) ||
|
||||
loc == regloc(kContextRegister));
|
||||
}
|
||||
|
||||
LinkageLocation Linkage::GetParameterSecondaryLocation(int index) const {
|
||||
DCHECK(ParameterHasSecondaryLocation(index));
|
||||
LinkageLocation loc = GetParameterLocation(index);
|
||||
|
||||
if (loc == regloc(kJSFunctionRegister)) {
|
||||
return LinkageLocation::ForCalleeFrameSlot(Frame::kJSFunctionSlot);
|
||||
} else {
|
||||
DCHECK(loc == regloc(kContextRegister));
|
||||
return LinkageLocation::ForCalleeFrameSlot(Frame::kContextSlot);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
} // namespace compiler
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -326,6 +326,9 @@ class Linkage : public ZoneObject {
|
||||
return incoming_->GetReturnType(index);
|
||||
}
|
||||
|
||||
bool ParameterHasSecondaryLocation(int index) const;
|
||||
LinkageLocation GetParameterSecondaryLocation(int index) const;
|
||||
|
||||
// Get the frame offset for a given spill slot. The location depends on the
|
||||
// calling convention and the specific frame layout, and may thus be
|
||||
// architecture-specific. Negative spill slots indicate arguments on the
|
||||
|
@ -172,7 +172,12 @@ void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
|
||||
}
|
||||
break;
|
||||
case UnallocatedOperand::FIXED_REGISTER:
|
||||
constraint->type_ = kFixedRegister;
|
||||
if (unallocated->HasSecondaryStorage()) {
|
||||
constraint->type_ = kRegisterAndSlot;
|
||||
constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
|
||||
} else {
|
||||
constraint->type_ = kFixedRegister;
|
||||
}
|
||||
constraint->value_ = unallocated->fixed_register_index();
|
||||
break;
|
||||
case UnallocatedOperand::FIXED_DOUBLE_REGISTER:
|
||||
@ -225,6 +230,7 @@ void RegisterAllocatorVerifier::CheckConstraint(
|
||||
CHECK(op->IsExplicit());
|
||||
return;
|
||||
case kFixedRegister:
|
||||
case kRegisterAndSlot:
|
||||
CHECK(op->IsRegister());
|
||||
CHECK_EQ(LocationOperand::cast(op)->GetRegister().code(),
|
||||
constraint->value_);
|
||||
@ -386,11 +392,13 @@ class OperandMap : public ZoneObject {
|
||||
}
|
||||
}
|
||||
|
||||
void Define(Zone* zone, const InstructionOperand* op, int virtual_register) {
|
||||
MapValue* Define(Zone* zone, const InstructionOperand* op,
|
||||
int virtual_register) {
|
||||
auto value = new (zone) MapValue();
|
||||
value->define_vreg = virtual_register;
|
||||
auto res = map().insert(std::make_pair(op, value));
|
||||
if (!res.second) res.first->second = value;
|
||||
return value;
|
||||
}
|
||||
|
||||
void Use(const InstructionOperand* op, int use_vreg, bool initial_pass) {
|
||||
@ -704,7 +712,20 @@ void RegisterAllocatorVerifier::VerifyGapMoves(BlockMaps* block_maps,
|
||||
}
|
||||
for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
|
||||
int virtual_register = op_constraints[count].virtual_register_;
|
||||
current->Define(zone(), instr->OutputAt(i), virtual_register);
|
||||
OperandMap::MapValue* value =
|
||||
current->Define(zone(), instr->OutputAt(i), virtual_register);
|
||||
if (op_constraints[count].type_ == kRegisterAndSlot) {
|
||||
const AllocatedOperand* reg_op =
|
||||
AllocatedOperand::cast(instr->OutputAt(i));
|
||||
MachineType mt = reg_op->machine_type();
|
||||
const AllocatedOperand* stack_op = AllocatedOperand::New(
|
||||
zone(), LocationOperand::LocationKind::STACK_SLOT, mt,
|
||||
op_constraints[i].spilled_slot_);
|
||||
auto insert_result =
|
||||
current->map().insert(std::make_pair(stack_op, value));
|
||||
DCHECK(insert_result.second);
|
||||
USE(insert_result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -36,12 +36,14 @@ class RegisterAllocatorVerifier final : public ZoneObject {
|
||||
kNone,
|
||||
kNoneDouble,
|
||||
kExplicit,
|
||||
kSameAsFirst
|
||||
kSameAsFirst,
|
||||
kRegisterAndSlot
|
||||
};
|
||||
|
||||
struct OperandConstraint {
|
||||
ConstraintType type_;
|
||||
int value_; // subkind index when relevant
|
||||
int spilled_slot_;
|
||||
int virtual_register_;
|
||||
};
|
||||
|
||||
|
@ -671,13 +671,13 @@ unsigned LiveRange::GetSize() {
|
||||
}
|
||||
|
||||
|
||||
struct TopLevelLiveRange::SpillAtDefinitionList : ZoneObject {
|
||||
SpillAtDefinitionList(int gap_index, InstructionOperand* operand,
|
||||
SpillAtDefinitionList* next)
|
||||
struct TopLevelLiveRange::SpillMoveInsertionList : ZoneObject {
|
||||
SpillMoveInsertionList(int gap_index, InstructionOperand* operand,
|
||||
SpillMoveInsertionList* next)
|
||||
: gap_index(gap_index), operand(operand), next(next) {}
|
||||
const int gap_index;
|
||||
InstructionOperand* const operand;
|
||||
SpillAtDefinitionList* const next;
|
||||
SpillMoveInsertionList* const next;
|
||||
};
|
||||
|
||||
|
||||
@ -687,12 +687,13 @@ TopLevelLiveRange::TopLevelLiveRange(int vreg, MachineType machine_type)
|
||||
last_child_id_(0),
|
||||
splintered_from_(nullptr),
|
||||
spill_operand_(nullptr),
|
||||
spills_at_definition_(nullptr),
|
||||
spill_move_insertion_locations_(nullptr),
|
||||
spilled_in_deferred_blocks_(false),
|
||||
spill_start_index_(kMaxInt),
|
||||
last_child_(this),
|
||||
last_pos_(nullptr),
|
||||
splinter_(nullptr) {
|
||||
splinter_(nullptr),
|
||||
has_preassigned_slot_(false) {
|
||||
bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
|
||||
}
|
||||
|
||||
@ -704,11 +705,11 @@ int TopLevelLiveRange::debug_virt_reg() const {
|
||||
#endif
|
||||
|
||||
|
||||
void TopLevelLiveRange::SpillAtDefinition(Zone* zone, int gap_index,
|
||||
InstructionOperand* operand) {
|
||||
void TopLevelLiveRange::RecordSpillLocation(Zone* zone, int gap_index,
|
||||
InstructionOperand* operand) {
|
||||
DCHECK(HasNoSpillType());
|
||||
spills_at_definition_ = new (zone)
|
||||
SpillAtDefinitionList(gap_index, operand, spills_at_definition_);
|
||||
spill_move_insertion_locations_ = new (zone) SpillMoveInsertionList(
|
||||
gap_index, operand, spill_move_insertion_locations_);
|
||||
}
|
||||
|
||||
|
||||
@ -754,7 +755,7 @@ void TopLevelLiveRange::MarkSpilledInDeferredBlock(
|
||||
|
||||
spill_start_index_ = -1;
|
||||
spilled_in_deferred_blocks_ = true;
|
||||
spills_at_definition_ = nullptr;
|
||||
spill_move_insertion_locations_ = nullptr;
|
||||
}
|
||||
|
||||
|
||||
@ -794,25 +795,26 @@ bool TopLevelLiveRange::TryCommitSpillInDeferredBlock(
|
||||
}
|
||||
|
||||
|
||||
void TopLevelLiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence,
|
||||
const InstructionOperand& op,
|
||||
bool might_be_duplicated) {
|
||||
DCHECK_IMPLIES(op.IsConstant(), spills_at_definition_ == nullptr);
|
||||
void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
|
||||
const InstructionOperand& op,
|
||||
bool might_be_duplicated) {
|
||||
DCHECK_IMPLIES(op.IsConstant(), spill_move_insertion_locations() == nullptr);
|
||||
auto zone = sequence->zone();
|
||||
|
||||
for (auto to_spill = spills_at_definition_; to_spill != nullptr;
|
||||
for (auto to_spill = spill_move_insertion_locations(); to_spill != nullptr;
|
||||
to_spill = to_spill->next) {
|
||||
auto instr = sequence->InstructionAt(to_spill->gap_index);
|
||||
auto move = instr->GetOrCreateParallelMove(Instruction::START, zone);
|
||||
// Skip insertion if it's possible that the move exists already as a
|
||||
// constraint move from a fixed output register to a slot.
|
||||
if (might_be_duplicated) {
|
||||
if (might_be_duplicated || has_preassigned_slot()) {
|
||||
bool found = false;
|
||||
for (auto move_op : *move) {
|
||||
if (move_op->IsEliminated()) continue;
|
||||
if (move_op->source().Equals(*to_spill->operand) &&
|
||||
move_op->destination().Equals(op)) {
|
||||
found = true;
|
||||
if (has_preassigned_slot()) move_op->Eliminate();
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -1609,7 +1611,7 @@ void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
|
||||
const InstructionBlock* successor = code()->InstructionBlockAt(succ);
|
||||
DCHECK(successor->PredecessorCount() == 1);
|
||||
int gap_index = successor->first_instruction_index();
|
||||
range->SpillAtDefinition(allocation_zone(), gap_index, output);
|
||||
range->RecordSpillLocation(allocation_zone(), gap_index, output);
|
||||
range->SetSpillStartIndex(gap_index);
|
||||
}
|
||||
}
|
||||
@ -1642,6 +1644,17 @@ void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
|
||||
int output_vreg = first_output->virtual_register();
|
||||
UnallocatedOperand output_copy(UnallocatedOperand::ANY, output_vreg);
|
||||
bool is_tagged = code()->IsReference(output_vreg);
|
||||
if (first_output->HasSecondaryStorage()) {
|
||||
range->MarkHasPreassignedSlot();
|
||||
InstructionOperand* spill_op = AllocatedOperand::New(
|
||||
data()->code_zone(), LocationOperand::LocationKind::STACK_SLOT,
|
||||
range->machine_type(), first_output->GetSecondaryStorage());
|
||||
range->RecordSpillLocation(allocation_zone(), instr_index + 1,
|
||||
first_output);
|
||||
range->SetSpillOperand(spill_op);
|
||||
range->SetSpillStartIndex(instr_index + 1);
|
||||
assigned = true;
|
||||
}
|
||||
AllocateFixed(first_output, instr_index, is_tagged);
|
||||
|
||||
// This value is produced on the stack, we never need to spill it.
|
||||
@ -1658,8 +1671,8 @@ void ConstraintBuilder::MeetConstraintsAfter(int instr_index) {
|
||||
// Make sure we add a gap move for spilling (if we have not done
|
||||
// so already).
|
||||
if (!assigned) {
|
||||
range->SpillAtDefinition(allocation_zone(), instr_index + 1,
|
||||
first_output);
|
||||
range->RecordSpillLocation(allocation_zone(), instr_index + 1,
|
||||
first_output);
|
||||
range->SetSpillStartIndex(instr_index + 1);
|
||||
}
|
||||
}
|
||||
@ -1744,7 +1757,7 @@ void ConstraintBuilder::ResolvePhis(const InstructionBlock* block) {
|
||||
}
|
||||
auto live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
|
||||
int gap_index = block->first_instruction_index();
|
||||
live_range->SpillAtDefinition(allocation_zone(), gap_index, &output);
|
||||
live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
|
||||
live_range->SetSpillStartIndex(gap_index);
|
||||
// We use the phi-ness of some nodes in some later heuristics.
|
||||
live_range->set_is_phi(true);
|
||||
@ -2959,7 +2972,7 @@ void SpillSlotLocator::LocateSpillSlots() {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
auto spills = range->spills_at_definition();
|
||||
auto spills = range->spill_move_insertion_locations();
|
||||
DCHECK_NOT_NULL(spills);
|
||||
for (; spills != nullptr; spills = spills->next) {
|
||||
code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
|
||||
@ -3032,7 +3045,7 @@ void OperandAssigner::CommitAssignment() {
|
||||
spill_operand)) {
|
||||
// Spill at definition if the range isn't spilled only in deferred
|
||||
// blocks.
|
||||
top_range->CommitSpillsAtDefinition(
|
||||
top_range->CommitSpillMoves(
|
||||
data()->code(), spill_operand,
|
||||
top_range->has_slot_use() || top_range->spilled());
|
||||
}
|
||||
@ -3073,6 +3086,7 @@ void ReferenceMapPopulator::PopulateReferenceMaps() {
|
||||
if (!data()->IsReference(range)) continue;
|
||||
// Skip empty live ranges.
|
||||
if (range->IsEmpty()) continue;
|
||||
if (range->has_preassigned_slot()) continue;
|
||||
|
||||
// Find the extent of the range and its children.
|
||||
int start = range->Start().ToInstructionIndex();
|
||||
|
@ -532,16 +532,16 @@ class TopLevelLiveRange final : public LiveRange {
|
||||
|
||||
AllocatedOperand GetSpillRangeOperand() const;
|
||||
|
||||
void SpillAtDefinition(Zone* zone, int gap_index,
|
||||
InstructionOperand* operand);
|
||||
void RecordSpillLocation(Zone* zone, int gap_index,
|
||||
InstructionOperand* operand);
|
||||
void SetSpillOperand(InstructionOperand* operand);
|
||||
void SetSpillStartIndex(int start) {
|
||||
spill_start_index_ = Min(start, spill_start_index_);
|
||||
}
|
||||
|
||||
void CommitSpillsAtDefinition(InstructionSequence* sequence,
|
||||
const InstructionOperand& operand,
|
||||
bool might_be_duplicated);
|
||||
void CommitSpillMoves(InstructionSequence* sequence,
|
||||
const InstructionOperand& operand,
|
||||
bool might_be_duplicated);
|
||||
|
||||
// If all the children of this range are spilled in deferred blocks, and if
|
||||
// for any non-spilled child with a use position requiring a slot, that range
|
||||
@ -576,10 +576,10 @@ class TopLevelLiveRange final : public LiveRange {
|
||||
return spilled_in_deferred_blocks_;
|
||||
}
|
||||
|
||||
struct SpillAtDefinitionList;
|
||||
struct SpillMoveInsertionList;
|
||||
|
||||
SpillAtDefinitionList* spills_at_definition() const {
|
||||
return spills_at_definition_;
|
||||
SpillMoveInsertionList* spill_move_insertion_locations() const {
|
||||
return spill_move_insertion_locations_;
|
||||
}
|
||||
void set_last_child(LiveRange* range) { last_child_ = range; }
|
||||
LiveRange* last_child() const { return last_child_; }
|
||||
@ -594,6 +594,9 @@ class TopLevelLiveRange final : public LiveRange {
|
||||
splinter->SetSplinteredFrom(this);
|
||||
}
|
||||
|
||||
void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
|
||||
bool has_preassigned_slot() const { return has_preassigned_slot_; }
|
||||
|
||||
private:
|
||||
void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
|
||||
|
||||
@ -610,7 +613,7 @@ class TopLevelLiveRange final : public LiveRange {
|
||||
InstructionOperand* spill_operand_;
|
||||
SpillRange* spill_range_;
|
||||
};
|
||||
SpillAtDefinitionList* spills_at_definition_;
|
||||
SpillMoveInsertionList* spill_move_insertion_locations_;
|
||||
// TODO(mtrofin): generalize spilling after definition, currently specialized
|
||||
// just for spill in a single deferred block.
|
||||
bool spilled_in_deferred_blocks_;
|
||||
@ -618,6 +621,7 @@ class TopLevelLiveRange final : public LiveRange {
|
||||
LiveRange* last_child_;
|
||||
UsePosition* last_pos_;
|
||||
TopLevelLiveRange* splinter_;
|
||||
bool has_preassigned_slot_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user