[compiler] Make StateValuesAccess methods inlinable
This moves several simple StateValuesAccess methods as well as SparseInputMask::InputIterator::IsReal into their header files so they can be more easily inlined. This gives about a 7% improvement to the BackgroundSelectInstructions runtime call stat. Also marks some methods called by the new methods as V8_PRIVATE_EXPORT so component build test can build. Bug: v8:10051 Change-Id: I3e34977a4fa660d3f4f55fd4f2c0b2370d5d2bc2 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2023559 Reviewed-by: Georg Neis <neis@chromium.org> Commit-Queue: Dan Elphick <delphick@chromium.org> Cr-Commit-Position: refs/heads/master@{#66078}
This commit is contained in:
parent
0a49d3059a
commit
14d1b9e944
@ -286,11 +286,6 @@ Node* SparseInputMask::InputIterator::GetReal() const {
|
||||
return parent_->InputAt(real_index_);
|
||||
}
|
||||
|
||||
bool SparseInputMask::InputIterator::IsReal() const {
|
||||
return bit_mask_ == SparseInputMask::kDenseBitMask ||
|
||||
(bit_mask_ & kEntryMask);
|
||||
}
|
||||
|
||||
bool SparseInputMask::InputIterator::IsEnd() const {
|
||||
return (bit_mask_ == kEndMarker) ||
|
||||
(bit_mask_ == SparseInputMask::kDenseBitMask &&
|
||||
|
@ -257,7 +257,7 @@ class SparseInputMask final {
|
||||
static const int kMaxSparseInputs = (sizeof(BitMaskType) * kBitsPerByte - 1);
|
||||
|
||||
// An iterator over a node's sparse inputs.
|
||||
class InputIterator final {
|
||||
class V8_EXPORT_PRIVATE InputIterator final {
|
||||
public:
|
||||
InputIterator() = default;
|
||||
InputIterator(BitMaskType bit_mask, Node* parent);
|
||||
@ -281,7 +281,10 @@ class SparseInputMask final {
|
||||
}
|
||||
|
||||
// True if the current sparse input is a real input node.
|
||||
bool IsReal() const;
|
||||
bool IsReal() const {
|
||||
return bit_mask_ == SparseInputMask::kDenseBitMask ||
|
||||
(bit_mask_ & kEntryMask);
|
||||
}
|
||||
|
||||
// True if the current sparse input is an empty value.
|
||||
bool IsEmpty() const { return !IsReal(); }
|
||||
@ -309,7 +312,7 @@ class SparseInputMask final {
|
||||
int CountReal() const;
|
||||
|
||||
// Returns an iterator over the sparse inputs of {node}.
|
||||
InputIterator IterateOverInputs(Node* node);
|
||||
V8_EXPORT_PRIVATE InputIterator IterateOverInputs(Node* node);
|
||||
|
||||
private:
|
||||
//
|
||||
@ -375,9 +378,10 @@ Type TypeGuardTypeOf(Operator const*) V8_WARN_UNUSED_RESULT;
|
||||
|
||||
int OsrValueIndexOf(Operator const*) V8_WARN_UNUSED_RESULT;
|
||||
|
||||
SparseInputMask SparseInputMaskOf(Operator const*) V8_WARN_UNUSED_RESULT;
|
||||
V8_EXPORT_PRIVATE SparseInputMask SparseInputMaskOf(Operator const*)
|
||||
V8_WARN_UNUSED_RESULT;
|
||||
|
||||
ZoneVector<MachineType> const* MachineTypesOf(Operator const*)
|
||||
V8_EXPORT_PRIVATE ZoneVector<MachineType> const* MachineTypesOf(Operator const*)
|
||||
V8_WARN_UNUSED_RESULT;
|
||||
|
||||
// The ArgumentsElementsState and ArgumentsLengthState can describe the layout
|
||||
|
@ -304,109 +304,6 @@ Node* StateValuesCache::GetNodeForValues(Node** values, size_t count,
|
||||
return tree;
|
||||
}
|
||||
|
||||
StateValuesAccess::iterator::iterator(Node* node) : current_depth_(0) {
|
||||
stack_[current_depth_] =
|
||||
SparseInputMaskOf(node->op()).IterateOverInputs(node);
|
||||
EnsureValid();
|
||||
}
|
||||
|
||||
SparseInputMask::InputIterator* StateValuesAccess::iterator::Top() {
|
||||
DCHECK_LE(0, current_depth_);
|
||||
DCHECK_GT(kMaxInlineDepth, current_depth_);
|
||||
return &(stack_[current_depth_]);
|
||||
}
|
||||
|
||||
void StateValuesAccess::iterator::Push(Node* node) {
|
||||
current_depth_++;
|
||||
CHECK_GT(kMaxInlineDepth, current_depth_);
|
||||
stack_[current_depth_] =
|
||||
SparseInputMaskOf(node->op()).IterateOverInputs(node);
|
||||
}
|
||||
|
||||
|
||||
void StateValuesAccess::iterator::Pop() {
|
||||
DCHECK_LE(0, current_depth_);
|
||||
current_depth_--;
|
||||
}
|
||||
|
||||
bool StateValuesAccess::iterator::done() const { return current_depth_ < 0; }
|
||||
|
||||
void StateValuesAccess::iterator::Advance() {
|
||||
Top()->Advance();
|
||||
EnsureValid();
|
||||
}
|
||||
|
||||
void StateValuesAccess::iterator::EnsureValid() {
|
||||
while (true) {
|
||||
SparseInputMask::InputIterator* top = Top();
|
||||
|
||||
if (top->IsEmpty()) {
|
||||
// We are on a valid (albeit optimized out) node.
|
||||
return;
|
||||
}
|
||||
|
||||
if (top->IsEnd()) {
|
||||
// We have hit the end of this iterator. Pop the stack and move to the
|
||||
// next sibling iterator.
|
||||
Pop();
|
||||
if (done()) {
|
||||
// Stack is exhausted, we have reached the end.
|
||||
return;
|
||||
}
|
||||
Top()->Advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
// At this point the value is known to be live and within our input nodes.
|
||||
Node* value_node = top->GetReal();
|
||||
|
||||
if (value_node->opcode() == IrOpcode::kStateValues ||
|
||||
value_node->opcode() == IrOpcode::kTypedStateValues) {
|
||||
// Nested state, we need to push to the stack.
|
||||
Push(value_node);
|
||||
continue;
|
||||
}
|
||||
|
||||
// We are on a valid node, we can stop the iteration.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Node* StateValuesAccess::iterator::node() { return Top()->Get(nullptr); }
|
||||
|
||||
MachineType StateValuesAccess::iterator::type() {
|
||||
Node* parent = Top()->parent();
|
||||
if (parent->opcode() == IrOpcode::kStateValues) {
|
||||
return MachineType::AnyTagged();
|
||||
} else {
|
||||
DCHECK_EQ(IrOpcode::kTypedStateValues, parent->opcode());
|
||||
|
||||
if (Top()->IsEmpty()) {
|
||||
return MachineType::None();
|
||||
} else {
|
||||
ZoneVector<MachineType> const* types = MachineTypesOf(parent->op());
|
||||
return (*types)[Top()->real_index()];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool StateValuesAccess::iterator::operator!=(iterator const& other) {
|
||||
// We only allow comparison with end().
|
||||
CHECK(other.done());
|
||||
return !done();
|
||||
}
|
||||
|
||||
StateValuesAccess::iterator& StateValuesAccess::iterator::operator++() {
|
||||
Advance();
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
StateValuesAccess::TypedNode StateValuesAccess::iterator::operator*() {
|
||||
return TypedNode(node(), type());
|
||||
}
|
||||
|
||||
|
||||
size_t StateValuesAccess::size() {
|
||||
size_t count = 0;
|
||||
SparseInputMask mask = SparseInputMaskOf(node_->op());
|
||||
|
@ -92,25 +92,107 @@ class V8_EXPORT_PRIVATE StateValuesAccess {
|
||||
class V8_EXPORT_PRIVATE iterator {
|
||||
public:
|
||||
// Bare minimum of operators needed for range iteration.
|
||||
bool operator!=(iterator const& other);
|
||||
iterator& operator++();
|
||||
TypedNode operator*();
|
||||
bool operator!=(iterator const& other) {
|
||||
// We only allow comparison with end().
|
||||
CHECK(other.done());
|
||||
return !done();
|
||||
}
|
||||
|
||||
iterator& operator++() {
|
||||
Advance();
|
||||
return *this;
|
||||
}
|
||||
|
||||
// TypedNode operator*();
|
||||
TypedNode operator*() { return TypedNode(node(), type()); }
|
||||
|
||||
private:
|
||||
friend class StateValuesAccess;
|
||||
|
||||
iterator() : current_depth_(-1) {}
|
||||
explicit iterator(Node* node);
|
||||
explicit iterator(Node* node) : current_depth_(0) {
|
||||
stack_[current_depth_] =
|
||||
SparseInputMaskOf(node->op()).IterateOverInputs(node);
|
||||
EnsureValid();
|
||||
}
|
||||
|
||||
Node* node();
|
||||
MachineType type();
|
||||
bool done() const;
|
||||
void Advance();
|
||||
void EnsureValid();
|
||||
Node* node() { return Top()->Get(nullptr); }
|
||||
MachineType type() {
|
||||
Node* parent = Top()->parent();
|
||||
if (parent->opcode() == IrOpcode::kStateValues) {
|
||||
return MachineType::AnyTagged();
|
||||
} else {
|
||||
DCHECK_EQ(IrOpcode::kTypedStateValues, parent->opcode());
|
||||
|
||||
SparseInputMask::InputIterator* Top();
|
||||
void Push(Node* node);
|
||||
void Pop();
|
||||
if (Top()->IsEmpty()) {
|
||||
return MachineType::None();
|
||||
} else {
|
||||
ZoneVector<MachineType> const* types = MachineTypesOf(parent->op());
|
||||
return (*types)[Top()->real_index()];
|
||||
}
|
||||
}
|
||||
}
|
||||
bool done() const { return current_depth_ < 0; }
|
||||
|
||||
void Advance() {
|
||||
Top()->Advance();
|
||||
EnsureValid();
|
||||
}
|
||||
|
||||
void EnsureValid() {
|
||||
while (true) {
|
||||
SparseInputMask::InputIterator* top = Top();
|
||||
|
||||
if (top->IsEmpty()) {
|
||||
// We are on a valid (albeit optimized out) node.
|
||||
return;
|
||||
}
|
||||
|
||||
if (top->IsEnd()) {
|
||||
// We have hit the end of this iterator. Pop the stack and move to the
|
||||
// next sibling iterator.
|
||||
Pop();
|
||||
if (done()) {
|
||||
// Stack is exhausted, we have reached the end.
|
||||
return;
|
||||
}
|
||||
Top()->Advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
// At this point the value is known to be live and within our input
|
||||
// nodes.
|
||||
Node* value_node = top->GetReal();
|
||||
|
||||
if (value_node->opcode() == IrOpcode::kStateValues ||
|
||||
value_node->opcode() == IrOpcode::kTypedStateValues) {
|
||||
// Nested state, we need to push to the stack.
|
||||
Push(value_node);
|
||||
continue;
|
||||
}
|
||||
|
||||
// We are on a valid node, we can stop the iteration.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
SparseInputMask::InputIterator* Top() {
|
||||
DCHECK_LE(0, current_depth_);
|
||||
DCHECK_GT(kMaxInlineDepth, current_depth_);
|
||||
return &(stack_[current_depth_]);
|
||||
}
|
||||
|
||||
void Push(Node* node) {
|
||||
current_depth_++;
|
||||
CHECK_GT(kMaxInlineDepth, current_depth_);
|
||||
stack_[current_depth_] =
|
||||
SparseInputMaskOf(node->op()).IterateOverInputs(node);
|
||||
}
|
||||
|
||||
void Pop() {
|
||||
DCHECK_LE(0, current_depth_);
|
||||
current_depth_--;
|
||||
}
|
||||
|
||||
static const int kMaxInlineDepth = 8;
|
||||
SparseInputMask::InputIterator stack_[kMaxInlineDepth];
|
||||
|
Loading…
Reference in New Issue
Block a user