[maglev] Introduce load elimination

Keep track of simple field loads and stores in NodeInfo, and try to
reuse them where possible instead of recalculating them.

Bug: v8:7700
Change-Id: I1f5eb3cb37ac76bcbc1ce75f243a36a31e71c907
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3974888
Reviewed-by: Nico Hartmann <nicohartmann@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: Victor Gomes <victorgomes@chromium.org>
Auto-Submit: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83909}
This commit is contained in:
Leszek Swirski 2022-10-24 16:09:43 +02:00 committed by V8 LUCI CQ
parent 1ac9d34c42
commit 5537dc9c20
4 changed files with 114 additions and 32 deletions

View File

@ -261,16 +261,6 @@ class V8_EXPORT_PRIVATE ObjectRef {
return base::hash_combine(ref.object().address());
}
};
struct Equal {
bool operator()(const ObjectRef& lhs, const ObjectRef& rhs) const {
return lhs.equals(rhs);
}
};
struct Less {
bool operator()(const ObjectRef& lhs, const ObjectRef& rhs) const {
return lhs.data_ < rhs.data_;
}
};
protected:
JSHeapBroker* broker() const;
@ -290,20 +280,24 @@ class V8_EXPORT_PRIVATE ObjectRef {
friend class TinyRef;
friend std::ostream& operator<<(std::ostream& os, const ObjectRef& ref);
friend bool operator<(const ObjectRef& lhs, const ObjectRef& rhs);
JSHeapBroker* broker_;
};
inline bool operator==(ObjectRef const& obj1, ObjectRef const& obj2) {
return obj1.equals(obj2);
inline bool operator==(const ObjectRef& lhs, const ObjectRef& rhs) {
return lhs.equals(rhs);
}
inline bool operator<(const ObjectRef& lhs, const ObjectRef& rhs) {
return lhs.data_ < rhs.data_;
}
template <class T>
using ZoneRefUnorderedSet =
ZoneUnorderedSet<T, ObjectRef::Hash, ObjectRef::Equal>;
using ZoneRefUnorderedSet = ZoneUnorderedSet<T, ObjectRef::Hash>;
template <class K, class V>
using ZoneRefMap = ZoneMap<K, V, ObjectRef::Less>;
using ZoneRefMap = ZoneMap<K, V>;
// Temporary class that carries information from a Map. We'd like to remove
// this class and use MapRef instead, but we can't as long as we support the

View File

@ -21,6 +21,7 @@
#include "src/interpreter/bytecodes.h"
#include "src/maglev/maglev-compilation-info.h"
#include "src/maglev/maglev-compilation-unit.h"
#include "src/maglev/maglev-graph-printer.h"
#include "src/maglev/maglev-interpreter-frame-state.h"
#include "src/maglev/maglev-ir.h"
#include "src/objects/elements-kind.h"
@ -1552,7 +1553,7 @@ bool MaglevGraphBuilder::TryBuildStoreField(
}
bool MaglevGraphBuilder::TryBuildPropertyLoad(
ValueNode* receiver, ValueNode* lookup_start_object,
ValueNode* receiver, ValueNode* lookup_start_object, compiler::NameRef name,
compiler::PropertyAccessInfo const& access_info) {
if (access_info.holder().has_value() && !access_info.HasDictionaryHolder()) {
broker()->dependencies()->DependOnStablePrototypeChains(
@ -1569,6 +1570,9 @@ bool MaglevGraphBuilder::TryBuildPropertyLoad(
case compiler::PropertyAccessInfo::kDataField:
case compiler::PropertyAccessInfo::kFastDataConstant:
BuildLoadField(access_info, lookup_start_object);
RecordKnownProperty(lookup_start_object, name,
current_interpreter_frame_.accumulator(),
access_info.IsFastDataConstant());
return true;
case compiler::PropertyAccessInfo::kDictionaryProtoDataConstant:
return TryFoldLoadDictPrototypeConstant(access_info);
@ -1584,12 +1588,15 @@ bool MaglevGraphBuilder::TryBuildPropertyLoad(
case compiler::PropertyAccessInfo::kStringLength:
DCHECK_EQ(receiver, lookup_start_object);
SetAccumulator(AddNewNode<StringLength>({receiver}));
RecordKnownProperty(lookup_start_object, name,
current_interpreter_frame_.accumulator(), true);
return true;
}
}
bool MaglevGraphBuilder::TryBuildPropertyStore(
ValueNode* receiver, compiler::PropertyAccessInfo const& access_info) {
ValueNode* receiver, compiler::NameRef name,
compiler::PropertyAccessInfo const& access_info) {
if (access_info.holder().has_value()) {
broker()->dependencies()->DependOnStablePrototypeChains(
access_info.lookup_start_object_maps(), kStartAtPrototype,
@ -1601,22 +1608,29 @@ bool MaglevGraphBuilder::TryBuildPropertyStore(
GetAccumulatorTagged());
} else {
DCHECK(access_info.IsDataField() || access_info.IsFastDataConstant());
return TryBuildStoreField(access_info, receiver);
if (TryBuildStoreField(access_info, receiver)) {
RecordKnownProperty(receiver, name,
current_interpreter_frame_.accumulator(),
access_info.IsFastDataConstant());
return true;
}
return false;
}
}
bool MaglevGraphBuilder::TryBuildPropertyAccess(
ValueNode* receiver, ValueNode* lookup_start_object,
ValueNode* receiver, ValueNode* lookup_start_object, compiler::NameRef name,
compiler::PropertyAccessInfo const& access_info,
compiler::AccessMode access_mode) {
switch (access_mode) {
case compiler::AccessMode::kLoad:
return TryBuildPropertyLoad(receiver, lookup_start_object, access_info);
return TryBuildPropertyLoad(receiver, lookup_start_object, name,
access_info);
case compiler::AccessMode::kStore:
case compiler::AccessMode::kStoreInLiteral:
case compiler::AccessMode::kDefine:
DCHECK_EQ(receiver, lookup_start_object);
return TryBuildPropertyStore(receiver, access_info);
return TryBuildPropertyStore(receiver, name, access_info);
case compiler::AccessMode::kHas:
// TODO(victorgomes): BuildPropertyTest.
return false;
@ -1689,8 +1703,8 @@ bool MaglevGraphBuilder::TryBuildNamedAccess(
}
// Generate the actual property access.
return TryBuildPropertyAccess(receiver, lookup_start_object, access_info,
access_mode);
return TryBuildPropertyAccess(receiver, lookup_start_object,
feedback.name(), access_info, access_mode);
} else {
// TODO(victorgomes): polymorphic case.
return false;
@ -1822,6 +1836,42 @@ bool MaglevGraphBuilder::TryBuildElementAccess(
}
}
void MaglevGraphBuilder::RecordKnownProperty(ValueNode* lookup_start_object,
compiler::NameRef name,
ValueNode* value, bool is_const) {
auto& loaded_properties =
is_const ? known_node_aspects().loaded_constant_properties
: known_node_aspects().loaded_properties;
loaded_properties.emplace(std::make_pair(lookup_start_object, name), value);
}
bool MaglevGraphBuilder::TryReuseKnownPropertyLoad(
ValueNode* lookup_start_object, compiler::NameRef name) {
if (auto it = known_node_aspects().loaded_properties.find(
{lookup_start_object, name});
it != known_node_aspects().loaded_properties.end()) {
current_interpreter_frame_.set_accumulator(it->second);
if (v8_flags.trace_maglev_graph_building) {
std::cout << " * Reusing non-constant loaded property "
<< PrintNodeLabel(graph_labeller(), it->second) << ": "
<< PrintNode(graph_labeller(), it->second) << std::endl;
}
return true;
}
if (auto it = known_node_aspects().loaded_constant_properties.find(
{lookup_start_object, name});
it != known_node_aspects().loaded_constant_properties.end()) {
current_interpreter_frame_.set_accumulator(it->second);
if (v8_flags.trace_maglev_graph_building) {
std::cout << " * Reusing constant loaded property "
<< PrintNodeLabel(graph_labeller(), it->second) << ": "
<< PrintNode(graph_labeller(), it->second) << std::endl;
}
return true;
}
return false;
}
void MaglevGraphBuilder::VisitGetNamedProperty() {
// GetNamedProperty <object> <name_index> <slot>
ValueNode* object = LoadRegisterTagged(0);
@ -1840,6 +1890,7 @@ void MaglevGraphBuilder::VisitGetNamedProperty() {
return;
case compiler::ProcessedFeedback::kNamedAccess:
if (TryReuseKnownPropertyLoad(object, name)) return;
if (TryBuildNamedAccess(object, object,
processed_feedback.AsNamedAccess(),
compiler::AccessMode::kLoad)) {
@ -1881,6 +1932,7 @@ void MaglevGraphBuilder::VisitGetNamedPropertyFromSuper() {
return;
case compiler::ProcessedFeedback::kNamedAccess:
if (TryReuseKnownPropertyLoad(lookup_start_object, name)) return;
if (TryBuildNamedAccess(receiver, lookup_start_object,
processed_feedback.AsNamedAccess(),
compiler::AccessMode::kLoad)) {

View File

@ -852,6 +852,11 @@ class MaglevGraphBuilder {
// we can no longer assume that objects with unstable maps still have the
// same map.
known_node_aspects().unstable_maps.clear();
// Similarly, side-effects can change object contents, so we have to clear
// our known loaded properties -- however, constant properties are known
// to not change (and we added a dependency on this), so we don't have to
// clear those.
known_node_aspects().loaded_properties.clear();
}
int next_offset() const {
@ -982,11 +987,13 @@ class MaglevGraphBuilder {
ValueNode* receiver, ValueNode* value);
bool TryBuildPropertyLoad(ValueNode* receiver, ValueNode* lookup_start_object,
compiler::NameRef name,
compiler::PropertyAccessInfo const& access_info);
bool TryBuildPropertyStore(ValueNode* receiver,
bool TryBuildPropertyStore(ValueNode* receiver, compiler::NameRef name,
compiler::PropertyAccessInfo const& access_info);
bool TryBuildPropertyAccess(ValueNode* receiver,
ValueNode* lookup_start_object,
compiler::NameRef name,
compiler::PropertyAccessInfo const& access_info,
compiler::AccessMode access_mode);
@ -1000,6 +1007,15 @@ class MaglevGraphBuilder {
bool TryBuildElementAccess(ValueNode* object, ValueNode* index,
compiler::ElementAccessFeedback const& feedback);
// Load elimination -- when loading or storing a simple property without
// side effects, record its value, and allow that value to be re-used on
// subsequent loads.
void RecordKnownProperty(ValueNode* lookup_start_object,
compiler::NameRef name, ValueNode* value,
bool is_const);
bool TryReuseKnownPropertyLoad(ValueNode* lookup_start_object,
compiler::NameRef name);
template <Operation kOperation>
void BuildGenericUnaryOperationNode();
template <Operation kOperation>

View File

@ -29,14 +29,14 @@ class MergePointInterpreterFrameState;
// left map is mutated to become the result of the intersection. Values that
// are in both maps are passed to the merging function to be merged with each
// other -- again, the LHS here is expected to be mutated.
template <typename Value, typename MergeFunc>
void DestructivelyIntersect(ZoneMap<ValueNode*, Value>& lhs_map,
const ZoneMap<ValueNode*, Value>& rhs_map,
template <typename Key, typename Value, typename MergeFunc>
void DestructivelyIntersect(ZoneMap<Key, Value>& lhs_map,
const ZoneMap<Key, Value>& rhs_map,
MergeFunc&& func) {
// Walk the two maps in lock step. This relies on the fact that ZoneMaps are
// sorted.
typename ZoneMap<ValueNode*, Value>::iterator lhs_it = lhs_map.begin();
typename ZoneMap<ValueNode*, Value>::const_iterator rhs_it = rhs_map.begin();
typename ZoneMap<Key, Value>::iterator lhs_it = lhs_map.begin();
typename ZoneMap<Key, Value>::const_iterator rhs_it = rhs_map.begin();
while (lhs_it != lhs_map.end() && rhs_it != rhs_map.end()) {
if (lhs_it->first < rhs_it->first) {
// Remove from LHS elements that are not in RHS.
@ -129,7 +129,11 @@ struct NodeInfo {
struct KnownNodeAspects {
explicit KnownNodeAspects(Zone* zone)
: node_infos(zone), stable_maps(zone), unstable_maps(zone) {}
: node_infos(zone),
stable_maps(zone),
unstable_maps(zone),
loaded_constant_properties(zone),
loaded_properties(zone) {}
KnownNodeAspects(const KnownNodeAspects& other) = delete;
KnownNodeAspects& operator=(const KnownNodeAspects& other) = delete;
@ -141,6 +145,8 @@ struct KnownNodeAspects {
clone->node_infos = node_infos;
clone->stable_maps = stable_maps;
clone->unstable_maps = unstable_maps;
clone->loaded_constant_properties = loaded_constant_properties;
clone->loaded_properties = loaded_properties;
return clone;
}
@ -152,6 +158,7 @@ struct KnownNodeAspects {
KnownNodeAspects* clone = zone->New<KnownNodeAspects>(zone);
clone->node_infos = node_infos;
clone->stable_maps = stable_maps;
clone->loaded_constant_properties = loaded_constant_properties;
return clone;
}
@ -181,6 +188,12 @@ struct KnownNodeAspects {
// We should always add the value even if the set is empty.
return true;
});
DestructivelyIntersect(
loaded_constant_properties, other.loaded_constant_properties,
[](ValueNode* lhs, ValueNode* rhs) { return lhs == rhs; });
DestructivelyIntersect(
loaded_properties, other.loaded_properties,
[](ValueNode* lhs, ValueNode* rhs) { return lhs == rhs; });
}
// TODO(leszeks): Store these more efficiently than with std::map -- in
@ -190,11 +203,18 @@ struct KnownNodeAspects {
// Permanently valid if checked in a dominator.
ZoneMap<ValueNode*, NodeInfo> node_infos;
// TODO(v8:7700): Investigate a better data structure to use than
// ZoneHandleSet. Valid across side-effecting calls, as long as we install a
// dependency.
// ZoneHandleSet.
// Valid across side-effecting calls, as long as we install a dependency.
ZoneMap<ValueNode*, ZoneHandleSet<Map>> stable_maps;
// Flushed after side-effecting calls.
ZoneMap<ValueNode*, ZoneHandleSet<Map>> unstable_maps;
// Valid across side-effecting calls, as long as we install a dependency.
ZoneMap<std::pair<ValueNode*, compiler::NameRef>, ValueNode*>
loaded_constant_properties;
// Flushed after side-effecting calls.
ZoneMap<std::pair<ValueNode*, compiler::NameRef>, ValueNode*>
loaded_properties;
};
class InterpreterFrameState {