Reland "Make LoadElimination aware of const fields (Part 2; stores)"
This is a reland of e588ff10e5
The only change over the original CL is found in JSCreateLowering::AllocateFastLiteral. We now guard against boilerplate values for unboxed double fields that *look* like legitimate initial values, but should really be kHoleNanInt64 instead.
The underlying problem certainly existed before, but an invariant added to LoadElimination in this CL caused a Chromium layout test to fail. The change in this reland is therefore a workaround, the root cause remains to be fixed. Specifically, we find that a pointer to the undefined value oddball is sometimes reinterpreted as a double and assigned as a boilerplate value. @jarin suspects that this stems from in-place map updates.
Original change's description:
> Make LoadElimination aware of const fields (Part 2; stores)
>
> Adds const information to store field accesses and uses it in load elimination
>
> Change-Id: I00765c854c95c955dabd78557463267b95f75eef
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1611543
> Reviewed-by: Georg Neis <neis@chromium.org>
> Reviewed-by: Tobias Tebbi <tebbi@chromium.org>
> Commit-Queue: Georg Schmid <gsps@google.com>
> Cr-Commit-Position: refs/heads/master@{#61796}
Change-Id: Ie388754890024a3ca7d10c9d4d7391442655b426
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1630676
Reviewed-by: Georg Neis <neis@chromium.org>
Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
Reviewed-by: Tobias Tebbi <tebbi@chromium.org>
Commit-Queue: Georg Schmid <gsps@google.com>
Cr-Commit-Position: refs/heads/master@{#61838}
This commit is contained in:
parent
ffc7075260
commit
85f257f4c9
@ -92,11 +92,11 @@ PropertyAccessInfo PropertyAccessInfo::DataConstant(
|
||||
Zone* zone, Handle<Map> receiver_map,
|
||||
ZoneVector<CompilationDependencies::Dependency const*>&& dependencies,
|
||||
FieldIndex field_index, Representation field_representation,
|
||||
Type field_type, MaybeHandle<Map> field_map, MaybeHandle<JSObject> holder) {
|
||||
return PropertyAccessInfo(kDataConstant, holder, MaybeHandle<Map>(),
|
||||
field_index, field_representation, field_type,
|
||||
field_map, {{receiver_map}, zone},
|
||||
std::move(dependencies));
|
||||
Type field_type, MaybeHandle<Map> field_map, MaybeHandle<JSObject> holder,
|
||||
MaybeHandle<Map> transition_map) {
|
||||
return PropertyAccessInfo(kDataConstant, holder, transition_map, field_index,
|
||||
field_representation, field_type, field_map,
|
||||
{{receiver_map}, zone}, std::move(dependencies));
|
||||
}
|
||||
|
||||
// static
|
||||
@ -796,10 +796,22 @@ PropertyAccessInfo AccessInfoFactory::LookupTransition(
|
||||
unrecorded_dependencies.push_back(
|
||||
dependencies()->TransitionDependencyOffTheRecord(
|
||||
MapRef(broker(), transition_map)));
|
||||
// Transitioning stores are never stores to constant fields.
|
||||
return PropertyAccessInfo::DataField(
|
||||
zone(), map, std::move(unrecorded_dependencies), field_index,
|
||||
details_representation, field_type, field_map, holder, transition_map);
|
||||
// Transitioning stores *may* store to const fields. The resulting
|
||||
// DataConstant access infos can be distinguished from later, i.e. redundant,
|
||||
// stores to the same constant field by the presence of a transition map.
|
||||
switch (details.constness()) {
|
||||
case PropertyConstness::kMutable:
|
||||
return PropertyAccessInfo::DataField(
|
||||
zone(), map, std::move(unrecorded_dependencies), field_index,
|
||||
details_representation, field_type, field_map, holder,
|
||||
transition_map);
|
||||
case PropertyConstness::kConst:
|
||||
return PropertyAccessInfo::DataConstant(
|
||||
zone(), map, std::move(unrecorded_dependencies), field_index,
|
||||
details_representation, field_type, field_map, holder,
|
||||
transition_map);
|
||||
}
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
} // namespace compiler
|
||||
|
@ -85,8 +85,8 @@ class PropertyAccessInfo final {
|
||||
ZoneVector<CompilationDependencies::Dependency const*>&&
|
||||
unrecorded_dependencies,
|
||||
FieldIndex field_index, Representation field_representation,
|
||||
Type field_type, MaybeHandle<Map> field_map,
|
||||
MaybeHandle<JSObject> holder);
|
||||
Type field_type, MaybeHandle<Map> field_map, MaybeHandle<JSObject> holder,
|
||||
MaybeHandle<Map> transition_map = MaybeHandle<Map>());
|
||||
static PropertyAccessInfo AccessorConstant(Zone* zone,
|
||||
Handle<Map> receiver_map,
|
||||
Handle<Object> constant,
|
||||
|
@ -1616,17 +1616,43 @@ Node* JSCreateLowering::AllocateFastLiteral(Node* effect, Node* control,
|
||||
DCHECK_EQ(kData, property_details.kind());
|
||||
NameRef property_name = boilerplate_map.GetPropertyKey(i);
|
||||
FieldIndex index = boilerplate_map.GetFieldIndexFor(i);
|
||||
FieldAccess access = {
|
||||
kTaggedBase, index.offset(), property_name.object(),
|
||||
MaybeHandle<Map>(), Type::Any(), MachineType::TypeCompressedTagged(),
|
||||
kFullWriteBarrier};
|
||||
FieldAccess access = {kTaggedBase,
|
||||
index.offset(),
|
||||
property_name.object(),
|
||||
MaybeHandle<Map>(),
|
||||
Type::Any(),
|
||||
MachineType::TypeCompressedTagged(),
|
||||
kFullWriteBarrier,
|
||||
LoadSensitivity::kUnsafe,
|
||||
property_details.constness()};
|
||||
Node* value;
|
||||
if (boilerplate_map.IsUnboxedDoubleField(i)) {
|
||||
access.machine_type = MachineType::Float64();
|
||||
access.type = Type::Number();
|
||||
value = jsgraph()->Constant(boilerplate.RawFastDoublePropertyAt(index));
|
||||
uint64_t value_bits = boilerplate.RawFastDoublePropertyAsBitsAt(index);
|
||||
// TODO(gsps): Remove the is_undefined once we've eliminated the case
|
||||
// where a pointer to the undefined value gets reinterpreted as the
|
||||
// boilerplate value of certain uninitialized unboxed double fields.
|
||||
bool is_undefined = value_bits == (*(factory()->undefined_value())).ptr();
|
||||
if (value_bits == kHoleNanInt64 || is_undefined) {
|
||||
// This special case is analogous to is_uninitialized being true in the
|
||||
// non-unboxed-double case below. The store of the hole NaN value here
|
||||
// will always be followed by another store that actually initializes
|
||||
// the field. The hole NaN should therefore be unobservable.
|
||||
// Load elimination expects there to be at most one const store to any
|
||||
// given field, so we always mark the unobservable ones as mutable.
|
||||
access.constness = PropertyConstness::kMutable;
|
||||
}
|
||||
value = jsgraph()->Constant(bit_cast<double>(value_bits));
|
||||
} else {
|
||||
ObjectRef boilerplate_value = boilerplate.RawFastPropertyAt(index);
|
||||
bool is_uninitialized =
|
||||
boilerplate_value.IsHeapObject() &&
|
||||
boilerplate_value.AsHeapObject().map().oddball_type() ==
|
||||
OddballType::kUninitialized;
|
||||
if (is_uninitialized) {
|
||||
access.constness = PropertyConstness::kMutable;
|
||||
}
|
||||
if (boilerplate_value.IsJSObject()) {
|
||||
JSObjectRef boilerplate_object = boilerplate_value.AsJSObject();
|
||||
value = effect = AllocateFastLiteral(effect, control,
|
||||
@ -1643,10 +1669,6 @@ Node* JSCreateLowering::AllocateFastLiteral(Node* effect, Node* control,
|
||||
value = effect = builder.Finish();
|
||||
} else if (property_details.representation().IsSmi()) {
|
||||
// Ensure that value is stored as smi.
|
||||
bool is_uninitialized =
|
||||
boilerplate_value.IsHeapObject() &&
|
||||
boilerplate_value.AsHeapObject().map().oddball_type() ==
|
||||
OddballType::kUninitialized;
|
||||
value = is_uninitialized
|
||||
? jsgraph()->ZeroConstant()
|
||||
: jsgraph()->Constant(boilerplate_value.AsSmi());
|
||||
|
@ -208,9 +208,13 @@ void CallHandlerInfoData::Serialize(JSHeapBroker* broker) {
|
||||
class JSObjectField {
|
||||
public:
|
||||
bool IsDouble() const { return object_ == nullptr; }
|
||||
uint64_t AsBitsOfDouble() const {
|
||||
CHECK(IsDouble());
|
||||
return number_bits_;
|
||||
}
|
||||
double AsDouble() const {
|
||||
CHECK(IsDouble());
|
||||
return number_;
|
||||
return bit_cast<double>(number_bits_);
|
||||
}
|
||||
|
||||
bool IsObject() const { return object_ != nullptr; }
|
||||
@ -219,12 +223,12 @@ class JSObjectField {
|
||||
return object_;
|
||||
}
|
||||
|
||||
explicit JSObjectField(double value) : number_(value) {}
|
||||
explicit JSObjectField(uint64_t value_bits) : number_bits_(value_bits) {}
|
||||
explicit JSObjectField(ObjectData* value) : object_(value) {}
|
||||
|
||||
private:
|
||||
ObjectData* object_ = nullptr;
|
||||
double number_ = 0;
|
||||
uint64_t number_bits_ = 0;
|
||||
};
|
||||
|
||||
class JSObjectData : public HeapObjectData {
|
||||
@ -1793,8 +1797,9 @@ void JSObjectData::SerializeRecursive(JSHeapBroker* broker, int depth) {
|
||||
DCHECK_EQ(field_index.property_index(),
|
||||
static_cast<int>(inobject_fields_.size()));
|
||||
if (boilerplate->IsUnboxedDoubleField(field_index)) {
|
||||
double value = boilerplate->RawFastDoublePropertyAt(field_index);
|
||||
inobject_fields_.push_back(JSObjectField{value});
|
||||
uint64_t value_bits =
|
||||
boilerplate->RawFastDoublePropertyAsBitsAt(field_index);
|
||||
inobject_fields_.push_back(JSObjectField{value_bits});
|
||||
} else {
|
||||
Handle<Object> value(boilerplate->RawFastPropertyAt(field_index),
|
||||
isolate);
|
||||
@ -2361,6 +2366,16 @@ double JSObjectRef::RawFastDoublePropertyAt(FieldIndex index) const {
|
||||
return object_data->GetInobjectField(index.property_index()).AsDouble();
|
||||
}
|
||||
|
||||
uint64_t JSObjectRef::RawFastDoublePropertyAsBitsAt(FieldIndex index) const {
|
||||
if (broker()->mode() == JSHeapBroker::kDisabled) {
|
||||
AllowHandleDereference handle_dereference;
|
||||
return object()->RawFastDoublePropertyAsBitsAt(index);
|
||||
}
|
||||
JSObjectData* object_data = data()->AsJSObject();
|
||||
CHECK(index.is_inobject());
|
||||
return object_data->GetInobjectField(index.property_index()).AsBitsOfDouble();
|
||||
}
|
||||
|
||||
ObjectRef JSObjectRef::RawFastPropertyAt(FieldIndex index) const {
|
||||
if (broker()->mode() == JSHeapBroker::kDisabled) {
|
||||
AllowHandleAllocation handle_allocation;
|
||||
|
@ -222,6 +222,7 @@ class JSObjectRef : public HeapObjectRef {
|
||||
using HeapObjectRef::HeapObjectRef;
|
||||
Handle<JSObject> object() const;
|
||||
|
||||
uint64_t RawFastDoublePropertyAsBitsAt(FieldIndex index) const;
|
||||
double RawFastDoublePropertyAt(FieldIndex index) const;
|
||||
ObjectRef RawFastPropertyAt(FieldIndex index) const;
|
||||
|
||||
|
@ -2226,6 +2226,8 @@ JSNativeContextSpecialization::BuildPropertyStore(
|
||||
&control, if_exceptions, access_info);
|
||||
} else {
|
||||
DCHECK(access_info.IsDataField() || access_info.IsDataConstant());
|
||||
DCHECK(access_mode == AccessMode::kStore ||
|
||||
access_mode == AccessMode::kStoreInLiteral);
|
||||
FieldIndex const field_index = access_info.field_index();
|
||||
Type const field_type = access_info.field_type();
|
||||
MachineRepresentation const field_representation =
|
||||
@ -2237,6 +2239,12 @@ JSNativeContextSpecialization::BuildPropertyStore(
|
||||
simplified()->LoadField(AccessBuilder::ForJSObjectPropertiesOrHash()),
|
||||
storage, effect, control);
|
||||
}
|
||||
PropertyConstness constness = access_info.IsDataConstant()
|
||||
? PropertyConstness::kConst
|
||||
: PropertyConstness::kMutable;
|
||||
bool store_to_existing_constant_field = access_info.IsDataConstant() &&
|
||||
access_mode == AccessMode::kStore &&
|
||||
!access_info.HasTransitionMap();
|
||||
FieldAccess field_access = {
|
||||
kTaggedBase,
|
||||
field_index.offset(),
|
||||
@ -2244,12 +2252,10 @@ JSNativeContextSpecialization::BuildPropertyStore(
|
||||
MaybeHandle<Map>(),
|
||||
field_type,
|
||||
MachineType::TypeForRepresentation(field_representation),
|
||||
kFullWriteBarrier};
|
||||
bool store_to_constant_field =
|
||||
(access_mode == AccessMode::kStore) && access_info.IsDataConstant();
|
||||
kFullWriteBarrier,
|
||||
LoadSensitivity::kUnsafe,
|
||||
constness};
|
||||
|
||||
DCHECK(access_mode == AccessMode::kStore ||
|
||||
access_mode == AccessMode::kStoreInLiteral);
|
||||
switch (field_representation) {
|
||||
case MachineRepresentation::kFloat64: {
|
||||
value = effect =
|
||||
@ -2264,7 +2270,10 @@ JSNativeContextSpecialization::BuildPropertyStore(
|
||||
Type::OtherInternal());
|
||||
a.Store(AccessBuilder::ForMap(),
|
||||
factory()->mutable_heap_number_map());
|
||||
a.Store(AccessBuilder::ForHeapNumberValue(), value);
|
||||
FieldAccess value_field_access =
|
||||
AccessBuilder::ForHeapNumberValue();
|
||||
value_field_access.constness = field_access.constness;
|
||||
a.Store(value_field_access, value);
|
||||
value = effect = a.Finish();
|
||||
|
||||
field_access.type = Type::Any();
|
||||
@ -2280,7 +2289,9 @@ JSNativeContextSpecialization::BuildPropertyStore(
|
||||
MaybeHandle<Map>(),
|
||||
Type::OtherInternal(),
|
||||
MachineType::TypeCompressedTaggedPointer(),
|
||||
kPointerWriteBarrier};
|
||||
kPointerWriteBarrier,
|
||||
LoadSensitivity::kUnsafe,
|
||||
constness};
|
||||
storage = effect =
|
||||
graph()->NewNode(simplified()->LoadField(storage_access),
|
||||
storage, effect, control);
|
||||
@ -2289,7 +2300,7 @@ JSNativeContextSpecialization::BuildPropertyStore(
|
||||
field_access.machine_type = MachineType::Float64();
|
||||
}
|
||||
}
|
||||
if (store_to_constant_field) {
|
||||
if (store_to_existing_constant_field) {
|
||||
DCHECK(!access_info.HasTransitionMap());
|
||||
// If the field is constant check that the value we are going
|
||||
// to store matches current value.
|
||||
@ -2311,7 +2322,7 @@ JSNativeContextSpecialization::BuildPropertyStore(
|
||||
case MachineRepresentation::kCompressedSigned:
|
||||
case MachineRepresentation::kCompressedPointer:
|
||||
case MachineRepresentation::kCompressed:
|
||||
if (store_to_constant_field) {
|
||||
if (store_to_existing_constant_field) {
|
||||
DCHECK(!access_info.HasTransitionMap());
|
||||
// If the field is constant check that the value we are going
|
||||
// to store matches current value.
|
||||
|
@ -601,9 +601,6 @@ Node* LoadElimination::AbstractState::LookupField(
|
||||
if (AbstractField const* this_field = fields[index]) {
|
||||
return this_field->Lookup(object);
|
||||
}
|
||||
if (constness == PropertyConstness::kConst) {
|
||||
return LookupField(object, index, PropertyConstness::kMutable);
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
@ -853,8 +850,13 @@ Reduction LoadElimination::ReduceLoadField(Node* node,
|
||||
} else {
|
||||
int field_index = FieldIndexOf(access);
|
||||
if (field_index >= 0) {
|
||||
if (Node* replacement =
|
||||
state->LookupField(object, field_index, access.constness)) {
|
||||
PropertyConstness constness = access.constness;
|
||||
Node* replacement = state->LookupField(object, field_index, constness);
|
||||
if (!replacement && constness == PropertyConstness::kConst) {
|
||||
replacement = state->LookupField(object, field_index,
|
||||
PropertyConstness::kMutable);
|
||||
}
|
||||
if (replacement) {
|
||||
// Make sure we don't resurrect dead {replacement} nodes.
|
||||
if (!replacement->IsDead()) {
|
||||
// Introduce a TypeGuard if the type of the {replacement} node is not
|
||||
@ -873,8 +875,8 @@ Reduction LoadElimination::ReduceLoadField(Node* node,
|
||||
return Replace(replacement);
|
||||
}
|
||||
}
|
||||
state = state->AddField(object, field_index, node, access.name,
|
||||
access.constness, zone());
|
||||
state = state->AddField(object, field_index, node, access.name, constness,
|
||||
zone());
|
||||
}
|
||||
}
|
||||
Handle<Map> field_map;
|
||||
@ -907,16 +909,36 @@ Reduction LoadElimination::ReduceStoreField(Node* node,
|
||||
} else {
|
||||
int field_index = FieldIndexOf(access);
|
||||
if (field_index >= 0) {
|
||||
PropertyConstness constness = access.constness;
|
||||
Node* const old_value =
|
||||
state->LookupField(object, field_index, access.constness);
|
||||
state->LookupField(object, field_index, constness);
|
||||
|
||||
if (constness == PropertyConstness::kConst && old_value) {
|
||||
// At runtime, we should never see two consecutive const stores, i.e.,
|
||||
// DCHECK_NULL(old_value)
|
||||
// ought to hold, but we might see such (unreachable) code statically.
|
||||
Node* control = NodeProperties::GetControlInput(node);
|
||||
Node* unreachable =
|
||||
graph()->NewNode(common()->Unreachable(), effect, control);
|
||||
return Replace(unreachable);
|
||||
}
|
||||
|
||||
if (old_value == new_value) {
|
||||
// This store is fully redundant.
|
||||
return Replace(effect);
|
||||
}
|
||||
|
||||
// Kill all potentially aliasing fields and record the new value.
|
||||
state = state->KillField(object, field_index, access.name, zone());
|
||||
state = state->AddField(object, field_index, new_value, access.name,
|
||||
access.constness, zone());
|
||||
PropertyConstness::kMutable, zone());
|
||||
if (constness == PropertyConstness::kConst) {
|
||||
// For const stores, we track information in both the const and the
|
||||
// mutable world to guard against field accesses that should have
|
||||
// been marked const, but were not.
|
||||
state = state->AddField(object, field_index, new_value, access.name,
|
||||
constness, zone());
|
||||
}
|
||||
} else {
|
||||
// Unsupported StoreField operator.
|
||||
state = state->KillFields(object, access.name, zone());
|
||||
@ -1199,10 +1221,13 @@ LoadElimination::AbstractState const* LoadElimination::ComputeLoopState(
|
||||
MaybeHandle<Name>(), zone());
|
||||
break;
|
||||
}
|
||||
case IrOpcode::kStoreField:
|
||||
state = ComputeLoopStateForStoreField(current, state,
|
||||
FieldAccessOf(current->op()));
|
||||
case IrOpcode::kStoreField: {
|
||||
FieldAccess access = FieldAccessOf(current->op());
|
||||
if (access.constness == PropertyConstness::kMutable) {
|
||||
state = ComputeLoopStateForStoreField(current, state, access);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case IrOpcode::kStoreElement: {
|
||||
Node* const object = NodeProperties::GetValueInput(current, 0);
|
||||
Node* const index = NodeProperties::GetValueInput(current, 1);
|
||||
|
@ -78,7 +78,7 @@ std::ostream& operator<<(std::ostream& os, FieldAccess const& access) {
|
||||
}
|
||||
#endif
|
||||
os << access.type << ", " << access.machine_type << ", "
|
||||
<< access.write_barrier_kind;
|
||||
<< access.write_barrier_kind << ", " << access.constness;
|
||||
if (FLAG_untrusted_code_mitigations) {
|
||||
os << ", " << access.load_sensitivity;
|
||||
}
|
||||
|
@ -401,6 +401,8 @@ inline PropertyConstness GeneralizeConstness(PropertyConstness a,
|
||||
|
||||
V8_EXPORT_PRIVATE std::ostream& operator<<(
|
||||
std::ostream& os, const PropertyAttributes& attributes);
|
||||
V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os,
|
||||
PropertyConstness constness);
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
@ -24,6 +24,16 @@ std::ostream& operator<<(std::ostream& os,
|
||||
return os;
|
||||
}
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, PropertyConstness constness) {
|
||||
switch (constness) {
|
||||
case PropertyConstness::kMutable:
|
||||
return os << "mutable";
|
||||
case PropertyConstness::kConst:
|
||||
return os << "const";
|
||||
}
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
Descriptor::Descriptor() : details_(Smi::zero()) {}
|
||||
|
||||
Descriptor::Descriptor(Handle<Name> key, const MaybeObjectHandle& value,
|
||||
|
@ -8,21 +8,149 @@
|
||||
(function() {
|
||||
function maybe_sideeffect(b) { return 42; }
|
||||
|
||||
function f(k) {
|
||||
%NeverOptimizeFunction(maybe_sideeffect);
|
||||
|
||||
class B {
|
||||
constructor(x) {
|
||||
this.value = x;
|
||||
}
|
||||
}
|
||||
%EnsureFeedbackVectorForFunction(B);
|
||||
|
||||
|
||||
function lit_const_smi() {
|
||||
let b = { value: 123 };
|
||||
maybe_sideeffect(b);
|
||||
let v1 = b.value;
|
||||
maybe_sideeffect(b);
|
||||
let v2 = b.value;
|
||||
%TurbofanStaticAssert(Object.is(v1, v2));
|
||||
%TurbofanStaticAssert(Object.is(v2, 123));
|
||||
}
|
||||
|
||||
lit_const_smi(); lit_const_smi();
|
||||
%OptimizeFunctionOnNextCall(lit_const_smi); lit_const_smi();
|
||||
|
||||
|
||||
function lit_const_object() {
|
||||
let o = {x: 123};
|
||||
let b = { value: o };
|
||||
maybe_sideeffect(b);
|
||||
let v1 = b.value;
|
||||
maybe_sideeffect(b);
|
||||
let v2 = b.value;
|
||||
%TurbofanStaticAssert(Object.is(v1, v2));
|
||||
%TurbofanStaticAssert(Object.is(v2, o));
|
||||
}
|
||||
|
||||
lit_const_object(); lit_const_object();
|
||||
%OptimizeFunctionOnNextCall(lit_const_object); lit_const_object();
|
||||
|
||||
|
||||
function lit_computed_smi(k) {
|
||||
let kk = 2 * k;
|
||||
let b = { value: kk };
|
||||
maybe_sideeffect(b);
|
||||
let v1 = b.value;
|
||||
maybe_sideeffect(b);
|
||||
let v2 = b.value;
|
||||
%TurbofanStaticAssert(Object.is(v1, v2));
|
||||
%TurbofanStaticAssert(Object.is(v2, kk));
|
||||
}
|
||||
|
||||
lit_computed_smi(1); lit_computed_smi(2);
|
||||
%OptimizeFunctionOnNextCall(lit_computed_smi); lit_computed_smi(3);
|
||||
|
||||
// TODO(bmeurer): Fix const tracking for double fields in object literals
|
||||
// lit_computed_smi(1.1); lit_computed_smi(2.2);
|
||||
// %OptimizeFunctionOnNextCall(lit_computed_smi); lit_computed_smi(3.3);
|
||||
|
||||
|
||||
function lit_param_object(k) {
|
||||
let b = { value: k };
|
||||
maybe_sideeffect(b);
|
||||
let v1 = b.value;
|
||||
maybe_sideeffect(b);
|
||||
let v2 = b.value;
|
||||
%TurbofanStaticAssert(v1 == v2);
|
||||
// TODO(gsps): Improve analysis to also propagate stored value
|
||||
// Eventually, this should also work:
|
||||
// %TurbofanStaticAssert(v2 == k);
|
||||
%TurbofanStaticAssert(Object.is(v1, v2));
|
||||
%TurbofanStaticAssert(Object.is(v2, k));
|
||||
}
|
||||
|
||||
%NeverOptimizeFunction(maybe_sideeffect);
|
||||
f(1);
|
||||
f(2);
|
||||
%OptimizeFunctionOnNextCall(f);
|
||||
f(3);
|
||||
lit_param_object({x: 1}); lit_param_object({x: 2});
|
||||
%OptimizeFunctionOnNextCall(lit_param_object); lit_param_object({x: 3});
|
||||
|
||||
|
||||
function nested_lit_param(k) {
|
||||
let b = { x: { value: k } };
|
||||
maybe_sideeffect(b);
|
||||
let v1 = b.x.value;
|
||||
maybe_sideeffect(b);
|
||||
let v2 = b.x.value;
|
||||
%TurbofanStaticAssert(Object.is(v1, v2));
|
||||
%TurbofanStaticAssert(Object.is(v2, k));
|
||||
}
|
||||
|
||||
nested_lit_param(1); nested_lit_param(2);
|
||||
%OptimizeFunctionOnNextCall(nested_lit_param); nested_lit_param(3);
|
||||
|
||||
// TODO(bmeurer): Fix const tracking for double fields in object literals
|
||||
// nested_lit_param(1.1); nested_lit_param(2.2);
|
||||
// %OptimizeFunctionOnNextCall(nested_lit_param); nested_lit_param(3.3);
|
||||
|
||||
|
||||
function nested_lit_param_object(k) {
|
||||
let b = { x: { value: k } };
|
||||
maybe_sideeffect(b);
|
||||
let v1 = b.x.value;
|
||||
maybe_sideeffect(b);
|
||||
let v2 = b.x.value;
|
||||
%TurbofanStaticAssert(Object.is(v1, v2));
|
||||
%TurbofanStaticAssert(Object.is(v2, k));
|
||||
}
|
||||
|
||||
nested_lit_param_object({x: 1}); nested_lit_param_object({x: 2});
|
||||
%OptimizeFunctionOnNextCall(nested_lit_param_object);
|
||||
nested_lit_param_object({x: 3});
|
||||
|
||||
|
||||
%EnsureFeedbackVectorForFunction(inst_param);
|
||||
function inst_param(k) {
|
||||
let b = new B(k);
|
||||
maybe_sideeffect(b);
|
||||
let v1 = b.value;
|
||||
maybe_sideeffect(b);
|
||||
let v2 = b.value;
|
||||
%TurbofanStaticAssert(Object.is(v1, v2));
|
||||
%TurbofanStaticAssert(Object.is(v2, k));
|
||||
}
|
||||
|
||||
inst_param(1); inst_param(2);
|
||||
%OptimizeFunctionOnNextCall(inst_param); inst_param(3);
|
||||
|
||||
// TODO(gsps): Reenable once we fully support const field information
|
||||
// tracking in the presence of pointer compression.
|
||||
// inst_param(1.1); inst_param(2.2);
|
||||
// %OptimizeFunctionOnNextCall(inst_param); inst_param(3.3);
|
||||
|
||||
inst_param({x: 1}); inst_param({x: 2});
|
||||
%OptimizeFunctionOnNextCall(inst_param); inst_param({x: 3});
|
||||
|
||||
|
||||
%EnsureFeedbackVectorForFunction(inst_computed);
|
||||
function inst_computed(k) {
|
||||
let kk = 2 * k;
|
||||
let b = new B(kk);
|
||||
maybe_sideeffect(b);
|
||||
let v1 = b.value;
|
||||
maybe_sideeffect(b);
|
||||
let v2 = b.value;
|
||||
%TurbofanStaticAssert(Object.is(v1, v2));
|
||||
%TurbofanStaticAssert(Object.is(v2, kk));
|
||||
}
|
||||
|
||||
inst_computed(1); inst_computed(2);
|
||||
%OptimizeFunctionOnNextCall(inst_computed); inst_computed(3);
|
||||
|
||||
inst_computed(1.1); inst_computed(2.2);
|
||||
%OptimizeFunctionOnNextCall(inst_computed); inst_computed(3.3);
|
||||
})();
|
||||
|
Loading…
Reference in New Issue
Block a user