[turbofan] Unify code that determines a JSCreate's map

There were four places where we did essentially the same steps in
order to extract the initial map for inlining a JSCreate operation.
This CL creates a function on NodeProperties for this task.

As a side effect, this fixes a bug in ReduceJSCreateArray, where
has_initial_map could get called when it wasn't permissible to do so.

Notes: For simplicity, in one or two places where we used to get the
target/newtarget constants from the types we now get them from
HeapConstant nodes.

Cosmetic change: rename "receiver_map" to the more accurate
"root_map" in JSNativeContextSpecialization::ExtractReceiverMaps.

Bug: chromium:939316
Change-Id: I8fd9eb50993be3d839ab9b18eeea28184c53eabf
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1528435
Commit-Queue: Georg Neis <neis@chromium.org>
Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60301}
This commit is contained in:
Georg Neis 2019-03-18 15:34:05 +01:00 committed by Commit Bot
parent 6f2b87b883
commit d922171717
6 changed files with 179 additions and 191 deletions

View File

@ -41,16 +41,6 @@ Node* GetArgumentsFrameState(Node* frame_state) {
: frame_state;
}
// Checks whether allocation using the given target and new.target can be
// inlined.
bool IsAllocationInlineable(const JSFunctionRef& target,
const JSFunctionRef& new_target) {
CHECK_IMPLIES(new_target.has_initial_map(),
!new_target.initial_map().is_dictionary_map());
return new_target.has_initial_map() &&
new_target.initial_map().GetConstructor().equals(target);
}
// When initializing arrays, we'll unfold the loop if the number of
// elements is known to be of this type.
const int kElementLoopUnrollLimit = 16;
@ -117,48 +107,32 @@ Reduction JSCreateLowering::Reduce(Node* node) {
Reduction JSCreateLowering::ReduceJSCreate(Node* node) {
DCHECK_EQ(IrOpcode::kJSCreate, node->opcode());
Node* const target = NodeProperties::GetValueInput(node, 0);
Type const target_type = NodeProperties::GetType(target);
Node* const new_target = NodeProperties::GetValueInput(node, 1);
Type const new_target_type = NodeProperties::GetType(new_target);
Node* const effect = NodeProperties::GetEffectInput(node);
Node* const control = NodeProperties::GetControlInput(node);
// Extract constructor and original constructor function.
if (!target_type.IsHeapConstant() || !new_target_type.IsHeapConstant() ||
!target_type.AsHeapConstant()->Ref().IsJSFunction() ||
!new_target_type.AsHeapConstant()->Ref().IsJSFunction()) {
return NoChange();
}
JSFunctionRef constructor =
target_type.AsHeapConstant()->Ref().AsJSFunction();
if (!constructor.map().is_constructor()) return NoChange();
base::Optional<MapRef> initial_map =
NodeProperties::GetJSCreateMap(broker(), node);
if (!initial_map.has_value()) return NoChange();
JSFunctionRef original_constructor =
new_target_type.AsHeapConstant()->Ref().AsJSFunction();
if (!original_constructor.map().is_constructor()) return NoChange();
// Check if we can inline the allocation.
if (!IsAllocationInlineable(constructor, original_constructor)) {
return NoChange();
}
HeapObjectMatcher(new_target).Ref(broker()).AsJSFunction();
SlackTrackingPrediction slack_tracking_prediction =
dependencies()->DependOnInitialMapInstanceSizePrediction(
original_constructor);
MapRef initial_map = original_constructor.initial_map();
// Emit code to allocate the JSObject instance for the
// {original_constructor}.
AllocationBuilder a(jsgraph(), effect, control);
a.Allocate(slack_tracking_prediction.instance_size());
a.Store(AccessBuilder::ForMap(), initial_map);
a.Store(AccessBuilder::ForMap(), *initial_map);
a.Store(AccessBuilder::ForJSObjectPropertiesOrHash(),
jsgraph()->EmptyFixedArrayConstant());
a.Store(AccessBuilder::ForJSObjectElements(),
jsgraph()->EmptyFixedArrayConstant());
for (int i = 0; i < slack_tracking_prediction.inobject_property_count();
++i) {
a.Store(AccessBuilder::ForJSObjectInObjectProperty(initial_map, i),
a.Store(AccessBuilder::ForJSObjectInObjectProperty(*initial_map, i),
jsgraph()->UndefinedConstant());
}
@ -640,124 +614,108 @@ Reduction JSCreateLowering::ReduceJSCreateArray(Node* node) {
}
}
AllocationType allocation = AllocationType::kYoung;
JSFunctionRef constructor = native_context().array_function();
Node* target = NodeProperties::GetValueInput(node, 0);
base::Optional<MapRef> initial_map =
NodeProperties::GetJSCreateMap(broker(), node);
if (!initial_map.has_value()) return NoChange();
Node* new_target = NodeProperties::GetValueInput(node, 1);
Type new_target_type = (target == new_target)
? Type::HeapConstant(constructor, zone())
: NodeProperties::GetType(new_target);
JSFunctionRef original_constructor =
HeapObjectMatcher(new_target).Ref(broker()).AsJSFunction();
SlackTrackingPrediction slack_tracking_prediction =
dependencies()->DependOnInitialMapInstanceSizePrediction(
original_constructor);
// Extract original constructor function.
if (new_target_type.IsHeapConstant() &&
new_target_type.AsHeapConstant()->Ref().IsJSFunction()) {
JSFunctionRef original_constructor =
new_target_type.AsHeapConstant()->Ref().AsJSFunction();
DCHECK(constructor.map().is_constructor());
DCHECK(original_constructor.map().is_constructor());
// Tells whether we are protected by either the {site} or a
// protector cell to do certain speculative optimizations.
bool can_inline_call = false;
// Check if we can inline the allocation.
if (IsAllocationInlineable(constructor, original_constructor)) {
SlackTrackingPrediction slack_tracking_prediction =
dependencies()->DependOnInitialMapInstanceSizePrediction(
original_constructor);
MapRef initial_map = original_constructor.initial_map();
// Check if we have a feedback {site} on the {node}.
ElementsKind elements_kind = initial_map->elements_kind();
if (site_ref) {
elements_kind = site_ref->GetElementsKind();
can_inline_call = site_ref->CanInlineCall();
allocation = dependencies()->DependOnPretenureMode(*site_ref);
dependencies()->DependOnElementsKind(*site_ref);
} else {
CellRef array_constructor_protector(
broker(), factory()->array_constructor_protector());
can_inline_call =
array_constructor_protector.value().AsSmi() == Isolate::kProtectorValid;
}
// Tells whether we are protected by either the {site} or a
// protector cell to do certain speculative optimizations.
bool can_inline_call = false;
// Check if we have a feedback {site} on the {node}.
ElementsKind elements_kind = initial_map.elements_kind();
if (site_ref) {
elements_kind = site_ref->GetElementsKind();
can_inline_call = site_ref->CanInlineCall();
allocation = dependencies()->DependOnPretenureMode(*site_ref);
dependencies()->DependOnElementsKind(*site_ref);
} else {
CellRef array_constructor_protector(
broker(), factory()->array_constructor_protector());
can_inline_call = array_constructor_protector.value().AsSmi() ==
Isolate::kProtectorValid;
}
if (arity == 0) {
Node* length = jsgraph()->ZeroConstant();
int capacity = JSArray::kPreallocatedArrayElements;
return ReduceNewArray(node, length, capacity, initial_map,
elements_kind, allocation,
slack_tracking_prediction);
} else if (arity == 1) {
Node* length = NodeProperties::GetValueInput(node, 2);
Type length_type = NodeProperties::GetType(length);
if (!length_type.Maybe(Type::Number())) {
// Handle the single argument case, where we know that the value
// cannot be a valid Array length.
elements_kind = GetMoreGeneralElementsKind(
elements_kind, IsHoleyElementsKind(elements_kind)
? HOLEY_ELEMENTS
: PACKED_ELEMENTS);
return ReduceNewArray(node, std::vector<Node*>{length}, initial_map,
elements_kind, allocation,
slack_tracking_prediction);
}
if (length_type.Is(Type::SignedSmall()) && length_type.Min() >= 0 &&
length_type.Max() <= kElementLoopUnrollLimit &&
length_type.Min() == length_type.Max()) {
int capacity = static_cast<int>(length_type.Max());
return ReduceNewArray(node, length, capacity, initial_map,
elements_kind, allocation,
slack_tracking_prediction);
}
if (length_type.Maybe(Type::UnsignedSmall()) && can_inline_call) {
return ReduceNewArray(node, length, initial_map, elements_kind,
allocation, slack_tracking_prediction);
}
} else if (arity <= JSArray::kInitialMaxFastElementArray) {
// Gather the values to store into the newly created array.
bool values_all_smis = true, values_all_numbers = true,
values_any_nonnumber = false;
std::vector<Node*> values;
values.reserve(p.arity());
for (int i = 0; i < arity; ++i) {
Node* value = NodeProperties::GetValueInput(node, 2 + i);
Type value_type = NodeProperties::GetType(value);
if (!value_type.Is(Type::SignedSmall())) {
values_all_smis = false;
}
if (!value_type.Is(Type::Number())) {
values_all_numbers = false;
}
if (!value_type.Maybe(Type::Number())) {
values_any_nonnumber = true;
}
values.push_back(value);
}
// Try to figure out the ideal elements kind statically.
if (values_all_smis) {
// Smis can be stored with any elements kind.
} else if (values_all_numbers) {
elements_kind = GetMoreGeneralElementsKind(
elements_kind, IsHoleyElementsKind(elements_kind)
? HOLEY_DOUBLE_ELEMENTS
: PACKED_DOUBLE_ELEMENTS);
} else if (values_any_nonnumber) {
elements_kind = GetMoreGeneralElementsKind(
elements_kind, IsHoleyElementsKind(elements_kind)
? HOLEY_ELEMENTS
: PACKED_ELEMENTS);
} else if (!can_inline_call) {
// We have some crazy combination of types for the {values} where
// there's no clear decision on the elements kind statically. And
// we don't have a protection against deoptimization loops for the
// checks that are introduced in the call to ReduceNewArray, so
// we cannot inline this invocation of the Array constructor here.
return NoChange();
}
return ReduceNewArray(node, values, initial_map, elements_kind,
allocation, slack_tracking_prediction);
}
if (arity == 0) {
Node* length = jsgraph()->ZeroConstant();
int capacity = JSArray::kPreallocatedArrayElements;
return ReduceNewArray(node, length, capacity, *initial_map, elements_kind,
allocation, slack_tracking_prediction);
} else if (arity == 1) {
Node* length = NodeProperties::GetValueInput(node, 2);
Type length_type = NodeProperties::GetType(length);
if (!length_type.Maybe(Type::Number())) {
// Handle the single argument case, where we know that the value
// cannot be a valid Array length.
elements_kind = GetMoreGeneralElementsKind(
elements_kind, IsHoleyElementsKind(elements_kind) ? HOLEY_ELEMENTS
: PACKED_ELEMENTS);
return ReduceNewArray(node, std::vector<Node*>{length}, *initial_map,
elements_kind, allocation,
slack_tracking_prediction);
}
if (length_type.Is(Type::SignedSmall()) && length_type.Min() >= 0 &&
length_type.Max() <= kElementLoopUnrollLimit &&
length_type.Min() == length_type.Max()) {
int capacity = static_cast<int>(length_type.Max());
return ReduceNewArray(node, length, capacity, *initial_map, elements_kind,
allocation, slack_tracking_prediction);
}
if (length_type.Maybe(Type::UnsignedSmall()) && can_inline_call) {
return ReduceNewArray(node, length, *initial_map, elements_kind,
allocation, slack_tracking_prediction);
}
} else if (arity <= JSArray::kInitialMaxFastElementArray) {
// Gather the values to store into the newly created array.
bool values_all_smis = true, values_all_numbers = true,
values_any_nonnumber = false;
std::vector<Node*> values;
values.reserve(p.arity());
for (int i = 0; i < arity; ++i) {
Node* value = NodeProperties::GetValueInput(node, 2 + i);
Type value_type = NodeProperties::GetType(value);
if (!value_type.Is(Type::SignedSmall())) {
values_all_smis = false;
}
if (!value_type.Is(Type::Number())) {
values_all_numbers = false;
}
if (!value_type.Maybe(Type::Number())) {
values_any_nonnumber = true;
}
values.push_back(value);
}
// Try to figure out the ideal elements kind statically.
if (values_all_smis) {
// Smis can be stored with any elements kind.
} else if (values_all_numbers) {
elements_kind = GetMoreGeneralElementsKind(
elements_kind, IsHoleyElementsKind(elements_kind)
? HOLEY_DOUBLE_ELEMENTS
: PACKED_DOUBLE_ELEMENTS);
} else if (values_any_nonnumber) {
elements_kind = GetMoreGeneralElementsKind(
elements_kind, IsHoleyElementsKind(elements_kind) ? HOLEY_ELEMENTS
: PACKED_ELEMENTS);
} else if (!can_inline_call) {
// We have some crazy combination of types for the {values} where
// there's no clear decision on the elements kind statically. And
// we don't have a protection against deoptimization loops for the
// checks that are introduced in the call to ReduceNewArray, so
// we cannot inline this invocation of the Array constructor here.
return NoChange();
}
return ReduceNewArray(node, values, *initial_map, elements_kind, allocation,
slack_tracking_prediction);
}
return NoChange();
}

View File

@ -3360,15 +3360,15 @@ bool JSNativeContextSpecialization::ExtractReceiverMaps(
// Try to extract some maps from the {nexus}.
if (nexus.ExtractMaps(receiver_maps) != 0) {
// Try to filter impossible candidates based on inferred root map.
Handle<Map> receiver_map;
if (InferReceiverRootMap(receiver).ToHandle(&receiver_map)) {
DCHECK(!receiver_map->is_abandoned_prototype_map());
Handle<Map> root_map;
if (InferReceiverRootMap(receiver).ToHandle(&root_map)) {
DCHECK(!root_map->is_abandoned_prototype_map());
Isolate* isolate = this->isolate();
receiver_maps->erase(
std::remove_if(receiver_maps->begin(), receiver_maps->end(),
[receiver_map, isolate](const Handle<Map>& map) {
[root_map, isolate](Handle<Map> map) {
return map->is_abandoned_prototype_map() ||
map->FindRootMap(isolate) != *receiver_map;
map->FindRootMap(isolate) != *root_map;
}),
receiver_maps->end());
}
@ -3410,18 +3410,12 @@ MaybeHandle<Map> JSNativeContextSpecialization::InferReceiverRootMap(
if (m.HasValue()) {
return handle(m.Value()->map()->FindRootMap(isolate()), isolate());
} else if (m.IsJSCreate()) {
HeapObjectMatcher mtarget(m.InputAt(0));
HeapObjectMatcher mnewtarget(m.InputAt(1));
if (mtarget.HasValue() && mnewtarget.HasValue()) {
Handle<JSFunction> constructor =
Handle<JSFunction>::cast(mtarget.Value());
if (constructor->has_initial_map()) {
Handle<Map> initial_map(constructor->initial_map(), isolate());
if (initial_map->constructor_or_backpointer() == *mnewtarget.Value()) {
DCHECK_EQ(*initial_map, initial_map->FindRootMap(isolate()));
return initial_map;
}
}
base::Optional<MapRef> initial_map =
NodeProperties::GetJSCreateMap(broker(), receiver);
if (initial_map.has_value()) {
DCHECK_EQ(*initial_map->object(),
initial_map->object()->FindRootMap(isolate()));
return initial_map->object();
}
}
return MaybeHandle<Map>();

View File

@ -361,6 +361,30 @@ bool NodeProperties::IsSame(Node* a, Node* b) {
}
}
// static
base::Optional<MapRef> NodeProperties::GetJSCreateMap(JSHeapBroker* broker,
Node* receiver) {
DCHECK(receiver->opcode() == IrOpcode::kJSCreate ||
receiver->opcode() == IrOpcode::kJSCreateArray);
HeapObjectMatcher mtarget(GetValueInput(receiver, 0));
HeapObjectMatcher mnewtarget(GetValueInput(receiver, 1));
if (mtarget.HasValue() && mnewtarget.HasValue() &&
mnewtarget.Ref(broker).IsJSFunction()) {
ObjectRef target = mtarget.Ref(broker);
JSFunctionRef newtarget = mnewtarget.Ref(broker).AsJSFunction();
if (newtarget.map().has_prototype_slot() && newtarget.has_initial_map()) {
if (broker->mode() == JSHeapBroker::kSerializing) newtarget.Serialize();
MapRef initial_map = newtarget.initial_map();
if (initial_map.GetConstructor().equals(target)) {
DCHECK(target.AsJSFunction().map().is_constructor());
DCHECK(newtarget.map().is_constructor());
return initial_map;
}
}
}
return base::nullopt;
}
// static
NodeProperties::InferReceiverMapsResult NodeProperties::InferReceiverMaps(
JSHeapBroker* broker, Node* receiver, Node* effect,
@ -406,21 +430,10 @@ NodeProperties::InferReceiverMapsResult NodeProperties::InferReceiverMaps(
}
case IrOpcode::kJSCreate: {
if (IsSame(receiver, effect)) {
HeapObjectMatcher mtarget(GetValueInput(effect, 0));
HeapObjectMatcher mnewtarget(GetValueInput(effect, 1));
if (mtarget.HasValue() && mnewtarget.HasValue() &&
mnewtarget.Ref(broker).IsJSFunction()) {
JSFunctionRef original_constructor =
mnewtarget.Ref(broker).AsJSFunction();
if (original_constructor.map().has_prototype_slot() &&
original_constructor.has_initial_map()) {
original_constructor.Serialize();
MapRef initial_map = original_constructor.initial_map();
if (initial_map.GetConstructor().equals(mtarget.Ref(broker))) {
*maps_return = ZoneHandleSet<Map>(initial_map.object());
return result;
}
}
base::Optional<MapRef> initial_map = GetJSCreateMap(broker, receiver);
if (initial_map.has_value()) {
*maps_return = ZoneHandleSet<Map>(initial_map->object());
return result;
}
// We reached the allocation of the {receiver}.
return kNoReceiverMaps;

View File

@ -155,6 +155,10 @@ class V8_EXPORT_PRIVATE NodeProperties final {
JSHeapBroker* broker, Node* receiver, Node* effect,
ZoneHandleSet<Map>* maps_return);
// Return the initial map of the new-target if the allocation can be inlined.
static base::Optional<MapRef> GetJSCreateMap(JSHeapBroker* broker,
Node* receiver);
static bool HasInstanceTypeWitness(JSHeapBroker* broker, Node* receiver,
Node* effect, InstanceType instance_type);

View File

@ -4,16 +4,36 @@
// Flags: --allow-natives-syntax
function f(arg) {
const o = Reflect.construct(Object, arguments, Proxy);
o.foo = arg;
}
(function JSCreate() {
function f(arg) {
const o = Reflect.construct(Object, arguments, Proxy);
o.foo = arg;
}
function g(i) {
f(i);
}
function g(i) {
f(i);
}
g(0);
g(1);
%OptimizeFunctionOnNextCall(g);
g(2);
g(0);
g(1);
%OptimizeFunctionOnNextCall(g);
g(2);
})();
(function JSCreateArray() {
function f() {
try {
const o = Reflect.construct(Array, arguments, parseInt);
} catch(e) { }
}
function g() {
f();
}
g();
g();
%OptimizeFunctionOnNextCall(g);
g();
})();

View File

@ -74,8 +74,7 @@ class JSCreateLoweringTest : public TypedGraphTest {
TEST_F(JSCreateLoweringTest, JSCreate) {
Handle<JSFunction> function = isolate()->object_function();
Node* const target =
Parameter(Type::HeapConstant(broker(), function, graph()->zone()));
Node* const target = graph()->NewNode(common()->HeapConstant(function));
Node* const context = Parameter(Type::Any());
Node* const effect = graph()->start();
Node* const control = graph()->start();