[maglev] Fast path instanceof

Copy the instanceof fast path from TurboFan, which emits an
'OrdinaryHasInstance' when there is no @@hasInstance symbol (which can
eventually become a constant true/false if we can look through the
prototype chain), and a direct call of @@hasInstance otherwise.

In particular, the call to @@hasInstance requires a continuation builtin
(to call ToBoolean), so add support for these too.

Bug: v8:7700
Change-Id: I14aee4346e98cd650f190b811cc7a733e33addae
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3990844
Reviewed-by: Victor Gomes <victorgomes@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#84012}
This commit is contained in:
Leszek Swirski 2022-11-02 15:01:16 +01:00 committed by V8 LUCI CQ
parent 7caf58b5fd
commit cecd01ac18
11 changed files with 643 additions and 123 deletions

View File

@ -479,13 +479,18 @@ class ExceptionHandlerTrampolineBuilder {
// values are tagged and b) the stack walk treats unknown stack slots as
// tagged.
const InterpretedDeoptFrame& lazy_frame =
deopt_info->top_frame().type() ==
DeoptFrame::FrameType::kBuiltinContinuationFrame
? deopt_info->top_frame().parent()->as_interpreted()
: deopt_info->top_frame().as_interpreted();
// TODO(v8:7700): Handle inlining.
ParallelMoveResolver<Register> direct_moves(masm_);
MoveVector materialising_moves;
bool save_accumulator = false;
RecordMoves(deopt_info->top_frame().as_interpreted().unit(), catch_block,
deopt_info->top_frame().as_interpreted().frame_state(),
RecordMoves(lazy_frame.unit(), catch_block, lazy_frame.frame_state(),
&direct_moves, &materialising_moves, &save_accumulator);
__ bind(&handler_info->trampoline_entry);
@ -981,7 +986,29 @@ class SafepointingNodeProcessor {
namespace {
int GetFrameCount(const DeoptFrame& deopt_frame) {
switch (deopt_frame.type()) {
case DeoptFrame::FrameType::kInterpretedFrame:
return 1 + deopt_frame.as_interpreted().unit().inlining_depth();
case DeoptFrame::FrameType::kBuiltinContinuationFrame:
return 1 + GetFrameCount(*deopt_frame.parent());
}
}
BytecodeOffset GetBytecodeOffset(const DeoptFrame& deopt_frame) {
switch (deopt_frame.type()) {
case DeoptFrame::FrameType::kInterpretedFrame:
return deopt_frame.as_interpreted().bytecode_position();
case DeoptFrame::FrameType::kBuiltinContinuationFrame:
return Builtins::GetContinuationBytecodeOffset(
deopt_frame.as_builtin_continuation().builtin_id());
}
}
SourcePosition GetSourcePosition(const DeoptFrame& deopt_frame) {
switch (deopt_frame.type()) {
case DeoptFrame::FrameType::kInterpretedFrame:
return deopt_frame.as_interpreted().source_position();
case DeoptFrame::FrameType::kBuiltinContinuationFrame:
return SourcePosition::Unknown();
}
}
} // namespace
@ -1067,6 +1094,34 @@ class MaglevTranslationArrayBuilder {
deopt_info->result_size());
break;
}
case DeoptFrame::FrameType::kBuiltinContinuationFrame: {
const BuiltinContinuationDeoptFrame& builtin_continuation_frame =
top_frame.as_builtin_continuation();
translation_array_builder_->BeginBuiltinContinuationFrame(
Builtins::GetContinuationBytecodeOffset(
builtin_continuation_frame.builtin_id()),
GetDeoptLiteral(*builtin_continuation_frame.parent()
->as_interpreted()
.unit()
.shared_function_info()
.object()),
builtin_continuation_frame.parameters().length());
// Closure
translation_array_builder_->StoreOptimizedOut();
// Parameters
for (ValueNode* value : builtin_continuation_frame.parameters()) {
BuildDeoptFrameSingleValue(value, *current_input_location);
current_input_location++;
}
// Context
ValueNode* value = builtin_continuation_frame.context();
BuildDeoptFrameSingleValue(value, *current_input_location);
current_input_location++;
}
}
}
@ -1117,6 +1172,36 @@ class MaglevTranslationArrayBuilder {
return_count);
break;
}
case DeoptFrame::FrameType::kBuiltinContinuationFrame: {
const BuiltinContinuationDeoptFrame& builtin_continuation_frame =
frame.as_builtin_continuation();
translation_array_builder_->BeginBuiltinContinuationFrame(
Builtins::GetContinuationBytecodeOffset(
builtin_continuation_frame.builtin_id()),
GetDeoptLiteral(*builtin_continuation_frame.parent()
->as_interpreted()
.unit()
.shared_function_info()
.object()),
builtin_continuation_frame.parameters().length());
// Closure
translation_array_builder_->StoreOptimizedOut();
// Parameters
for (ValueNode* value : builtin_continuation_frame.parameters()) {
BuildDeoptFrameSingleValue(value, *current_input_location);
current_input_location++;
}
// Context
ValueNode* value = builtin_continuation_frame.context();
BuildDeoptFrameSingleValue(value, *current_input_location);
current_input_location++;
break;
}
}
}
@ -1322,9 +1407,8 @@ void MaglevCodeGenerator::EmitDeopts() {
translation_builder.BuildEagerDeopt(deopt_info);
if (masm_.compilation_info()->collect_source_positions()) {
__ RecordDeoptReason(
deopt_info->reason(), 0,
deopt_info->top_frame().as_interpreted().source_position(),
__ RecordDeoptReason(deopt_info->reason(), 0,
GetSourcePosition(deopt_info->top_frame()),
deopt_index);
}
__ bind(deopt_info->deopt_entry_label());
@ -1341,9 +1425,8 @@ void MaglevCodeGenerator::EmitDeopts() {
translation_builder.BuildLazyDeopt(deopt_info);
if (masm_.compilation_info()->collect_source_positions()) {
__ RecordDeoptReason(
DeoptimizeReason::kUnknown, 0,
deopt_info->top_frame().as_interpreted().source_position(),
__ RecordDeoptReason(DeoptimizeReason::kUnknown, 0,
GetSourcePosition(deopt_info->top_frame()),
deopt_index);
}
__ bind(deopt_info->deopt_entry_label());
@ -1462,8 +1545,7 @@ Handle<DeoptimizationData> MaglevCodeGenerator::GenerateDeoptimizationData(
int i = 0;
for (EagerDeoptInfo* deopt_info : code_gen_state_.eager_deopts()) {
DCHECK_NE(deopt_info->translation_index(), -1);
raw_data.SetBytecodeOffset(
i, deopt_info->top_frame().as_interpreted().bytecode_position());
raw_data.SetBytecodeOffset(i, GetBytecodeOffset(deopt_info->top_frame()));
raw_data.SetTranslationIndex(i,
Smi::FromInt(deopt_info->translation_index()));
raw_data.SetPc(i, Smi::FromInt(deopt_info->deopt_entry_label()->pos()));
@ -1474,8 +1556,7 @@ Handle<DeoptimizationData> MaglevCodeGenerator::GenerateDeoptimizationData(
}
for (LazyDeoptInfo* deopt_info : code_gen_state_.lazy_deopts()) {
DCHECK_NE(deopt_info->translation_index(), -1);
raw_data.SetBytecodeOffset(
i, deopt_info->top_frame().as_interpreted().bytecode_position());
raw_data.SetBytecodeOffset(i, GetBytecodeOffset(deopt_info->top_frame()));
raw_data.SetTranslationIndex(i,
Smi::FromInt(deopt_info->translation_index()));
raw_data.SetPc(i, Smi::FromInt(deopt_info->deopt_entry_label()->pos()));

View File

@ -180,9 +180,8 @@ class UseMarkingProcessor {
LoopUsedNodes* loop_used_nodes,
const ProcessingState& state) {
int use_id = node->id();
detail::DeepForEachInput(
deopt_info,
[&](ValueNode* node, interpreter::Register reg, InputLocation* input) {
detail::DeepForEachInput(deopt_info,
[&](ValueNode* node, InputLocation* input) {
MarkUse(node, use_id, input, loop_used_nodes);
});
}
@ -190,9 +189,8 @@ class UseMarkingProcessor {
LoopUsedNodes* loop_used_nodes,
const ProcessingState& state) {
int use_id = node->id();
detail::DeepForEachInput(
deopt_info,
[&](ValueNode* node, interpreter::Register reg, InputLocation* input) {
detail::DeepForEachInput(deopt_info,
[&](ValueNode* node, InputLocation* input) {
MarkUse(node, use_id, input, loop_used_nodes);
});
}

View File

@ -3093,6 +3093,274 @@ void MaglevGraphBuilder::VisitTestGreaterThanOrEqual() {
VisitCompareOperation<Operation::kGreaterThanOrEqual>();
}
MaglevGraphBuilder::InferHasInPrototypeChainResult
MaglevGraphBuilder::InferHasInPrototypeChain(
ValueNode* receiver, compiler::HeapObjectRef prototype) {
auto stable_it = known_node_aspects().stable_maps.find(receiver);
auto unstable_it = known_node_aspects().unstable_maps.find(receiver);
auto stable_end = known_node_aspects().stable_maps.end();
auto unstable_end = known_node_aspects().unstable_maps.end();
// If either of the map sets is not found, then we don't know anything about
// the map of the receiver, so bail.
if (stable_it == stable_end || unstable_it == unstable_end) {
return kMayBeInPrototypeChain;
}
ZoneVector<compiler::MapRef> receiver_map_refs(zone());
// Try to determine either that all of the {receiver_maps} have the given
// {prototype} in their chain, or that none do. If we can't tell, return
// kMayBeInPrototypeChain.
bool all = true;
bool none = true;
for (const ZoneHandleSet<Map>& map_set :
{stable_it->second, unstable_it->second}) {
for (Handle<Map> map_handle : map_set) {
compiler::MapRef map = MakeRefAssumeMemoryFence(broker(), map_handle);
receiver_map_refs.push_back(map);
while (true) {
if (IsSpecialReceiverInstanceType(map.instance_type())) {
return kMayBeInPrototypeChain;
}
if (!map.IsJSObjectMap()) {
all = false;
break;
}
compiler::HeapObjectRef map_prototype = map.prototype();
if (map_prototype.equals(prototype)) {
none = false;
break;
}
map = map_prototype.map();
// TODO(v8:11457) Support dictionary mode protoypes here.
if (!map.is_stable() || map.is_dictionary_map()) {
return kMayBeInPrototypeChain;
}
if (map.oddball_type() == compiler::OddballType::kNull) {
all = false;
break;
}
}
}
}
DCHECK_IMPLIES(all, !none);
if (!all && !none) return kMayBeInPrototypeChain;
{
base::Optional<compiler::JSObjectRef> last_prototype;
if (all) {
// We don't need to protect the full chain if we found the prototype, we
// can stop at {prototype}. In fact we could stop at the one before
// {prototype} but since we're dealing with multiple receiver maps this
// might be a different object each time, so it's much simpler to include
// {prototype}. That does, however, mean that we must check {prototype}'s
// map stability.
if (!prototype.map().is_stable()) return kMayBeInPrototypeChain;
last_prototype = prototype.AsJSObject();
}
broker()->dependencies()->DependOnStablePrototypeChains(
receiver_map_refs, kStartAtPrototype, last_prototype);
}
DCHECK_EQ(all, !none);
return all ? kIsInPrototypeChain : kIsNotInPrototypeChain;
}
bool MaglevGraphBuilder::TryBuildFastHasInPrototypeChain(
ValueNode* object, compiler::ObjectRef prototype) {
if (!prototype.IsHeapObject()) return false;
auto in_prototype_chain =
InferHasInPrototypeChain(object, prototype.AsHeapObject());
if (in_prototype_chain == kMayBeInPrototypeChain) return false;
SetAccumulator(GetBooleanConstant(in_prototype_chain == kIsInPrototypeChain));
return true;
}
void MaglevGraphBuilder::BuildHasInPrototypeChain(
ValueNode* object, compiler::ObjectRef prototype) {
if (TryBuildFastHasInPrototypeChain(object, prototype)) return;
SetAccumulator(BuildCallRuntime(Runtime::kHasInPrototypeChain,
{object, GetConstant(prototype)}));
}
bool MaglevGraphBuilder::TryBuildFastOrdinaryHasInstance(
ValueNode* object, compiler::JSObjectRef callable,
ValueNode* callable_node_if_not_constant) {
const bool is_constant = callable_node_if_not_constant == nullptr;
if (!is_constant) return false;
if (callable.IsJSBoundFunction()) {
// OrdinaryHasInstance on bound functions turns into a recursive
// invocation of the instanceof operator again.
compiler::JSBoundFunctionRef function = callable.AsJSBoundFunction();
compiler::JSReceiverRef bound_target_function =
function.bound_target_function();
if (!bound_target_function.IsJSObject() ||
!TryBuildFastInstanceOf(object, bound_target_function.AsJSObject(),
nullptr)) {
// If we can't build a fast instance-of, build a slow one with the
// partial optimisation of using the bound target function constant.
SetAccumulator(BuildCallBuiltin<Builtin::kInstanceOf>(
{object, GetConstant(bound_target_function)}));
}
return true;
}
if (callable.IsJSFunction()) {
// Optimize if we currently know the "prototype" property.
compiler::JSFunctionRef function = callable.AsJSFunction();
// TODO(v8:7700): Remove the has_prototype_slot condition once the broker
// is always enabled.
if (!function.map().has_prototype_slot() ||
!function.has_instance_prototype(broker()->dependencies()) ||
function.PrototypeRequiresRuntimeLookup(broker()->dependencies())) {
return false;
}
compiler::ObjectRef prototype =
broker()->dependencies()->DependOnPrototypeProperty(function);
BuildHasInPrototypeChain(object, prototype);
return true;
}
return false;
}
void MaglevGraphBuilder::BuildOrdinaryHasInstance(
ValueNode* object, compiler::JSObjectRef callable,
ValueNode* callable_node_if_not_constant) {
if (TryBuildFastOrdinaryHasInstance(object, callable,
callable_node_if_not_constant))
return;
SetAccumulator(BuildCallBuiltin<Builtin::kOrdinaryHasInstance>(
{object, callable_node_if_not_constant ? callable_node_if_not_constant
: GetConstant(callable)}));
}
bool MaglevGraphBuilder::TryBuildFastInstanceOf(
ValueNode* object, compiler::JSObjectRef callable,
ValueNode* callable_node_if_not_constant) {
compiler::MapRef receiver_map = callable.map();
compiler::NameRef name =
MakeRef(broker(), local_isolate()->factory()->has_instance_symbol());
compiler::PropertyAccessInfo access_info = broker()->GetPropertyAccessInfo(
receiver_map, name, compiler::AccessMode::kLoad,
broker()->dependencies());
// TODO(v8:11457) Support dictionary mode holders here.
if (access_info.IsInvalid() || access_info.HasDictionaryHolder()) {
return false;
}
access_info.RecordDependencies(broker()->dependencies());
if (access_info.IsNotFound()) {
// If there's no @@hasInstance handler, the OrdinaryHasInstance operation
// takes over, but that requires the constructor to be callable.
if (!receiver_map.is_callable()) {
return false;
}
broker()->dependencies()->DependOnStablePrototypeChains(
access_info.lookup_start_object_maps(), kStartAtPrototype);
// Monomorphic property access.
if (callable_node_if_not_constant) {
BuildCheckMaps(callable_node_if_not_constant,
access_info.lookup_start_object_maps());
}
BuildOrdinaryHasInstance(object, callable, callable_node_if_not_constant);
return true;
}
if (access_info.IsFastDataConstant()) {
base::Optional<compiler::JSObjectRef> holder = access_info.holder();
bool found_on_proto = holder.has_value();
compiler::JSObjectRef holder_ref =
found_on_proto ? holder.value() : callable;
base::Optional<compiler::ObjectRef> has_instance_field =
holder_ref.GetOwnFastDataProperty(access_info.field_representation(),
access_info.field_index(),
broker()->dependencies());
if (!has_instance_field.has_value() ||
!has_instance_field->IsHeapObject() ||
!has_instance_field->AsHeapObject().map().is_callable()) {
return false;
}
if (found_on_proto) {
broker()->dependencies()->DependOnStablePrototypeChains(
access_info.lookup_start_object_maps(), kStartAtPrototype,
holder.value());
}
ValueNode* callable_node;
if (callable_node_if_not_constant) {
// Check that {callable_node_if_not_constant} is actually {callable}.
AddNewNode<CheckValue>({callable_node_if_not_constant}, callable);
callable_node = callable_node_if_not_constant;
} else {
callable_node = GetConstant(callable);
}
// Call @@hasInstance
Call* call = AddNewNode<Call>(
Call::kFixedInputCount + 2, ConvertReceiverMode::kNotNullOrUndefined,
Call::TargetType::kJSFunction, compiler::FeedbackSource(),
GetConstant(*has_instance_field), GetContext());
call->set_arg(0, callable_node);
call->set_arg(1, object);
// Make sure that a lazy deopt after the @@hasInstance call also performs
// ToBoolean before returning to the interpreter.
// TODO(leszeks): Wrap this in a helper.
new (call->lazy_deopt_info()) LazyDeoptInfo(
zone(),
BuiltinContinuationDeoptFrame(
Builtin::kToBooleanLazyDeoptContinuation, {}, GetContext(),
zone()->New<InterpretedDeoptFrame>(
call->lazy_deopt_info()->top_frame().as_interpreted())));
SetAccumulator(AddNewNode<ToBoolean>({call}));
return true;
}
return false;
}
bool MaglevGraphBuilder::TryBuildFastInstanceOfWithFeedback(
ValueNode* object, ValueNode* callable,
compiler::FeedbackSource feedback_source) {
compiler::ProcessedFeedback const& feedback =
broker()->GetFeedbackForInstanceOf(feedback_source);
// TurboFan emits generic code when there's no feedback, rather than
// deopting.
if (feedback.IsInsufficient()) return false;
// Check if the right hand side is a known receiver, or
// we have feedback from the InstanceOfIC.
if (callable->Is<Constant>() &&
callable->Cast<Constant>()->object().IsJSObject()) {
compiler::JSObjectRef callable_ref =
callable->Cast<Constant>()->object().AsJSObject();
return TryBuildFastInstanceOf(object, callable_ref, nullptr);
}
if (feedback_source.IsValid()) {
base::Optional<compiler::JSObjectRef> callable_from_feedback =
feedback.AsInstanceOf().value();
if (callable_from_feedback) {
return TryBuildFastInstanceOf(object, *callable_from_feedback, callable);
}
}
return false;
}
void MaglevGraphBuilder::VisitTestInstanceOf() {
// TestInstanceOf <src> <feedback_slot>
ValueNode* object = LoadRegisterTagged(0);
@ -3100,8 +3368,10 @@ void MaglevGraphBuilder::VisitTestInstanceOf() {
FeedbackSlot slot = GetSlotOperand(1);
compiler::FeedbackSource feedback_source{feedback(), slot};
// TODO(victorgomes): Check feedback slot and a do static lookup for
// @@hasInstance.
if (TryBuildFastInstanceOfWithFeedback(object, callable, feedback_source)) {
return;
}
ValueNode* context = GetContext();
SetAccumulator(
AddNewNode<TestInstanceOf>({context, object, callable}, feedback_source));

View File

@ -1058,6 +1058,30 @@ class MaglevGraphBuilder {
bool TryReuseKnownPropertyLoad(ValueNode* lookup_start_object,
compiler::NameRef name);
enum InferHasInPrototypeChainResult {
kMayBeInPrototypeChain,
kIsInPrototypeChain,
kIsNotInPrototypeChain
};
InferHasInPrototypeChainResult InferHasInPrototypeChain(
ValueNode* receiver, compiler::HeapObjectRef prototype);
bool TryBuildFastHasInPrototypeChain(ValueNode* object,
compiler::ObjectRef prototype);
void BuildHasInPrototypeChain(ValueNode* object,
compiler::ObjectRef prototype);
bool TryBuildFastOrdinaryHasInstance(ValueNode* object,
compiler::JSObjectRef callable,
ValueNode* callable_node);
void BuildOrdinaryHasInstance(ValueNode* object,
compiler::JSObjectRef callable,
ValueNode* callable_node);
bool TryBuildFastInstanceOf(ValueNode* object,
compiler::JSObjectRef callable_ref,
ValueNode* callable_node);
bool TryBuildFastInstanceOfWithFeedback(
ValueNode* object, ValueNode* callable,
compiler::FeedbackSource feedback_source);
template <Operation kOperation>
void BuildGenericUnaryOperationNode();
template <Operation kOperation>

View File

@ -365,6 +365,57 @@ void MaglevPrintingVisitor::PreProcessBasicBlock(BasicBlock* block) {
namespace {
void PrintSingleDeoptFrame(
std::ostream& os, MaglevGraphLabeller* graph_labeller,
const DeoptFrame& frame, InputLocation*& current_input_location,
LazyDeoptInfo* lazy_deopt_info_if_top_frame = nullptr) {
switch (frame.type()) {
case DeoptFrame::FrameType::kInterpretedFrame: {
os << "@" << frame.as_interpreted().bytecode_position() << " : {";
bool first = true;
frame.as_interpreted().frame_state()->ForEachValue(
frame.as_interpreted().unit(),
[&](ValueNode* node, interpreter::Register reg) {
if (first) {
first = false;
} else {
os << ", ";
}
os << reg.ToString() << ":";
if (lazy_deopt_info_if_top_frame &&
lazy_deopt_info_if_top_frame->IsResultRegister(reg)) {
os << "<result>";
} else {
os << PrintNodeLabel(graph_labeller, node) << ":"
<< current_input_location->operand();
current_input_location++;
}
});
os << "}";
break;
}
case DeoptFrame::FrameType::kBuiltinContinuationFrame: {
os << "@" << Builtins::name(frame.as_builtin_continuation().builtin_id())
<< " : {";
int arg_index = 0;
for (ValueNode* node : frame.as_builtin_continuation().parameters()) {
os << "a" << arg_index << ":" << PrintNodeLabel(graph_labeller, node)
<< ":" << current_input_location->operand();
arg_index++;
current_input_location++;
os << ", ";
}
os << "<context>:"
<< PrintNodeLabel(graph_labeller,
frame.as_builtin_continuation().context())
<< ":" << current_input_location->operand();
current_input_location++;
os << "}";
break;
}
}
}
void RecursivePrintEagerDeopt(std::ostream& os,
std::vector<BasicBlock*> targets,
const DeoptFrame& frame,
@ -379,26 +430,12 @@ void RecursivePrintEagerDeopt(std::ostream& os,
PrintVerticalArrows(os, targets);
PrintPadding(os, graph_labeller, max_node_id, 0);
if (!frame.parent()) {
os << " ↱ eager @" << frame.as_interpreted().bytecode_position() << " : {";
os << " ↱ eager ";
} else {
os << " │ @" << frame.as_interpreted().bytecode_position().ToInt()
<< " : {";
os << "";
}
bool first = true;
frame.as_interpreted().frame_state()->ForEachValue(
frame.as_interpreted().unit(),
[&](ValueNode* node, interpreter::Register reg) {
if (first) {
first = false;
} else {
os << ", ";
}
os << reg.ToString() << ":" << PrintNodeLabel(graph_labeller, node)
<< ":" << current_input_location->operand();
current_input_location++;
});
os << "}\n";
PrintSingleDeoptFrame(os, graph_labeller, frame, current_input_location);
os << "\n";
}
void PrintEagerDeopt(std::ostream& os, std::vector<BasicBlock*> targets,
@ -430,23 +467,9 @@ void RecursivePrintLazyDeopt(std::ostream& os, std::vector<BasicBlock*> targets,
PrintVerticalArrows(os, targets);
PrintPadding(os, graph_labeller, max_node_id, 0);
os << " │ @" << frame.as_interpreted().bytecode_position().ToInt()
<< " : {";
bool first = true;
frame.as_interpreted().frame_state()->ForEachValue(
frame.as_interpreted().unit(),
[&](ValueNode* node, interpreter::Register reg) {
if (first) {
first = false;
} else {
os << ", ";
}
os << reg.ToString() << ":" << PrintNodeLabel(graph_labeller, node)
<< ":" << current_input_location->operand();
current_input_location++;
});
os << "}\n";
os << "";
PrintSingleDeoptFrame(os, graph_labeller, frame, current_input_location);
os << "\n";
}
template <typename NodeT>
@ -464,27 +487,10 @@ void PrintLazyDeopt(std::ostream& os, std::vector<BasicBlock*> targets,
PrintVerticalArrows(os, targets);
PrintPadding(os, graph_labeller, max_node_id, 0);
os << " ↳ lazy @" << top_frame.as_interpreted().bytecode_position()
<< " : {";
bool first = true;
top_frame.as_interpreted().frame_state()->ForEachValue(
top_frame.as_interpreted().unit(),
[&](ValueNode* node, interpreter::Register reg) {
if (first) {
first = false;
} else {
os << ", ";
}
os << reg.ToString() << ":";
if (deopt_info->IsResultRegister(reg)) {
os << "<result>";
} else {
os << PrintNodeLabel(graph_labeller, node) << ":"
<< current_input_location->operand();
current_input_location++;
}
});
os << "}\n";
os << " ↳ lazy ";
PrintSingleDeoptFrame(os, graph_labeller, top_frame, current_input_location,
deopt_info);
os << "\n";
}
template <typename NodeT>
@ -509,15 +515,20 @@ void PrintExceptionHandlerPoint(std::ostream& os,
// The exception handler liveness should be a subset of lazy_deopt_info one.
auto* liveness = block->state()->frame_state().liveness();
LazyDeoptInfo* deopt_info = node->lazy_deopt_info();
const DeoptFrame& top_frame = deopt_info->top_frame();
const InterpretedDeoptFrame& lazy_frame =
deopt_info->top_frame().type() ==
DeoptFrame::FrameType::kBuiltinContinuationFrame
? deopt_info->top_frame().parent()->as_interpreted()
: deopt_info->top_frame().as_interpreted();
PrintVerticalArrows(os, targets);
PrintPadding(os, graph_labeller, max_node_id, 0);
os << " ↳ throw @" << handler_offset << " : {";
bool first = true;
top_frame.as_interpreted().frame_state()->ForEachValue(
top_frame.as_interpreted().unit(),
lazy_frame.as_interpreted().frame_state()->ForEachValue(
lazy_frame.as_interpreted().unit(),
[&](ValueNode* node, interpreter::Register reg) {
if (!reg.is_parameter() && !liveness->RegisterIsLive(reg.index())) {
// Skip, since not live at the handler offset.

View File

@ -28,21 +28,6 @@ std::ostream& operator<<(std::ostream& os, const ValueRepresentation& repr) {
return os;
}
namespace {
ValueRepresentation ToValueRepresentation(MachineType type) {
switch (type.representation()) {
case MachineRepresentation::kTagged:
case MachineRepresentation::kTaggedSigned:
case MachineRepresentation::kTaggedPointer:
return ValueRepresentation::kTagged;
case MachineRepresentation::kFloat64:
return ValueRepresentation::kFloat64;
default:
return ValueRepresentation::kInt32;
}
}
} // namespace
class Graph;
// TODO(victorgomes): Currently it only verifies the inputs for all ValueNodes
@ -59,6 +44,19 @@ class MaglevGraphVerifier {
void PostProcessGraph(Graph* graph) {}
void PreProcessBasicBlock(BasicBlock* block) {}
static ValueRepresentation ToValueRepresentation(MachineType type) {
switch (type.representation()) {
case MachineRepresentation::kTagged:
case MachineRepresentation::kTaggedSigned:
case MachineRepresentation::kTaggedPointer:
return ValueRepresentation::kTagged;
case MachineRepresentation::kFloat64:
return ValueRepresentation::kFloat64;
default:
return ValueRepresentation::kInt32;
}
}
void CheckValueInputIs(NodeBase* node, int i, ValueRepresentation expected) {
ValueNode* input = node->input(i).node();
ValueRepresentation got = input->properties().value_representation();
@ -142,6 +140,7 @@ class MaglevGraphVerifier {
case Opcode::kSetPendingMessage:
case Opcode::kStoreMap:
case Opcode::kStringLength:
case Opcode::kToBoolean:
case Opcode::kToBooleanLogicalNot:
case Opcode::kTestUndetectable:
case Opcode::kTestTypeOf:

View File

@ -26,9 +26,15 @@ void DeepForEachInputImpl(const DeoptFrame& frame,
frame.as_interpreted().frame_state()->ForEachValue(
frame.as_interpreted().unit(),
[&](ValueNode* node, interpreter::Register reg) {
f(node, reg, &input_locations[index++]);
f(node, &input_locations[index++]);
});
break;
case DeoptFrame::FrameType::kBuiltinContinuationFrame:
for (ValueNode* node : frame.as_builtin_continuation().parameters()) {
f(node, &input_locations[index++]);
}
f(frame.as_builtin_continuation().context(), &input_locations[index++]);
UNREACHABLE();
}
}
@ -42,10 +48,10 @@ void DeepForEachInput(const EagerDeoptInfo* deopt_info, Function&& f) {
template <typename Function>
void DeepForEachInput(const LazyDeoptInfo* deopt_info, Function&& f) {
int index = 0;
InputLocation* input_locations = deopt_info->input_locations();
const DeoptFrame& top_frame = deopt_info->top_frame();
if (top_frame.parent()) {
DeepForEachInputImpl(*top_frame.parent(), deopt_info->input_locations(),
index, f);
DeepForEachInputImpl(*top_frame.parent(), input_locations, index, f);
}
// Handle the top-of-frame info separately, since we have to skip the result
// location.
@ -57,9 +63,15 @@ void DeepForEachInput(const LazyDeoptInfo* deopt_info, Function&& f) {
// Skip over the result location since it is irrelevant for lazy
// deopts (unoptimized code will recreate the result).
if (deopt_info->IsResultRegister(reg)) return;
f(node, reg, &deopt_info->input_locations()[index++]);
f(node, &input_locations[index++]);
});
break;
case DeoptFrame::FrameType::kBuiltinContinuationFrame:
for (ValueNode* node : top_frame.as_builtin_continuation().parameters()) {
f(node, &input_locations[index++]);
};
f(top_frame.as_builtin_continuation().context(),
&input_locations[index++]);
}
}

View File

@ -145,8 +145,7 @@ class SaveRegisterStateForCall {
RegList GetGeneralRegistersUsedAsInputs(const EagerDeoptInfo* deopt_info) {
RegList regs;
detail::DeepForEachInput(deopt_info,
[&regs](ValueNode* value, interpreter::Register reg,
InputLocation* input) {
[&regs](ValueNode* value, InputLocation* input) {
if (input->IsGeneralRegister()) {
regs.set(input->AssignedGeneralRegister());
}
@ -221,7 +220,7 @@ void AllocateRaw(MaglevAssembler* masm, RegisterSnapshot& register_snapshot,
__ bind(*done);
}
void ToBoolean(MaglevAssembler* masm, Register value, ZoneLabelRef is_true,
void EmitToBoolean(MaglevAssembler* masm, Register value, ZoneLabelRef is_true,
ZoneLabelRef is_false, bool fallthrough_when_true) {
Register map = kScratchRegister;
@ -378,9 +377,16 @@ size_t GetInputLocationsArraySize(const DeoptFrame& top_frame) {
size_t size = 0;
const DeoptFrame* frame = &top_frame;
do {
const InterpretedDeoptFrame& interpreted_frame = frame->as_interpreted();
size += interpreted_frame.frame_state()->size(interpreted_frame.unit());
frame = interpreted_frame.parent();
switch (frame->type()) {
case DeoptFrame::FrameType::kInterpretedFrame:
size += frame->as_interpreted().frame_state()->size(
frame->as_interpreted().unit());
break;
case DeoptFrame::FrameType::kBuiltinContinuationFrame:
size += frame->as_builtin_continuation().parameters().size() + 1;
break;
}
frame = frame->parent();
} while (frame != nullptr);
return size;
}
@ -3205,6 +3211,28 @@ void SetPendingMessage::GenerateCode(MaglevAssembler* masm,
}
}
void ToBoolean::AllocateVreg(MaglevVregAllocationState* vreg_state) {
UseRegister(value());
DefineAsRegister(vreg_state, this);
}
void ToBoolean::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
Register object = ToRegister(value());
Register return_value = ToRegister(result());
Label done;
Zone* zone = masm->compilation_info()->zone();
ZoneLabelRef object_is_true(zone), object_is_false(zone);
// TODO(leszeks): We're likely to be calling this on an existing boolean --
// maybe that's a case we should fast-path here and re-use that boolean value?
EmitToBoolean(masm, object, object_is_true, object_is_false, true);
__ bind(*object_is_true);
__ LoadRoot(return_value, RootIndex::kTrueValue);
__ jmp(&done, Label::kNear);
__ bind(*object_is_false);
__ LoadRoot(return_value, RootIndex::kFalseValue);
__ bind(&done);
}
void ToBooleanLogicalNot::AllocateVreg(MaglevVregAllocationState* vreg_state) {
UseRegister(value());
DefineAsRegister(vreg_state, this);
@ -3216,7 +3244,7 @@ void ToBooleanLogicalNot::GenerateCode(MaglevAssembler* masm,
Label done;
Zone* zone = masm->compilation_info()->zone();
ZoneLabelRef object_is_true(zone), object_is_false(zone);
ToBoolean(masm, object, object_is_true, object_is_false, true);
EmitToBoolean(masm, object, object_is_true, object_is_false, true);
__ bind(*object_is_true);
__ LoadRoot(return_value, RootIndex::kFalseValue);
__ jmp(&done, Label::kNear);
@ -4259,9 +4287,7 @@ void AttemptOnStackReplacement(MaglevAssembler* masm,
// configurable.
RegisterSnapshot snapshot = node->register_snapshot();
detail::DeepForEachInput(
node->eager_deopt_info(),
[&](ValueNode* node, interpreter::Register reg,
InputLocation* input) {
node->eager_deopt_info(), [&](ValueNode* node, InputLocation* input) {
if (!input->IsAnyRegister()) return;
if (input->IsDoubleRegister()) {
snapshot.live_double_registers.set(
@ -4445,7 +4471,7 @@ void BranchIfToBooleanTrue::GenerateCode(MaglevAssembler* masm,
ZoneLabelRef false_label =
ZoneLabelRef::UnsafeFromLabelPointer(if_false()->label());
bool fallthrough_when_true = (if_true() == state.next_block());
ToBoolean(masm, ToRegister(condition_input()), true_label, false_label,
EmitToBoolean(masm, ToRegister(condition_input()), true_label, false_label,
fallthrough_when_true);
}

View File

@ -179,6 +179,7 @@ class CompactInterpreterFrameState;
V(SetPendingMessage) \
V(StringAt) \
V(StringLength) \
V(ToBoolean) \
V(ToBooleanLogicalNot) \
V(TaggedEqual) \
V(TaggedNotEqual) \
@ -665,17 +666,19 @@ class Input : public InputLocation {
};
class InterpretedDeoptFrame;
class BuiltinContinuationDeoptFrame;
class DeoptFrame {
public:
enum class FrameType {
kInterpretedFrame,
// kBuiltinContinuationFrame,
kBuiltinContinuationFrame,
};
FrameType type() const { return type_; }
const DeoptFrame* parent() const { return parent_; }
inline const InterpretedDeoptFrame& as_interpreted() const;
inline const BuiltinContinuationDeoptFrame& as_builtin_continuation() const;
protected:
struct InterpretedFrameData {
@ -684,14 +687,24 @@ class DeoptFrame {
BytecodeOffset bytecode_position;
SourcePosition source_position;
};
struct BuiltinContinuationFrameData {
Builtin builtin_id;
base::Vector<ValueNode*> parameters;
ValueNode* context;
};
DeoptFrame(InterpretedFrameData data, const DeoptFrame* parent)
: interpreted_frame_data_(data),
type_(FrameType::kInterpretedFrame),
parent_(parent) {}
DeoptFrame(BuiltinContinuationFrameData data, const DeoptFrame* parent)
: builtin_continuation_frame_data_(data),
type_(FrameType::kBuiltinContinuationFrame),
parent_(parent) {}
union {
const InterpretedFrameData interpreted_frame_data_;
const BuiltinContinuationFrameData builtin_continuation_frame_data_;
};
FrameType type_;
const DeoptFrame* parent_;
@ -730,6 +743,35 @@ inline const InterpretedDeoptFrame& DeoptFrame::as_interpreted() const {
return static_cast<const InterpretedDeoptFrame&>(*this);
}
class BuiltinContinuationDeoptFrame : public DeoptFrame {
public:
BuiltinContinuationDeoptFrame(Builtin builtin_id,
base::Vector<ValueNode*> parameters,
ValueNode* context, const DeoptFrame* parent)
: DeoptFrame(
BuiltinContinuationFrameData{builtin_id, parameters, context},
parent) {}
const Builtin& builtin_id() const {
return builtin_continuation_frame_data_.builtin_id;
}
base::Vector<ValueNode*> parameters() const {
return builtin_continuation_frame_data_.parameters;
}
ValueNode* context() const {
return builtin_continuation_frame_data_.context;
}
};
// Make sure storing/passing deopt frames by value doesn't truncate them.
static_assert(sizeof(BuiltinContinuationDeoptFrame) == sizeof(DeoptFrame));
inline const BuiltinContinuationDeoptFrame&
DeoptFrame::as_builtin_continuation() const {
DCHECK_EQ(type(), FrameType::kBuiltinContinuationFrame);
return static_cast<const BuiltinContinuationDeoptFrame&>(*this);
}
class DeoptInfo {
protected:
DeoptInfo(Zone* zone, DeoptFrame top_frame);
@ -1999,6 +2041,19 @@ class SetPendingMessage : public FixedInputValueNodeT<1, SetPendingMessage> {
void PrintParams(std::ostream&, MaglevGraphLabeller*) const {}
};
class ToBoolean : public FixedInputValueNodeT<1, ToBoolean> {
using Base = FixedInputValueNodeT<1, ToBoolean>;
public:
explicit ToBoolean(uint64_t bitfield) : Base(bitfield) {}
Input& value() { return Node::input(0); }
void AllocateVreg(MaglevVregAllocationState*);
void GenerateCode(MaglevAssembler*, const ProcessingState&);
void PrintParams(std::ostream&, MaglevGraphLabeller*) const {}
};
class ToBooleanLogicalNot
: public FixedInputValueNodeT<1, ToBooleanLogicalNot> {
using Base = FixedInputValueNodeT<1, ToBooleanLogicalNot>;

View File

@ -531,8 +531,7 @@ void StraightForwardRegisterAllocator::UpdateUse(
void StraightForwardRegisterAllocator::UpdateUse(
const EagerDeoptInfo& deopt_info) {
detail::DeepForEachInput(
&deopt_info,
[&](ValueNode* node, interpreter::Register reg, InputLocation* input) {
&deopt_info, [&](ValueNode* node, InputLocation* input) {
if (v8_flags.trace_maglev_regalloc) {
printing_visitor_->os()
<< "- using " << PrintNodeLabel(graph_labeller(), node) << "\n";
@ -549,8 +548,7 @@ void StraightForwardRegisterAllocator::UpdateUse(
void StraightForwardRegisterAllocator::UpdateUse(
const LazyDeoptInfo& deopt_info) {
detail::DeepForEachInput(
&deopt_info,
[&](ValueNode* node, interpreter::Register reg, InputLocation* input) {
&deopt_info, [&](ValueNode* node, InputLocation* input) {
if (v8_flags.trace_maglev_regalloc) {
printing_visitor_->os()
<< "- using " << PrintNodeLabel(graph_labeller(), node) << "\n";

View File

@ -0,0 +1,46 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --maglev
(function() {
var A = {};
A[Symbol.hasInstance] = function(x) {
return 1;
};
var a = {};
function foo(o) {
return o instanceof A;
};
%PrepareFunctionForOptimization(foo);
foo(a);
foo(a);
assertSame(foo(a), true);
%OptimizeMaglevOnNextCall(foo);
assertSame(foo(a), true);
})();
(function() {
var A = {};
A[Symbol.hasInstance] = function(x) {
%DeoptimizeFunction(foo);
return 1;
};
var a = {};
function foo(o) {
return o instanceof A;
};
%PrepareFunctionForOptimization(foo);
foo(a);
foo(a);
assertSame(foo(a), true);
%OptimizeMaglevOnNextCall(foo);
assertSame(foo(a), true);
})();