[crankshaft] Replace stub cache harvesting with a bit of type propagation

Harvesting maps from the stub cache for megamorphic ICs is both slow
(linear in the size of the stub cache) and imprecise (as it finds all
maps that have a cached handler for the given property name).
In the canonical megamorphic situation, this type feedback is useless
anyway. The interesting case is when we can filter it down to a single
map; however in these cases it is often possible to derive this map
just by looking at the HGraph, which is both faster and more reliable.

Review URL: https://codereview.chromium.org/1669213003

Cr-Commit-Position: refs/heads/master@{#33998}
This commit is contained in:
jkummerow 2016-02-15 07:21:31 -08:00 committed by Commit bot
parent e082ebdbf3
commit 5aa2cb3bcc
3 changed files with 56 additions and 11 deletions

View File

@ -6780,18 +6780,48 @@ void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
}
}
static bool ComputeReceiverTypes(Expression* expr,
HValue* receiver,
static bool ComputeReceiverTypes(Expression* expr, HValue* receiver,
SmallMapList** t,
Zone* zone) {
HOptimizedGraphBuilder* builder) {
Zone* zone = builder->zone();
SmallMapList* maps = expr->GetReceiverTypes();
*t = maps;
bool monomorphic = expr->IsMonomorphic();
if (maps != NULL && receiver->HasMonomorphicJSObjectType()) {
Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
maps->FilterForPossibleTransitions(root_map);
monomorphic = maps->length() == 1;
if (maps->length() > 0) {
Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
maps->FilterForPossibleTransitions(root_map);
monomorphic = maps->length() == 1;
} else {
// No type feedback, see if we can infer the type. This is safely
// possible if the receiver had a known map at some point, and no
// map-changing stores have happened to it since.
Handle<Map> candidate_map = receiver->GetMonomorphicJSObjectMap();
if (candidate_map->is_observed()) return false;
for (HInstruction* current = builder->current_block()->last();
current != nullptr; current = current->previous()) {
if (current->IsBlockEntry()) break;
if (current->CheckChangesFlag(kMaps)) {
// Only allow map changes that store the candidate map. We don't
// need to care which object the map is being written into.
if (!current->IsStoreNamedField()) break;
HStoreNamedField* map_change = HStoreNamedField::cast(current);
if (!map_change->value()->IsConstant()) break;
HConstant* map_constant = HConstant::cast(map_change->value());
if (!map_constant->representation().IsTagged()) break;
Handle<Object> map = map_constant->handle(builder->isolate());
if (!map.is_identical_to(candidate_map)) break;
}
if (current == receiver) {
// We made it all the way back to the receiver without encountering
// a map change! So we can assume that the receiver still has the
// candidate_map we know about.
maps->Add(candidate_map, zone);
monomorphic = true;
break;
}
}
}
}
return monomorphic && CanInlinePropertyAccess(maps->first());
}
@ -7700,7 +7730,7 @@ HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
HInstruction* instr = NULL;
SmallMapList* maps;
bool monomorphic = ComputeReceiverTypes(expr, obj, &maps, zone());
bool monomorphic = ComputeReceiverTypes(expr, obj, &maps, this);
bool force_generic = false;
if (expr->GetKeyType() == PROPERTY) {
@ -7857,7 +7887,7 @@ HValue* HOptimizedGraphBuilder::BuildNamedAccess(
Expression* expr, FeedbackVectorSlot slot, HValue* object,
Handle<Name> name, HValue* value, bool is_uninitialized) {
SmallMapList* maps;
ComputeReceiverTypes(expr, object, &maps, zone());
ComputeReceiverTypes(expr, object, &maps, this);
DCHECK(maps != NULL);
if (maps->length() > 0) {
@ -9701,7 +9731,7 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
HValue* receiver = Top();
SmallMapList* maps;
ComputeReceiverTypes(expr, receiver, &maps, zone());
ComputeReceiverTypes(expr, receiver, &maps, this);
if (prop->key()->IsPropertyName() && maps->length() > 0) {
Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();

View File

@ -332,7 +332,7 @@ DEFINE_INT(max_inlined_nodes_cumulative, 400,
"maximum cumulative number of AST nodes considered for inlining")
DEFINE_BOOL(loop_invariant_code_motion, true, "loop invariant code motion")
DEFINE_BOOL(fast_math, true, "faster (but maybe less accurate) math functions")
DEFINE_BOOL(collect_megamorphic_maps_from_stub_cache, true,
DEFINE_BOOL(collect_megamorphic_maps_from_stub_cache, false,
"crankshaft harvests type feedback from stub cache")
DEFINE_BOOL(hydrogen_stats, false, "print statistics for hydrogen")
DEFINE_BOOL(trace_check_elimination, false, "trace check elimination phase")

View File

@ -112,6 +112,9 @@ void AddInternalFieldAccessor(v8::Isolate* isolate,
// "Fast" accessor that accesses an internal field.
TEST(FastAccessorWithInternalField) {
// Crankshaft support for fast accessors is not implemented; crankshafted
// code uses the slow accessor which breaks this test's expectations.
v8::internal::FLAG_always_opt = false;
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope scope(isolate);
@ -142,6 +145,9 @@ TEST(FastAccessorWithInternalField) {
// "Fast" accessor with control flow via ...OrReturnNull methods.
TEST(FastAccessorOrReturnNull) {
// Crankshaft support for fast accessors is not implemented; crankshafted
// code uses the slow accessor which breaks this test's expectations.
v8::internal::FLAG_always_opt = false;
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope scope(isolate);
@ -191,6 +197,9 @@ TEST(FastAccessorOrReturnNull) {
// "Fast" accessor with simple control flow via explicit labels.
TEST(FastAccessorControlFlowWithLabels) {
// Crankshaft support for fast accessors is not implemented; crankshafted
// code uses the slow accessor which breaks this test's expectations.
v8::internal::FLAG_always_opt = false;
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope scope(isolate);
@ -226,6 +235,9 @@ TEST(FastAccessorControlFlowWithLabels) {
// "Fast" accessor, loading things.
TEST(FastAccessorLoad) {
// Crankshaft support for fast accessors is not implemented; crankshafted
// code uses the slow accessor which breaks this test's expectations.
v8::internal::FLAG_always_opt = false;
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope scope(isolate);
@ -305,6 +317,9 @@ void ApiCallbackParam(const v8::FunctionCallbackInfo<v8::Value>& info) {
// "Fast" accessor, callback to embedder
TEST(FastAccessorCallback) {
// Crankshaft support for fast accessors is not implemented; crankshafted
// code uses the slow accessor which breaks this test's expectations.
v8::internal::FLAG_always_opt = false;
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope scope(isolate);