Optimize array.indexOf, array.includes builtins for frozen, sealed objects

Introduce FastJSArrayForRead to include sealed, frozen elements kind objects and apply for related builtins

In micro-benchmark, it shows ~10x improvement.
Before:
ArrayIndexOf
ArrayIndexOf-Numbers(Score): 0.0780
ArrayIncludes
ArrayIncludes-Numbers(Score): 0.0773
After:
ArrayIndexOf
ArrayIndexOf-Numbers(Score): 0.621
ArrayIncludes
ArrayIncludes-Numbers(Score): 0.608


Bug: v8:6831
Change-Id: Ic79c0ba7e85c40625ecb42faed16816fa066b1d3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1604322
Reviewed-by: Toon Verwaest <verwaest@chromium.org>
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
Commit-Queue: Z Nguyen-Huu <duongn@microsoft.com>
Cr-Commit-Position: refs/heads/master@{#61474}
This commit is contained in:
Z Duong Nguyen-Huu 2019-05-13 14:44:59 -07:00 committed by Commit Bot
parent 23834cbdfd
commit ea575e8283
3 changed files with 44 additions and 8 deletions

View File

@ -311,6 +311,11 @@ macro NewJSArray(implicit context: Context)(map: Map, length: Smi): JSArray {
// holey elements when the global NoElementsProtector is not invalidated.
transient type FastJSArray extends JSArray;
// A HeapObject with a JSArray map, and either fast packed elements, or fast
// holey elements or frozen, sealed elements when the global NoElementsProtector
// is not invalidated.
transient type FastJSArrayForRead extends JSArray;
// A FastJSArray when the global ArraySpeciesProtector is not invalidated.
transient type FastJSArrayForCopy extends FastJSArray;
@ -781,6 +786,8 @@ const PACKED_DOUBLE_ELEMENTS:
constexpr ElementsKind generates 'PACKED_DOUBLE_ELEMENTS';
const HOLEY_DOUBLE_ELEMENTS:
constexpr ElementsKind generates 'HOLEY_DOUBLE_ELEMENTS';
const LAST_FROZEN_ELEMENTS_KIND:
constexpr ElementsKind generates 'LAST_FROZEN_ELEMENTS_KIND';
const DICTIONARY_ELEMENTS:
constexpr ElementsKind generates 'DICTIONARY_ELEMENTS';
@ -1749,6 +1756,24 @@ Cast<FastJSArray>(implicit context: Context)(o: HeapObject): FastJSArray
return %RawDownCast<FastJSArray>(o);
}
Cast<FastJSArrayForRead>(implicit context: Context)(o: HeapObject):
FastJSArrayForRead
labels CastError {
const map: Map = o.map;
if (!IsJSArrayMap(map)) goto CastError;
// Bailout if receiver has slow elements.
const elementsKind: ElementsKind = LoadMapElementsKind(map);
if (!IsElementsKindLessThanOrEqual(elementsKind, LAST_FROZEN_ELEMENTS_KIND))
goto CastError;
// Verify that our prototype is the initial array prototype.
if (!IsPrototypeInitialArrayPrototype(map)) goto CastError;
if (IsNoElementsProtectorCellInvalid()) goto CastError;
return %RawDownCast<FastJSArrayForRead>(o);
}
Cast<FastJSArrayForCopy>(implicit context: Context)(o: HeapObject):
FastJSArrayForCopy
labels CastError {
@ -2604,6 +2629,14 @@ macro BranchIfFastJSArray(o: Object, context: Context): never
BranchIf<FastJSArray>(o) otherwise True, False;
}
macro BranchIfFastJSArrayForRead(o: Object, context: Context): never
labels True, False {
// Long-term, it's likely not a good idea to have this slow-path test here,
// since it fundamentally breaks the type system.
GotoIfForceSlowPath() otherwise False;
BranchIf<FastJSArrayForRead>(o) otherwise True, False;
}
macro BranchIfNotFastJSArray(o: Object, context: Context): never
labels True, False {
BranchIfNot<FastJSArray>(o) otherwise True, False;

View File

@ -936,7 +936,7 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant,
// Take slow path if not a JSArray, if retrieving elements requires
// traversing prototype, or if access checks are required.
BranchIfFastJSArray(receiver, context, &init_index, &call_runtime);
BranchIfFastJSArrayForRead(receiver, context, &init_index, &call_runtime);
BIND(&init_index);
VARIABLE(index_var, MachineType::PointerRepresentation(), intptr_zero);
@ -994,12 +994,16 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant,
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
GotoIf(Uint32LessThanOrEqual(elements_kind, Int32Constant(HOLEY_ELEMENTS)),
GotoIf(IsElementsKindLessThanOrEqual(elements_kind, HOLEY_ELEMENTS),
&if_smiorobjects);
GotoIf(Word32Equal(elements_kind, Int32Constant(PACKED_DOUBLE_ELEMENTS)),
&if_packed_doubles);
GotoIf(Word32Equal(elements_kind, Int32Constant(HOLEY_DOUBLE_ELEMENTS)),
GotoIf(
ElementsKindEqual(elements_kind, Int32Constant(PACKED_DOUBLE_ELEMENTS)),
&if_packed_doubles);
GotoIf(ElementsKindEqual(elements_kind, Int32Constant(HOLEY_DOUBLE_ELEMENTS)),
&if_holey_doubles);
GotoIf(
IsElementsKindLessThanOrEqual(elements_kind, LAST_FROZEN_ELEMENTS_KIND),
&if_smiorobjects);
Goto(&return_not_found);
BIND(&if_smiorobjects);

View File

@ -13356,10 +13356,9 @@ TNode<BoolT> CodeStubAssembler::IsElementsKindLessThanOrEqual(
TNode<BoolT> CodeStubAssembler::IsElementsKindInRange(
TNode<Int32T> target_kind, ElementsKind lower_reference_kind,
ElementsKind higher_reference_kind) {
return Int32LessThanOrEqual(
return Uint32LessThanOrEqual(
Int32Sub(target_kind, Int32Constant(lower_reference_kind)),
Int32Sub(Int32Constant(higher_reference_kind),
Int32Constant(lower_reference_kind)));
Int32Constant(higher_reference_kind - lower_reference_kind));
}
Node* CodeStubAssembler::IsDebugActive() {