[CSA] Skip write barriers when copying Smi-only FixedArrays.
This CL allows ExtractFixedArray to know if the JSArray has only Smi elements. In that case ExtractFixedArray will always skip write barriers when copying the Smi-only backing store. In the case that the copying is not possibly conflicting with the concurrent marker, CopyElements will efficiently use memcpy for further performance. This improves the performance of ExtractFastJSArray and CloneFastJSArray. As a result, performance of copying Smi arrays by slice() and spreading is improved, except for spreading Smi holey arrays. Bug: v8:7980 Change-Id: Ie39f8abf3b5039cc12a8ca7ece81352375e0e9da Reviewed-on: https://chromium-review.googlesource.com/c/1286340 Reviewed-by: Georg Neis <neis@chromium.org> Reviewed-by: Sigurd Schneider <sigurds@chromium.org> Reviewed-by: Michael Stanton <mvstanton@chromium.org> Commit-Queue: Hai Dang <dhai@google.com> Cr-Commit-Position: refs/heads/master@{#56989}
This commit is contained in:
parent
9df7ee392d
commit
ff9ba741db
@ -4000,9 +4000,9 @@ Node* CodeStubAssembler::ExtractFastJSArray(Node* context, Node* array,
|
||||
Node* native_context = LoadNativeContext(context);
|
||||
Node* array_map = LoadJSArrayElementsMap(elements_kind, native_context);
|
||||
|
||||
Node* new_elements =
|
||||
ExtractFixedArray(LoadElements(array), begin, count, capacity,
|
||||
ExtractFixedArrayFlag::kAllFixedArrays, mode);
|
||||
Node* new_elements = ExtractFixedArray(
|
||||
LoadElements(array), begin, count, capacity,
|
||||
ExtractFixedArrayFlag::kAllFixedArrays, mode, nullptr, elements_kind);
|
||||
|
||||
Node* result = AllocateUninitializedJSArrayWithoutElements(
|
||||
array_map, ParameterToTagged(count, mode), allocation_site);
|
||||
@ -4034,10 +4034,11 @@ Node* CodeStubAssembler::CloneFastJSArray(Node* context, Node* array,
|
||||
}
|
||||
|
||||
// Simple extraction that preserves holes.
|
||||
new_elements = ExtractFixedArray(
|
||||
LoadElements(array), IntPtrOrSmiConstant(0, mode),
|
||||
TaggedToParameter(length, mode), nullptr,
|
||||
ExtractFixedArrayFlag::kAllFixedArraysDontCopyCOW, mode);
|
||||
new_elements =
|
||||
ExtractFixedArray(LoadElements(array), IntPtrOrSmiConstant(0, mode),
|
||||
TaggedToParameter(length, mode), nullptr,
|
||||
ExtractFixedArrayFlag::kAllFixedArraysDontCopyCOW, mode,
|
||||
nullptr, var_elements_kind.value());
|
||||
var_new_elements.Bind(new_elements);
|
||||
Goto(&allocate_jsarray);
|
||||
|
||||
@ -4115,7 +4116,8 @@ TNode<FixedArray> CodeStubAssembler::ExtractToFixedArray(
|
||||
Node* source, Node* first, Node* count, Node* capacity, Node* source_map,
|
||||
ElementsKind from_kind, AllocationFlags allocation_flags,
|
||||
ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode,
|
||||
HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted) {
|
||||
HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted,
|
||||
Node* source_elements_kind) {
|
||||
DCHECK_NE(first, nullptr);
|
||||
DCHECK_NE(count, nullptr);
|
||||
DCHECK_NE(capacity, nullptr);
|
||||
@ -4202,7 +4204,8 @@ TNode<FixedArray> CodeStubAssembler::ExtractToFixedArray(
|
||||
RootIndex::kTheHoleValue, parameter_mode);
|
||||
CopyElements(to_kind, to_elements, IntPtrConstant(0), CAST(source),
|
||||
ParameterToIntPtr(first, parameter_mode),
|
||||
ParameterToIntPtr(count, parameter_mode));
|
||||
ParameterToIntPtr(count, parameter_mode),
|
||||
SKIP_WRITE_BARRIER);
|
||||
} else {
|
||||
CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
|
||||
count, capacity, SKIP_WRITE_BARRIER,
|
||||
@ -4215,16 +4218,47 @@ TNode<FixedArray> CodeStubAssembler::ExtractToFixedArray(
|
||||
BIND(&old_space);
|
||||
{
|
||||
Comment("Copy FixedArray old space");
|
||||
Label copy_one_by_one(this);
|
||||
|
||||
to_elements =
|
||||
AllocateFixedArray(to_kind, capacity, parameter_mode,
|
||||
allocation_flags, var_target_map.value());
|
||||
var_result.Bind(to_elements);
|
||||
CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
|
||||
count, capacity, UPDATE_WRITE_BARRIER,
|
||||
parameter_mode, convert_holes,
|
||||
var_holes_converted);
|
||||
Goto(&done);
|
||||
// Try to use memcpy if we don't need to convert holes to undefined.
|
||||
if (convert_holes == HoleConversionMode::kDontConvert &&
|
||||
source_elements_kind != nullptr) {
|
||||
// Only try memcpy if we're not copying object pointers.
|
||||
GotoIfNot(IsFastSmiElementsKind(source_elements_kind),
|
||||
©_one_by_one);
|
||||
|
||||
const ElementsKind to_smi_kind = PACKED_SMI_ELEMENTS;
|
||||
to_elements =
|
||||
AllocateFixedArray(to_smi_kind, capacity, parameter_mode,
|
||||
allocation_flags, var_target_map.value());
|
||||
var_result.Bind(to_elements);
|
||||
|
||||
FillFixedArrayWithValue(to_smi_kind, to_elements, count, capacity,
|
||||
RootIndex::kTheHoleValue, parameter_mode);
|
||||
// CopyElements will try to use memcpy if it's not conflicting with
|
||||
// GC. Otherwise it will copy elements by elements, but skip write
|
||||
// barriers (since we're copying smis to smis).
|
||||
CopyElements(to_smi_kind, to_elements, IntPtrConstant(0),
|
||||
CAST(source), ParameterToIntPtr(first, parameter_mode),
|
||||
ParameterToIntPtr(count, parameter_mode),
|
||||
SKIP_WRITE_BARRIER);
|
||||
Goto(&done);
|
||||
} else {
|
||||
Goto(©_one_by_one);
|
||||
}
|
||||
|
||||
BIND(©_one_by_one);
|
||||
{
|
||||
to_elements =
|
||||
AllocateFixedArray(to_kind, capacity, parameter_mode,
|
||||
allocation_flags, var_target_map.value());
|
||||
var_result.Bind(to_elements);
|
||||
CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
|
||||
count, capacity, UPDATE_WRITE_BARRIER,
|
||||
parameter_mode, convert_holes,
|
||||
var_holes_converted);
|
||||
Goto(&done);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4322,7 +4356,7 @@ TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedDoubleArrayFillingHoles(
|
||||
TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedArray(
|
||||
Node* source, Node* first, Node* count, Node* capacity,
|
||||
ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode,
|
||||
TVariable<BoolT>* var_holes_converted) {
|
||||
TVariable<BoolT>* var_holes_converted, Node* source_runtime_kind) {
|
||||
DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays ||
|
||||
extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays);
|
||||
// If we want to replace holes, ExtractFixedArrayFlag::kDontCopyCOW should not
|
||||
@ -4372,10 +4406,10 @@ TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedArray(
|
||||
if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
|
||||
// Here we can only get |source| as FixedArray, never FixedDoubleArray.
|
||||
// PACKED_ELEMENTS is used to signify that the source is a FixedArray.
|
||||
Node* to_elements =
|
||||
ExtractToFixedArray(source, first, count, capacity, source_map,
|
||||
PACKED_ELEMENTS, allocation_flags, extract_flags,
|
||||
parameter_mode, convert_holes, var_holes_converted);
|
||||
Node* to_elements = ExtractToFixedArray(
|
||||
source, first, count, capacity, source_map, PACKED_ELEMENTS,
|
||||
allocation_flags, extract_flags, parameter_mode, convert_holes,
|
||||
var_holes_converted, source_runtime_kind);
|
||||
var_result.Bind(to_elements);
|
||||
Goto(&done);
|
||||
}
|
||||
@ -4674,7 +4708,8 @@ void CodeStubAssembler::CopyElements(ElementsKind kind,
|
||||
TNode<IntPtrT> dst_index,
|
||||
TNode<FixedArrayBase> src_elements,
|
||||
TNode<IntPtrT> src_index,
|
||||
TNode<IntPtrT> length) {
|
||||
TNode<IntPtrT> length,
|
||||
WriteBarrierMode write_barrier) {
|
||||
Label finished(this);
|
||||
Label needs_barrier(this);
|
||||
const bool needs_barrier_check = !IsDoubleElementsKind(kind);
|
||||
@ -4732,7 +4767,12 @@ void CodeStubAssembler::CopyElements(ElementsKind kind,
|
||||
[&](Node* array, Node* offset) {
|
||||
Node* const element = Load(MachineType::AnyTagged(), array, offset);
|
||||
Node* const delta_offset = IntPtrAdd(offset, delta);
|
||||
Store(dst_elements, delta_offset, element);
|
||||
if (write_barrier == SKIP_WRITE_BARRIER) {
|
||||
StoreNoWriteBarrier(MachineRepresentation::kTagged, dst_elements,
|
||||
delta_offset, element);
|
||||
} else {
|
||||
Store(dst_elements, delta_offset, element);
|
||||
}
|
||||
},
|
||||
INTPTR_PARAMETERS, ForEachDirection::kForward);
|
||||
Goto(&finished);
|
||||
|
@ -1646,10 +1646,15 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
|
||||
// needs to be the same. Copy from src_elements at
|
||||
// [src_index, src_index + length) to dst_elements at
|
||||
// [dst_index, dst_index + length).
|
||||
// The function decides whether it can use memcpy. In case it cannot,
|
||||
// |write_barrier| can help it to skip write barrier. SKIP_WRITE_BARRIER is
|
||||
// only safe when copying to new space, or when copying to old space and the
|
||||
// array does not contain object pointers.
|
||||
void CopyElements(ElementsKind kind, TNode<FixedArrayBase> dst_elements,
|
||||
TNode<IntPtrT> dst_index,
|
||||
TNode<FixedArrayBase> src_elements,
|
||||
TNode<IntPtrT> src_index, TNode<IntPtrT> length);
|
||||
TNode<IntPtrT> src_index, TNode<IntPtrT> length,
|
||||
WriteBarrierMode write_barrier = UPDATE_WRITE_BARRIER);
|
||||
|
||||
TNode<FixedArray> HeapObjectToFixedArray(TNode<HeapObject> base,
|
||||
Label* cast_fail);
|
||||
@ -1710,13 +1715,17 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
|
||||
// * If |var_holes_converted| is given, any holes will be converted to
|
||||
// undefined and the variable will be set according to whether or not there
|
||||
// were any hole.
|
||||
// * If |source_elements_kind| is given, the function will try to use the
|
||||
// runtime elements kind of source to make copy faster. More specifically, it
|
||||
// can skip write barriers.
|
||||
TNode<FixedArrayBase> ExtractFixedArray(
|
||||
Node* source, Node* first, Node* count = nullptr,
|
||||
Node* capacity = nullptr,
|
||||
ExtractFixedArrayFlags extract_flags =
|
||||
ExtractFixedArrayFlag::kAllFixedArrays,
|
||||
ParameterMode parameter_mode = INTPTR_PARAMETERS,
|
||||
TVariable<BoolT>* var_holes_converted = nullptr);
|
||||
TVariable<BoolT>* var_holes_converted = nullptr,
|
||||
Node* source_elements_kind = nullptr);
|
||||
|
||||
TNode<FixedArrayBase> ExtractFixedArray(
|
||||
TNode<FixedArrayBase> source, TNode<Smi> first, TNode<Smi> count,
|
||||
@ -1762,7 +1771,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
|
||||
ExtractFixedArrayFlag::kAllFixedArrays,
|
||||
ParameterMode parameter_mode = INTPTR_PARAMETERS,
|
||||
HoleConversionMode convert_holes = HoleConversionMode::kDontConvert,
|
||||
TVariable<BoolT>* var_holes_converted = nullptr);
|
||||
TVariable<BoolT>* var_holes_converted = nullptr,
|
||||
Node* source_runtime_kind = nullptr);
|
||||
|
||||
// Attempt to copy a FixedDoubleArray to another FixedDoubleArray. In the case
|
||||
// where the source array has a hole, produce a FixedArray instead where holes
|
||||
|
17
test/mjsunit/es6/array-spread-large-holey.js
Normal file
17
test/mjsunit/es6/array-spread-large-holey.js
Normal file
@ -0,0 +1,17 @@
|
||||
// Copyright 2018 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// Test spreading of large holey arrays, which are supposedly allocated in
|
||||
// LargeObjectSpace. Holes should be replaced with undefined.
|
||||
|
||||
var arr = new Array(2e5);
|
||||
|
||||
for (var i = 0; i < 10; i++) {
|
||||
arr[i] = i;
|
||||
}
|
||||
|
||||
var arr2 = [...arr];
|
||||
assertTrue(arr2.hasOwnProperty(10));
|
||||
assertEquals(undefined, arr2[10]);
|
||||
assertEquals(9, arr2[9]);
|
Loading…
Reference in New Issue
Block a user