[ptr-compr][turbofan] Tagged to Compressed representation and type changes

This is the first CL that aims to eliminate the straggler tagged loads and
stores.

Cq-Include-Trybots: luci.v8.try:v8_linux64_pointer_compression_rel_ng
Cq-Include-Trybots: luci.v8.try:v8_linux64_arm64_pointer_compression_rel_ng
Bug: v8:8977, v8:7703
Change-Id: If3782c0c7047d4c7d8669e12fb423cc0c74bc58a
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1587392
Reviewed-by: Michael Stanton <mvstanton@chromium.org>
Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
Commit-Queue: Santiago Aboy Solanes <solanes@chromium.org>
Cr-Commit-Position: refs/heads/master@{#61180}
This commit is contained in:
Santiago Aboy Solanes 2019-04-30 12:54:53 +01:00 committed by Commit Bot
parent bbd740f038
commit afb19c2e06
6 changed files with 132 additions and 47 deletions

View File

@ -458,7 +458,7 @@ InstructionOperand OperandForDeopt(Isolate* isolate, OperandGenerator* g,
case IrOpcode::kDelayedStringConstant:
return g->UseImmediate(input);
case IrOpcode::kHeapConstant: {
if (!CanBeTaggedPointer(rep)) {
if (!CanBeTaggedOrCompressedPointer(rep)) {
// If we have inconsistent static and dynamic types, e.g. if we
// smi-check a string, we can get here with a heap object that
// says it is a smi. In that case, we return an invalid instruction

View File

@ -768,9 +768,11 @@ FieldAccess ForPropertyCellValue(MachineRepresentation representation,
Type type, MaybeHandle<Map> map,
NameRef const& name) {
WriteBarrierKind kind = kFullWriteBarrier;
if (representation == MachineRepresentation::kTaggedSigned) {
if (representation == MachineRepresentation::kTaggedSigned ||
representation == MachineRepresentation::kCompressedSigned) {
kind = kNoWriteBarrier;
} else if (representation == MachineRepresentation::kTaggedPointer) {
} else if (representation == MachineRepresentation::kTaggedPointer ||
representation == MachineRepresentation::kCompressedPointer) {
kind = kPointerWriteBarrier;
}
MachineType r = MachineType::TypeForRepresentation(representation);
@ -882,20 +884,21 @@ Reduction JSNativeContextSpecialization::ReduceGlobalAccess(
// Load from constant type cell can benefit from type feedback.
MaybeHandle<Map> map;
Type property_cell_value_type = Type::NonInternal();
MachineRepresentation representation = MachineRepresentation::kTagged;
MachineRepresentation representation =
MachineType::RepCompressedTagged();
if (property_details.cell_type() == PropertyCellType::kConstantType) {
// Compute proper type based on the current value in the cell.
if (property_cell_value.IsSmi()) {
property_cell_value_type = Type::SignedSmall();
representation = MachineRepresentation::kTaggedSigned;
representation = MachineType::RepCompressedTaggedSigned();
} else if (property_cell_value.IsHeapNumber()) {
property_cell_value_type = Type::Number();
representation = MachineRepresentation::kTaggedPointer;
representation = MachineType::RepCompressedTaggedPointer();
} else {
MapRef property_cell_value_map =
property_cell_value.AsHeapObject().map();
property_cell_value_type = Type::For(property_cell_value_map);
representation = MachineRepresentation::kTaggedPointer;
representation = MachineType::RepCompressedTaggedPointer();
// We can only use the property cell value map for map check
// elimination if it's stable, i.e. the HeapObject wasn't
@ -938,7 +941,8 @@ Reduction JSNativeContextSpecialization::ReduceGlobalAccess(
// cell.
dependencies()->DependOnGlobalProperty(property_cell);
Type property_cell_value_type;
MachineRepresentation representation = MachineRepresentation::kTagged;
MachineRepresentation representation =
MachineType::RepCompressedTagged();
if (property_cell_value.IsHeapObject()) {
// We cannot do anything if the {property_cell_value}s map is no
// longer stable.
@ -957,13 +961,13 @@ Reduction JSNativeContextSpecialization::ReduceGlobalAccess(
ZoneHandleSet<Map>(property_cell_value_map.object())),
value, effect, control);
property_cell_value_type = Type::OtherInternal();
representation = MachineRepresentation::kTaggedPointer;
representation = MachineType::RepCompressedTaggedPointer();
} else {
// Check that the {value} is a Smi.
value = effect = graph()->NewNode(
simplified()->CheckSmi(VectorSlotPair()), value, effect, control);
property_cell_value_type = Type::SignedSmall();
representation = MachineRepresentation::kTaggedSigned;
representation = MachineType::RepCompressedTaggedSigned();
}
effect = graph()->NewNode(simplified()->StoreField(ForPropertyCellValue(
representation, property_cell_value_type,
@ -978,7 +982,7 @@ Reduction JSNativeContextSpecialization::ReduceGlobalAccess(
dependencies()->DependOnGlobalProperty(property_cell);
effect = graph()->NewNode(
simplified()->StoreField(ForPropertyCellValue(
MachineRepresentation::kTagged, Type::NonInternal(),
MachineType::RepCompressedTagged(), Type::NonInternal(),
MaybeHandle<Map>(), name)),
jsgraph()->Constant(property_cell), value, effect, control);
break;
@ -2269,14 +2273,18 @@ JSNativeContextSpecialization::BuildPropertyStore(
value = effect = a.Finish();
field_access.type = Type::Any();
field_access.machine_type = MachineType::TaggedPointer();
field_access.machine_type =
MachineType::TypeCompressedTaggedPointer();
field_access.write_barrier_kind = kPointerWriteBarrier;
} else {
// We just store directly to the MutableHeapNumber.
FieldAccess const storage_access = {
kTaggedBase, field_index.offset(),
name.object(), MaybeHandle<Map>(),
Type::OtherInternal(), MachineType::TaggedPointer(),
kTaggedBase,
field_index.offset(),
name.object(),
MaybeHandle<Map>(),
Type::OtherInternal(),
MachineType::TypeCompressedTaggedPointer(),
kPointerWriteBarrier};
storage = effect =
graph()->NewNode(simplified()->LoadField(storage_access),
@ -2815,13 +2823,13 @@ JSNativeContextSpecialization::BuildElementAccess(
// Compute the element access.
Type element_type = Type::NonInternal();
MachineType element_machine_type = MachineType::AnyTagged();
MachineType element_machine_type = MachineType::TypeCompressedTagged();
if (IsDoubleElementsKind(elements_kind)) {
element_type = Type::Number();
element_machine_type = MachineType::Float64();
} else if (IsSmiElementsKind(elements_kind)) {
element_type = Type::SignedSmall();
element_machine_type = MachineType::TaggedSigned();
element_machine_type = MachineType::TypeCompressedTaggedSigned();
}
ElementAccess element_access = {
kTaggedBase, FixedArray::kHeaderSize,
@ -2838,7 +2846,7 @@ JSNativeContextSpecialization::BuildElementAccess(
}
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
element_access.machine_type = MachineType::AnyTagged();
element_access.machine_type = MachineType::TypeCompressedTagged();
}
// Check if we can return undefined for out-of-bounds loads.
@ -2946,7 +2954,7 @@ JSNativeContextSpecialization::BuildElementAccess(
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
element_access.machine_type = MachineType::AnyTagged();
element_access.machine_type = MachineType::TypeCompressedTagged();
}
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);

View File

@ -798,7 +798,7 @@ Reduction LoadElimination::ReduceLoadField(Node* node,
if (state == nullptr) return NoChange();
if (access.offset == HeapObject::kMapOffset &&
access.base_is_tagged == kTaggedBase) {
DCHECK(IsAnyTagged(access.machine_type.representation()));
DCHECK(IsAnyCompressedTagged(access.machine_type.representation()));
ZoneHandleSet<Map> object_maps;
if (state->LookupMaps(object, &object_maps) && object_maps.size() == 1) {
Node* value = jsgraph()->HeapConstant(object_maps[0]);
@ -847,7 +847,7 @@ Reduction LoadElimination::ReduceStoreField(Node* node,
if (state == nullptr) return NoChange();
if (access.offset == HeapObject::kMapOffset &&
access.base_is_tagged == kTaggedBase) {
DCHECK(IsAnyTagged(access.machine_type.representation()));
DCHECK(IsAnyCompressedTagged(access.machine_type.representation()));
// Kill all potential knowledge about the {object}s map.
state = state->KillMaps(object, zone());
Type const new_value_type = NodeProperties::GetType(new_value);
@ -890,12 +890,6 @@ Reduction LoadElimination::ReduceLoadElement(Node* node) {
switch (access.machine_type.representation()) {
case MachineRepresentation::kNone:
case MachineRepresentation::kBit:
// TODO(solanes): Create the code for the compressed values
case MachineRepresentation::kCompressedSigned:
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCompressed:
UNREACHABLE();
break;
case MachineRepresentation::kWord8:
case MachineRepresentation::kWord16:
case MachineRepresentation::kWord32:
@ -908,6 +902,9 @@ Reduction LoadElimination::ReduceLoadElement(Node* node) {
case MachineRepresentation::kTaggedSigned:
case MachineRepresentation::kTaggedPointer:
case MachineRepresentation::kTagged:
case MachineRepresentation::kCompressedSigned:
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCompressed:
if (Node* replacement = state->LookupElement(
object, index, access.machine_type.representation())) {
// Make sure we don't resurrect dead {replacement} nodes.
@ -948,12 +945,6 @@ Reduction LoadElimination::ReduceStoreElement(Node* node) {
switch (access.machine_type.representation()) {
case MachineRepresentation::kNone:
case MachineRepresentation::kBit:
// TODO(solanes): Create the code for the compressed values
case MachineRepresentation::kCompressedSigned:
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCompressed:
UNREACHABLE();
break;
case MachineRepresentation::kWord8:
case MachineRepresentation::kWord16:
case MachineRepresentation::kWord32:
@ -966,6 +957,9 @@ Reduction LoadElimination::ReduceStoreElement(Node* node) {
case MachineRepresentation::kTaggedSigned:
case MachineRepresentation::kTaggedPointer:
case MachineRepresentation::kTagged:
case MachineRepresentation::kCompressedSigned:
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCompressed:
state = state->AddElement(object, index, new_value,
access.machine_type.representation(), zone());
break;

View File

@ -467,13 +467,13 @@ void MemoryOptimizer::VisitLoadElement(Node* node,
ElementAccess const& access = ElementAccessOf(node->op());
Node* index = node->InputAt(1);
node->ReplaceInput(1, ComputeIndex(access, index));
MachineType type = access.machine_type;
if (NeedsPoisoning(access.load_sensitivity) &&
access.machine_type.representation() !=
MachineRepresentation::kTaggedPointer) {
NodeProperties::ChangeOp(node,
machine()->PoisonedLoad(access.machine_type));
type.representation() != MachineRepresentation::kTaggedPointer &&
type.representation() != MachineRepresentation::kCompressedPointer) {
NodeProperties::ChangeOp(node, machine()->PoisonedLoad(type));
} else {
NodeProperties::ChangeOp(node, machine()->Load(access.machine_type));
NodeProperties::ChangeOp(node, machine()->Load(type));
}
EnqueueUses(node, state);
}
@ -483,13 +483,13 @@ void MemoryOptimizer::VisitLoadField(Node* node, AllocationState const* state) {
FieldAccess const& access = FieldAccessOf(node->op());
Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag());
node->InsertInput(graph()->zone(), 1, offset);
MachineType type = access.machine_type;
if (NeedsPoisoning(access.load_sensitivity) &&
access.machine_type.representation() !=
MachineRepresentation::kTaggedPointer) {
NodeProperties::ChangeOp(node,
machine()->PoisonedLoad(access.machine_type));
type.representation() != MachineRepresentation::kTaggedPointer &&
type.representation() != MachineRepresentation::kCompressedPointer) {
NodeProperties::ChangeOp(node, machine()->PoisonedLoad(type));
} else {
NodeProperties::ChangeOp(node, machine()->Load(access.machine_type));
NodeProperties::ChangeOp(node, machine()->Load(type));
}
EnqueueUses(node, state);
}

View File

@ -2803,9 +2803,10 @@ class RepresentationSelector {
access.machine_type.representation();
// Convert to Smi if possible, such that we can avoid a write barrier.
if (field_representation == MachineRepresentation::kTagged &&
if ((field_representation == MachineRepresentation::kTagged ||
field_representation == MachineRepresentation::kCompressed) &&
TypeOf(value_node).Is(Type::SignedSmall())) {
field_representation = MachineRepresentation::kTaggedSigned;
field_representation = MachineType::RepCompressedTaggedSigned();
}
WriteBarrierKind write_barrier_kind = WriteBarrierKindFor(
access.base_is_tagged, field_representation, access.offset,
@ -2845,9 +2846,10 @@ class RepresentationSelector {
access.machine_type.representation();
// Convert to Smi if possible, such that we can avoid a write barrier.
if (element_representation == MachineRepresentation::kTagged &&
if ((element_representation == MachineRepresentation::kTagged ||
element_representation == MachineRepresentation::kCompressed) &&
TypeOf(value_node).Is(Type::SignedSmall())) {
element_representation = MachineRepresentation::kTaggedSigned;
element_representation = MachineType::RepCompressedTaggedSigned();
}
WriteBarrierKind write_barrier_kind = WriteBarrierKindFor(
access.base_is_tagged, element_representation, access.type,

View File

@ -235,6 +235,77 @@ class MachineType {
return MachineType(MachineRepresentation::kBit, MachineSemantic::kNone);
}
// These methods return compressed representations when the compressed
// pointer flag is enabled. Otherwise, they returned the corresponding tagged
// one.
constexpr static MachineRepresentation RepCompressedTagged() {
#ifdef V8_COMPRESS_POINTERS
return MachineRepresentation::kCompressed;
#else
return MachineRepresentation::kTagged;
#endif
}
constexpr static MachineRepresentation RepCompressedTaggedSigned() {
#ifdef V8_COMPRESS_POINTERS
return MachineRepresentation::kCompressedSigned;
#else
return MachineRepresentation::kTaggedSigned;
#endif
}
constexpr static MachineRepresentation RepCompressedTaggedPointer() {
#ifdef V8_COMPRESS_POINTERS
return MachineRepresentation::kCompressedPointer;
#else
return MachineRepresentation::kTaggedPointer;
#endif
}
constexpr static MachineType TypeCompressedTagged() {
#ifdef V8_COMPRESS_POINTERS
return MachineType::AnyCompressed();
#else
return MachineType::AnyTagged();
#endif
}
constexpr static MachineType TypeCompressedTaggedSigned() {
#ifdef V8_COMPRESS_POINTERS
return MachineType::CompressedSigned();
#else
return MachineType::TaggedSigned();
#endif
}
constexpr static MachineType TypeCompressedTaggedPointer() {
#ifdef V8_COMPRESS_POINTERS
return MachineType::CompressedPointer();
#else
return MachineType::TaggedPointer();
#endif
}
constexpr bool IsCompressedTagged() const {
#ifdef V8_COMPRESS_POINTERS
return IsCompressed();
#else
return IsTagged();
#endif
}
constexpr bool IsCompressedTaggedSigned() const {
#ifdef V8_COMPRESS_POINTERS
return IsCompressedSigned();
#else
return IsTaggedSigned();
#endif
}
constexpr bool IsCompressedTaggedPointer() const {
#ifdef V8_COMPRESS_POINTERS
return IsCompressedPointer();
#else
return IsTaggedPointer();
#endif
}
static MachineType TypeForRepresentation(const MachineRepresentation& rep,
bool isSigned = true) {
switch (rep) {
@ -333,6 +404,16 @@ inline bool IsAnyCompressed(MachineRepresentation rep) {
rep == MachineRepresentation::kCompressedSigned;
}
// TODO(solanes): remove '|| IsAnyTagged(rep)' when all the representation
// changes are in place
inline bool IsAnyCompressedTagged(MachineRepresentation rep) {
#ifdef V8_COMPRESS_POINTERS
return IsAnyCompressed(rep) || IsAnyTagged(rep);
#else
return IsAnyTagged(rep);
#endif
}
// Gets the log2 of the element size in bytes of the machine type.
V8_EXPORT_PRIVATE inline int ElementSizeLog2Of(MachineRepresentation rep) {
switch (rep) {