Add LoadFromObject and StoreToObject nodes, injected via Torque
R=tebbi@chromium.org Change-Id: I30aab2663180382a078901c10e39cd1ad6c906f5 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1627541 Commit-Queue: Georg Schmid <gsps@google.com> Reviewed-by: Tobias Tebbi <tebbi@chromium.org> Cr-Commit-Position: refs/heads/master@{#61897}
This commit is contained in:
parent
f455f86d89
commit
2054dab3bc
@ -887,37 +887,36 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
std::is_convertible<TNode<T>, TNode<Object>>::value,
|
||||
int>::type = 0>
|
||||
TNode<T> LoadReference(Reference reference) {
|
||||
return CAST(LoadObjectField(reference.object, reference.offset,
|
||||
MachineTypeOf<T>::value));
|
||||
return CAST(LoadFromObject(MachineTypeOf<T>::value, reference.object,
|
||||
reference.offset));
|
||||
}
|
||||
template <class T, typename std::enable_if<
|
||||
std::is_convertible<TNode<T>, TNode<UntaggedT>>::value,
|
||||
int>::type = 0>
|
||||
TNode<T> LoadReference(Reference reference) {
|
||||
return UncheckedCast<T>(LoadObjectField(reference.object, reference.offset,
|
||||
MachineTypeOf<T>::value));
|
||||
return UncheckedCast<T>(LoadFromObject(MachineTypeOf<T>::value,
|
||||
reference.object, reference.offset));
|
||||
}
|
||||
template <class T, typename std::enable_if<
|
||||
std::is_convertible<TNode<T>, TNode<Object>>::value,
|
||||
int>::type = 0>
|
||||
void StoreReference(Reference reference, TNode<T> value) {
|
||||
int const_offset;
|
||||
MachineRepresentation rep = MachineRepresentationOf<T>::value;
|
||||
StoreToObjectWriteBarrier write_barrier = StoreToObjectWriteBarrier::kFull;
|
||||
if (std::is_same<T, Smi>::value) {
|
||||
StoreObjectFieldNoWriteBarrier(reference.object, reference.offset, value);
|
||||
} else if (std::is_same<T, Map>::value &&
|
||||
ToInt32Constant(reference.offset, const_offset) &&
|
||||
const_offset == HeapObject::kMapOffset) {
|
||||
StoreMap(reference.object, value);
|
||||
} else {
|
||||
StoreObjectField(reference.object, reference.offset, value);
|
||||
write_barrier = StoreToObjectWriteBarrier::kNone;
|
||||
} else if (std::is_same<T, Map>::value) {
|
||||
write_barrier = StoreToObjectWriteBarrier::kMap;
|
||||
}
|
||||
StoreToObject(rep, reference.object, reference.offset, value,
|
||||
write_barrier);
|
||||
}
|
||||
template <class T, typename std::enable_if<
|
||||
std::is_convertible<TNode<T>, TNode<UntaggedT>>::value,
|
||||
int>::type = 0>
|
||||
void StoreReference(Reference reference, TNode<T> value) {
|
||||
StoreObjectFieldNoWriteBarrier<T>(reference.object, reference.offset,
|
||||
value);
|
||||
StoreToObject(MachineRepresentationOf<T>::value, reference.object,
|
||||
reference.offset, value, StoreToObjectWriteBarrier::kNone);
|
||||
}
|
||||
|
||||
// Tag a smi and store it.
|
||||
|
@ -971,6 +971,11 @@ Node* CodeAssembler::AtomicLoad(MachineType type, Node* base, Node* offset) {
|
||||
return raw_assembler()->AtomicLoad(type, base, offset);
|
||||
}
|
||||
|
||||
Node* CodeAssembler::LoadFromObject(MachineType type, TNode<HeapObject> object,
|
||||
TNode<IntPtrT> offset) {
|
||||
return raw_assembler()->LoadFromObject(type, object, offset);
|
||||
}
|
||||
|
||||
TNode<Object> CodeAssembler::LoadRoot(RootIndex root_index) {
|
||||
if (RootsTable::IsImmortalImmovable(root_index)) {
|
||||
Handle<Object> root = isolate()->root_handle(root_index);
|
||||
@ -996,6 +1001,30 @@ Node* CodeAssembler::Store(Node* base, Node* value) {
|
||||
kFullWriteBarrier);
|
||||
}
|
||||
|
||||
void CodeAssembler::StoreToObject(MachineRepresentation rep,
|
||||
TNode<HeapObject> object,
|
||||
TNode<IntPtrT> offset, Node* value,
|
||||
StoreToObjectWriteBarrier write_barrier) {
|
||||
WriteBarrierKind write_barrier_kind;
|
||||
switch (write_barrier) {
|
||||
case StoreToObjectWriteBarrier::kFull:
|
||||
write_barrier_kind = WriteBarrierKind::kFullWriteBarrier;
|
||||
break;
|
||||
case StoreToObjectWriteBarrier::kMap:
|
||||
write_barrier_kind = WriteBarrierKind::kMapWriteBarrier;
|
||||
break;
|
||||
case StoreToObjectWriteBarrier::kNone:
|
||||
if (CanBeTaggedPointer(rep)) {
|
||||
write_barrier_kind = WriteBarrierKind::kAssertNoWriteBarrier;
|
||||
} else {
|
||||
write_barrier_kind = WriteBarrierKind::kNoWriteBarrier;
|
||||
}
|
||||
break;
|
||||
}
|
||||
raw_assembler()->StoreToObject(rep, object, offset, value,
|
||||
write_barrier_kind);
|
||||
}
|
||||
|
||||
void CodeAssembler::OptimizedStoreField(MachineRepresentation rep,
|
||||
TNode<HeapObject> object, int offset,
|
||||
Node* value) {
|
||||
|
@ -298,6 +298,8 @@ inline bool NeedsBoundsCheck(CheckBounds check_bounds) {
|
||||
}
|
||||
}
|
||||
|
||||
enum class StoreToObjectWriteBarrier { kNone, kMap, kFull };
|
||||
|
||||
class AccessCheckNeeded;
|
||||
class BigIntWrapper;
|
||||
class ClassBoilerplate;
|
||||
@ -966,6 +968,9 @@ class V8_EXPORT_PRIVATE CodeAssembler {
|
||||
Node* base, Node* offset,
|
||||
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe);
|
||||
|
||||
Node* LoadFromObject(MachineType type, TNode<HeapObject> object,
|
||||
TNode<IntPtrT> offset);
|
||||
|
||||
// Load a value from the root array.
|
||||
TNode<Object> LoadRoot(RootIndex root_index);
|
||||
|
||||
@ -991,6 +996,9 @@ class V8_EXPORT_PRIVATE CodeAssembler {
|
||||
TNode<HeapObject> OptimizedAllocate(TNode<IntPtrT> size,
|
||||
AllocationType allocation,
|
||||
AllowLargeObjects allow_large_objects);
|
||||
void StoreToObject(MachineRepresentation rep, TNode<HeapObject> object,
|
||||
TNode<IntPtrT> offset, Node* value,
|
||||
StoreToObjectWriteBarrier write_barrier);
|
||||
void OptimizedStoreField(MachineRepresentation rep, TNode<HeapObject> object,
|
||||
int offset, Node* value);
|
||||
void OptimizedStoreFieldAssertNoWriteBarrier(MachineRepresentation rep,
|
||||
|
@ -227,10 +227,14 @@ void MemoryOptimizer::VisitNode(Node* node, AllocationState const* state) {
|
||||
return VisitCall(node, state);
|
||||
case IrOpcode::kCallWithCallerSavedRegisters:
|
||||
return VisitCallWithCallerSavedRegisters(node, state);
|
||||
case IrOpcode::kLoadFromObject:
|
||||
return VisitLoadFromObject(node, state);
|
||||
case IrOpcode::kLoadElement:
|
||||
return VisitLoadElement(node, state);
|
||||
case IrOpcode::kLoadField:
|
||||
return VisitLoadField(node, state);
|
||||
case IrOpcode::kStoreToObject:
|
||||
return VisitStoreToObject(node, state);
|
||||
case IrOpcode::kStoreElement:
|
||||
return VisitStoreElement(node, state);
|
||||
case IrOpcode::kStoreField:
|
||||
@ -475,6 +479,32 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
|
||||
node->Kill();
|
||||
}
|
||||
|
||||
void MemoryOptimizer::VisitLoadFromObject(Node* node,
|
||||
AllocationState const* state) {
|
||||
DCHECK_EQ(IrOpcode::kLoadFromObject, node->opcode());
|
||||
ObjectAccess const& access = ObjectAccessOf(node->op());
|
||||
Node* offset = node->InputAt(1);
|
||||
node->ReplaceInput(1, __ IntSub(offset, __ IntPtrConstant(kHeapObjectTag)));
|
||||
NodeProperties::ChangeOp(node, machine()->Load(access.machine_type));
|
||||
EnqueueUses(node, state);
|
||||
}
|
||||
|
||||
void MemoryOptimizer::VisitStoreToObject(Node* node,
|
||||
AllocationState const* state) {
|
||||
DCHECK_EQ(IrOpcode::kStoreToObject, node->opcode());
|
||||
ObjectAccess const& access = ObjectAccessOf(node->op());
|
||||
Node* object = node->InputAt(0);
|
||||
Node* offset = node->InputAt(1);
|
||||
Node* value = node->InputAt(2);
|
||||
node->ReplaceInput(1, __ IntSub(offset, __ IntPtrConstant(kHeapObjectTag)));
|
||||
WriteBarrierKind write_barrier_kind = ComputeWriteBarrierKind(
|
||||
node, object, value, state, access.write_barrier_kind);
|
||||
NodeProperties::ChangeOp(
|
||||
node, machine()->Store(StoreRepresentation(
|
||||
access.machine_type.representation(), write_barrier_kind)));
|
||||
EnqueueUses(node, state);
|
||||
}
|
||||
|
||||
#undef __
|
||||
|
||||
void MemoryOptimizer::VisitCall(Node* node, AllocationState const* state) {
|
||||
|
@ -116,8 +116,10 @@ class MemoryOptimizer final {
|
||||
void VisitAllocateRaw(Node*, AllocationState const*);
|
||||
void VisitCall(Node*, AllocationState const*);
|
||||
void VisitCallWithCallerSavedRegisters(Node*, AllocationState const*);
|
||||
void VisitLoadFromObject(Node*, AllocationState const*);
|
||||
void VisitLoadElement(Node*, AllocationState const*);
|
||||
void VisitLoadField(Node*, AllocationState const*);
|
||||
void VisitStoreToObject(Node*, AllocationState const*);
|
||||
void VisitStoreElement(Node*, AllocationState const*);
|
||||
void VisitStoreField(Node*, AllocationState const*);
|
||||
void VisitStore(Node*, AllocationState const*);
|
||||
|
@ -412,10 +412,12 @@
|
||||
V(LoadField) \
|
||||
V(LoadElement) \
|
||||
V(LoadTypedElement) \
|
||||
V(LoadFromObject) \
|
||||
V(LoadDataViewElement) \
|
||||
V(StoreField) \
|
||||
V(StoreElement) \
|
||||
V(StoreTypedElement) \
|
||||
V(StoreToObject) \
|
||||
V(StoreDataViewElement) \
|
||||
V(StoreSignedSmallElement) \
|
||||
V(TransitionAndStoreElement) \
|
||||
|
@ -127,32 +127,37 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
|
||||
}
|
||||
|
||||
// Memory Operations.
|
||||
std::pair<MachineType, const Operator*> InsertDecompressionIfNeeded(
|
||||
MachineType type) {
|
||||
const Operator* decompress_op = nullptr;
|
||||
if (COMPRESS_POINTERS_BOOL) {
|
||||
switch (type.representation()) {
|
||||
case MachineRepresentation::kTaggedPointer:
|
||||
type = MachineType::CompressedPointer();
|
||||
decompress_op = machine()->ChangeCompressedPointerToTaggedPointer();
|
||||
break;
|
||||
case MachineRepresentation::kTaggedSigned:
|
||||
type = MachineType::CompressedSigned();
|
||||
decompress_op = machine()->ChangeCompressedSignedToTaggedSigned();
|
||||
break;
|
||||
case MachineRepresentation::kTagged:
|
||||
type = MachineType::AnyCompressed();
|
||||
decompress_op = machine()->ChangeCompressedToTagged();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return std::make_pair(type, decompress_op);
|
||||
}
|
||||
Node* Load(MachineType type, Node* base,
|
||||
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe) {
|
||||
return Load(type, base, IntPtrConstant(0), needs_poisoning);
|
||||
}
|
||||
Node* Load(MachineType type, Node* base, Node* index,
|
||||
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe) {
|
||||
// change_op is used below to change to the correct Tagged representation
|
||||
const Operator* change_op = nullptr;
|
||||
if (COMPRESS_POINTERS_BOOL) {
|
||||
switch (type.representation()) {
|
||||
case MachineRepresentation::kTaggedPointer:
|
||||
type = MachineType::CompressedPointer();
|
||||
change_op = machine()->ChangeCompressedPointerToTaggedPointer();
|
||||
break;
|
||||
case MachineRepresentation::kTaggedSigned:
|
||||
type = MachineType::CompressedSigned();
|
||||
change_op = machine()->ChangeCompressedSignedToTaggedSigned();
|
||||
break;
|
||||
case MachineRepresentation::kTagged:
|
||||
type = MachineType::AnyCompressed();
|
||||
change_op = machine()->ChangeCompressedToTagged();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
const Operator* decompress_op;
|
||||
std::tie(type, decompress_op) = InsertDecompressionIfNeeded(type);
|
||||
const Operator* op = machine()->Load(type);
|
||||
CHECK_NE(PoisoningMitigationLevel::kPoisonAll, poisoning_level_);
|
||||
if (needs_poisoning == LoadSensitivity::kCritical &&
|
||||
@ -161,11 +166,25 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
|
||||
}
|
||||
|
||||
Node* load = AddNode(op, base, index);
|
||||
if (change_op != nullptr) {
|
||||
load = AddNode(change_op, load);
|
||||
if (decompress_op != nullptr) {
|
||||
load = AddNode(decompress_op, load);
|
||||
}
|
||||
return load;
|
||||
}
|
||||
Node* LoadFromObject(
|
||||
MachineType type, Node* base, Node* offset,
|
||||
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe) {
|
||||
const Operator* decompress_op;
|
||||
std::tie(type, decompress_op) = InsertDecompressionIfNeeded(type);
|
||||
CHECK_EQ(needs_poisoning, LoadSensitivity::kSafe);
|
||||
ObjectAccess access = {type, WriteBarrierKind::kNoWriteBarrier};
|
||||
Node* load = AddNode(simplified()->LoadFromObject(access), base, offset);
|
||||
if (decompress_op != nullptr) {
|
||||
load = AddNode(decompress_op, load);
|
||||
}
|
||||
return load;
|
||||
}
|
||||
|
||||
std::pair<MachineRepresentation, Node*> InsertCompressionIfNeeded(
|
||||
MachineRepresentation rep, Node* value) {
|
||||
if (COMPRESS_POINTERS_BOOL) {
|
||||
@ -200,6 +219,13 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
|
||||
return AddNode(machine()->Store(StoreRepresentation(rep, write_barrier)),
|
||||
base, index, value);
|
||||
}
|
||||
void StoreToObject(MachineRepresentation rep, Node* object, Node* offset,
|
||||
Node* value, WriteBarrierKind write_barrier) {
|
||||
std::tie(rep, value) = InsertCompressionIfNeeded(rep, value);
|
||||
ObjectAccess access = {MachineType::TypeForRepresentation(rep),
|
||||
write_barrier};
|
||||
AddNode(simplified()->StoreToObject(access), object, offset, value);
|
||||
}
|
||||
void OptimizedStoreField(MachineRepresentation rep, Node* object, int offset,
|
||||
Node* value, WriteBarrierKind write_barrier) {
|
||||
std::tie(rep, value) = InsertCompressionIfNeeded(rep, value);
|
||||
|
@ -113,7 +113,6 @@ size_t hash_value(ElementAccess const& access) {
|
||||
access.machine_type);
|
||||
}
|
||||
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, ElementAccess const& access) {
|
||||
os << access.base_is_tagged << ", " << access.header_size << ", "
|
||||
<< access.type << ", " << access.machine_type << ", "
|
||||
@ -124,6 +123,20 @@ std::ostream& operator<<(std::ostream& os, ElementAccess const& access) {
|
||||
return os;
|
||||
}
|
||||
|
||||
bool operator==(ObjectAccess const& lhs, ObjectAccess const& rhs) {
|
||||
return lhs.machine_type == rhs.machine_type &&
|
||||
lhs.write_barrier_kind == rhs.write_barrier_kind;
|
||||
}
|
||||
|
||||
size_t hash_value(ObjectAccess const& access) {
|
||||
return base::hash_combine(access.machine_type, access.write_barrier_kind);
|
||||
}
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, ObjectAccess const& access) {
|
||||
os << access.machine_type << ", " << access.write_barrier_kind;
|
||||
return os;
|
||||
}
|
||||
|
||||
const FieldAccess& FieldAccessOf(const Operator* op) {
|
||||
DCHECK_NOT_NULL(op);
|
||||
DCHECK(op->opcode() == IrOpcode::kLoadField ||
|
||||
@ -131,7 +144,6 @@ const FieldAccess& FieldAccessOf(const Operator* op) {
|
||||
return OpParameter<FieldAccess>(op);
|
||||
}
|
||||
|
||||
|
||||
const ElementAccess& ElementAccessOf(const Operator* op) {
|
||||
DCHECK_NOT_NULL(op);
|
||||
DCHECK(op->opcode() == IrOpcode::kLoadElement ||
|
||||
@ -139,6 +151,13 @@ const ElementAccess& ElementAccessOf(const Operator* op) {
|
||||
return OpParameter<ElementAccess>(op);
|
||||
}
|
||||
|
||||
const ObjectAccess& ObjectAccessOf(const Operator* op) {
|
||||
DCHECK_NOT_NULL(op);
|
||||
DCHECK(op->opcode() == IrOpcode::kLoadFromObject ||
|
||||
op->opcode() == IrOpcode::kStoreToObject);
|
||||
return OpParameter<ObjectAccess>(op);
|
||||
}
|
||||
|
||||
ExternalArrayType ExternalArrayTypeOf(const Operator* op) {
|
||||
DCHECK(op->opcode() == IrOpcode::kLoadTypedElement ||
|
||||
op->opcode() == IrOpcode::kLoadDataViewElement ||
|
||||
@ -1684,7 +1703,9 @@ SPECULATIVE_NUMBER_BINOP_LIST(SPECULATIVE_NUMBER_BINOP)
|
||||
V(LoadElement, ElementAccess, Operator::kNoWrite, 2, 1, 1) \
|
||||
V(StoreElement, ElementAccess, Operator::kNoRead, 3, 1, 0) \
|
||||
V(LoadTypedElement, ExternalArrayType, Operator::kNoWrite, 4, 1, 1) \
|
||||
V(LoadFromObject, ObjectAccess, Operator::kNoWrite, 2, 1, 1) \
|
||||
V(StoreTypedElement, ExternalArrayType, Operator::kNoRead, 5, 1, 0) \
|
||||
V(StoreToObject, ObjectAccess, Operator::kNoRead, 3, 1, 0) \
|
||||
V(LoadDataViewElement, ExternalArrayType, Operator::kNoWrite, 4, 1, 1) \
|
||||
V(StoreDataViewElement, ExternalArrayType, Operator::kNoRead, 5, 1, 0)
|
||||
|
||||
|
@ -142,6 +142,30 @@ V8_EXPORT_PRIVATE ElementAccess const& ElementAccessOf(const Operator* op)
|
||||
|
||||
ExternalArrayType ExternalArrayTypeOf(const Operator* op) V8_WARN_UNUSED_RESULT;
|
||||
|
||||
// An access descriptor for loads/stores of CSA-accessible structures.
|
||||
struct ObjectAccess {
|
||||
MachineType machine_type; // machine type of the field.
|
||||
WriteBarrierKind write_barrier_kind; // write barrier hint.
|
||||
|
||||
ObjectAccess()
|
||||
: machine_type(MachineType::None()),
|
||||
write_barrier_kind(kFullWriteBarrier) {}
|
||||
|
||||
ObjectAccess(MachineType machine_type, WriteBarrierKind write_barrier_kind)
|
||||
: machine_type(machine_type), write_barrier_kind(write_barrier_kind) {}
|
||||
|
||||
int tag() const { return kHeapObjectTag; }
|
||||
};
|
||||
|
||||
V8_EXPORT_PRIVATE bool operator==(ObjectAccess const&, ObjectAccess const&);
|
||||
|
||||
size_t hash_value(ObjectAccess const&);
|
||||
|
||||
V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&, ObjectAccess const&);
|
||||
|
||||
V8_EXPORT_PRIVATE ObjectAccess const& ObjectAccessOf(const Operator* op)
|
||||
V8_WARN_UNUSED_RESULT;
|
||||
|
||||
// The ConvertReceiverMode is used as parameter by ConvertReceiver operators.
|
||||
ConvertReceiverMode ConvertReceiverModeOf(Operator const* op)
|
||||
V8_WARN_UNUSED_RESULT;
|
||||
@ -829,6 +853,12 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final
|
||||
const Operator* TransitionAndStoreNonNumberElement(Handle<Map> fast_map,
|
||||
Type value_type);
|
||||
|
||||
// load-from-object [base + offset]
|
||||
const Operator* LoadFromObject(ObjectAccess const&);
|
||||
|
||||
// store-to-object [base + offset], value
|
||||
const Operator* StoreToObject(ObjectAccess const&);
|
||||
|
||||
// load-typed-element buffer, [base + external + index]
|
||||
const Operator* LoadTypedElement(ExternalArrayType const&);
|
||||
|
||||
|
@ -2168,6 +2168,8 @@ Type Typer::Visitor::TypeLoadElement(Node* node) {
|
||||
return ElementAccessOf(node->op()).type;
|
||||
}
|
||||
|
||||
Type Typer::Visitor::TypeLoadFromObject(Node* node) { UNREACHABLE(); }
|
||||
|
||||
Type Typer::Visitor::TypeLoadTypedElement(Node* node) {
|
||||
switch (ExternalArrayTypeOf(node->op())) {
|
||||
#define TYPED_ARRAY_CASE(ElemType, type, TYPE, ctype) \
|
||||
@ -2194,6 +2196,8 @@ Type Typer::Visitor::TypeStoreField(Node* node) { UNREACHABLE(); }
|
||||
|
||||
Type Typer::Visitor::TypeStoreElement(Node* node) { UNREACHABLE(); }
|
||||
|
||||
Type Typer::Visitor::TypeStoreToObject(Node* node) { UNREACHABLE(); }
|
||||
|
||||
Type Typer::Visitor::TypeTransitionAndStoreElement(Node* node) {
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
@ -1563,6 +1563,9 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
|
||||
// CheckValueInputIs(node, 0, Type::Object());
|
||||
// CheckTypeIs(node, ElementAccessOf(node->op()).type));
|
||||
break;
|
||||
case IrOpcode::kLoadFromObject:
|
||||
// TODO(gsps): Can we check some types here?
|
||||
break;
|
||||
case IrOpcode::kLoadTypedElement:
|
||||
break;
|
||||
case IrOpcode::kLoadDataViewElement:
|
||||
@ -1581,6 +1584,9 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
|
||||
// CheckValueInputIs(node, 1, ElementAccessOf(node->op()).type));
|
||||
CheckNotTyped(node);
|
||||
break;
|
||||
case IrOpcode::kStoreToObject:
|
||||
// TODO(gsps): Can we check some types here?
|
||||
break;
|
||||
case IrOpcode::kTransitionAndStoreElement:
|
||||
CheckNotTyped(node);
|
||||
break;
|
||||
|
Loading…
Reference in New Issue
Block a user