Revert "[ptr-compr][x64][compiler] Support load map in compressed form"
This reverts commit 6ca3adb94c
.
Reason for revert: Build failed with V8_MAP_PACKING
Original change's description:
> [ptr-compr][x64][compiler] Support load map in compressed form
>
> ...to allow pointer decompression at use-site.
>
> Bug: v8:13056, v8:7703
> Change-Id: If369286814c76340a945cc2a9fd863888a813080
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3811737
> Reviewed-by: Leszek Swirski <leszeks@chromium.org>
> Commit-Queue: Hao A Xu <hao.a.xu@intel.com>
> Cr-Commit-Position: refs/heads/main@{#82242}
Bug: v8:13056, v8:7703
Change-Id: I01b6ea880c656b66392cb8eb47f7c80d8c0e4936
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3815777
Bot-Commit: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82244}
This commit is contained in:
parent
031d76a48a
commit
eb568ceba9
@ -169,13 +169,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
__ ldrh(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
|
||||
JumpIf(cc, type, Operand(instance_type), target);
|
||||
}
|
||||
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type,
|
||||
ScratchRegisterScope* scratch_scope,
|
||||
Label* target, Label::Distance) {
|
||||
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
|
||||
target);
|
||||
}
|
||||
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
|
||||
InstanceType instance_type,
|
||||
Label* target, Label::Distance) {
|
||||
@ -537,12 +530,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
|
||||
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
|
||||
}
|
||||
|
||||
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
|
||||
Register object) {
|
||||
LoadMap(map_bit_field, object);
|
||||
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
}
|
||||
|
||||
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
||||
__ add(lhs, lhs, Operand(rhs));
|
||||
}
|
||||
|
@ -180,13 +180,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
__ Ldrh(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
|
||||
JumpIf(cc, type, instance_type, target);
|
||||
}
|
||||
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type,
|
||||
ScratchRegisterScope* scratch_scope,
|
||||
Label* target, Label::Distance) {
|
||||
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
|
||||
target);
|
||||
}
|
||||
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
|
||||
InstanceType instance_type,
|
||||
Label* target, Label::Distance) {
|
||||
@ -607,12 +600,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
|
||||
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
|
||||
}
|
||||
|
||||
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
|
||||
Register object) {
|
||||
LoadMap(map_bit_field, object);
|
||||
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
}
|
||||
|
||||
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
||||
if (SmiValuesAre31Bits()) {
|
||||
__ Add(lhs.W(), lhs.W(), Immediate(rhs));
|
||||
|
@ -67,11 +67,6 @@ class BaselineAssembler {
|
||||
InstanceType instance_type, Register map,
|
||||
Label* target,
|
||||
Label::Distance distance = Label::kFar);
|
||||
inline void JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type,
|
||||
ScratchRegisterScope* scratch_scope,
|
||||
Label* target,
|
||||
Label::Distance distance = Label::kFar);
|
||||
inline void JumpIfInstanceType(Condition cc, Register map,
|
||||
InstanceType instance_type, Label* target,
|
||||
Label::Distance distance = Label::kFar);
|
||||
@ -189,13 +184,6 @@ class BaselineAssembler {
|
||||
int32_t index);
|
||||
inline void LoadFixedArrayElement(TaggedRegister output, TaggedRegister array,
|
||||
int32_t index);
|
||||
inline void LoadWord8Field(Register output, TaggedRegister source,
|
||||
int offset);
|
||||
inline void LoadMap(TaggedRegister output, Register value);
|
||||
inline void JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type, TaggedRegister map,
|
||||
Label* target,
|
||||
Label::Distance distance = Label::kFar);
|
||||
#endif
|
||||
|
||||
// Falls through and sets scratch_and_result to 0 on failure, jumps to
|
||||
@ -220,8 +208,6 @@ class BaselineAssembler {
|
||||
inline void StaModuleVariable(Register context, Register value,
|
||||
int cell_index, uint32_t depth);
|
||||
|
||||
inline void LoadMapBitField(Register map_bit_field, Register object);
|
||||
|
||||
inline void AddSmi(Register lhs, Smi rhs);
|
||||
inline void SmiUntag(Register value);
|
||||
inline void SmiUntag(Register output, Register value);
|
||||
|
@ -1493,7 +1493,8 @@ void BaselineCompiler::VisitTestUndetectable() {
|
||||
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
|
||||
|
||||
Register map_bit_field = kInterpreterAccumulatorRegister;
|
||||
__ LoadMapBitField(map_bit_field, kInterpreterAccumulatorRegister);
|
||||
__ LoadMap(map_bit_field, kInterpreterAccumulatorRegister);
|
||||
__ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
__ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
|
||||
Condition::kZero, ¬_undetectable, Label::kNear);
|
||||
|
||||
@ -1536,8 +1537,8 @@ void BaselineCompiler::VisitTestTypeOf() {
|
||||
Label is_smi, is_heap_number;
|
||||
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
|
||||
__ JumpIfObjectType(Condition::kEqual, kInterpreterAccumulatorRegister,
|
||||
HEAP_NUMBER_TYPE, &scratch_scope, &is_heap_number,
|
||||
Label::kNear);
|
||||
HEAP_NUMBER_TYPE, scratch_scope.AcquireScratch(),
|
||||
&is_heap_number, Label::kNear);
|
||||
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue);
|
||||
__ Jump(&done, Label::kNear);
|
||||
@ -1553,7 +1554,8 @@ void BaselineCompiler::VisitTestTypeOf() {
|
||||
static_assert(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
|
||||
__ JumpIfObjectType(Condition::kGreaterThanEqual,
|
||||
kInterpreterAccumulatorRegister, FIRST_NONSTRING_TYPE,
|
||||
&scratch_scope, &bad_instance_type, Label::kNear);
|
||||
scratch_scope.AcquireScratch(), &bad_instance_type,
|
||||
Label::kNear);
|
||||
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
|
||||
__ Jump(&done, Label::kNear);
|
||||
@ -1567,8 +1569,8 @@ void BaselineCompiler::VisitTestTypeOf() {
|
||||
Label is_smi, bad_instance_type;
|
||||
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
|
||||
__ JumpIfObjectType(Condition::kNotEqual, kInterpreterAccumulatorRegister,
|
||||
SYMBOL_TYPE, &scratch_scope, &bad_instance_type,
|
||||
Label::kNear);
|
||||
SYMBOL_TYPE, scratch_scope.AcquireScratch(),
|
||||
&bad_instance_type, Label::kNear);
|
||||
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
|
||||
__ Jump(&done, Label::kNear);
|
||||
@ -1597,8 +1599,8 @@ void BaselineCompiler::VisitTestTypeOf() {
|
||||
Label is_smi, bad_instance_type;
|
||||
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
|
||||
__ JumpIfObjectType(Condition::kNotEqual, kInterpreterAccumulatorRegister,
|
||||
BIGINT_TYPE, &scratch_scope, &bad_instance_type,
|
||||
Label::kNear);
|
||||
BIGINT_TYPE, scratch_scope.AcquireScratch(),
|
||||
&bad_instance_type, Label::kNear);
|
||||
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
|
||||
__ Jump(&done, Label::kNear);
|
||||
@ -1618,7 +1620,8 @@ void BaselineCompiler::VisitTestTypeOf() {
|
||||
|
||||
// All other undetectable maps are typeof undefined.
|
||||
Register map_bit_field = kInterpreterAccumulatorRegister;
|
||||
__ LoadMapBitField(map_bit_field, kInterpreterAccumulatorRegister);
|
||||
__ LoadMap(map_bit_field, kInterpreterAccumulatorRegister);
|
||||
__ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
__ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
|
||||
Condition::kZero, ¬_undetectable, Label::kNear);
|
||||
|
||||
@ -1637,7 +1640,8 @@ void BaselineCompiler::VisitTestTypeOf() {
|
||||
|
||||
// Check if the map is callable but not undetectable.
|
||||
Register map_bit_field = kInterpreterAccumulatorRegister;
|
||||
__ LoadMapBitField(map_bit_field, kInterpreterAccumulatorRegister);
|
||||
__ LoadMap(map_bit_field, kInterpreterAccumulatorRegister);
|
||||
__ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
__ TestAndBranch(map_bit_field, Map::Bits1::IsCallableBit::kMask,
|
||||
Condition::kZero, ¬_callable, Label::kNear);
|
||||
__ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
|
||||
@ -2020,7 +2024,8 @@ void BaselineCompiler::VisitJumpIfJSReceiver() {
|
||||
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
|
||||
|
||||
__ JumpIfObjectType(Condition::kLessThan, kInterpreterAccumulatorRegister,
|
||||
FIRST_JS_RECEIVER_TYPE, &scratch_scope, &dont_jump);
|
||||
FIRST_JS_RECEIVER_TYPE, scratch_scope.AcquireScratch(),
|
||||
&dont_jump);
|
||||
UpdateInterruptBudgetAndDoInterpreterJump();
|
||||
|
||||
__ Bind(&is_smi);
|
||||
@ -2157,7 +2162,8 @@ void BaselineCompiler::VisitThrowIfNotSuperConstructor() {
|
||||
Register reg = scratch_scope.AcquireScratch();
|
||||
LoadRegister(reg, 0);
|
||||
Register map_bit_field = scratch_scope.AcquireScratch();
|
||||
__ LoadMapBitField(map_bit_field, reg);
|
||||
__ LoadMap(map_bit_field, reg);
|
||||
__ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
__ TestAndBranch(map_bit_field, Map::Bits1::IsConstructorBit::kMask,
|
||||
Condition::kNotZero, &done, Label::kNear);
|
||||
|
||||
|
@ -167,14 +167,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
__ CmpObjectType(object, instance_type, map);
|
||||
__ j(AsMasmCondition(cc), target, distance);
|
||||
}
|
||||
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type,
|
||||
ScratchRegisterScope* scratch_scope,
|
||||
Label* target,
|
||||
Label::Distance distance) {
|
||||
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
|
||||
target, distance);
|
||||
}
|
||||
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
|
||||
InstanceType instance_type,
|
||||
Label* target,
|
||||
@ -498,12 +490,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
|
||||
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
|
||||
}
|
||||
|
||||
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
|
||||
Register object) {
|
||||
LoadMap(map_bit_field, object);
|
||||
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
}
|
||||
|
||||
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
||||
if (rhs.value() == 0) return;
|
||||
__ add(lhs, Immediate(rhs));
|
||||
|
@ -155,13 +155,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
__ GetObjectType(object, map, type);
|
||||
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
|
||||
}
|
||||
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type,
|
||||
ScratchRegisterScope* scratch_scope,
|
||||
Label* target, Label::Distance) {
|
||||
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
|
||||
target);
|
||||
}
|
||||
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
|
||||
InstanceType instance_type,
|
||||
Label* target, Label::Distance) {
|
||||
@ -505,12 +498,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
|
||||
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
|
||||
}
|
||||
|
||||
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
|
||||
Register object) {
|
||||
LoadMap(map_bit_field, object);
|
||||
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
}
|
||||
|
||||
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
||||
__ Add_d(lhs, lhs, Operand(rhs));
|
||||
}
|
||||
|
@ -157,13 +157,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
__ GetObjectType(object, map, type);
|
||||
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
|
||||
}
|
||||
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type,
|
||||
ScratchRegisterScope* scratch_scope,
|
||||
Label* target, Label::Distance) {
|
||||
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
|
||||
target);
|
||||
}
|
||||
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
|
||||
InstanceType instance_type,
|
||||
Label* target, Label::Distance) {
|
||||
@ -480,12 +473,6 @@ void BaselineAssembler::StaContextSlot(Register context, Register value,
|
||||
value);
|
||||
}
|
||||
|
||||
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
|
||||
Register object) {
|
||||
LoadMap(map_bit_field, object);
|
||||
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
}
|
||||
|
||||
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
||||
__ Addu(lhs, lhs, Operand(rhs));
|
||||
}
|
||||
|
@ -155,13 +155,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
__ GetObjectType(object, map, type);
|
||||
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
|
||||
}
|
||||
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type,
|
||||
ScratchRegisterScope* scratch_scope,
|
||||
Label* target, Label::Distance) {
|
||||
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
|
||||
target);
|
||||
}
|
||||
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
|
||||
InstanceType instance_type,
|
||||
Label* target, Label::Distance) {
|
||||
@ -515,12 +508,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
|
||||
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
|
||||
}
|
||||
|
||||
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
|
||||
Register object) {
|
||||
LoadMap(map_bit_field, object);
|
||||
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
}
|
||||
|
||||
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
||||
__ Daddu(lhs, lhs, Operand(rhs));
|
||||
}
|
||||
|
@ -274,14 +274,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
JumpIf(cc, type, Operand(instance_type), target);
|
||||
}
|
||||
|
||||
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type,
|
||||
ScratchRegisterScope* scratch_scope,
|
||||
Label* target, Label::Distance) {
|
||||
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
|
||||
target);
|
||||
}
|
||||
|
||||
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
|
||||
InstanceType instance_type,
|
||||
Label* target, Label::Distance) {
|
||||
@ -678,12 +670,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
|
||||
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
|
||||
}
|
||||
|
||||
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
|
||||
Register object) {
|
||||
LoadMap(map_bit_field, object);
|
||||
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
}
|
||||
|
||||
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
||||
if (rhs.value() == 0) return;
|
||||
__ LoadSmiLiteral(r0, rhs);
|
||||
|
@ -153,13 +153,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
__ GetObjectType(object, map, type);
|
||||
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
|
||||
}
|
||||
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type,
|
||||
ScratchRegisterScope* scratch_scope,
|
||||
Label* target, Label::Distance) {
|
||||
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
|
||||
target);
|
||||
}
|
||||
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
|
||||
InstanceType instance_type,
|
||||
Label* target, Label::Distance) {
|
||||
@ -513,12 +506,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
|
||||
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
|
||||
}
|
||||
|
||||
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
|
||||
Register object) {
|
||||
LoadMap(map_bit_field, object);
|
||||
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
}
|
||||
|
||||
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
||||
ASM_CODE_COMMENT(masm_);
|
||||
if (SmiValuesAre31Bits()) {
|
||||
|
@ -274,14 +274,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
JumpIf(cc, type, Operand(instance_type), target);
|
||||
}
|
||||
|
||||
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type,
|
||||
ScratchRegisterScope* scratch_scope,
|
||||
Label* target, Label::Distance) {
|
||||
JumpIfObjectType(cc, object, instance_type, scratch_scope->AcquireScratch(),
|
||||
target);
|
||||
}
|
||||
|
||||
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
|
||||
InstanceType instance_type,
|
||||
Label* target, Label::Distance) {
|
||||
@ -676,12 +668,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
|
||||
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
|
||||
}
|
||||
|
||||
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
|
||||
Register object) {
|
||||
LoadMap(map_bit_field, object);
|
||||
LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
|
||||
}
|
||||
|
||||
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
||||
if (rhs.value() == 0) return;
|
||||
__ LoadSmiLiteral(r0, rhs);
|
||||
|
@ -168,23 +168,6 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
__ CmpObjectType(object, instance_type, map);
|
||||
__ j(AsMasmCondition(cc), target, distance);
|
||||
}
|
||||
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type,
|
||||
TaggedRegister map, Label* target,
|
||||
Label::Distance distance) {
|
||||
__ AssertNotSmi(object);
|
||||
__ CmpObjectType(object, instance_type, map);
|
||||
__ j(AsMasmCondition(cc), target, distance);
|
||||
}
|
||||
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
||||
InstanceType instance_type,
|
||||
ScratchRegisterScope* scratch_scope,
|
||||
Label* target,
|
||||
Label::Distance distance) {
|
||||
JumpIfObjectType(cc, object, instance_type,
|
||||
TaggedRegister(scratch_scope->AcquireScratch()), target,
|
||||
distance);
|
||||
}
|
||||
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
|
||||
InstanceType instance_type,
|
||||
Label* target,
|
||||
@ -372,10 +355,6 @@ void BaselineAssembler::LoadWord8Field(Register output, Register source,
|
||||
int offset) {
|
||||
__ movb(output, FieldOperand(source, offset));
|
||||
}
|
||||
void BaselineAssembler::LoadWord8Field(Register output, TaggedRegister source,
|
||||
int offset) {
|
||||
__ movb(output, FieldOperand(source, offset));
|
||||
}
|
||||
void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
|
||||
Smi value) {
|
||||
__ StoreTaggedSignedField(FieldOperand(target, offset), value);
|
||||
@ -436,10 +415,6 @@ void BaselineAssembler::LoadFixedArrayElement(TaggedRegister output,
|
||||
FixedArray::kHeaderSize + index * kTaggedSize);
|
||||
}
|
||||
|
||||
void BaselineAssembler::LoadMap(TaggedRegister output, Register value) {
|
||||
__ LoadMap(output, value);
|
||||
}
|
||||
|
||||
void BaselineAssembler::TryLoadOptimizedOsrCode(Register scratch_and_result,
|
||||
Register feedback_vector,
|
||||
FeedbackSlot slot,
|
||||
@ -597,13 +572,6 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
|
||||
StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
|
||||
}
|
||||
|
||||
void BaselineAssembler::LoadMapBitField(Register map_bit_field,
|
||||
Register object) {
|
||||
TaggedRegister map(map_bit_field);
|
||||
LoadMap(map, object);
|
||||
LoadWord8Field(map_bit_field, map, Map::kBitFieldOffset);
|
||||
}
|
||||
|
||||
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
||||
if (rhs.value() == 0) return;
|
||||
if (SmiValuesAre31Bits()) {
|
||||
|
@ -324,8 +324,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// If the type of the result (stored in its map) is less than
|
||||
// FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
|
||||
static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
|
||||
TaggedRegister map(rcx);
|
||||
__ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, map);
|
||||
__ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
|
||||
__ j(above_equal, &leave_and_return, Label::kNear);
|
||||
__ jmp(&use_receiver);
|
||||
|
||||
@ -1192,7 +1191,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// if so, call into CompileLazy.
|
||||
Label compile_lazy;
|
||||
__ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
|
||||
TaggedRegister(kScratchRegister));
|
||||
kScratchRegister);
|
||||
__ j(not_equal, &compile_lazy);
|
||||
|
||||
// Load the feedback vector from the closure.
|
||||
@ -1205,9 +1204,8 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
Label push_stack_frame;
|
||||
// Check if feedback vector is valid. If valid, check for optimized code
|
||||
// and update invocation count. Otherwise, setup the stack frame.
|
||||
TaggedRegister map(rcx);
|
||||
__ LoadMap(map, feedback_vector);
|
||||
__ CmpInstanceType(map, FEEDBACK_VECTOR_TYPE);
|
||||
__ LoadMap(rcx, feedback_vector);
|
||||
__ CmpInstanceType(rcx, FEEDBACK_VECTOR_TYPE);
|
||||
__ j(not_equal, &push_stack_frame);
|
||||
|
||||
// Check the tiering state.
|
||||
@ -1373,9 +1371,8 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
Label install_baseline_code;
|
||||
// Check if feedback vector is valid. If not, call prepare for baseline to
|
||||
// allocate it.
|
||||
TaggedRegister map(rcx);
|
||||
__ LoadMap(map, feedback_vector);
|
||||
__ CmpInstanceType(map, FEEDBACK_VECTOR_TYPE);
|
||||
__ LoadMap(rcx, feedback_vector);
|
||||
__ CmpInstanceType(rcx, FEEDBACK_VECTOR_TYPE);
|
||||
__ j(not_equal, &install_baseline_code);
|
||||
|
||||
// Check the tiering state.
|
||||
@ -1570,8 +1567,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
|
||||
__ LoadTaggedPointerField(
|
||||
rbx, FieldOperand(shared_function_info,
|
||||
SharedFunctionInfo::kFunctionDataOffset));
|
||||
__ CmpObjectType(rbx, INTERPRETER_DATA_TYPE,
|
||||
TaggedRegister(kScratchRegister));
|
||||
__ CmpObjectType(rbx, INTERPRETER_DATA_TYPE, kScratchRegister);
|
||||
__ j(not_equal, &builtin_trampoline, Label::kNear);
|
||||
|
||||
__ LoadTaggedPointerField(
|
||||
@ -1605,7 +1601,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
|
||||
// Check function data field is actually a BytecodeArray object.
|
||||
__ AssertNotSmi(kInterpreterBytecodeArrayRegister);
|
||||
__ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
|
||||
TaggedRegister(rbx));
|
||||
rbx);
|
||||
__ Assert(
|
||||
equal,
|
||||
AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
|
||||
@ -1707,8 +1703,7 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
|
||||
__ LoadTaggedPointerField(feedback_vector,
|
||||
FieldOperand(feedback_cell, Cell::kValueOffset));
|
||||
if (FLAG_debug_code) {
|
||||
__ CmpObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE,
|
||||
TaggedRegister(kScratchRegister));
|
||||
__ CmpObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE, kScratchRegister);
|
||||
__ Assert(equal, AbortReason::kExpectedFeedbackVector);
|
||||
}
|
||||
|
||||
@ -2257,9 +2252,8 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
|
||||
if (mode == CallOrConstructMode::kConstruct) {
|
||||
Label new_target_constructor, new_target_not_constructor;
|
||||
__ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
|
||||
TaggedRegister map(rbx);
|
||||
__ LoadMap(map, rdx);
|
||||
__ testb(FieldOperand(map, Map::kBitFieldOffset),
|
||||
__ LoadMap(rbx, rdx);
|
||||
__ testb(FieldOperand(rbx, Map::kBitFieldOffset),
|
||||
Immediate(Map::Bits1::IsConstructorBit::kMask));
|
||||
__ j(not_zero, &new_target_constructor, Label::kNear);
|
||||
__ bind(&new_target_not_constructor);
|
||||
@ -2374,7 +2368,7 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
||||
__ movq(rcx, args.GetReceiverOperand());
|
||||
__ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
|
||||
static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
|
||||
__ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, TaggedRegister(rbx));
|
||||
__ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
|
||||
__ j(above_equal, &done_convert);
|
||||
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
|
||||
Label convert_global_proxy;
|
||||
@ -5171,7 +5165,7 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm,
|
||||
// always have baseline code.
|
||||
if (!is_osr) {
|
||||
Label start_with_baseline;
|
||||
__ CmpObjectType(code_obj, CODET_TYPE, TaggedRegister(kScratchRegister));
|
||||
__ CmpObjectType(code_obj, CODET_TYPE, kScratchRegister);
|
||||
__ j(equal, &start_with_baseline);
|
||||
|
||||
// Start with bytecode as there is no baseline code.
|
||||
@ -5184,7 +5178,7 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm,
|
||||
// Start with baseline code.
|
||||
__ bind(&start_with_baseline);
|
||||
} else if (FLAG_debug_code) {
|
||||
__ CmpObjectType(code_obj, CODET_TYPE, TaggedRegister(kScratchRegister));
|
||||
__ CmpObjectType(code_obj, CODET_TYPE, kScratchRegister);
|
||||
__ Assert(equal, AbortReason::kExpectedBaselineData);
|
||||
}
|
||||
|
||||
@ -5207,8 +5201,7 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm,
|
||||
Label install_baseline_code;
|
||||
// Check if feedback vector is valid. If not, call prepare for baseline to
|
||||
// allocate it.
|
||||
__ CmpObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE,
|
||||
TaggedRegister(kScratchRegister));
|
||||
__ CmpObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE, kScratchRegister);
|
||||
__ j(not_equal, &install_baseline_code);
|
||||
|
||||
// Save BytecodeOffset from the stack frame.
|
||||
|
@ -205,14 +205,6 @@ void TurboAssembler::LoadMap(Register destination, Register object) {
|
||||
#endif
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadMap(TaggedRegister destination, Register object) {
|
||||
LoadTaggedPointerField(destination,
|
||||
FieldOperand(object, HeapObject::kMapOffset));
|
||||
#ifdef V8_MAP_PACKING
|
||||
UnpackMapWord(destination);
|
||||
#endif
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadTaggedPointerField(Register destination,
|
||||
Operand field_operand) {
|
||||
if (COMPRESS_POINTERS_BOOL) {
|
||||
@ -2436,20 +2428,10 @@ void MacroAssembler::CmpObjectType(Register heap_object, InstanceType type,
|
||||
CmpInstanceType(map, type);
|
||||
}
|
||||
|
||||
void MacroAssembler::CmpObjectType(Register heap_object, InstanceType type,
|
||||
TaggedRegister map) {
|
||||
LoadMap(map, heap_object);
|
||||
CmpInstanceType(map, type);
|
||||
}
|
||||
|
||||
void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
|
||||
cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
|
||||
}
|
||||
|
||||
void MacroAssembler::CmpInstanceType(TaggedRegister map, InstanceType type) {
|
||||
cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
|
||||
}
|
||||
|
||||
void MacroAssembler::CmpInstanceTypeRange(Register map,
|
||||
Register instance_type_out,
|
||||
InstanceType lower_limit,
|
||||
@ -2465,10 +2447,9 @@ void MacroAssembler::TestCodeTIsMarkedForDeoptimization(Register codet,
|
||||
testl(FieldOperand(codet, CodeDataContainer::kKindSpecificFlagsOffset),
|
||||
Immediate(1 << Code::kMarkedForDeoptimizationBit));
|
||||
} else {
|
||||
TaggedRegister container(scratch);
|
||||
LoadTaggedPointerField(container,
|
||||
LoadTaggedPointerField(scratch,
|
||||
FieldOperand(codet, Code::kCodeDataContainerOffset));
|
||||
testl(FieldOperand(container, CodeDataContainer::kKindSpecificFlagsOffset),
|
||||
testl(FieldOperand(scratch, CodeDataContainer::kKindSpecificFlagsOffset),
|
||||
Immediate(1 << Code::kMarkedForDeoptimizationBit));
|
||||
}
|
||||
}
|
||||
@ -2650,11 +2631,10 @@ void MacroAssembler::InvokeFunction(Register function, Register new_target,
|
||||
Register actual_parameter_count,
|
||||
InvokeType type) {
|
||||
ASM_CODE_COMMENT(this);
|
||||
TaggedRegister sfi(rbx);
|
||||
LoadTaggedPointerField(
|
||||
sfi, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
|
||||
rbx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
|
||||
movzxwq(rbx,
|
||||
FieldOperand(sfi, SharedFunctionInfo::kFormalParameterCountOffset));
|
||||
FieldOperand(rbx, SharedFunctionInfo::kFormalParameterCountOffset));
|
||||
|
||||
InvokeFunction(function, new_target, rbx, actual_parameter_count, type);
|
||||
}
|
||||
@ -3101,20 +3081,12 @@ void MacroAssembler::LeaveExitFrameEpilogue() {
|
||||
void MacroAssembler::LoadNativeContextSlot(Register dst, int index) {
|
||||
ASM_CODE_COMMENT(this);
|
||||
// Load native context.
|
||||
TaggedRegister context(dst);
|
||||
LoadMap(context, rsi);
|
||||
LoadMap(dst, rsi);
|
||||
LoadTaggedPointerField(
|
||||
context,
|
||||
FieldOperand(context,
|
||||
Map::kConstructorOrBackPointerOrNativeContextOffset));
|
||||
dst,
|
||||
FieldOperand(dst, Map::kConstructorOrBackPointerOrNativeContextOffset));
|
||||
// Load value from native context.
|
||||
if (COMPRESS_POINTERS_BOOL) {
|
||||
LoadTaggedPointerField(
|
||||
dst, Operand(kPtrComprCageBaseRegister, context.reg(),
|
||||
ScaleFactor::times_1, Context::SlotOffset(index)));
|
||||
} else {
|
||||
LoadTaggedPointerField(dst, Operand(dst, Context::SlotOffset(index)));
|
||||
}
|
||||
LoadTaggedPointerField(dst, Operand(dst, Context::SlotOffset(index)));
|
||||
}
|
||||
|
||||
int TurboAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
|
||||
@ -3270,10 +3242,9 @@ void TurboAssembler::ComputeCodeStartAddress(Register dst) {
|
||||
// 3. if it is not zero then it jumps to the builtin.
|
||||
void TurboAssembler::BailoutIfDeoptimized(Register scratch) {
|
||||
int offset = Code::kCodeDataContainerOffset - Code::kHeaderSize;
|
||||
TaggedRegister container(scratch);
|
||||
LoadTaggedPointerField(container,
|
||||
LoadTaggedPointerField(scratch,
|
||||
Operand(kJavaScriptCallCodeStartRegister, offset));
|
||||
testl(FieldOperand(container, CodeDataContainer::kKindSpecificFlagsOffset),
|
||||
testl(FieldOperand(scratch, CodeDataContainer::kKindSpecificFlagsOffset),
|
||||
Immediate(1 << Code::kMarkedForDeoptimizationBit));
|
||||
Jump(BUILTIN_CODE(isolate(), CompileLazyDeoptimizedCode),
|
||||
RelocInfo::CODE_TARGET, not_zero);
|
||||
|
@ -288,7 +288,6 @@ class V8_EXPORT_PRIVATE TurboAssembler
|
||||
#endif
|
||||
|
||||
void LoadMap(Register destination, Register object);
|
||||
void LoadMap(TaggedRegister destination, Register object);
|
||||
|
||||
void Move(Register dst, intptr_t x) {
|
||||
if (x == 0) {
|
||||
@ -806,13 +805,10 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
|
||||
// Incoming register is heap_object and outgoing register is map.
|
||||
// They may be the same register, and may be kScratchRegister.
|
||||
void CmpObjectType(Register heap_object, InstanceType type, Register map);
|
||||
void CmpObjectType(Register heap_object, InstanceType type,
|
||||
TaggedRegister map);
|
||||
|
||||
// Compare instance type for map.
|
||||
// Always use unsigned comparisons: above and below, not less and greater.
|
||||
void CmpInstanceType(Register map, InstanceType type);
|
||||
void CmpInstanceType(TaggedRegister map, InstanceType type);
|
||||
|
||||
// Compare instance type ranges for a map (low and high inclusive)
|
||||
// Always use unsigned comparisons: below_equal for a positive result.
|
||||
|
Loading…
Reference in New Issue
Block a user