[baseline][cleanup] Use cross platform condition

Bug: v8:11461
Change-Id: I34f377ade988f4ad36693fd65634ff509a043775
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4197345
Auto-Submit: Victor Gomes <victorgomes@chromium.org>
Reviewed-by: Toon Verwaest <verwaest@chromium.org>
Commit-Queue: Toon Verwaest <verwaest@chromium.org>
Cr-Commit-Position: refs/heads/main@{#85505}
This commit is contained in:
Victor Gomes 2023-01-26 14:12:43 +01:00 committed by V8 LUCI CQ
parent f4900cf92b
commit aeeab6305d
12 changed files with 137 additions and 455 deletions

View File

@ -39,35 +39,6 @@ class BaselineAssembler::ScratchRegisterScope {
UseScratchRegisterScope wrapped_scope_;
};
// TODO(v8:11429,leszeks): Unify condition names in the MacroAssembler.
enum class Condition : uint32_t {
kEqual = static_cast<uint32_t>(eq),
kNotEqual = static_cast<uint32_t>(ne),
kLessThan = static_cast<uint32_t>(lt),
kGreaterThan = static_cast<uint32_t>(gt),
kLessThanEqual = static_cast<uint32_t>(le),
kGreaterThanEqual = static_cast<uint32_t>(ge),
kUnsignedLessThan = static_cast<uint32_t>(lo),
kUnsignedGreaterThan = static_cast<uint32_t>(hi),
kUnsignedLessThanEqual = static_cast<uint32_t>(ls),
kUnsignedGreaterThanEqual = static_cast<uint32_t>(hs),
kOverflow = static_cast<uint32_t>(vs),
kNoOverflow = static_cast<uint32_t>(vc),
kZero = static_cast<uint32_t>(eq),
kNotZero = static_cast<uint32_t>(ne),
};
inline internal::Condition AsMasmCondition(Condition cond) {
// This is important for arm, where the internal::Condition where each value
// represents an encoded bit field value.
static_assert(sizeof(internal::Condition) == sizeof(Condition));
return static_cast<internal::Condition>(cond);
}
namespace detail {
#ifdef DEBUG
@ -132,13 +103,13 @@ void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
Label* target, Label::Distance) {
__ tst(value, Operand(mask));
__ b(AsMasmCondition(cc), target);
__ b(cc, target);
}
void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
Label* target, Label::Distance) {
__ cmp(lhs, Operand(rhs));
__ b(AsMasmCondition(cc), target);
__ b(cc, target);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
@ -536,8 +507,7 @@ void BaselineAssembler::Switch(Register reg, int case_value_base,
// Mostly copied from code-generator-arm.cc
ScratchRegisterScope scope(this);
JumpIf(Condition::kUnsignedGreaterThanEqual, reg, Operand(num_labels),
&fallthrough);
JumpIf(kUnsignedGreaterThanEqual, reg, Operand(num_labels), &fallthrough);
// Ensure to emit the constant pool first if necessary.
__ CheckConstPool(true, true);
__ BlockConstPoolFor(num_labels);
@ -591,8 +561,8 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
// If actual is bigger than formal, then we should use it to free up the stack
// arguments.
Label corrected_args_count;
__ JumpIf(Condition::kGreaterThanEqual, params_size,
Operand(actual_params_size), &corrected_args_count);
__ JumpIf(kGreaterThanEqual, params_size, Operand(actual_params_size),
&corrected_args_count);
__ masm()->mov(params_size, actual_params_size);
__ Bind(&corrected_args_count);

View File

@ -38,32 +38,6 @@ class BaselineAssembler::ScratchRegisterScope {
UseScratchRegisterScope wrapped_scope_;
};
// TODO(v8:11461): Unify condition names in the MacroAssembler.
enum class Condition : uint32_t {
kEqual = eq,
kNotEqual = ne,
kLessThan = lt,
kGreaterThan = gt,
kLessThanEqual = le,
kGreaterThanEqual = ge,
kUnsignedLessThan = lo,
kUnsignedGreaterThan = hi,
kUnsignedLessThanEqual = ls,
kUnsignedGreaterThanEqual = hs,
kOverflow = vs,
kNoOverflow = vc,
kZero = eq,
kNotZero = ne,
};
inline internal::Condition AsMasmCondition(Condition cond) {
return static_cast<internal::Condition>(cond);
}
namespace detail {
#ifdef DEBUG
@ -125,19 +99,19 @@ void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
Label* target, Label::Distance) {
if (cc == Condition::kZero) {
if (cc == kZero) {
__ TestAndBranchIfAllClear(value, mask, target);
} else if (cc == Condition::kNotZero) {
} else if (cc == kNotZero) {
__ TestAndBranchIfAnySet(value, mask, target);
} else {
__ Tst(value, Immediate(mask));
__ B(AsMasmCondition(cc), target);
__ B(cc, target);
}
}
void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
Label* target, Label::Distance) {
__ CompareAndBranch(lhs, rhs, AsMasmCondition(cc), target);
__ CompareAndBranch(lhs, rhs, cc, target);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
@ -173,14 +147,14 @@ void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
Label* target, Label::Distance distance) {
__ AssertSmi(value);
__ CompareTaggedAndBranch(value, smi, AsMasmCondition(cc), target);
__ CompareTaggedAndBranch(value, smi, cc, target);
}
void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
Label* target, Label::Distance) {
__ AssertSmi(lhs);
__ AssertSmi(rhs);
__ CompareTaggedAndBranch(lhs, rhs, AsMasmCondition(cc), target);
__ CompareTaggedAndBranch(lhs, rhs, cc, target);
}
void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
MemOperand operand, Label* target,
@ -188,7 +162,7 @@ void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
ScratchRegisterScope temps(this);
Register tmp = temps.AcquireScratch();
__ Ldr(tmp, operand);
__ CompareTaggedAndBranch(value, tmp, AsMasmCondition(cc), target);
__ CompareTaggedAndBranch(value, tmp, cc, target);
}
void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
Register value, Label* target,
@ -196,7 +170,7 @@ void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
ScratchRegisterScope temps(this);
Register tmp = temps.AcquireScratch();
__ Ldr(tmp, operand);
__ CompareTaggedAndBranch(tmp, value, AsMasmCondition(cc), target);
__ CompareTaggedAndBranch(tmp, value, cc, target);
}
void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
Label* target, Label::Distance) {
@ -601,7 +575,7 @@ void BaselineAssembler::Switch(Register reg, int case_value_base,
ScratchRegisterScope scope(this);
Register temp = scope.AcquireScratch();
Label table;
JumpIf(Condition::kUnsignedGreaterThanEqual, reg, num_labels, &fallthrough);
JumpIf(kUnsignedGreaterThanEqual, reg, num_labels, &fallthrough);
__ Adr(temp, &table);
int entry_size_log2 = 2;
#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
@ -666,7 +640,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
// If actual is bigger than formal, then we should use it to free up the stack
// arguments.
Label corrected_args_count;
__ JumpIf(Condition::kGreaterThanEqual, params_size, actual_params_size,
__ JumpIf(kGreaterThanEqual, params_size, actual_params_size,
&corrected_args_count);
__ masm()->Mov(params_size, actual_params_size);
__ Bind(&corrected_args_count);

View File

@ -17,8 +17,6 @@ namespace v8 {
namespace internal {
namespace baseline {
enum class Condition : uint32_t;
class BaselineAssembler {
public:
class ScratchRegisterScope;

View File

@ -561,8 +561,8 @@ void BaselineCompiler::VerifyFrame() {
__ Move(scratch, __ FeedbackVectorOperand());
Label is_smi, is_ok;
__ JumpIfSmi(scratch, &is_smi);
__ JumpIfObjectType(Condition::kEqual, scratch, FEEDBACK_VECTOR_TYPE,
scratch, &is_ok);
__ JumpIfObjectType(kEqual, scratch, FEEDBACK_VECTOR_TYPE, scratch,
&is_ok);
__ Bind(&is_smi);
__ masm()->Abort(AbortReason::kExpectedFeedbackVector);
__ Bind(&is_ok);
@ -669,8 +669,8 @@ void BaselineCompiler::JumpIfToBoolean(bool do_jump_if_true, Label* label,
// the original value into kInterpreterAccumulatorRegister, so we don't have
// to worry about it getting clobbered.
static_assert(kReturnRegister0 == kInterpreterAccumulatorRegister);
__ JumpIfSmi(do_jump_if_true ? Condition::kNotEqual : Condition::kEqual,
kReturnRegister1, Smi::FromInt(0), label, distance);
__ JumpIfSmi(do_jump_if_true ? kNotEqual : kEqual, kReturnRegister1,
Smi::FromInt(0), label, distance);
}
void BaselineCompiler::VisitLdaZero() {
@ -1490,8 +1490,7 @@ void BaselineCompiler::VisitTestReferenceEqual() {
SelectBooleanConstant(
kInterpreterAccumulatorRegister,
[&](Label* is_true, Label::Distance distance) {
__ JumpIfTagged(Condition::kEqual,
__ RegisterFrameOperand(RegisterOperand(0)),
__ JumpIfTagged(kEqual, __ RegisterFrameOperand(RegisterOperand(0)),
kInterpreterAccumulatorRegister, is_true, distance);
});
}
@ -1521,8 +1520,8 @@ void BaselineCompiler::VisitTestUndetectable() {
Register map_bit_field = kInterpreterAccumulatorRegister;
__ LoadMap(map_bit_field, kInterpreterAccumulatorRegister);
__ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
__ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
Condition::kZero, &not_undetectable, Label::kNear);
__ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask, kZero,
&not_undetectable, Label::kNear);
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
__ Jump(&done, Label::kNear);
@ -1562,7 +1561,7 @@ void BaselineCompiler::VisitTestTypeOf() {
case interpreter::TestTypeOfFlags::LiteralFlag::kNumber: {
Label is_smi, is_heap_number;
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
__ JumpIfObjectType(Condition::kEqual, kInterpreterAccumulatorRegister,
__ JumpIfObjectType(kEqual, kInterpreterAccumulatorRegister,
HEAP_NUMBER_TYPE, scratch_scope.AcquireScratch(),
&is_heap_number, Label::kNear);
@ -1578,10 +1577,9 @@ void BaselineCompiler::VisitTestTypeOf() {
Label is_smi, bad_instance_type;
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
static_assert(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
__ JumpIfObjectType(Condition::kGreaterThanEqual,
kInterpreterAccumulatorRegister, FIRST_NONSTRING_TYPE,
scratch_scope.AcquireScratch(), &bad_instance_type,
Label::kNear);
__ JumpIfObjectType(kGreaterThanEqual, kInterpreterAccumulatorRegister,
FIRST_NONSTRING_TYPE, scratch_scope.AcquireScratch(),
&bad_instance_type, Label::kNear);
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
__ Jump(&done, Label::kNear);
@ -1594,7 +1592,7 @@ void BaselineCompiler::VisitTestTypeOf() {
case interpreter::TestTypeOfFlags::LiteralFlag::kSymbol: {
Label is_smi, bad_instance_type;
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
__ JumpIfObjectType(Condition::kNotEqual, kInterpreterAccumulatorRegister,
__ JumpIfObjectType(kNotEqual, kInterpreterAccumulatorRegister,
SYMBOL_TYPE, scratch_scope.AcquireScratch(),
&bad_instance_type, Label::kNear);
@ -1624,7 +1622,7 @@ void BaselineCompiler::VisitTestTypeOf() {
case interpreter::TestTypeOfFlags::LiteralFlag::kBigInt: {
Label is_smi, bad_instance_type;
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
__ JumpIfObjectType(Condition::kNotEqual, kInterpreterAccumulatorRegister,
__ JumpIfObjectType(kNotEqual, kInterpreterAccumulatorRegister,
BIGINT_TYPE, scratch_scope.AcquireScratch(),
&bad_instance_type, Label::kNear);
@ -1649,7 +1647,7 @@ void BaselineCompiler::VisitTestTypeOf() {
__ LoadMap(map_bit_field, kInterpreterAccumulatorRegister);
__ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
__ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
Condition::kZero, &not_undetectable, Label::kNear);
kZero, &not_undetectable, Label::kNear);
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
__ Jump(&done, Label::kNear);
@ -1668,10 +1666,10 @@ void BaselineCompiler::VisitTestTypeOf() {
Register map_bit_field = kInterpreterAccumulatorRegister;
__ LoadMap(map_bit_field, kInterpreterAccumulatorRegister);
__ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
__ TestAndBranch(map_bit_field, Map::Bits1::IsCallableBit::kMask,
Condition::kZero, &not_callable, Label::kNear);
__ TestAndBranch(map_bit_field, Map::Bits1::IsCallableBit::kMask, kZero,
&not_callable, Label::kNear);
__ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
Condition::kNotZero, &undetectable, Label::kNear);
kNotZero, &undetectable, Label::kNear);
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
__ Jump(&done, Label::kNear);
@ -1693,7 +1691,7 @@ void BaselineCompiler::VisitTestTypeOf() {
// If the object's instance type isn't within the range, return false.
static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
Register map = scratch_scope.AcquireScratch();
__ JumpIfObjectType(Condition::kLessThan, kInterpreterAccumulatorRegister,
__ JumpIfObjectType(kLessThan, kInterpreterAccumulatorRegister,
FIRST_JS_RECEIVER_TYPE, map, &bad_instance_type,
Label::kNear);
@ -1703,8 +1701,7 @@ void BaselineCompiler::VisitTestTypeOf() {
__ TestAndBranch(map_bit_field,
Map::Bits1::IsUndetectableBit::kMask |
Map::Bits1::IsCallableBit::kMask,
Condition::kNotZero, &undetectable_or_callable,
Label::kNear);
kNotZero, &undetectable_or_callable, Label::kNear);
__ Bind(&is_null);
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
@ -1918,8 +1915,8 @@ void BaselineCompiler::VisitJumpLoop() {
FeedbackVector::kOsrStateOffset);
static_assert(FeedbackVector::MaybeHasOptimizedOsrCodeBit::encode(true) >
FeedbackVector::kMaxOsrUrgency);
__ JumpIfByte(Condition::kUnsignedGreaterThan, osr_state, loop_depth,
&osr_armed, Label::kNear);
__ JumpIfByte(kUnsignedGreaterThan, osr_state, loop_depth, &osr_armed,
Label::kNear);
}
__ Bind(&osr_not_armed);
@ -1946,8 +1943,8 @@ void BaselineCompiler::VisitJumpLoop() {
iterator().GetSlotOperand(2), &osr,
Label::kNear);
__ DecodeField<FeedbackVector::OsrUrgencyBits>(scratch1);
__ JumpIfByte(Condition::kUnsignedLessThanEqual, scratch1, loop_depth,
&osr_not_armed, Label::kNear);
__ JumpIfByte(kUnsignedLessThanEqual, scratch1, loop_depth, &osr_not_armed,
Label::kNear);
__ Bind(&osr);
CallBuiltin<Builtin::kBaselineOnStackReplacement>(maybe_target_code);
@ -2049,7 +2046,7 @@ void BaselineCompiler::VisitJumpIfJSReceiver() {
Label is_smi, dont_jump;
__ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
__ JumpIfObjectType(Condition::kLessThan, kInterpreterAccumulatorRegister,
__ JumpIfObjectType(kLessThan, kInterpreterAccumulatorRegister,
FIRST_JS_RECEIVER_TYPE, scratch_scope.AcquireScratch(),
&dont_jump);
UpdateInterruptBudgetAndDoInterpreterJump();
@ -2097,8 +2094,7 @@ void BaselineCompiler::VisitForInContinue() {
[&](Label* is_true, Label::Distance distance) {
LoadRegister(kInterpreterAccumulatorRegister, 0);
__ JumpIfTagged(
Condition::kNotEqual,
kInterpreterAccumulatorRegister,
kNotEqual, kInterpreterAccumulatorRegister,
__ RegisterFrameOperand(RegisterOperand(1)),
is_true, distance);
});
@ -2190,8 +2186,8 @@ void BaselineCompiler::VisitThrowIfNotSuperConstructor() {
Register map_bit_field = scratch_scope.AcquireScratch();
__ LoadMap(map_bit_field, reg);
__ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
__ TestAndBranch(map_bit_field, Map::Bits1::IsConstructorBit::kMask,
Condition::kNotZero, &done, Label::kNear);
__ TestAndBranch(map_bit_field, Map::Bits1::IsConstructorBit::kMask, kNotZero,
&done, Label::kNear);
CallRuntime(Runtime::kThrowNotSuperConstructor, reg, __ FunctionOperand());

View File

@ -44,32 +44,6 @@ class BaselineAssembler::ScratchRegisterScope {
int registers_used_;
};
// TODO(v8:11461): Unify condition names in the MacroAssembler.
enum class Condition : uint32_t {
kEqual = equal,
kNotEqual = not_equal,
kLessThan = less,
kGreaterThan = greater,
kLessThanEqual = less_equal,
kGreaterThanEqual = greater_equal,
kUnsignedLessThan = below,
kUnsignedGreaterThan = above,
kUnsignedLessThanEqual = below_equal,
kUnsignedGreaterThanEqual = above_equal,
kOverflow = overflow,
kNoOverflow = no_overflow,
kZero = zero,
kNotZero = not_zero,
};
inline internal::Condition AsMasmCondition(Condition cond) {
return static_cast<internal::Condition>(cond);
}
namespace detail {
#define __ masm_->
@ -124,7 +98,7 @@ void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
Label* target,
Label::Distance distance) {
__ cmp(left, Immediate(right));
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
@ -139,13 +113,13 @@ void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
} else {
__ test(value, Immediate(mask));
}
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
Label* target, Label::Distance distance) {
__ cmp(lhs, rhs);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
@ -153,7 +127,7 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
Label::Distance distance) {
__ AssertNotSmi(object);
__ CmpObjectType(object, instance_type, map);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
@ -167,7 +141,7 @@ void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
__ movd(eax, xmm0);
}
__ CmpInstanceType(map, instance_type);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
MemOperand operand, Label* target,
@ -181,31 +155,31 @@ void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
} else {
__ cmp(value, Immediate(smi));
}
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
Label* target, Label::Distance distance) {
__ AssertSmi(lhs);
__ AssertSmi(rhs);
__ cmp(lhs, rhs);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
MemOperand operand, Label* target,
Label::Distance distance) {
__ cmp(operand, value);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
Register value, Label* target,
Label::Distance distance) {
__ cmp(operand, value);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
Label* target, Label::Distance distance) {
__ cmpb(value, Immediate(byte));
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::Move(interpreter::Register output, Register source) {
return __ mov(RegisterFrameOperand(output), source);

View File

@ -37,32 +37,6 @@ class BaselineAssembler::ScratchRegisterScope {
UseScratchRegisterScope wrapped_scope_;
};
enum class Condition : uint32_t {
kEqual = eq,
kNotEqual = ne,
kLessThan = lt,
kGreaterThan = gt,
kLessThanEqual = le,
kGreaterThanEqual = ge,
kUnsignedLessThan = Uless,
kUnsignedGreaterThan = Ugreater,
kUnsignedLessThanEqual = Uless_equal,
kUnsignedGreaterThanEqual = Ugreater_equal,
kOverflow = overflow,
kNoOverflow = no_overflow,
kZero = eq,
kNotZero = ne,
};
inline internal::Condition AsMasmCondition(Condition cond) {
static_assert(sizeof(internal::Condition) == sizeof(Condition));
return static_cast<internal::Condition>(cond);
}
namespace detail {
#ifdef DEBUG
@ -123,12 +97,12 @@ void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
ScratchRegisterScope temps(this);
Register scratch = temps.AcquireScratch();
__ And(scratch, value, Operand(mask));
__ Branch(target, AsMasmCondition(cc), scratch, Operand(zero_reg));
__ Branch(target, cc, scratch, Operand(zero_reg));
}
void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
Label* target, Label::Distance) {
__ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
__ Branch(target, cc, lhs, Operand(rhs));
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
@ -137,7 +111,7 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
ScratchRegisterScope temps(this);
Register type = temps.AcquireScratch();
__ GetObjectType(object, map, type);
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
__ Branch(target, cc, type, Operand(instance_type));
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
@ -150,7 +124,7 @@ void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
__ Assert(eq, AbortReason::kUnexpectedValue, type, Operand(MAP_TYPE));
}
__ Ld_d(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
__ Branch(target, cc, type, Operand(instance_type));
}
void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
Label* target, Label::Distance) {
@ -158,13 +132,13 @@ void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
Register scratch = temps.AcquireScratch();
__ li(scratch, Operand(smi));
__ SmiUntag(scratch);
__ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
__ Branch(target, cc, value, Operand(scratch));
}
void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
Label* target, Label::Distance) {
__ AssertSmi(lhs);
__ AssertSmi(rhs);
__ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
__ Branch(target, cc, lhs, Operand(rhs));
}
void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
MemOperand operand, Label* target,
@ -172,7 +146,7 @@ void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
ScratchRegisterScope temps(this);
Register scratch = temps.AcquireScratch();
__ Ld_d(scratch, operand);
__ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
__ Branch(target, cc, value, Operand(scratch));
}
void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
Register value, Label* target,
@ -180,11 +154,11 @@ void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
ScratchRegisterScope temps(this);
Register scratch = temps.AcquireScratch();
__ Ld_d(scratch, operand);
__ Branch(target, AsMasmCondition(cc), scratch, Operand(value));
__ Branch(target, cc, scratch, Operand(value));
}
void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
Label* target, Label::Distance) {
__ Branch(target, AsMasmCondition(cc), value, Operand(byte));
__ Branch(target, cc, value, Operand(byte));
}
void BaselineAssembler::Move(interpreter::Register output, Register source) {
Move(RegisterFrameOperand(output), source);
@ -504,8 +478,7 @@ void BaselineAssembler::Switch(Register reg, int case_value_base,
__ Sub_d(reg, reg, Operand(case_value_base));
}
__ Branch(&fallthrough, AsMasmCondition(Condition::kUnsignedGreaterThanEqual),
reg, Operand(num_labels));
__ Branch(&fallthrough, kUnsignedGreaterThanEqual, reg, Operand(num_labels));
__ GenerateSwitchTable(reg, num_labels,
[labels](size_t i) { return labels[i]; });

View File

@ -37,32 +37,6 @@ class BaselineAssembler::ScratchRegisterScope {
UseScratchRegisterScope wrapped_scope_;
};
enum class Condition : uint32_t {
kEqual = eq,
kNotEqual = ne,
kLessThan = lt,
kGreaterThan = gt,
kLessThanEqual = le,
kGreaterThanEqual = ge,
kUnsignedLessThan = Uless,
kUnsignedGreaterThan = Ugreater,
kUnsignedLessThanEqual = Uless_equal,
kUnsignedGreaterThanEqual = Ugreater_equal,
kOverflow = overflow,
kNoOverflow = no_overflow,
kZero = eq,
kNotZero = ne,
};
inline internal::Condition AsMasmCondition(Condition cond) {
static_assert(sizeof(internal::Condition) == sizeof(Condition));
return static_cast<internal::Condition>(cond);
}
namespace detail {
#ifdef DEBUG
@ -123,12 +97,12 @@ void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
ScratchRegisterScope temps(this);
Register scratch = temps.AcquireScratch();
__ And(scratch, value, Operand(mask));
__ Branch(target, AsMasmCondition(cc), scratch, Operand(zero_reg));
__ Branch(target, cc, scratch, Operand(zero_reg));
}
void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
Label* target, Label::Distance) {
__ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
__ Branch(target, cc, lhs, Operand(rhs));
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
@ -137,7 +111,7 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
ScratchRegisterScope temps(this);
Register type = temps.AcquireScratch();
__ GetObjectType(object, map, type);
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
__ Branch(target, cc, type, Operand(instance_type));
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
@ -150,7 +124,7 @@ void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
__ Assert(eq, AbortReason::kUnexpectedValue, type, Operand(MAP_TYPE));
}
__ Ld(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
__ Branch(target, cc, type, Operand(instance_type));
}
void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
MemOperand operand, Label* target,
@ -158,7 +132,7 @@ void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
ScratchRegisterScope temps(this);
Register scratch = temps.AcquireScratch();
__ Ld(scratch, operand);
__ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
__ Branch(target, cc, value, Operand(scratch));
}
void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
Label* target, Label::Distance) {
@ -166,13 +140,13 @@ void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
Register scratch = temps.AcquireScratch();
__ li(scratch, Operand(smi));
__ SmiUntag(scratch);
__ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
__ Branch(target, cc, value, Operand(scratch));
}
void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
Label* target, Label::Distance) {
__ AssertSmi(lhs);
__ AssertSmi(rhs);
__ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
__ Branch(target, cc, lhs, Operand(rhs));
}
void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
MemOperand operand, Label* target,
@ -180,7 +154,7 @@ void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
ScratchRegisterScope temps(this);
Register scratch = temps.AcquireScratch();
__ Ld(scratch, operand);
__ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
__ Branch(target, cc, value, Operand(scratch));
}
void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
Register value, Label* target,
@ -188,11 +162,11 @@ void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
ScratchRegisterScope temps(this);
Register scratch = temps.AcquireScratch();
__ Ld(scratch, operand);
__ Branch(target, AsMasmCondition(cc), scratch, Operand(value));
__ Branch(target, cc, scratch, Operand(value));
}
void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
Label* target, Label::Distance) {
__ Branch(target, AsMasmCondition(cc), value, Operand(byte));
__ Branch(target, cc, value, Operand(byte));
}
void BaselineAssembler::Move(interpreter::Register output, Register source) {
@ -514,8 +488,7 @@ void BaselineAssembler::Switch(Register reg, int case_value_base,
__ Dsubu(reg, reg, Operand(case_value_base));
}
__ Branch(&fallthrough, AsMasmCondition(Condition::kUnsignedGreaterThanEqual),
reg, Operand(num_labels));
__ Branch(&fallthrough, kUnsignedGreaterThanEqual, reg, Operand(num_labels));
__ GenerateSwitchTable(reg, num_labels,
[labels](size_t i) { return labels[i]; });

View File

@ -49,85 +49,24 @@ class BaselineAssembler::ScratchRegisterScope {
int registers_used_;
};
// TODO(v8:11429,leszeks): Unify condition names in the MacroAssembler.
enum class Condition : uint32_t {
kEqual,
kNotEqual,
kLessThan,
kGreaterThan,
kLessThanEqual,
kGreaterThanEqual,
kUnsignedLessThan,
kUnsignedGreaterThan,
kUnsignedLessThanEqual,
kUnsignedGreaterThanEqual,
kOverflow,
kNoOverflow,
kZero,
kNotZero
};
inline internal::Condition AsMasmCondition(Condition cond) {
static_assert(sizeof(internal::Condition) == sizeof(Condition));
switch (cond) {
case Condition::kEqual:
return eq;
case Condition::kNotEqual:
return ne;
case Condition::kLessThan:
return lt;
case Condition::kGreaterThan:
return gt;
case Condition::kLessThanEqual:
return le;
case Condition::kGreaterThanEqual:
return ge;
case Condition::kUnsignedLessThan:
return lt;
case Condition::kUnsignedGreaterThan:
return gt;
case Condition::kUnsignedLessThanEqual:
return le;
case Condition::kUnsignedGreaterThanEqual:
return ge;
case Condition::kOverflow:
return overflow;
case Condition::kNoOverflow:
return nooverflow;
case Condition::kZero:
return eq;
case Condition::kNotZero:
return ne;
default:
UNREACHABLE();
}
}
inline bool IsSignedCondition(Condition cond) {
switch (cond) {
case Condition::kEqual:
case Condition::kNotEqual:
case Condition::kLessThan:
case Condition::kGreaterThan:
case Condition::kLessThanEqual:
case Condition::kGreaterThanEqual:
case Condition::kOverflow:
case Condition::kNoOverflow:
case Condition::kZero:
case Condition::kNotZero:
case kEqual:
case kNotEqual:
case kLessThan:
case kGreaterThan:
case kLessThanEqual:
case kGreaterThanEqual:
case kOverflow:
case kNoOverflow:
case kZero:
case kNotZero:
return true;
case Condition::kUnsignedLessThan:
case Condition::kUnsignedGreaterThan:
case Condition::kUnsignedLessThanEqual:
case Condition::kUnsignedGreaterThanEqual:
case kUnsignedLessThan:
case kUnsignedGreaterThan:
case kUnsignedLessThanEqual:
case kUnsignedGreaterThanEqual:
return false;
default:
@ -155,7 +94,7 @@ static void JumpIfHelper(MacroAssembler* assm, Condition cc, Register lhs,
__ CmpU32(lhs, rhs);
}
}
__ b(AsMasmCondition(cc), target);
__ b(cc, target);
}
#undef __
@ -221,7 +160,7 @@ void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
Label* target, Label::Distance) {
ASM_CODE_COMMENT(masm_);
__ AndU64(r0, value, Operand(mask), ip, SetRC);
__ b(AsMasmCondition(cc), target, cr0);
__ b(cc, target, cr0);
}
void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
@ -232,7 +171,7 @@ void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
} else {
__ CmpU64(lhs, rhs, r0);
}
__ b(AsMasmCondition(cc), target);
__ b(cc, target);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
@ -674,8 +613,7 @@ void BaselineAssembler::Switch(Register reg, int case_value_base,
}
// Mostly copied from code-generator-arm.cc
JumpIf(Condition::kUnsignedGreaterThanEqual, reg, Operand(num_labels),
&fallthrough);
JumpIf(kUnsignedGreaterThanEqual, reg, Operand(num_labels), &fallthrough);
// Ensure to emit the constant pool first if necessary.
int entry_size_log2 = 3;
__ ShiftLeftU32(reg, reg, Operand(entry_size_log2));
@ -737,8 +675,8 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
// If actual is bigger than formal, then we should use it to free up the stack
// arguments.
Label corrected_args_count;
JumpIfHelper(__ masm(), Condition::kGreaterThanEqual, params_size,
actual_params_size, &corrected_args_count);
JumpIfHelper(__ masm(), kGreaterThanEqual, params_size, actual_params_size,
&corrected_args_count);
__ masm()->mr(params_size, actual_params_size);
__ Bind(&corrected_args_count);

View File

@ -36,31 +36,6 @@ class BaselineAssembler::ScratchRegisterScope {
UseScratchRegisterScope wrapped_scope_;
};
enum class Condition : uint32_t {
kEqual = eq,
kNotEqual = ne,
kLessThan = lt,
kGreaterThan = gt,
kLessThanEqual = le,
kGreaterThanEqual = ge,
kUnsignedLessThan = Uless,
kUnsignedGreaterThan = Ugreater,
kUnsignedLessThanEqual = Uless_equal,
kUnsignedGreaterThanEqual = Ugreater_equal,
kOverflow = overflow,
kNoOverflow = no_overflow,
kZero = eq,
kNotZero = ne,
};
inline internal::Condition AsMasmCondition(Condition cond) {
return static_cast<internal::Condition>(cond);
}
namespace detail {
#ifdef DEBUG
@ -121,12 +96,12 @@ void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
ScratchRegisterScope temps(this);
Register tmp = temps.AcquireScratch();
__ And(tmp, value, Operand(mask));
__ Branch(target, AsMasmCondition(cc), tmp, Operand(zero_reg));
__ Branch(target, cc, tmp, Operand(zero_reg));
}
void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
Label* target, Label::Distance) {
__ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
__ Branch(target, cc, lhs, Operand(rhs));
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
@ -135,7 +110,7 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
ScratchRegisterScope temps(this);
Register type = temps.AcquireScratch();
__ GetObjectType(object, map, type);
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
__ Branch(target, cc, type, Operand(instance_type));
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
@ -148,7 +123,7 @@ void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
__ Assert(eq, AbortReason::kUnexpectedValue, type, Operand(MAP_TYPE));
}
__ LoadWord(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
__ Branch(target, cc, type, Operand(instance_type));
}
void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
MemOperand operand, Label* target,
@ -156,7 +131,7 @@ void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
ScratchRegisterScope temps(this);
Register temp = temps.AcquireScratch();
__ LoadWord(temp, operand);
__ Branch(target, AsMasmCondition(cc), value, Operand(temp));
__ Branch(target, cc, value, Operand(temp));
}
void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
Label* target, Label::Distance) {
@ -164,14 +139,14 @@ void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
Register temp = temps.AcquireScratch();
__ li(temp, Operand(smi));
__ SmiUntag(temp);
__ Branch(target, AsMasmCondition(cc), value, Operand(temp));
__ Branch(target, cc, value, Operand(temp));
}
void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
Label* target, Label::Distance) {
// todo: compress pointer
__ AssertSmi(lhs);
__ AssertSmi(rhs);
__ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
__ Branch(target, cc, lhs, Operand(rhs));
}
void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
MemOperand operand, Label* target,
@ -180,7 +155,7 @@ void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
ScratchRegisterScope temps(this);
Register scratch = temps.AcquireScratch();
__ LoadWord(scratch, operand);
__ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
__ Branch(target, cc, value, Operand(scratch));
}
void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
Register value, Label* target,
@ -189,11 +164,11 @@ void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
ScratchRegisterScope temps(this);
Register scratch = temps.AcquireScratch();
__ LoadWord(scratch, operand);
__ Branch(target, AsMasmCondition(cc), scratch, Operand(value));
__ Branch(target, cc, scratch, Operand(value));
}
void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
Label* target, Label::Distance) {
__ Branch(target, AsMasmCondition(cc), value, Operand(byte));
__ Branch(target, cc, value, Operand(byte));
}
void BaselineAssembler::Move(interpreter::Register output, Register source) {
@ -519,8 +494,7 @@ void BaselineAssembler::Switch(Register reg, int case_value_base,
// Mostly copied from code-generator-riscv64.cc
ScratchRegisterScope scope(this);
Label table;
__ Branch(&fallthrough, AsMasmCondition(Condition::kUnsignedGreaterThanEqual),
reg, Operand(num_labels));
__ Branch(&fallthrough, kUnsignedGreaterThanEqual, reg, Operand(num_labels));
int64_t imm64;
imm64 = __ branch_long_offset(&table);
CHECK(is_int32(imm64 + 0x800));

View File

@ -48,85 +48,24 @@ class BaselineAssembler::ScratchRegisterScope {
int registers_used_;
};
// TODO(v8:11429,leszeks): Unify condition names in the MacroAssembler.
enum class Condition : uint32_t {
kEqual,
kNotEqual,
kLessThan,
kGreaterThan,
kLessThanEqual,
kGreaterThanEqual,
kUnsignedLessThan,
kUnsignedGreaterThan,
kUnsignedLessThanEqual,
kUnsignedGreaterThanEqual,
kOverflow,
kNoOverflow,
kZero,
kNotZero
};
inline internal::Condition AsMasmCondition(Condition cond) {
static_assert(sizeof(internal::Condition) == sizeof(Condition));
switch (cond) {
case Condition::kEqual:
return eq;
case Condition::kNotEqual:
return ne;
case Condition::kLessThan:
return lt;
case Condition::kGreaterThan:
return gt;
case Condition::kLessThanEqual:
return le;
case Condition::kGreaterThanEqual:
return ge;
case Condition::kUnsignedLessThan:
return lt;
case Condition::kUnsignedGreaterThan:
return gt;
case Condition::kUnsignedLessThanEqual:
return le;
case Condition::kUnsignedGreaterThanEqual:
return ge;
case Condition::kOverflow:
return overflow;
case Condition::kNoOverflow:
return nooverflow;
case Condition::kZero:
return eq;
case Condition::kNotZero:
return ne;
default:
UNREACHABLE();
}
}
inline bool IsSignedCondition(Condition cond) {
switch (cond) {
case Condition::kEqual:
case Condition::kNotEqual:
case Condition::kLessThan:
case Condition::kGreaterThan:
case Condition::kLessThanEqual:
case Condition::kGreaterThanEqual:
case Condition::kOverflow:
case Condition::kNoOverflow:
case Condition::kZero:
case Condition::kNotZero:
case kEqual:
case kNotEqual:
case kLessThan:
case kGreaterThan:
case kLessThanEqual:
case kGreaterThanEqual:
case kOverflow:
case kNoOverflow:
case kZero:
case kNotZero:
return true;
case Condition::kUnsignedLessThan:
case Condition::kUnsignedGreaterThan:
case Condition::kUnsignedLessThanEqual:
case Condition::kUnsignedGreaterThanEqual:
case kUnsignedLessThan:
case kUnsignedGreaterThan:
case kUnsignedLessThanEqual:
case kUnsignedGreaterThanEqual:
return false;
default:
@ -154,7 +93,7 @@ static void JumpIfHelper(MacroAssembler* assm, Condition cc, Register lhs,
__ CmpU32(lhs, rhs);
}
}
__ b(AsMasmCondition(cc), target);
__ b(cc, target);
}
#undef __
@ -220,7 +159,7 @@ void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
Label* target, Label::Distance) {
ASM_CODE_COMMENT(masm_);
__ AndP(r0, value, Operand(mask));
__ b(AsMasmCondition(cc), target);
__ b(cc, target);
}
void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
@ -231,7 +170,7 @@ void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
} else {
__ CmpU64(lhs, rhs);
}
__ b(AsMasmCondition(cc), target);
__ b(cc, target);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
@ -687,8 +626,7 @@ void BaselineAssembler::Switch(Register reg, int case_value_base,
// Mostly copied from code-generator-arm.cc
ScratchRegisterScope scope(this);
JumpIf(Condition::kUnsignedGreaterThanEqual, reg, Operand(num_labels),
&fallthrough);
JumpIf(kUnsignedGreaterThanEqual, reg, Operand(num_labels), &fallthrough);
// Ensure to emit the constant pool first if necessary.
int entry_size_log2 = 3;
__ ShiftLeftU32(reg, reg, Operand(entry_size_log2));
@ -745,8 +683,8 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
// If actual is bigger than formal, then we should use it to free up the stack
// arguments.
Label corrected_args_count;
JumpIfHelper(__ masm(), Condition::kGreaterThanEqual, params_size,
actual_params_size, &corrected_args_count);
JumpIfHelper(__ masm(), kGreaterThanEqual, params_size, actual_params_size,
&corrected_args_count);
__ masm()->mov(params_size, actual_params_size);
__ Bind(&corrected_args_count);

View File

@ -46,32 +46,6 @@ class BaselineAssembler::ScratchRegisterScope {
int registers_used_;
};
// TODO(v8:11461): Unify condition names in the MacroAssembler.
enum class Condition : uint32_t {
kEqual = equal,
kNotEqual = not_equal,
kLessThan = less,
kGreaterThan = greater,
kLessThanEqual = less_equal,
kGreaterThanEqual = greater_equal,
kUnsignedLessThan = below,
kUnsignedGreaterThan = above,
kUnsignedLessThanEqual = below_equal,
kUnsignedGreaterThanEqual = above_equal,
kOverflow = overflow,
kNoOverflow = no_overflow,
kZero = zero,
kNotZero = not_zero,
};
inline internal::Condition AsMasmCondition(Condition cond) {
return static_cast<internal::Condition>(cond);
}
namespace detail {
#define __ masm_->
@ -130,13 +104,13 @@ void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
} else {
__ testl(value, Immediate(mask));
}
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
Label* target, Label::Distance distance) {
__ cmpq(lhs, rhs);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
InstanceType instance_type,
@ -144,7 +118,7 @@ void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
Label::Distance distance) {
__ AssertNotSmi(object);
__ CmpObjectType(object, instance_type, map);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
InstanceType instance_type,
@ -156,30 +130,30 @@ void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
__ Assert(equal, AbortReason::kUnexpectedValue);
}
__ CmpInstanceType(map, instance_type);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
MemOperand operand, Label* target,
Label::Distance distance) {
__ cmpq(value, operand);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Smi smi,
Label* target, Label::Distance distance) {
__ SmiCompare(lhs, smi);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
Label* target, Label::Distance distance) {
__ SmiCompare(lhs, rhs);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
Label* target,
Label::Distance distance) {
__ cmpq(left, Immediate(right));
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
// cmp_tagged
@ -187,18 +161,18 @@ void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
MemOperand operand, Label* target,
Label::Distance distance) {
__ cmp_tagged(value, operand);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
Register value, Label* target,
Label::Distance distance) {
__ cmp_tagged(operand, value);
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
Label* target, Label::Distance distance) {
__ cmpb(value, Immediate(byte));
__ j(AsMasmCondition(cc), target, distance);
__ j(cc, target, distance);
}
void BaselineAssembler::Move(interpreter::Register output, Register source) {

View File

@ -2482,8 +2482,8 @@ void AttemptOnStackReplacement(MaglevAssembler* masm,
__ LoadByte(scratch0,
FieldMemOperand(scratch0, FeedbackVector::kOsrStateOffset));
__ DecodeField<FeedbackVector::OsrUrgencyBits>(scratch0);
basm.JumpIfByte(baseline::Condition::kUnsignedLessThanEqual, scratch0,
loop_depth, *no_code_for_osr, Label::kNear);
basm.JumpIfByte(kUnsignedLessThanEqual, scratch0, loop_depth,
*no_code_for_osr, Label::kNear);
// The osr_urgency exceeds the current loop_depth, signaling an OSR
// request. Call into runtime to compile.