s390x: rename Cmp operations
Change-Id: Idb8948c3ff9209a6a41d0793cd2f5c1557b417df Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2598697 Reviewed-by: Milad Fa <mfarazma@redhat.com> Commit-Queue: Junliang Yan <junyan@redhat.com> Cr-Commit-Position: refs/heads/master@{#71851}
This commit is contained in:
parent
94bf7d9f79
commit
20703d7f99
@ -363,7 +363,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
|
||||
__ Move(scratch, debug_suspended_generator);
|
||||
__ LoadP(scratch, MemOperand(scratch));
|
||||
__ CmpP(scratch, r3);
|
||||
__ CmpS64(scratch, r3);
|
||||
__ beq(&prepare_step_in_suspended_generator);
|
||||
__ bind(&stepping_prepared);
|
||||
|
||||
@ -371,7 +371,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label stack_overflow;
|
||||
__ LoadP(scratch, __ StackLimitAsMemOperand(StackLimitKind::kRealStackLimit));
|
||||
__ CmpLogicalP(sp, scratch);
|
||||
__ CmpU64(sp, scratch);
|
||||
__ blt(&stack_overflow);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
@ -649,7 +649,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
|
||||
// Check if the current stack frame is marked as the outermost JS frame.
|
||||
Label non_outermost_js_2;
|
||||
__ pop(r7);
|
||||
__ CmpP(r7, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
|
||||
__ CmpS64(r7, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
|
||||
__ bne(&non_outermost_js_2, Label::kNear);
|
||||
__ mov(scrach, Operand::Zero());
|
||||
__ Move(r7, js_entry_sp);
|
||||
@ -872,7 +872,7 @@ static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
|
||||
// If actual is bigger than formal, then we should use it to free up the stack
|
||||
// arguments.
|
||||
Label corrected_args_count;
|
||||
__ CmpP(params_size, actual_params_size);
|
||||
__ CmpS64(params_size, actual_params_size);
|
||||
__ bge(&corrected_args_count);
|
||||
__ mov(params_size, actual_params_size);
|
||||
__ bind(&corrected_args_count);
|
||||
@ -890,7 +890,7 @@ static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
|
||||
OptimizationMarker expected_marker,
|
||||
Runtime::FunctionId function_id) {
|
||||
Label no_match;
|
||||
__ CmpP(actual_marker, Operand(expected_marker));
|
||||
__ CmpS64(actual_marker, Operand(expected_marker));
|
||||
__ bne(&no_match);
|
||||
GenerateTailCallToReturnedCode(masm, function_id);
|
||||
__ bind(&no_match);
|
||||
@ -1003,7 +1003,7 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
|
||||
STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
|
||||
STATIC_ASSERT(3 ==
|
||||
static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
|
||||
__ CmpP(bytecode, Operand(0x3));
|
||||
__ CmpS64(bytecode, Operand(0x3));
|
||||
__ bgt(&process_bytecode);
|
||||
__ tmll(bytecode, Operand(0x1));
|
||||
__ bne(&extra_wide);
|
||||
@ -1026,9 +1026,9 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
|
||||
__ bind(&process_bytecode);
|
||||
|
||||
// Bailout to the return label if this is a return bytecode.
|
||||
#define JUMP_IF_EQUAL(NAME) \
|
||||
__ CmpP(bytecode, \
|
||||
Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
|
||||
#define JUMP_IF_EQUAL(NAME) \
|
||||
__ CmpS64(bytecode, \
|
||||
Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
|
||||
__ beq(if_return);
|
||||
RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
|
||||
#undef JUMP_IF_EQUAL
|
||||
@ -1036,8 +1036,8 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
|
||||
// If this is a JumpLoop, re-execute it to perform the jump to the beginning
|
||||
// of the loop.
|
||||
Label end, not_jump_loop;
|
||||
__ CmpP(bytecode,
|
||||
Operand(static_cast<int>(interpreter::Bytecode::kJumpLoop)));
|
||||
__ CmpS64(bytecode,
|
||||
Operand(static_cast<int>(interpreter::Bytecode::kJumpLoop)));
|
||||
__ bne(¬_jump_loop);
|
||||
// We need to restore the original bytecode_offset since we might have
|
||||
// increased it to skip the wide / extra-wide prefix bytecode.
|
||||
@ -1103,7 +1103,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
__ LoadTaggedPointerField(
|
||||
r6, FieldMemOperand(feedback_vector, HeapObject::kMapOffset));
|
||||
__ LoadU16(r6, FieldMemOperand(r6, Map::kInstanceTypeOffset));
|
||||
__ CmpP(r6, Operand(FEEDBACK_VECTOR_TYPE));
|
||||
__ CmpS64(r6, Operand(FEEDBACK_VECTOR_TYPE));
|
||||
__ bne(&push_stack_frame);
|
||||
|
||||
Register optimization_state = r6;
|
||||
@ -1165,8 +1165,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
|
||||
// Do a stack check to ensure we don't go over the limit.
|
||||
__ SubS64(r8, sp, r4);
|
||||
__ CmpLogicalP(r8,
|
||||
__ StackLimitAsMemOperand(StackLimitKind::kRealStackLimit));
|
||||
__ CmpU64(r8, __ StackLimitAsMemOperand(StackLimitKind::kRealStackLimit));
|
||||
__ blt(&stack_overflow);
|
||||
|
||||
// If ok, push undefined as the initial value for all register file entries.
|
||||
@ -1190,7 +1189,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
__ LoadS32(r8, FieldMemOperand(
|
||||
kInterpreterBytecodeArrayRegister,
|
||||
BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
|
||||
__ CmpP(r8, Operand::Zero());
|
||||
__ CmpS64(r8, Operand::Zero());
|
||||
__ beq(&no_incoming_new_target_or_generator_register);
|
||||
__ ShiftLeftU64(r8, r8, Operand(kSystemPointerSizeLog2));
|
||||
__ StoreU64(r5, MemOperand(fp, r8));
|
||||
@ -1200,7 +1199,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// TODO(solanes): Merge with the real stack limit check above.
|
||||
Label stack_check_interrupt, after_stack_check_interrupt;
|
||||
__ LoadP(r0, __ StackLimitAsMemOperand(StackLimitKind::kInterruptStackLimit));
|
||||
__ CmpLogicalP(sp, r0);
|
||||
__ CmpU64(sp, r0);
|
||||
__ blt(&stack_check_interrupt);
|
||||
__ bind(&after_stack_check_interrupt);
|
||||
|
||||
@ -1497,8 +1496,8 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
Label okay;
|
||||
__ CmpP(kInterpreterBytecodeOffsetRegister,
|
||||
Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
|
||||
__ CmpS64(kInterpreterBytecodeOffsetRegister,
|
||||
Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
|
||||
__ bge(&okay);
|
||||
__ bkpt(0);
|
||||
__ bind(&okay);
|
||||
@ -1524,9 +1523,9 @@ void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
|
||||
__ SmiUntag(kInterpreterBytecodeOffsetRegister);
|
||||
|
||||
Label enter_bytecode, function_entry_bytecode;
|
||||
__ CmpP(kInterpreterBytecodeOffsetRegister,
|
||||
Operand(BytecodeArray::kHeaderSize - kHeapObjectTag +
|
||||
kFunctionEntryBytecodeOffset));
|
||||
__ CmpS64(kInterpreterBytecodeOffsetRegister,
|
||||
Operand(BytecodeArray::kHeaderSize - kHeapObjectTag +
|
||||
kFunctionEntryBytecodeOffset));
|
||||
__ beq(&function_entry_bytecode);
|
||||
|
||||
// Load the current bytecode.
|
||||
@ -1936,11 +1935,11 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
FieldMemOperand(r4, HeapObject::kMapOffset));
|
||||
__ LoadS16(scratch,
|
||||
FieldMemOperand(scratch, Map::kInstanceTypeOffset));
|
||||
__ CmpP(scratch, Operand(FIXED_ARRAY_TYPE));
|
||||
__ CmpS64(scratch, Operand(FIXED_ARRAY_TYPE));
|
||||
__ beq(&ok);
|
||||
__ CmpP(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
|
||||
__ CmpS64(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
|
||||
__ bne(&fail);
|
||||
__ CmpP(r6, Operand::Zero());
|
||||
__ CmpS64(r6, Operand::Zero());
|
||||
__ beq(&ok);
|
||||
// Fall through.
|
||||
__ bind(&fail);
|
||||
@ -1978,7 +1977,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
// Push arguments onto the stack (thisArgument is already on the stack).
|
||||
{
|
||||
Label loop, no_args, skip;
|
||||
__ CmpP(r6, Operand::Zero());
|
||||
__ CmpS64(r6, Operand::Zero());
|
||||
__ beq(&no_args);
|
||||
__ AddS64(r4, r4,
|
||||
Operand(FixedArray::kHeaderSize - kHeapObjectTag - kTaggedSize));
|
||||
@ -2046,8 +2045,8 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
|
||||
__ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
||||
__ LoadP(scratch,
|
||||
MemOperand(r6, CommonFrameConstants::kContextOrFrameTypeOffset));
|
||||
__ CmpP(scratch,
|
||||
Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ CmpS64(scratch,
|
||||
Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ beq(&arguments_adaptor);
|
||||
{
|
||||
__ LoadP(r7, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
|
||||
@ -2129,7 +2128,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
|
||||
__ ShiftLeftU64(r1, r7, Operand(kSystemPointerSizeLog2));
|
||||
__ LoadP(scratch, MemOperand(r6, r1));
|
||||
__ StoreU64(scratch, MemOperand(r4, r1));
|
||||
__ CmpP(r7, Operand::Zero());
|
||||
__ CmpS64(r7, Operand::Zero());
|
||||
__ bne(&loop);
|
||||
}
|
||||
}
|
||||
@ -2280,8 +2279,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack
|
||||
// limit".
|
||||
__ CmpLogicalP(
|
||||
r1, __ StackLimitAsMemOperand(StackLimitKind::kRealStackLimit));
|
||||
__ CmpU64(r1, __ StackLimitAsMemOperand(StackLimitKind::kRealStackLimit));
|
||||
__ bgt(&done); // Signed comparison.
|
||||
// Restore the stack pointer.
|
||||
{
|
||||
@ -2355,7 +2353,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
|
||||
__ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
|
||||
__ Jump(masm->isolate()->builtins()->CallFunction(mode),
|
||||
RelocInfo::CODE_TARGET, eq);
|
||||
__ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
|
||||
__ CmpS64(r7, Operand(JS_BOUND_FUNCTION_TYPE));
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
|
||||
RelocInfo::CODE_TARGET, eq);
|
||||
|
||||
@ -2365,7 +2363,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
|
||||
__ beq(&non_callable);
|
||||
|
||||
// Check if target is a proxy and call CallProxy external builtin
|
||||
__ CmpP(r7, Operand(JS_PROXY_TYPE));
|
||||
__ CmpS64(r7, Operand(JS_PROXY_TYPE));
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET, eq);
|
||||
|
||||
// 2. Call to something else, which might have a [[Call]] internal method (if
|
||||
@ -2471,12 +2469,12 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
|
||||
|
||||
// Only dispatch to bound functions after checking whether they are
|
||||
// constructors.
|
||||
__ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
|
||||
__ CmpS64(r7, Operand(JS_BOUND_FUNCTION_TYPE));
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
|
||||
RelocInfo::CODE_TARGET, eq);
|
||||
|
||||
// Only dispatch to proxies after checking whether they are constructors.
|
||||
__ CmpP(r7, Operand(JS_PROXY_TYPE));
|
||||
__ CmpS64(r7, Operand(JS_PROXY_TYPE));
|
||||
__ bne(&non_proxy);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
|
||||
RelocInfo::CODE_TARGET);
|
||||
@ -2519,7 +2517,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// -------------------------------------------
|
||||
{
|
||||
Label under_application, over_application, invoke;
|
||||
__ CmpP(r2, r4);
|
||||
__ CmpS64(r2, r4);
|
||||
__ blt(&under_application);
|
||||
|
||||
// Enough parameters: actual >= expected
|
||||
@ -2551,7 +2549,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
__ bind(©);
|
||||
__ LoadP(r0, MemOperand(r2, 0));
|
||||
__ push(r0);
|
||||
__ CmpP(r2, r6); // Compare before moving to next argument.
|
||||
__ CmpS64(r2, r6); // Compare before moving to next argument.
|
||||
__ lay(r2, MemOperand(r2, -kSystemPointerSize));
|
||||
__ bne(©);
|
||||
|
||||
@ -2582,7 +2580,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
Label fill;
|
||||
__ bind(&fill);
|
||||
__ push(r7);
|
||||
__ CmpP(sp, r6);
|
||||
__ CmpS64(sp, r6);
|
||||
__ b(ne, &fill);
|
||||
|
||||
// Calculate copy start address into r0 and copy end address is fp.
|
||||
@ -2605,7 +2603,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
__ LoadP(r7, MemOperand(r2, 2 * kSystemPointerSize));
|
||||
__ push(r7);
|
||||
|
||||
__ CmpP(r2, fp); // Compare before moving to next argument.
|
||||
__ CmpS64(r2, fp); // Compare before moving to next argument.
|
||||
__ lay(r2, MemOperand(r2, -kSystemPointerSize));
|
||||
__ b(ne, ©);
|
||||
}
|
||||
@ -2858,7 +2856,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
// If the handler is a JS frame, restore the context to the frame. Note that
|
||||
// the context will be set to (cp == 0) for non-JS frames.
|
||||
Label skip;
|
||||
__ CmpP(cp, Operand::Zero());
|
||||
__ CmpS64(cp, Operand::Zero());
|
||||
__ beq(&skip, Label::kNear);
|
||||
__ StoreU64(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
||||
__ bind(&skip);
|
||||
@ -2920,7 +2918,7 @@ void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
|
||||
// bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
|
||||
// the result is 0.
|
||||
// Compare exponent with 84 (compare exponent - 1 with 83).
|
||||
__ CmpP(scratch, Operand(83));
|
||||
__ CmpS64(scratch, Operand(83));
|
||||
__ bge(&out_of_range, Label::kNear);
|
||||
|
||||
// If we reach this code, 31 <= exponent <= 83.
|
||||
@ -2930,7 +2928,7 @@ void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
|
||||
// Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
|
||||
__ mov(r0, Operand(51));
|
||||
__ SubS64(scratch, r0, scratch);
|
||||
__ CmpP(scratch, Operand::Zero());
|
||||
__ CmpS64(scratch, Operand::Zero());
|
||||
__ ble(&only_low, Label::kNear);
|
||||
// 21 <= exponent <= 51, shift scratch_low and scratch_high
|
||||
// to generate the result.
|
||||
@ -3023,13 +3021,13 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
|
||||
__ Move(scratch, ExternalReference::is_profiling_address(isolate));
|
||||
__ LoadU8(scratch, MemOperand(scratch, 0));
|
||||
__ CmpP(scratch, Operand::Zero());
|
||||
__ CmpS64(scratch, Operand::Zero());
|
||||
|
||||
Label profiler_enabled, end_profiler_check;
|
||||
__ bne(&profiler_enabled, Label::kNear);
|
||||
__ Move(scratch, ExternalReference::address_of_runtime_stats_flag());
|
||||
__ LoadU32(scratch, MemOperand(scratch, 0));
|
||||
__ CmpP(scratch, Operand::Zero());
|
||||
__ CmpS64(scratch, Operand::Zero());
|
||||
__ bne(&profiler_enabled, Label::kNear);
|
||||
{
|
||||
// Call the api function directly.
|
||||
@ -3070,12 +3068,12 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
__ StoreU64(r6, MemOperand(r9, kNextOffset));
|
||||
if (__ emit_debug_code()) {
|
||||
__ LoadU32(r3, MemOperand(r9, kLevelOffset));
|
||||
__ CmpP(r3, r8);
|
||||
__ CmpS64(r3, r8);
|
||||
__ Check(eq, AbortReason::kUnexpectedLevelAfterReturnFromApiCall);
|
||||
}
|
||||
__ SubS64(r8, Operand(1));
|
||||
__ StoreU32(r8, MemOperand(r9, kLevelOffset));
|
||||
__ CmpP(r7, MemOperand(r9, kLimitOffset));
|
||||
__ CmpS64(r7, MemOperand(r9, kLimitOffset));
|
||||
__ bne(&delete_allocated_handles, Label::kNear);
|
||||
|
||||
// Leave the API exit frame.
|
||||
@ -3476,7 +3474,7 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
|
||||
__ StoreU64(r6, MemOperand(r5, 0));
|
||||
__ la(r5, MemOperand(r5, kSystemPointerSize));
|
||||
__ bind(&pop_loop_header);
|
||||
__ CmpP(r4, sp);
|
||||
__ CmpS64(r4, sp);
|
||||
__ bne(&pop_loop);
|
||||
|
||||
// Compute the output frame in the deoptimizer.
|
||||
@ -3515,12 +3513,12 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
|
||||
__ push(r8);
|
||||
|
||||
__ bind(&inner_loop_header);
|
||||
__ CmpP(r5, Operand::Zero());
|
||||
__ CmpS64(r5, Operand::Zero());
|
||||
__ bne(&inner_push_loop); // test for gt?
|
||||
|
||||
__ AddS64(r6, r6, Operand(kSystemPointerSize));
|
||||
__ bind(&outer_loop_header);
|
||||
__ CmpP(r6, r3);
|
||||
__ CmpS64(r6, r3);
|
||||
__ blt(&outer_push_loop);
|
||||
|
||||
__ LoadP(r3, MemOperand(r2, Deoptimizer::input_offset()));
|
||||
@ -3610,7 +3608,7 @@ void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
|
||||
RelocInfo::CODE_TARGET);
|
||||
|
||||
Label deopt, bailout;
|
||||
__ CmpP(r2, Operand(static_cast<int>(DynamicCheckMapsStatus::kSuccess)));
|
||||
__ CmpS64(r2, Operand(static_cast<int>(DynamicCheckMapsStatus::kSuccess)));
|
||||
__ bne(&deopt);
|
||||
|
||||
__ RestoreRegisters(registers);
|
||||
@ -3618,11 +3616,11 @@ void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
|
||||
__ Ret();
|
||||
|
||||
__ bind(&deopt);
|
||||
__ CmpP(r2, Operand(static_cast<int>(DynamicCheckMapsStatus::kBailout)));
|
||||
__ CmpS64(r2, Operand(static_cast<int>(DynamicCheckMapsStatus::kBailout)));
|
||||
__ beq(&bailout);
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
__ CmpP(r2, Operand(static_cast<int>(DynamicCheckMapsStatus::kDeopt)));
|
||||
__ CmpS64(r2, Operand(static_cast<int>(DynamicCheckMapsStatus::kDeopt)));
|
||||
__ Assert(eq, AbortReason::kUnexpectedDynamicCheckMapsStatus);
|
||||
}
|
||||
__ RestoreRegisters(registers);
|
||||
|
@ -549,7 +549,7 @@ void TurboAssembler::PushArray(Register array, Register size, Register scratch,
|
||||
ShiftLeftU64(scratch, size, Operand(kSystemPointerSizeLog2));
|
||||
lay(scratch, MemOperand(array, scratch));
|
||||
bind(&loop);
|
||||
CmpP(array, scratch);
|
||||
CmpS64(array, scratch);
|
||||
bge(&done);
|
||||
lay(scratch, MemOperand(scratch, -kSystemPointerSize));
|
||||
lay(sp, MemOperand(sp, -kSystemPointerSize));
|
||||
@ -562,7 +562,7 @@ void TurboAssembler::PushArray(Register array, Register size, Register scratch,
|
||||
lay(scratch, MemOperand(array, scratch));
|
||||
mov(scratch2, array);
|
||||
bind(&loop);
|
||||
CmpP(scratch2, scratch);
|
||||
CmpS64(scratch2, scratch);
|
||||
bge(&done);
|
||||
lay(sp, MemOperand(sp, -kSystemPointerSize));
|
||||
MoveChar(MemOperand(sp), MemOperand(scratch2), Operand(kSystemPointerSize));
|
||||
@ -885,7 +885,7 @@ void MacroAssembler::RecordWrite(Register object, Register address,
|
||||
DCHECK(object != value);
|
||||
if (emit_debug_code()) {
|
||||
LoadTaggedPointerField(r0, MemOperand(address));
|
||||
CmpP(value, r0);
|
||||
CmpS64(value, r0);
|
||||
Check(eq, AbortReason::kWrongAddressOrValuePassedToRecordWrite);
|
||||
}
|
||||
|
||||
@ -1449,7 +1449,7 @@ void TurboAssembler::PrepareForTailCall(Register callee_args_count,
|
||||
AddS64(src_reg, src_reg, Operand(kSystemPointerSize));
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
CmpLogicalP(src_reg, dst_reg);
|
||||
CmpU64(src_reg, dst_reg);
|
||||
Check(lt, AbortReason::kStackAccessBelowStackPointer);
|
||||
}
|
||||
|
||||
@ -1503,7 +1503,7 @@ void MacroAssembler::StackOverflowCheck(Register num_args, Register scratch,
|
||||
SubS64(scratch, sp, scratch);
|
||||
// Check if the arguments will overflow the stack.
|
||||
ShiftLeftU64(r0, num_args, Operand(kSystemPointerSizeLog2));
|
||||
CmpP(scratch, r0);
|
||||
CmpS64(scratch, r0);
|
||||
ble(stack_overflow); // Signed comparison.
|
||||
}
|
||||
|
||||
@ -1522,7 +1522,7 @@ void MacroAssembler::InvokePrologue(Register expected_parameter_count,
|
||||
#ifdef V8_NO_ARGUMENTS_ADAPTOR
|
||||
// If the expected parameter count is equal to the adaptor sentinel, no need
|
||||
// to push undefined value as arguments.
|
||||
CmpP(expected_parameter_count, Operand(kDontAdaptArgumentsSentinel));
|
||||
CmpS64(expected_parameter_count, Operand(kDontAdaptArgumentsSentinel));
|
||||
beq(®ular_invoke);
|
||||
|
||||
// If overapplication or if the actual argument count is equal to the
|
||||
@ -1579,7 +1579,7 @@ void MacroAssembler::InvokePrologue(Register expected_parameter_count,
|
||||
#else
|
||||
// Check whether the expected and actual arguments count match. If not,
|
||||
// setup registers according to contract with ArgumentsAdaptorTrampoline.
|
||||
CmpP(expected_parameter_count, actual_parameter_count);
|
||||
CmpS64(expected_parameter_count, actual_parameter_count);
|
||||
beq(®ular_invoke);
|
||||
|
||||
Handle<Code> adaptor = BUILTIN_CODE(isolate(), ArgumentsAdaptorTrampoline);
|
||||
@ -1719,7 +1719,7 @@ void MacroAssembler::MaybeDropFrames() {
|
||||
ExternalReference::debug_restart_fp_address(isolate());
|
||||
Move(r3, restart_fp);
|
||||
LoadP(r3, MemOperand(r3));
|
||||
CmpP(r3, Operand::Zero());
|
||||
CmpS64(r3, Operand::Zero());
|
||||
Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET,
|
||||
ne);
|
||||
}
|
||||
@ -1773,7 +1773,7 @@ void MacroAssembler::CompareInstanceType(Register map, Register type_reg,
|
||||
STATIC_ASSERT(Map::kInstanceTypeOffset < 4096);
|
||||
STATIC_ASSERT(LAST_TYPE <= 0xFFFF);
|
||||
LoadS16(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
|
||||
CmpP(type_reg, Operand(type));
|
||||
CmpS64(type_reg, Operand(type));
|
||||
}
|
||||
|
||||
void MacroAssembler::CompareRoot(Register obj, RootIndex index) {
|
||||
@ -1791,9 +1791,9 @@ void MacroAssembler::JumpIfIsInRange(Register value, unsigned lower_limit,
|
||||
Register scratch = r0;
|
||||
mov(scratch, value);
|
||||
slgfi(scratch, Operand(lower_limit));
|
||||
CmpLogicalP(scratch, Operand(higher_limit - lower_limit));
|
||||
CmpU64(scratch, Operand(higher_limit - lower_limit));
|
||||
} else {
|
||||
CmpLogicalP(value, Operand(higher_limit));
|
||||
CmpU64(value, Operand(higher_limit));
|
||||
}
|
||||
ble(on_in_range);
|
||||
}
|
||||
@ -1884,7 +1884,7 @@ void MacroAssembler::JumpToInstructionStream(Address entry) {
|
||||
|
||||
void MacroAssembler::LoadWeakValue(Register out, Register in,
|
||||
Label* target_if_cleared) {
|
||||
Cmp32(in, Operand(kClearedWeakHeapObjectLower32));
|
||||
CmpS32(in, Operand(kClearedWeakHeapObjectLower32));
|
||||
beq(target_if_cleared);
|
||||
|
||||
AndP(out, in, Operand(~kWeakHeapObjectMask));
|
||||
@ -2053,11 +2053,11 @@ void MacroAssembler::AssertGeneratorObject(Register object) {
|
||||
beq(&do_check);
|
||||
|
||||
// Check if JSAsyncFunctionObject (See MacroAssembler::CompareInstanceType)
|
||||
CmpP(instance_type, Operand(JS_ASYNC_FUNCTION_OBJECT_TYPE));
|
||||
CmpS64(instance_type, Operand(JS_ASYNC_FUNCTION_OBJECT_TYPE));
|
||||
beq(&do_check);
|
||||
|
||||
// Check if JSAsyncGeneratorObject (See MacroAssembler::CompareInstanceType)
|
||||
CmpP(instance_type, Operand(JS_ASYNC_GENERATOR_OBJECT_TYPE));
|
||||
CmpS64(instance_type, Operand(JS_ASYNC_GENERATOR_OBJECT_TYPE));
|
||||
|
||||
bind(&do_check);
|
||||
// Restore generator object to register and perform assertion
|
||||
@ -3253,20 +3253,14 @@ void TurboAssembler::LoadPositive32(Register result, Register input) {
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
// Compare 32-bit Register vs Register
|
||||
void TurboAssembler::Cmp32(Register src1, Register src2) { cr_z(src1, src2); }
|
||||
void TurboAssembler::CmpS32(Register src1, Register src2) { cr_z(src1, src2); }
|
||||
|
||||
// Compare Pointer Sized Register vs Register
|
||||
void TurboAssembler::CmpP(Register src1, Register src2) {
|
||||
#if V8_TARGET_ARCH_S390X
|
||||
cgr(src1, src2);
|
||||
#else
|
||||
Cmp32(src1, src2);
|
||||
#endif
|
||||
}
|
||||
void TurboAssembler::CmpS64(Register src1, Register src2) { cgr(src1, src2); }
|
||||
|
||||
// Compare 32-bit Register vs Immediate
|
||||
// This helper will set up proper relocation entries if required.
|
||||
void TurboAssembler::Cmp32(Register dst, const Operand& opnd) {
|
||||
void TurboAssembler::CmpS32(Register dst, const Operand& opnd) {
|
||||
if (opnd.rmode() == RelocInfo::NONE) {
|
||||
intptr_t value = opnd.immediate();
|
||||
if (is_int16(value))
|
||||
@ -3282,21 +3276,17 @@ void TurboAssembler::Cmp32(Register dst, const Operand& opnd) {
|
||||
|
||||
// Compare Pointer Sized Register vs Immediate
|
||||
// This helper will set up proper relocation entries if required.
|
||||
void TurboAssembler::CmpP(Register dst, const Operand& opnd) {
|
||||
#if V8_TARGET_ARCH_S390X
|
||||
void TurboAssembler::CmpS64(Register dst, const Operand& opnd) {
|
||||
if (opnd.rmode() == RelocInfo::NONE) {
|
||||
cgfi(dst, opnd);
|
||||
} else {
|
||||
mov(r0, opnd); // Need to generate 64-bit relocation
|
||||
cgr(dst, r0);
|
||||
}
|
||||
#else
|
||||
Cmp32(dst, opnd);
|
||||
#endif
|
||||
}
|
||||
|
||||
// Compare 32-bit Register vs Memory
|
||||
void TurboAssembler::Cmp32(Register dst, const MemOperand& opnd) {
|
||||
void TurboAssembler::CmpS32(Register dst, const MemOperand& opnd) {
|
||||
// make sure offset is within 20 bit range
|
||||
DCHECK(is_int20(opnd.offset()));
|
||||
if (is_uint12(opnd.offset()))
|
||||
@ -3306,14 +3296,10 @@ void TurboAssembler::Cmp32(Register dst, const MemOperand& opnd) {
|
||||
}
|
||||
|
||||
// Compare Pointer Size Register vs Memory
|
||||
void TurboAssembler::CmpP(Register dst, const MemOperand& opnd) {
|
||||
void TurboAssembler::CmpS64(Register dst, const MemOperand& opnd) {
|
||||
// make sure offset is within 20 bit range
|
||||
DCHECK(is_int20(opnd.offset()));
|
||||
#if V8_TARGET_ARCH_S390X
|
||||
cg(dst, opnd);
|
||||
#else
|
||||
Cmp32(dst, opnd);
|
||||
#endif
|
||||
}
|
||||
|
||||
// Using cs or scy based on the offset
|
||||
@ -3337,34 +3323,34 @@ void TurboAssembler::CmpAndSwap64(Register old_val, Register new_val,
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
// Compare Logical 32-bit Register vs Register
|
||||
void TurboAssembler::CmpLogical32(Register dst, Register src) { clr(dst, src); }
|
||||
void TurboAssembler::CmpU32(Register dst, Register src) { clr(dst, src); }
|
||||
|
||||
// Compare Logical Pointer Sized Register vs Register
|
||||
void TurboAssembler::CmpLogicalP(Register dst, Register src) {
|
||||
void TurboAssembler::CmpU64(Register dst, Register src) {
|
||||
#ifdef V8_TARGET_ARCH_S390X
|
||||
clgr(dst, src);
|
||||
#else
|
||||
CmpLogical32(dst, src);
|
||||
CmpU32(dst, src);
|
||||
#endif
|
||||
}
|
||||
|
||||
// Compare Logical 32-bit Register vs Immediate
|
||||
void TurboAssembler::CmpLogical32(Register dst, const Operand& opnd) {
|
||||
void TurboAssembler::CmpU32(Register dst, const Operand& opnd) {
|
||||
clfi(dst, opnd);
|
||||
}
|
||||
|
||||
// Compare Logical Pointer Sized Register vs Immediate
|
||||
void TurboAssembler::CmpLogicalP(Register dst, const Operand& opnd) {
|
||||
void TurboAssembler::CmpU64(Register dst, const Operand& opnd) {
|
||||
#if V8_TARGET_ARCH_S390X
|
||||
DCHECK_EQ(static_cast<uint32_t>(opnd.immediate() >> 32), 0);
|
||||
clgfi(dst, opnd);
|
||||
#else
|
||||
CmpLogical32(dst, opnd);
|
||||
CmpU32(dst, opnd);
|
||||
#endif
|
||||
}
|
||||
|
||||
// Compare Logical 32-bit Register vs Memory
|
||||
void TurboAssembler::CmpLogical32(Register dst, const MemOperand& opnd) {
|
||||
void TurboAssembler::CmpU32(Register dst, const MemOperand& opnd) {
|
||||
// make sure offset is within 20 bit range
|
||||
DCHECK(is_int20(opnd.offset()));
|
||||
if (is_uint12(opnd.offset()))
|
||||
@ -3374,25 +3360,16 @@ void TurboAssembler::CmpLogical32(Register dst, const MemOperand& opnd) {
|
||||
}
|
||||
|
||||
// Compare Logical Pointer Sized Register vs Memory
|
||||
void TurboAssembler::CmpLogicalP(Register dst, const MemOperand& opnd) {
|
||||
void TurboAssembler::CmpU64(Register dst, const MemOperand& opnd) {
|
||||
// make sure offset is within 20 bit range
|
||||
DCHECK(is_int20(opnd.offset()));
|
||||
#if V8_TARGET_ARCH_S390X
|
||||
clg(dst, opnd);
|
||||
#else
|
||||
CmpLogical32(dst, opnd);
|
||||
CmpU32(dst, opnd);
|
||||
#endif
|
||||
}
|
||||
|
||||
// Compare Logical Byte (Mem - Imm)
|
||||
void TurboAssembler::CmpLogicalByte(const MemOperand& mem, const Operand& imm) {
|
||||
DCHECK(is_uint8(imm.immediate()));
|
||||
if (is_uint12(mem.offset()))
|
||||
cli(mem, imm);
|
||||
else
|
||||
cliy(mem, imm);
|
||||
}
|
||||
|
||||
void TurboAssembler::Branch(Condition c, const Operand& opnd) {
|
||||
intptr_t value = opnd.immediate();
|
||||
if (is_int16(value))
|
||||
@ -4290,12 +4267,12 @@ void TurboAssembler::LoadPC(Register dst) {
|
||||
}
|
||||
|
||||
void TurboAssembler::JumpIfEqual(Register x, int32_t y, Label* dest) {
|
||||
Cmp32(x, Operand(y));
|
||||
CmpS32(x, Operand(y));
|
||||
beq(dest);
|
||||
}
|
||||
|
||||
void TurboAssembler::JumpIfLessThan(Register x, int32_t y, Label* dest) {
|
||||
Cmp32(x, Operand(y));
|
||||
CmpS32(x, Operand(y));
|
||||
blt(dest);
|
||||
}
|
||||
|
||||
|
@ -304,25 +304,27 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
||||
void Sqrt(DoubleRegister result, const MemOperand& input);
|
||||
|
||||
// Compare
|
||||
void Cmp32(Register src1, Register src2);
|
||||
void CmpP(Register src1, Register src2);
|
||||
void Cmp32(Register dst, const Operand& opnd);
|
||||
void CmpP(Register dst, const Operand& opnd);
|
||||
void Cmp32(Register dst, const MemOperand& opnd);
|
||||
void CmpP(Register dst, const MemOperand& opnd);
|
||||
void CmpS32(Register src1, Register src2);
|
||||
void CmpS64(Register src1, Register src2);
|
||||
void CmpS32(Register dst, const Operand& opnd);
|
||||
void CmpS64(Register dst, const Operand& opnd);
|
||||
void CmpS32(Register dst, const MemOperand& opnd);
|
||||
void CmpS64(Register dst, const MemOperand& opnd);
|
||||
void CmpAndSwap(Register old_val, Register new_val, const MemOperand& opnd);
|
||||
void CmpAndSwap64(Register old_val, Register new_val, const MemOperand& opnd);
|
||||
// TODO(john.yan): remove this
|
||||
template <class T>
|
||||
void CmpP(Register src1, T src2) {
|
||||
CmpS64(src1, src2);
|
||||
}
|
||||
|
||||
// Compare Logical
|
||||
void CmpLogical32(Register src1, Register src2);
|
||||
void CmpLogicalP(Register src1, Register src2);
|
||||
void CmpLogical32(Register src1, const Operand& opnd);
|
||||
void CmpLogicalP(Register src1, const Operand& opnd);
|
||||
void CmpLogical32(Register dst, const MemOperand& opnd);
|
||||
void CmpLogicalP(Register dst, const MemOperand& opnd);
|
||||
|
||||
// Compare Logical Byte (CLI/CLIY)
|
||||
void CmpLogicalByte(const MemOperand& mem, const Operand& imm);
|
||||
void CmpU32(Register src1, Register src2);
|
||||
void CmpU64(Register src1, Register src2);
|
||||
void CmpU32(Register src1, const Operand& opnd);
|
||||
void CmpU64(Register src1, const Operand& opnd);
|
||||
void CmpU32(Register dst, const MemOperand& opnd);
|
||||
void CmpU64(Register dst, const MemOperand& opnd);
|
||||
|
||||
// Load 32bit
|
||||
void LoadS32(Register dst, const MemOperand& opnd, Register scratch = no_reg);
|
||||
@ -1080,9 +1082,9 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
|
||||
template <class T>
|
||||
void CompareTagged(Register src1, T src2) {
|
||||
if (COMPRESS_POINTERS_BOOL) {
|
||||
Cmp32(src1, src2);
|
||||
CmpS32(src1, src2);
|
||||
} else {
|
||||
CmpP(src1, src2);
|
||||
CmpS64(src1, src2);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1013,8 +1013,8 @@ void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
|
||||
|
||||
// Check if current frame is an arguments adaptor frame.
|
||||
__ LoadP(scratch1, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
||||
__ CmpP(scratch1,
|
||||
Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ CmpS64(scratch1,
|
||||
Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ bne(&done);
|
||||
|
||||
// Load arguments count from current arguments adaptor frame (note, it
|
||||
@ -1133,7 +1133,7 @@ void CodeGenerator::AssembleTailCallAfterGap(Instruction* instr,
|
||||
void CodeGenerator::AssembleCodeStartRegisterCheck() {
|
||||
Register scratch = r1;
|
||||
__ ComputeCodeStartAddress(scratch);
|
||||
__ CmpP(scratch, kJavaScriptCallCodeStartRegister);
|
||||
__ CmpS64(scratch, kJavaScriptCallCodeStartRegister);
|
||||
__ Assert(eq, AbortReason::kWrongFunctionCodeStart);
|
||||
}
|
||||
|
||||
@ -1148,7 +1148,7 @@ void CodeGenerator::BailoutIfDeoptimized() {
|
||||
if (FLAG_debug_code) {
|
||||
// Check that {kJavaScriptCallCodeStartRegister} is correct.
|
||||
__ ComputeCodeStartAddress(ip);
|
||||
__ CmpP(ip, kJavaScriptCallCodeStartRegister);
|
||||
__ CmpS64(ip, kJavaScriptCallCodeStartRegister);
|
||||
__ Assert(eq, AbortReason::kWrongFunctionCodeStart);
|
||||
}
|
||||
|
||||
@ -1171,7 +1171,7 @@ void CodeGenerator::GenerateSpeculationPoisonFromCodeStartRegister() {
|
||||
// bits cleared if we are speculatively executing the wrong PC.
|
||||
__ mov(kSpeculationPoisonRegister, Operand::Zero());
|
||||
__ mov(r0, Operand(-1));
|
||||
__ CmpP(kJavaScriptCallCodeStartRegister, scratch);
|
||||
__ CmpS64(kJavaScriptCallCodeStartRegister, scratch);
|
||||
__ LoadOnConditionP(eq, kSpeculationPoisonRegister, r0);
|
||||
}
|
||||
|
||||
@ -1293,7 +1293,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
// Check the function's context matches the context argument.
|
||||
__ LoadTaggedPointerField(
|
||||
kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
|
||||
__ CmpP(cp, kScratchReg);
|
||||
__ CmpS64(cp, kScratchReg);
|
||||
__ Assert(eq, AbortReason::kWrongFunctionContext);
|
||||
}
|
||||
static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
|
||||
@ -1441,7 +1441,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
|
||||
constexpr size_t kValueIndex = 0;
|
||||
DCHECK(instr->InputAt(kValueIndex)->IsRegister());
|
||||
__ CmpLogicalP(lhs_register, i.InputRegister(kValueIndex));
|
||||
__ CmpU64(lhs_register, i.InputRegister(kValueIndex));
|
||||
break;
|
||||
}
|
||||
case kArchStackCheckOffset:
|
||||
@ -1944,11 +1944,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
break;
|
||||
#endif
|
||||
case kS390_Cmp32:
|
||||
ASSEMBLE_COMPARE32(Cmp32, CmpLogical32);
|
||||
ASSEMBLE_COMPARE32(CmpS32, CmpU32);
|
||||
break;
|
||||
#if V8_TARGET_ARCH_S390X
|
||||
case kS390_Cmp64:
|
||||
ASSEMBLE_COMPARE(CmpP, CmpLogicalP);
|
||||
ASSEMBLE_COMPARE(CmpS64, CmpU64);
|
||||
break;
|
||||
#endif
|
||||
case kS390_CmpFloat:
|
||||
@ -4380,7 +4380,7 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
|
||||
cases[index] = GetLabel(i.InputRpo(index + 2));
|
||||
}
|
||||
Label* const table = AddJumpTable(cases, case_count);
|
||||
__ CmpLogicalP(input, Operand(case_count));
|
||||
__ CmpU64(input, Operand(case_count));
|
||||
__ bge(GetLabel(i.InputRpo(1)));
|
||||
__ larl(kScratchReg, table);
|
||||
__ ShiftLeftU64(r1, input, Operand(kSystemPointerSizeLog2));
|
||||
@ -4493,7 +4493,7 @@ void CodeGenerator::AssembleConstructFrame() {
|
||||
__ LoadP(scratch, MemOperand(scratch));
|
||||
__ AddS64(scratch, scratch,
|
||||
Operand(required_slots * kSystemPointerSize));
|
||||
__ CmpLogicalP(sp, scratch);
|
||||
__ CmpU64(sp, scratch);
|
||||
__ bge(&done);
|
||||
}
|
||||
|
||||
@ -4570,7 +4570,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) {
|
||||
if (additional_pop_count->IsImmediate()) {
|
||||
DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0);
|
||||
} else if (__ emit_debug_code()) {
|
||||
__ CmpP(g.ToRegister(additional_pop_count), Operand(0));
|
||||
__ CmpS64(g.ToRegister(additional_pop_count), Operand(0));
|
||||
__ Assert(eq, AbortReason::kUnexpectedAdditionalPopValue);
|
||||
}
|
||||
}
|
||||
@ -4614,7 +4614,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) {
|
||||
__ AddS64(argc_reg, argc_reg, Operand(1)); // Also pop the receiver.
|
||||
if (parameter_count > 1) {
|
||||
Label skip;
|
||||
__ CmpP(argc_reg, Operand(parameter_count));
|
||||
__ CmpS64(argc_reg, Operand(parameter_count));
|
||||
__ bgt(&skip);
|
||||
__ mov(argc_reg, Operand(parameter_count));
|
||||
__ bind(&skip);
|
||||
|
@ -172,7 +172,7 @@ void RegExpMacroAssemblerS390::Backtrack() {
|
||||
__ LoadP(r2, MemOperand(frame_pointer(), kBacktrackCount), r0);
|
||||
__ AddS64(r2, r2, Operand(1));
|
||||
__ StoreU64(r2, MemOperand(frame_pointer(), kBacktrackCount), r0);
|
||||
__ CmpLogicalP(r2, Operand(backtrack_limit()));
|
||||
__ CmpU64(r2, Operand(backtrack_limit()));
|
||||
__ bne(&next);
|
||||
|
||||
// Backtrack limit exceeded.
|
||||
@ -194,12 +194,12 @@ void RegExpMacroAssemblerS390::Backtrack() {
|
||||
void RegExpMacroAssemblerS390::Bind(Label* label) { __ bind(label); }
|
||||
|
||||
void RegExpMacroAssemblerS390::CheckCharacter(uint32_t c, Label* on_equal) {
|
||||
__ CmpLogicalP(current_character(), Operand(c));
|
||||
__ CmpU64(current_character(), Operand(c));
|
||||
BranchOrBacktrack(eq, on_equal);
|
||||
}
|
||||
|
||||
void RegExpMacroAssemblerS390::CheckCharacterGT(uc16 limit, Label* on_greater) {
|
||||
__ CmpLogicalP(current_character(), Operand(limit));
|
||||
__ CmpU64(current_character(), Operand(limit));
|
||||
BranchOrBacktrack(gt, on_greater);
|
||||
}
|
||||
|
||||
@ -207,7 +207,7 @@ void RegExpMacroAssemblerS390::CheckAtStart(int cp_offset, Label* on_at_start) {
|
||||
__ LoadP(r3, MemOperand(frame_pointer(), kStringStartMinusOne));
|
||||
__ AddS64(r2, current_input_offset(),
|
||||
Operand(-char_size() + cp_offset * char_size()));
|
||||
__ CmpP(r2, r3);
|
||||
__ CmpS64(r2, r3);
|
||||
BranchOrBacktrack(eq, on_at_start);
|
||||
}
|
||||
|
||||
@ -216,18 +216,18 @@ void RegExpMacroAssemblerS390::CheckNotAtStart(int cp_offset,
|
||||
__ LoadP(r3, MemOperand(frame_pointer(), kStringStartMinusOne));
|
||||
__ AddS64(r2, current_input_offset(),
|
||||
Operand(-char_size() + cp_offset * char_size()));
|
||||
__ CmpP(r2, r3);
|
||||
__ CmpS64(r2, r3);
|
||||
BranchOrBacktrack(ne, on_not_at_start);
|
||||
}
|
||||
|
||||
void RegExpMacroAssemblerS390::CheckCharacterLT(uc16 limit, Label* on_less) {
|
||||
__ CmpLogicalP(current_character(), Operand(limit));
|
||||
__ CmpU64(current_character(), Operand(limit));
|
||||
BranchOrBacktrack(lt, on_less);
|
||||
}
|
||||
|
||||
void RegExpMacroAssemblerS390::CheckGreedyLoop(Label* on_equal) {
|
||||
Label backtrack_non_equal;
|
||||
__ CmpP(current_input_offset(), MemOperand(backtrack_stackpointer(), 0));
|
||||
__ CmpS64(current_input_offset(), MemOperand(backtrack_stackpointer(), 0));
|
||||
__ bne(&backtrack_non_equal);
|
||||
__ AddS64(backtrack_stackpointer(), Operand(kSystemPointerSize));
|
||||
|
||||
@ -252,7 +252,7 @@ void RegExpMacroAssemblerS390::CheckNotBackReferenceIgnoreCase(
|
||||
if (read_backward) {
|
||||
__ LoadP(r5, MemOperand(frame_pointer(), kStringStartMinusOne));
|
||||
__ AddS64(r5, r5, r3);
|
||||
__ CmpP(current_input_offset(), r5);
|
||||
__ CmpS64(current_input_offset(), r5);
|
||||
BranchOrBacktrack(le, on_no_match);
|
||||
} else {
|
||||
__ AddS64(r0, r3, current_input_offset());
|
||||
@ -282,27 +282,27 @@ void RegExpMacroAssemblerS390::CheckNotBackReferenceIgnoreCase(
|
||||
__ LoadU8(r5, MemOperand(r2, r1));
|
||||
__ LoadU8(r6, MemOperand(r4, r1));
|
||||
|
||||
__ CmpP(r6, r5);
|
||||
__ CmpS64(r6, r5);
|
||||
__ beq(&loop_check);
|
||||
|
||||
// Mismatch, try case-insensitive match (converting letters to lower-case).
|
||||
__ Or(r5, Operand(0x20)); // Convert capture character to lower-case.
|
||||
__ Or(r6, Operand(0x20)); // Also convert input character.
|
||||
__ CmpP(r6, r5);
|
||||
__ CmpS64(r6, r5);
|
||||
__ bne(&fail);
|
||||
__ SubS64(r5, Operand('a'));
|
||||
__ CmpLogicalP(r5, Operand('z' - 'a')); // Is r5 a lowercase letter?
|
||||
__ CmpU64(r5, Operand('z' - 'a')); // Is r5 a lowercase letter?
|
||||
__ ble(&loop_check); // In range 'a'-'z'.
|
||||
// Latin-1: Check for values in range [224,254] but not 247.
|
||||
__ SubS64(r5, Operand(224 - 'a'));
|
||||
__ CmpLogicalP(r5, Operand(254 - 224));
|
||||
__ CmpU64(r5, Operand(254 - 224));
|
||||
__ bgt(&fail); // Weren't Latin-1 letters.
|
||||
__ CmpLogicalP(r5, Operand(247 - 224)); // Check for 247.
|
||||
__ CmpU64(r5, Operand(247 - 224)); // Check for 247.
|
||||
__ beq(&fail);
|
||||
|
||||
__ bind(&loop_check);
|
||||
__ la(r1, MemOperand(r1, char_size()));
|
||||
__ CmpP(r1, r3);
|
||||
__ CmpS64(r1, r3);
|
||||
__ blt(&loop);
|
||||
__ b(&success);
|
||||
|
||||
@ -360,7 +360,7 @@ void RegExpMacroAssemblerS390::CheckNotBackReferenceIgnoreCase(
|
||||
}
|
||||
|
||||
// Check if function returned non-zero for success or zero for failure.
|
||||
__ CmpP(r2, Operand::Zero());
|
||||
__ CmpS64(r2, Operand::Zero());
|
||||
BranchOrBacktrack(eq, on_no_match);
|
||||
|
||||
// On success, advance position by length of capture.
|
||||
@ -393,7 +393,7 @@ void RegExpMacroAssemblerS390::CheckNotBackReference(int start_reg,
|
||||
if (read_backward) {
|
||||
__ LoadP(r5, MemOperand(frame_pointer(), kStringStartMinusOne));
|
||||
__ AddS64(r5, r5, r3);
|
||||
__ CmpP(current_input_offset(), r5);
|
||||
__ CmpS64(current_input_offset(), r5);
|
||||
BranchOrBacktrack(le, on_no_match);
|
||||
} else {
|
||||
__ AddS64(r0, r3, current_input_offset());
|
||||
@ -420,9 +420,9 @@ void RegExpMacroAssemblerS390::CheckNotBackReference(int start_reg,
|
||||
__ LoadU16(r6, MemOperand(r4, r1));
|
||||
}
|
||||
__ la(r1, MemOperand(r1, char_size()));
|
||||
__ CmpP(r5, r6);
|
||||
__ CmpS64(r5, r6);
|
||||
BranchOrBacktrack(ne, on_no_match);
|
||||
__ CmpP(r1, r3);
|
||||
__ CmpS64(r1, r3);
|
||||
__ blt(&loop);
|
||||
|
||||
// Move current character position to position after match.
|
||||
@ -440,7 +440,7 @@ void RegExpMacroAssemblerS390::CheckNotBackReference(int start_reg,
|
||||
|
||||
void RegExpMacroAssemblerS390::CheckNotCharacter(unsigned c,
|
||||
Label* on_not_equal) {
|
||||
__ CmpLogicalP(current_character(), Operand(c));
|
||||
__ CmpU64(current_character(), Operand(c));
|
||||
BranchOrBacktrack(ne, on_not_equal);
|
||||
}
|
||||
|
||||
@ -448,7 +448,7 @@ void RegExpMacroAssemblerS390::CheckCharacterAfterAnd(uint32_t c, uint32_t mask,
|
||||
Label* on_equal) {
|
||||
__ AndP(r2, current_character(), Operand(mask));
|
||||
if (c != 0) {
|
||||
__ CmpLogicalP(r2, Operand(c));
|
||||
__ CmpU64(r2, Operand(c));
|
||||
}
|
||||
BranchOrBacktrack(eq, on_equal);
|
||||
}
|
||||
@ -458,7 +458,7 @@ void RegExpMacroAssemblerS390::CheckNotCharacterAfterAnd(unsigned c,
|
||||
Label* on_not_equal) {
|
||||
__ AndP(r2, current_character(), Operand(mask));
|
||||
if (c != 0) {
|
||||
__ CmpLogicalP(r2, Operand(c));
|
||||
__ CmpU64(r2, Operand(c));
|
||||
}
|
||||
BranchOrBacktrack(ne, on_not_equal);
|
||||
}
|
||||
@ -469,7 +469,7 @@ void RegExpMacroAssemblerS390::CheckNotCharacterAfterMinusAnd(
|
||||
__ lay(r2, MemOperand(current_character(), -minus));
|
||||
__ And(r2, Operand(mask));
|
||||
if (c != 0) {
|
||||
__ CmpLogicalP(r2, Operand(c));
|
||||
__ CmpU64(r2, Operand(c));
|
||||
}
|
||||
BranchOrBacktrack(ne, on_not_equal);
|
||||
}
|
||||
@ -477,14 +477,14 @@ void RegExpMacroAssemblerS390::CheckNotCharacterAfterMinusAnd(
|
||||
void RegExpMacroAssemblerS390::CheckCharacterInRange(uc16 from, uc16 to,
|
||||
Label* on_in_range) {
|
||||
__ lay(r2, MemOperand(current_character(), -from));
|
||||
__ CmpLogicalP(r2, Operand(to - from));
|
||||
__ CmpU64(r2, Operand(to - from));
|
||||
BranchOrBacktrack(le, on_in_range); // Unsigned lower-or-same condition.
|
||||
}
|
||||
|
||||
void RegExpMacroAssemblerS390::CheckCharacterNotInRange(
|
||||
uc16 from, uc16 to, Label* on_not_in_range) {
|
||||
__ lay(r2, MemOperand(current_character(), -from));
|
||||
__ CmpLogicalP(r2, Operand(to - from));
|
||||
__ CmpU64(r2, Operand(to - from));
|
||||
BranchOrBacktrack(gt, on_not_in_range); // Unsigned higher condition.
|
||||
}
|
||||
|
||||
@ -498,7 +498,7 @@ void RegExpMacroAssemblerS390::CheckBitInTable(Handle<ByteArray> table,
|
||||
}
|
||||
__ LoadU8(r2,
|
||||
MemOperand(r2, index, (ByteArray::kHeaderSize - kHeapObjectTag)));
|
||||
__ CmpP(r2, Operand::Zero());
|
||||
__ CmpS64(r2, Operand::Zero());
|
||||
BranchOrBacktrack(ne, on_bit_set);
|
||||
}
|
||||
|
||||
@ -512,14 +512,14 @@ bool RegExpMacroAssemblerS390::CheckSpecialCharacterClass(uc16 type,
|
||||
if (mode_ == LATIN1) {
|
||||
// One byte space characters are '\t'..'\r', ' ' and \u00a0.
|
||||
Label success;
|
||||
__ CmpP(current_character(), Operand(' '));
|
||||
__ CmpS64(current_character(), Operand(' '));
|
||||
__ beq(&success);
|
||||
// Check range 0x09..0x0D
|
||||
__ SubS64(r2, current_character(), Operand('\t'));
|
||||
__ CmpLogicalP(r2, Operand('\r' - '\t'));
|
||||
__ CmpU64(r2, Operand('\r' - '\t'));
|
||||
__ ble(&success);
|
||||
// \u00a0 (NBSP).
|
||||
__ CmpLogicalP(r2, Operand(0x00A0 - '\t'));
|
||||
__ CmpU64(r2, Operand(0x00A0 - '\t'));
|
||||
BranchOrBacktrack(ne, on_no_match);
|
||||
__ bind(&success);
|
||||
return true;
|
||||
@ -531,13 +531,13 @@ bool RegExpMacroAssemblerS390::CheckSpecialCharacterClass(uc16 type,
|
||||
case 'd':
|
||||
// Match ASCII digits ('0'..'9')
|
||||
__ SubS64(r2, current_character(), Operand('0'));
|
||||
__ CmpLogicalP(r2, Operand('9' - '0'));
|
||||
__ CmpU64(r2, Operand('9' - '0'));
|
||||
BranchOrBacktrack(gt, on_no_match);
|
||||
return true;
|
||||
case 'D':
|
||||
// Match non ASCII-digits
|
||||
__ SubS64(r2, current_character(), Operand('0'));
|
||||
__ CmpLogicalP(r2, Operand('9' - '0'));
|
||||
__ CmpU64(r2, Operand('9' - '0'));
|
||||
BranchOrBacktrack(le, on_no_match);
|
||||
return true;
|
||||
case '.': {
|
||||
@ -545,14 +545,14 @@ bool RegExpMacroAssemblerS390::CheckSpecialCharacterClass(uc16 type,
|
||||
__ XorP(r2, current_character(), Operand(0x01));
|
||||
// See if current character is '\n'^1 or '\r'^1, i.e., 0x0B or 0x0C
|
||||
__ SubS64(r2, Operand(0x0B));
|
||||
__ CmpLogicalP(r2, Operand(0x0C - 0x0B));
|
||||
__ CmpU64(r2, Operand(0x0C - 0x0B));
|
||||
BranchOrBacktrack(le, on_no_match);
|
||||
if (mode_ == UC16) {
|
||||
// Compare original value to 0x2028 and 0x2029, using the already
|
||||
// computed (current_char ^ 0x01 - 0x0B). I.e., check for
|
||||
// 0x201D (0x2028 - 0x0B) or 0x201E.
|
||||
__ SubS64(r2, Operand(0x2028 - 0x0B));
|
||||
__ CmpLogicalP(r2, Operand(1));
|
||||
__ CmpU64(r2, Operand(1));
|
||||
BranchOrBacktrack(le, on_no_match);
|
||||
}
|
||||
return true;
|
||||
@ -562,7 +562,7 @@ bool RegExpMacroAssemblerS390::CheckSpecialCharacterClass(uc16 type,
|
||||
__ XorP(r2, current_character(), Operand(0x01));
|
||||
// See if current character is '\n'^1 or '\r'^1, i.e., 0x0B or 0x0C
|
||||
__ SubS64(r2, Operand(0x0B));
|
||||
__ CmpLogicalP(r2, Operand(0x0C - 0x0B));
|
||||
__ CmpU64(r2, Operand(0x0C - 0x0B));
|
||||
if (mode_ == LATIN1) {
|
||||
BranchOrBacktrack(gt, on_no_match);
|
||||
} else {
|
||||
@ -572,7 +572,7 @@ bool RegExpMacroAssemblerS390::CheckSpecialCharacterClass(uc16 type,
|
||||
// computed (current_char ^ 0x01 - 0x0B). I.e., check for
|
||||
// 0x201D (0x2028 - 0x0B) or 0x201E.
|
||||
__ SubS64(r2, Operand(0x2028 - 0x0B));
|
||||
__ CmpLogicalP(r2, Operand(1));
|
||||
__ CmpU64(r2, Operand(1));
|
||||
BranchOrBacktrack(gt, on_no_match);
|
||||
__ bind(&done);
|
||||
}
|
||||
@ -581,14 +581,14 @@ bool RegExpMacroAssemblerS390::CheckSpecialCharacterClass(uc16 type,
|
||||
case 'w': {
|
||||
if (mode_ != LATIN1) {
|
||||
// Table is 1256 entries, so all LATIN1 characters can be tested.
|
||||
__ CmpP(current_character(), Operand('z'));
|
||||
__ CmpS64(current_character(), Operand('z'));
|
||||
BranchOrBacktrack(gt, on_no_match);
|
||||
}
|
||||
ExternalReference map =
|
||||
ExternalReference::re_word_character_map(isolate());
|
||||
__ mov(r2, Operand(map));
|
||||
__ LoadU8(r2, MemOperand(r2, current_character()));
|
||||
__ CmpLogicalP(r2, Operand::Zero());
|
||||
__ CmpU64(r2, Operand::Zero());
|
||||
BranchOrBacktrack(eq, on_no_match);
|
||||
return true;
|
||||
}
|
||||
@ -596,14 +596,14 @@ bool RegExpMacroAssemblerS390::CheckSpecialCharacterClass(uc16 type,
|
||||
Label done;
|
||||
if (mode_ != LATIN1) {
|
||||
// Table is 256 entries, so all LATIN characters can be tested.
|
||||
__ CmpLogicalP(current_character(), Operand('z'));
|
||||
__ CmpU64(current_character(), Operand('z'));
|
||||
__ bgt(&done);
|
||||
}
|
||||
ExternalReference map =
|
||||
ExternalReference::re_word_character_map(isolate());
|
||||
__ mov(r2, Operand(map));
|
||||
__ LoadU8(r2, MemOperand(r2, current_character()));
|
||||
__ CmpLogicalP(r2, Operand::Zero());
|
||||
__ CmpU64(r2, Operand::Zero());
|
||||
BranchOrBacktrack(ne, on_no_match);
|
||||
if (mode_ != LATIN1) {
|
||||
__ bind(&done);
|
||||
@ -697,7 +697,7 @@ Handle<HeapObject> RegExpMacroAssemblerS390::GetCode(Handle<String> source) {
|
||||
__ ble(&stack_limit_hit);
|
||||
// Check if there is room for the variable number of registers above
|
||||
// the stack limit.
|
||||
__ CmpLogicalP(r2, Operand(num_registers_ * kSystemPointerSize));
|
||||
__ CmpU64(r2, Operand(num_registers_ * kSystemPointerSize));
|
||||
__ bge(&stack_ok);
|
||||
// Exit with OutOfMemory exception. There is not enough space on the stack
|
||||
// for our working registers.
|
||||
@ -706,7 +706,7 @@ Handle<HeapObject> RegExpMacroAssemblerS390::GetCode(Handle<String> source) {
|
||||
|
||||
__ bind(&stack_limit_hit);
|
||||
CallCheckStackGuardState(r2);
|
||||
__ CmpP(r2, Operand::Zero());
|
||||
__ CmpS64(r2, Operand::Zero());
|
||||
// If returned value is non-zero, we exit with the returned value as result.
|
||||
__ bne(&return_r2);
|
||||
|
||||
@ -740,7 +740,7 @@ Handle<HeapObject> RegExpMacroAssemblerS390::GetCode(Handle<String> source) {
|
||||
|
||||
Label load_char_start_regexp, start_regexp;
|
||||
// Load newline if index is at start, previous character otherwise.
|
||||
__ CmpP(r3, Operand::Zero());
|
||||
__ CmpS64(r3, Operand::Zero());
|
||||
__ bne(&load_char_start_regexp);
|
||||
__ mov(current_character(), Operand('\n'));
|
||||
__ b(&start_regexp);
|
||||
@ -856,7 +856,7 @@ Handle<HeapObject> RegExpMacroAssemblerS390::GetCode(Handle<String> source) {
|
||||
// output registers is reduced by the number of stored captures.
|
||||
__ SubS64(r3, Operand(num_saved_registers_));
|
||||
// Check whether we have enough room for another set of capture results.
|
||||
__ CmpP(r3, Operand(num_saved_registers_));
|
||||
__ CmpS64(r3, Operand(num_saved_registers_));
|
||||
__ blt(&return_r2);
|
||||
|
||||
__ StoreU64(r3, MemOperand(frame_pointer(), kNumOutputRegisters));
|
||||
@ -870,11 +870,11 @@ Handle<HeapObject> RegExpMacroAssemblerS390::GetCode(Handle<String> source) {
|
||||
if (global_with_zero_length_check()) {
|
||||
// Special case for zero-length matches.
|
||||
// r6: capture start index
|
||||
__ CmpP(current_input_offset(), r6);
|
||||
__ CmpS64(current_input_offset(), r6);
|
||||
// Not a zero-length match, restart.
|
||||
__ bne(&load_char_start_regexp);
|
||||
// Offset from the end is zero if we already reached the end.
|
||||
__ CmpP(current_input_offset(), Operand::Zero());
|
||||
__ CmpS64(current_input_offset(), Operand::Zero());
|
||||
__ beq(&exit_label_);
|
||||
// Advance current position after a zero-length match.
|
||||
Label advance;
|
||||
@ -916,7 +916,7 @@ Handle<HeapObject> RegExpMacroAssemblerS390::GetCode(Handle<String> source) {
|
||||
SafeCallTarget(&check_preempt_label_);
|
||||
|
||||
CallCheckStackGuardState(r2);
|
||||
__ CmpP(r2, Operand::Zero());
|
||||
__ CmpS64(r2, Operand::Zero());
|
||||
// If returning non-zero, we should end execution with the given
|
||||
// result as return value.
|
||||
__ bne(&return_r2);
|
||||
@ -941,7 +941,7 @@ Handle<HeapObject> RegExpMacroAssemblerS390::GetCode(Handle<String> source) {
|
||||
__ CallCFunction(grow_stack, num_arguments);
|
||||
// If return nullptr, we have failed to grow the stack, and
|
||||
// must exit with a stack-overflow exception.
|
||||
__ CmpP(r2, Operand::Zero());
|
||||
__ CmpS64(r2, Operand::Zero());
|
||||
__ beq(&exit_with_exception);
|
||||
// Otherwise use return value as new stack pointer.
|
||||
__ mov(backtrack_stackpointer(), r2);
|
||||
@ -979,20 +979,20 @@ void RegExpMacroAssemblerS390::GoTo(Label* to) { BranchOrBacktrack(al, to); }
|
||||
void RegExpMacroAssemblerS390::IfRegisterGE(int reg, int comparand,
|
||||
Label* if_ge) {
|
||||
__ LoadP(r2, register_location(reg), r0);
|
||||
__ CmpP(r2, Operand(comparand));
|
||||
__ CmpS64(r2, Operand(comparand));
|
||||
BranchOrBacktrack(ge, if_ge);
|
||||
}
|
||||
|
||||
void RegExpMacroAssemblerS390::IfRegisterLT(int reg, int comparand,
|
||||
Label* if_lt) {
|
||||
__ LoadP(r2, register_location(reg), r0);
|
||||
__ CmpP(r2, Operand(comparand));
|
||||
__ CmpS64(r2, Operand(comparand));
|
||||
BranchOrBacktrack(lt, if_lt);
|
||||
}
|
||||
|
||||
void RegExpMacroAssemblerS390::IfRegisterEqPos(int reg, Label* if_eq) {
|
||||
__ LoadP(r2, register_location(reg), r0);
|
||||
__ CmpP(r2, current_input_offset());
|
||||
__ CmpS64(r2, current_input_offset());
|
||||
BranchOrBacktrack(eq, if_eq);
|
||||
}
|
||||
|
||||
@ -1044,7 +1044,7 @@ void RegExpMacroAssemblerS390::ReadStackPointerFromRegister(int reg) {
|
||||
|
||||
void RegExpMacroAssemblerS390::SetCurrentPositionFromEnd(int by) {
|
||||
Label after_position;
|
||||
__ CmpP(current_input_offset(), Operand(-by * char_size()));
|
||||
__ CmpS64(current_input_offset(), Operand(-by * char_size()));
|
||||
__ bge(&after_position);
|
||||
__ mov(current_input_offset(), Operand(-by * char_size()));
|
||||
// On RegExp code entry (where this operation is used), the character before
|
||||
@ -1163,12 +1163,12 @@ MemOperand RegExpMacroAssemblerS390::register_location(int register_index) {
|
||||
void RegExpMacroAssemblerS390::CheckPosition(int cp_offset,
|
||||
Label* on_outside_input) {
|
||||
if (cp_offset >= 0) {
|
||||
__ CmpP(current_input_offset(), Operand(-cp_offset * char_size()));
|
||||
__ CmpS64(current_input_offset(), Operand(-cp_offset * char_size()));
|
||||
BranchOrBacktrack(ge, on_outside_input);
|
||||
} else {
|
||||
__ LoadP(r3, MemOperand(frame_pointer(), kStringStartMinusOne));
|
||||
__ AddS64(r2, current_input_offset(), Operand(cp_offset * char_size()));
|
||||
__ CmpP(r2, r3);
|
||||
__ CmpS64(r2, r3);
|
||||
BranchOrBacktrack(le, on_outside_input);
|
||||
}
|
||||
}
|
||||
@ -1233,7 +1233,7 @@ void RegExpMacroAssemblerS390::CheckPreemption() {
|
||||
ExternalReference stack_limit =
|
||||
ExternalReference::address_of_jslimit(isolate());
|
||||
__ mov(r2, Operand(stack_limit));
|
||||
__ CmpLogicalP(sp, MemOperand(r2));
|
||||
__ CmpU64(sp, MemOperand(r2));
|
||||
SafeCall(&check_preempt_label_, le);
|
||||
}
|
||||
|
||||
@ -1241,7 +1241,7 @@ void RegExpMacroAssemblerS390::CheckStackLimit() {
|
||||
ExternalReference stack_limit =
|
||||
ExternalReference::address_of_regexp_stack_limit_address(isolate());
|
||||
__ mov(r2, Operand(stack_limit));
|
||||
__ CmpLogicalP(backtrack_stackpointer(), MemOperand(r2));
|
||||
__ CmpU64(backtrack_stackpointer(), MemOperand(r2));
|
||||
SafeCall(&stack_overflow_label_, le);
|
||||
}
|
||||
|
||||
|
@ -316,7 +316,7 @@ void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
|
||||
bind(&loop);
|
||||
StoreU64(r0, MemOperand(r0));
|
||||
la(r0, MemOperand(r0, kSystemPointerSize));
|
||||
CmpLogicalP(r3, r4);
|
||||
CmpU64(r3, r4);
|
||||
bne(&loop);
|
||||
|
||||
pop(r4);
|
||||
@ -604,30 +604,30 @@ void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
|
||||
if (type.kind() == ValueType::kI32) {
|
||||
if (rhs == no_reg) {
|
||||
if (use_signed) {
|
||||
Cmp32(lhs, Operand::Zero());
|
||||
CmpS32(lhs, Operand::Zero());
|
||||
} else {
|
||||
CmpLogical32(lhs, Operand::Zero());
|
||||
CmpU32(lhs, Operand::Zero());
|
||||
}
|
||||
} else {
|
||||
if (use_signed) {
|
||||
Cmp32(lhs, rhs);
|
||||
CmpS32(lhs, rhs);
|
||||
} else {
|
||||
CmpLogical32(lhs, rhs);
|
||||
CmpU32(lhs, rhs);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
CHECK_EQ(type.kind(), ValueType::kI64);
|
||||
if (rhs == no_reg) {
|
||||
if (use_signed) {
|
||||
CmpP(lhs, Operand::Zero());
|
||||
CmpS64(lhs, Operand::Zero());
|
||||
} else {
|
||||
CmpLogicalP(lhs, Operand::Zero());
|
||||
CmpU64(lhs, Operand::Zero());
|
||||
}
|
||||
} else {
|
||||
if (use_signed) {
|
||||
CmpP(lhs, rhs);
|
||||
CmpS64(lhs, rhs);
|
||||
} else {
|
||||
CmpLogicalP(lhs, rhs);
|
||||
CmpU64(lhs, rhs);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -658,9 +658,9 @@ void LiftoffAssembler::emit_i32_set_cond(LiftoffCondition liftoff_cond,
|
||||
Register rhs) {
|
||||
bool use_signed = liftoff::UseSignedOp(liftoff_cond);
|
||||
if (use_signed) {
|
||||
Cmp32(lhs, rhs);
|
||||
CmpS32(lhs, rhs);
|
||||
} else {
|
||||
CmpLogical32(lhs, rhs);
|
||||
CmpU32(lhs, rhs);
|
||||
}
|
||||
|
||||
EMIT_SET_CONDITION(dst, liftoff::ToCondition(liftoff_cond));
|
||||
@ -675,9 +675,9 @@ void LiftoffAssembler::emit_i64_set_cond(LiftoffCondition liftoff_cond,
|
||||
LiftoffRegister rhs) {
|
||||
bool use_signed = liftoff::UseSignedOp(liftoff_cond);
|
||||
if (use_signed) {
|
||||
CmpP(lhs.gp(), rhs.gp());
|
||||
CmpS64(lhs.gp(), rhs.gp());
|
||||
} else {
|
||||
CmpLogicalP(lhs.gp(), rhs.gp());
|
||||
CmpU64(lhs.gp(), rhs.gp());
|
||||
}
|
||||
|
||||
EMIT_SET_CONDITION(dst, liftoff::ToCondition(liftoff_cond));
|
||||
|
@ -47,7 +47,7 @@ TEST_F(TurboAssemblerTest, TestCheck) {
|
||||
|
||||
// Fail if the first parameter is 17.
|
||||
__ lgfi(r3, Operand(17));
|
||||
__ CmpP(r2, r3); // 1st parameter is in {r2}.
|
||||
__ CmpS64(r2, r3); // 1st parameter is in {r2}.
|
||||
__ Check(Condition::ne, AbortReason::kNoReason);
|
||||
__ Ret();
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user