Rename RelocInfo::NONE to RelocInfo::NONE32.
This CL only does renaming, nothing else. Followup to: https://chromiumcodereview.appspot.com/11695006/ There are now NONE and NONE64 RelocInfo types, but only ARM uses them both at the same time. They were added in: https://chromiumcodereview.appspot.com/11191029/ R= ulan@chromium.org Review URL: https://chromiumcodereview.appspot.com/11744020 Patch from JF Bastien <jfb@chromium.org>. git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13311 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
f96267aa7b
commit
0606abbaab
@ -344,7 +344,7 @@ Operand::Operand(const ExternalReference& f) {
|
||||
Operand::Operand(Smi* value) {
|
||||
rm_ = no_reg;
|
||||
imm32_ = reinterpret_cast<intptr_t>(value);
|
||||
rmode_ = RelocInfo::NONE;
|
||||
rmode_ = RelocInfo::NONE32;
|
||||
}
|
||||
|
||||
|
||||
|
@ -234,7 +234,7 @@ Operand::Operand(Handle<Object> handle) {
|
||||
} else {
|
||||
// no relocation needed
|
||||
imm32_ = reinterpret_cast<intptr_t>(obj);
|
||||
rmode_ = RelocInfo::NONE;
|
||||
rmode_ = RelocInfo::NONE32;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -386,7 +386,7 @@ class Operand BASE_EMBEDDED {
|
||||
public:
|
||||
// immediate
|
||||
INLINE(explicit Operand(int32_t immediate,
|
||||
RelocInfo::Mode rmode = RelocInfo::NONE));
|
||||
RelocInfo::Mode rmode = RelocInfo::NONE32));
|
||||
INLINE(static Operand Zero()) {
|
||||
return Operand(static_cast<int32_t>(0));
|
||||
}
|
||||
|
@ -140,7 +140,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
|
||||
__ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
|
||||
__ str(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset));
|
||||
// Field JSArray::kElementsOffset is initialized later.
|
||||
__ mov(scratch3, Operand(0, RelocInfo::NONE));
|
||||
__ mov(scratch3, Operand(0, RelocInfo::NONE32));
|
||||
__ str(scratch3, FieldMemOperand(result, JSArray::kLengthOffset));
|
||||
|
||||
if (initial_capacity == 0) {
|
||||
@ -319,7 +319,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
|
||||
has_non_smi_element, finish, cant_transition_map, not_double;
|
||||
|
||||
// Check for array construction with zero arguments or one.
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE32));
|
||||
__ b(ne, &argc_one_or_more);
|
||||
|
||||
// Handle construction of an empty array.
|
||||
@ -590,7 +590,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
|
||||
|
||||
// Load the first arguments in r0 and get rid of the rest.
|
||||
Label no_arguments;
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE32));
|
||||
__ b(eq, &no_arguments);
|
||||
// First args = sp[(argc - 1) * 4].
|
||||
__ sub(r0, r0, Operand(1));
|
||||
@ -634,7 +634,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
|
||||
__ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
|
||||
__ Assert(eq, "Unexpected string wrapper instance size");
|
||||
__ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
|
||||
__ cmp(r4, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r4, Operand(0, RelocInfo::NONE32));
|
||||
__ Assert(eq, "Unexpected unused properties of string wrapper");
|
||||
}
|
||||
__ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
|
||||
@ -1097,7 +1097,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
// r5-r7, cp may be clobbered
|
||||
|
||||
// Clear the context before we push it when entering the internal frame.
|
||||
__ mov(cp, Operand(0, RelocInfo::NONE));
|
||||
__ mov(cp, Operand(0, RelocInfo::NONE32));
|
||||
|
||||
// Enter an internal frame.
|
||||
{
|
||||
@ -1396,7 +1396,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
// r0: actual number of arguments
|
||||
// r1: function
|
||||
Label shift_arguments;
|
||||
__ mov(r4, Operand(0, RelocInfo::NONE)); // indicate regular JS_FUNCTION
|
||||
__ mov(r4, Operand(0, RelocInfo::NONE32)); // indicate regular JS_FUNCTION
|
||||
{ Label convert_to_object, use_global_receiver, patch_receiver;
|
||||
// Change context eagerly in case we need the global receiver.
|
||||
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
|
||||
@ -1451,7 +1451,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
|
||||
// Restore the function to r1, and the flag to r4.
|
||||
__ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
|
||||
__ mov(r4, Operand(0, RelocInfo::NONE));
|
||||
__ mov(r4, Operand(0, RelocInfo::NONE32));
|
||||
__ jmp(&patch_receiver);
|
||||
|
||||
// Use the global receiver object from the called function as the
|
||||
@ -1473,11 +1473,11 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
|
||||
// 3b. Check for function proxy.
|
||||
__ bind(&slow);
|
||||
__ mov(r4, Operand(1, RelocInfo::NONE)); // indicate function proxy
|
||||
__ mov(r4, Operand(1, RelocInfo::NONE32)); // indicate function proxy
|
||||
__ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
|
||||
__ b(eq, &shift_arguments);
|
||||
__ bind(&non_function);
|
||||
__ mov(r4, Operand(2, RelocInfo::NONE)); // indicate non-function
|
||||
__ mov(r4, Operand(2, RelocInfo::NONE32)); // indicate non-function
|
||||
|
||||
// 3c. Patch the first argument when calling a non-function. The
|
||||
// CALL_NON_FUNCTION builtin expects the non-function callee as
|
||||
@ -1521,7 +1521,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
__ tst(r4, r4);
|
||||
__ b(eq, &function);
|
||||
// Expected number of arguments is 0 for CALL_NON_FUNCTION.
|
||||
__ mov(r2, Operand(0, RelocInfo::NONE));
|
||||
__ mov(r2, Operand(0, RelocInfo::NONE32));
|
||||
__ SetCallKind(r5, CALL_AS_METHOD);
|
||||
__ cmp(r4, Operand(1));
|
||||
__ b(ne, &non_proxy);
|
||||
@ -1599,7 +1599,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
|
||||
// Push current limit and index.
|
||||
__ bind(&okay);
|
||||
__ push(r0); // limit
|
||||
__ mov(r1, Operand(0, RelocInfo::NONE)); // initial index
|
||||
__ mov(r1, Operand(0, RelocInfo::NONE32)); // initial index
|
||||
__ push(r1);
|
||||
|
||||
// Get the receiver.
|
||||
@ -1711,7 +1711,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
|
||||
__ bind(&call_proxy);
|
||||
__ push(r1); // add function proxy as last argument
|
||||
__ add(r0, r0, Operand(1));
|
||||
__ mov(r2, Operand(0, RelocInfo::NONE));
|
||||
__ mov(r2, Operand(0, RelocInfo::NONE32));
|
||||
__ SetCallKind(r5, CALL_AS_METHOD);
|
||||
__ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
|
||||
__ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
|
@ -560,7 +560,7 @@ void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
|
||||
STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
|
||||
__ and_(exponent, source_, Operand(HeapNumber::kSignMask), SetCC);
|
||||
// Subtract from 0 if source was negative.
|
||||
__ rsb(source_, source_, Operand(0, RelocInfo::NONE), LeaveCC, ne);
|
||||
__ rsb(source_, source_, Operand(0, RelocInfo::NONE32), LeaveCC, ne);
|
||||
|
||||
// We have -1, 0 or 1, which we treat specially. Register source_ contains
|
||||
// absolute value: it is either equal to 1 (special case of -1 and 1),
|
||||
@ -573,7 +573,7 @@ void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
|
||||
HeapNumber::kExponentBias << HeapNumber::kExponentShift;
|
||||
__ orr(exponent, exponent, Operand(exponent_word_for_1), LeaveCC, eq);
|
||||
// 1, 0 and -1 all have 0 for the second word.
|
||||
__ mov(mantissa, Operand(0, RelocInfo::NONE));
|
||||
__ mov(mantissa, Operand(0, RelocInfo::NONE32));
|
||||
__ Ret();
|
||||
|
||||
__ bind(¬_special);
|
||||
@ -1141,7 +1141,7 @@ void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
|
||||
// Set the sign bit in scratch_ if the value was negative.
|
||||
__ orr(scratch_, scratch_, Operand(HeapNumber::kSignMask), LeaveCC, cs);
|
||||
// Subtract from 0 if the value was negative.
|
||||
__ rsb(the_int_, the_int_, Operand(0, RelocInfo::NONE), LeaveCC, cs);
|
||||
__ rsb(the_int_, the_int_, Operand(0, RelocInfo::NONE32), LeaveCC, cs);
|
||||
// We should be masking the implict first digit of the mantissa away here,
|
||||
// but it just ends up combining harmlessly with the last digit of the
|
||||
// exponent that happens to be 1. The sign bit is 0 so we shift 10 to get
|
||||
@ -1164,7 +1164,7 @@ void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
|
||||
non_smi_exponent += 1 << HeapNumber::kExponentShift;
|
||||
__ mov(ip, Operand(HeapNumber::kSignMask | non_smi_exponent));
|
||||
__ str(ip, FieldMemOperand(the_heap_number_, HeapNumber::kExponentOffset));
|
||||
__ mov(ip, Operand(0, RelocInfo::NONE));
|
||||
__ mov(ip, Operand(0, RelocInfo::NONE32));
|
||||
__ str(ip, FieldMemOperand(the_heap_number_, HeapNumber::kMantissaOffset));
|
||||
__ Ret();
|
||||
}
|
||||
@ -1380,7 +1380,7 @@ void EmitNanCheck(MacroAssembler* masm, Label* lhs_not_nan, Condition cond) {
|
||||
Operand(lhs_exponent, LSL, HeapNumber::kNonMantissaBitsInTopWord),
|
||||
SetCC);
|
||||
__ b(ne, &one_is_nan);
|
||||
__ cmp(lhs_mantissa, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(lhs_mantissa, Operand(0, RelocInfo::NONE32));
|
||||
__ b(ne, &one_is_nan);
|
||||
|
||||
__ bind(lhs_not_nan);
|
||||
@ -1395,7 +1395,7 @@ void EmitNanCheck(MacroAssembler* masm, Label* lhs_not_nan, Condition cond) {
|
||||
Operand(rhs_exponent, LSL, HeapNumber::kNonMantissaBitsInTopWord),
|
||||
SetCC);
|
||||
__ b(ne, &one_is_nan);
|
||||
__ cmp(rhs_mantissa, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(rhs_mantissa, Operand(0, RelocInfo::NONE32));
|
||||
__ b(eq, &neither_is_nan);
|
||||
|
||||
__ bind(&one_is_nan);
|
||||
@ -1922,7 +1922,7 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
|
||||
__ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
|
||||
__ tst(ip, Operand(1 << Map::kIsUndetectable));
|
||||
// Undetectable -> false.
|
||||
__ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, ne);
|
||||
__ mov(tos_, Operand(0, RelocInfo::NONE32), LeaveCC, ne);
|
||||
__ Ret(ne);
|
||||
}
|
||||
}
|
||||
@ -1955,14 +1955,14 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
|
||||
// "tos_" is a register, and contains a non zero value by default.
|
||||
// Hence we only need to overwrite "tos_" with zero to return false for
|
||||
// FP_ZERO or FP_NAN cases. Otherwise, by default it returns true.
|
||||
__ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO
|
||||
__ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN
|
||||
__ mov(tos_, Operand(0, RelocInfo::NONE32), LeaveCC, eq); // for FP_ZERO
|
||||
__ mov(tos_, Operand(0, RelocInfo::NONE32), LeaveCC, vs); // for FP_NAN
|
||||
} else {
|
||||
Label done, not_nan, not_zero;
|
||||
__ ldr(temp, FieldMemOperand(tos_, HeapNumber::kExponentOffset));
|
||||
// -0 maps to false:
|
||||
__ bic(
|
||||
temp, temp, Operand(HeapNumber::kSignMask, RelocInfo::NONE), SetCC);
|
||||
temp, temp, Operand(HeapNumber::kSignMask, RelocInfo::NONE32), SetCC);
|
||||
__ b(ne, ¬_zero);
|
||||
// If exponent word is zero then the answer depends on the mantissa word.
|
||||
__ ldr(tos_, FieldMemOperand(tos_, HeapNumber::kMantissaOffset));
|
||||
@ -1975,25 +1975,25 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
|
||||
__ mov(temp, Operand(temp, LSR, HeapNumber::kMantissaBitsInTopWord));
|
||||
unsigned int shifted_exponent_mask =
|
||||
HeapNumber::kExponentMask >> HeapNumber::kMantissaBitsInTopWord;
|
||||
__ cmp(temp, Operand(shifted_exponent_mask, RelocInfo::NONE));
|
||||
__ cmp(temp, Operand(shifted_exponent_mask, RelocInfo::NONE32));
|
||||
__ b(ne, ¬_nan); // If exponent is not 0x7ff then it can't be a NaN.
|
||||
|
||||
// Reload exponent word.
|
||||
__ ldr(temp, FieldMemOperand(tos_, HeapNumber::kExponentOffset));
|
||||
__ tst(temp, Operand(HeapNumber::kMantissaMask, RelocInfo::NONE));
|
||||
__ tst(temp, Operand(HeapNumber::kMantissaMask, RelocInfo::NONE32));
|
||||
// If mantissa is not zero then we have a NaN, so return 0.
|
||||
__ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, ne);
|
||||
__ mov(tos_, Operand(0, RelocInfo::NONE32), LeaveCC, ne);
|
||||
__ b(ne, &done);
|
||||
|
||||
// Load mantissa word.
|
||||
__ ldr(temp, FieldMemOperand(tos_, HeapNumber::kMantissaOffset));
|
||||
__ cmp(temp, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(temp, Operand(0, RelocInfo::NONE32));
|
||||
// If mantissa is not zero then we have a NaN, so return 0.
|
||||
__ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, ne);
|
||||
__ mov(tos_, Operand(0, RelocInfo::NONE32), LeaveCC, ne);
|
||||
__ b(ne, &done);
|
||||
|
||||
__ bind(¬_nan);
|
||||
__ mov(tos_, Operand(1, RelocInfo::NONE));
|
||||
__ mov(tos_, Operand(1, RelocInfo::NONE32));
|
||||
__ bind(&done);
|
||||
}
|
||||
__ Ret();
|
||||
@ -2016,7 +2016,7 @@ void ToBooleanStub::CheckOddball(MacroAssembler* masm,
|
||||
// The value of a root is never NULL, so we can avoid loading a non-null
|
||||
// value into tos_ when we want to return 'true'.
|
||||
if (!result) {
|
||||
__ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq);
|
||||
__ mov(tos_, Operand(0, RelocInfo::NONE32), LeaveCC, eq);
|
||||
}
|
||||
__ Ret(eq);
|
||||
}
|
||||
@ -2161,7 +2161,7 @@ void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
|
||||
__ b(eq, slow);
|
||||
|
||||
// Return '0 - value'.
|
||||
__ rsb(r0, r0, Operand(0, RelocInfo::NONE));
|
||||
__ rsb(r0, r0, Operand(0, RelocInfo::NONE32));
|
||||
__ Ret();
|
||||
}
|
||||
|
||||
@ -3478,7 +3478,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
|
||||
__ ldr(cache_entry, MemOperand(cache_entry, cache_array_index));
|
||||
// r0 points to the cache for the type type_.
|
||||
// If NULL, the cache hasn't been initialized yet, so go through runtime.
|
||||
__ cmp(cache_entry, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(cache_entry, Operand(0, RelocInfo::NONE32));
|
||||
__ b(eq, &invalid_cache);
|
||||
|
||||
#ifdef DEBUG
|
||||
@ -4091,7 +4091,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
|
||||
Isolate* isolate = masm->isolate();
|
||||
ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
|
||||
isolate);
|
||||
__ mov(r0, Operand(false, RelocInfo::NONE));
|
||||
__ mov(r0, Operand(false, RelocInfo::NONE32));
|
||||
__ mov(r2, Operand(external_caught));
|
||||
__ str(r0, MemOperand(r2));
|
||||
|
||||
@ -4773,7 +4773,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
|
||||
// of the arguments object and the elements array in words.
|
||||
Label add_arguments_object;
|
||||
__ bind(&try_allocate);
|
||||
__ cmp(r1, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r1, Operand(0, RelocInfo::NONE32));
|
||||
__ b(eq, &add_arguments_object);
|
||||
__ mov(r1, Operand(r1, LSR, kSmiTagSize));
|
||||
__ add(r1, r1, Operand(FixedArray::kHeaderSize / kPointerSize));
|
||||
@ -4806,7 +4806,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
|
||||
|
||||
// If there are no actual arguments, we're done.
|
||||
Label done;
|
||||
__ cmp(r1, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r1, Operand(0, RelocInfo::NONE32));
|
||||
__ b(eq, &done);
|
||||
|
||||
// Get the parameters pointer from the stack.
|
||||
@ -4833,7 +4833,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
|
||||
// Post-increment r4 with kPointerSize on each iteration.
|
||||
__ str(r3, MemOperand(r4, kPointerSize, PostIndex));
|
||||
__ sub(r1, r1, Operand(1));
|
||||
__ cmp(r1, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r1, Operand(0, RelocInfo::NONE32));
|
||||
__ b(ne, &loop);
|
||||
|
||||
// Return and remove the on-stack parameters.
|
||||
@ -5469,8 +5469,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
__ cmp(r3, Operand(JS_FUNCTION_PROXY_TYPE));
|
||||
__ b(ne, &non_function);
|
||||
__ push(r1); // put proxy as additional argument
|
||||
__ mov(r0, Operand(argc_ + 1, RelocInfo::NONE));
|
||||
__ mov(r2, Operand(0, RelocInfo::NONE));
|
||||
__ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32));
|
||||
__ mov(r2, Operand(0, RelocInfo::NONE32));
|
||||
__ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
|
||||
__ SetCallKind(r5, CALL_AS_METHOD);
|
||||
{
|
||||
@ -5484,7 +5484,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
__ bind(&non_function);
|
||||
__ str(r1, MemOperand(sp, argc_ * kPointerSize));
|
||||
__ mov(r0, Operand(argc_)); // Set up the number of arguments.
|
||||
__ mov(r2, Operand(0, RelocInfo::NONE));
|
||||
__ mov(r2, Operand(0, RelocInfo::NONE32));
|
||||
__ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
|
||||
__ SetCallKind(r5, CALL_AS_METHOD);
|
||||
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
@ -5527,7 +5527,7 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
|
||||
__ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
|
||||
__ bind(&do_call);
|
||||
// Set expected number of arguments to zero (not changing r0).
|
||||
__ mov(r2, Operand(0, RelocInfo::NONE));
|
||||
__ mov(r2, Operand(0, RelocInfo::NONE32));
|
||||
__ SetCallKind(r5, CALL_AS_METHOD);
|
||||
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
RelocInfo::CODE_TARGET);
|
||||
@ -5696,7 +5696,7 @@ void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
|
||||
if (!ascii) {
|
||||
__ add(count, count, Operand(count), SetCC);
|
||||
} else {
|
||||
__ cmp(count, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(count, Operand(0, RelocInfo::NONE32));
|
||||
}
|
||||
__ b(eq, &done);
|
||||
|
||||
@ -5751,7 +5751,7 @@ void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm,
|
||||
if (!ascii) {
|
||||
__ add(count, count, Operand(count), SetCC);
|
||||
} else {
|
||||
__ cmp(count, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(count, Operand(0, RelocInfo::NONE32));
|
||||
}
|
||||
__ b(eq, &done);
|
||||
|
||||
|
@ -161,7 +161,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
|
||||
#ifdef DEBUG
|
||||
__ RecordComment("// Calling from debug break to runtime - come in - over");
|
||||
#endif
|
||||
__ mov(r0, Operand(0, RelocInfo::NONE)); // no arguments
|
||||
__ mov(r0, Operand(0, RelocInfo::NONE32)); // no arguments
|
||||
__ mov(r1, Operand(ExternalReference::debug_break(masm->isolate())));
|
||||
|
||||
CEntryStub ceb(1);
|
||||
|
@ -80,12 +80,12 @@ void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
|
||||
// this is optimized code, so we don't have to have a predictable size.
|
||||
int call_size_in_bytes =
|
||||
MacroAssembler::CallSizeNotPredictableCodeSize(deopt_entry,
|
||||
RelocInfo::NONE);
|
||||
RelocInfo::NONE32);
|
||||
int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
|
||||
ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
|
||||
ASSERT(call_size_in_bytes <= patch_size());
|
||||
CodePatcher patcher(call_address, call_size_in_words);
|
||||
patcher.masm()->Call(deopt_entry, RelocInfo::NONE);
|
||||
patcher.masm()->Call(deopt_entry, RelocInfo::NONE32);
|
||||
ASSERT(prev_call_address == NULL ||
|
||||
call_address >= prev_call_address + patch_size());
|
||||
ASSERT(call_address + patch_size() <= code->instruction_end());
|
||||
|
@ -3018,7 +3018,7 @@ void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
|
||||
// Move 0x41300000xxxxxxxx (x = random bits) to VFP.
|
||||
__ vmov(d7, r0, r1);
|
||||
// Move 0x4130000000000000 to VFP.
|
||||
__ mov(r0, Operand(0, RelocInfo::NONE));
|
||||
__ mov(r0, Operand(0, RelocInfo::NONE32));
|
||||
__ vmov(d8, r0, r1);
|
||||
// Subtract and store the result in the heap number.
|
||||
__ vsub(d7, d7, d8);
|
||||
|
@ -4007,7 +4007,7 @@ void LCodeGen::DoRandom(LRandom* instr) {
|
||||
// Move 0x41300000xxxxxxxx (x = random bits) to VFP.
|
||||
__ vmov(d7, r0, r1);
|
||||
// Move 0x4130000000000000 to VFP.
|
||||
__ mov(r0, Operand(0, RelocInfo::NONE));
|
||||
__ mov(r0, Operand(0, RelocInfo::NONE32));
|
||||
__ vmov(d8, r0, r1);
|
||||
// Subtract and store the result in the heap number.
|
||||
__ vsub(d7, d7, d8);
|
||||
@ -4763,7 +4763,7 @@ static void GenerateUInt2Double(MacroAssembler* masm,
|
||||
masm->orr(hiword, scratch,
|
||||
Operand(hiword, LSR, mantissa_shift_for_hi_word));
|
||||
} else {
|
||||
masm->mov(loword, Operand(0, RelocInfo::NONE));
|
||||
masm->mov(loword, Operand(0, RelocInfo::NONE32));
|
||||
masm->orr(hiword, scratch,
|
||||
Operand(hiword, LSL, -mantissa_shift_for_hi_word));
|
||||
}
|
||||
|
@ -304,7 +304,7 @@ void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
|
||||
if (!src2.is_reg() &&
|
||||
!src2.must_output_reloc_info(this) &&
|
||||
src2.immediate() == 0) {
|
||||
mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, cond);
|
||||
mov(dst, Operand(0, RelocInfo::NONE32), LeaveCC, cond);
|
||||
} else if (!src2.is_single_instruction(this) &&
|
||||
!src2.must_output_reloc_info(this) &&
|
||||
CpuFeatures::IsSupported(ARMv7) &&
|
||||
@ -410,7 +410,7 @@ void MacroAssembler::Usat(Register dst, int satpos, const Operand& src,
|
||||
}
|
||||
tst(dst, Operand(~satval));
|
||||
b(eq, &done);
|
||||
mov(dst, Operand(0, RelocInfo::NONE), LeaveCC, mi); // 0 if negative.
|
||||
mov(dst, Operand(0, RelocInfo::NONE32), LeaveCC, mi); // 0 if negative.
|
||||
mov(dst, Operand(satval), LeaveCC, pl); // satval if positive.
|
||||
bind(&done);
|
||||
} else {
|
||||
@ -948,7 +948,7 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles,
|
||||
}
|
||||
|
||||
// Clear top frame.
|
||||
mov(r3, Operand(0, RelocInfo::NONE));
|
||||
mov(r3, Operand(0, RelocInfo::NONE32));
|
||||
mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
|
||||
str(r3, MemOperand(ip));
|
||||
|
||||
@ -1218,7 +1218,7 @@ void MacroAssembler::IsObjectJSStringType(Register object,
|
||||
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
void MacroAssembler::DebugBreak() {
|
||||
mov(r0, Operand(0, RelocInfo::NONE));
|
||||
mov(r0, Operand(0, RelocInfo::NONE32));
|
||||
mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
|
||||
CEntryStub ces(1);
|
||||
ASSERT(AllowThisStubCall(&ces));
|
||||
@ -1249,7 +1249,7 @@ void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
|
||||
// Push the frame pointer, context, state, and code object.
|
||||
if (kind == StackHandler::JS_ENTRY) {
|
||||
mov(r7, Operand(Smi::FromInt(0))); // Indicates no context.
|
||||
mov(ip, Operand(0, RelocInfo::NONE)); // NULL frame pointer.
|
||||
mov(ip, Operand(0, RelocInfo::NONE32)); // NULL frame pointer.
|
||||
stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | ip.bit());
|
||||
} else {
|
||||
stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit());
|
||||
@ -1373,7 +1373,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
||||
// In debug mode, make sure the lexical context is set.
|
||||
#ifdef DEBUG
|
||||
cmp(scratch, Operand(0, RelocInfo::NONE));
|
||||
cmp(scratch, Operand(0, RelocInfo::NONE32));
|
||||
Check(ne, "we should not have an empty lexical context");
|
||||
#endif
|
||||
|
||||
@ -2456,7 +2456,7 @@ void MacroAssembler::ConvertToInt32(Register source,
|
||||
HeapNumber::kExponentBits);
|
||||
// Load dest with zero. We use this either for the final shift or
|
||||
// for the answer.
|
||||
mov(dest, Operand(0, RelocInfo::NONE));
|
||||
mov(dest, Operand(0, RelocInfo::NONE32));
|
||||
// Check whether the exponent matches a 32 bit signed int that is not a Smi.
|
||||
// A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). This is
|
||||
// the exponent that we are fastest at and also the highest exponent we can
|
||||
@ -2510,7 +2510,7 @@ void MacroAssembler::ConvertToInt32(Register source,
|
||||
// Move down according to the exponent.
|
||||
mov(dest, Operand(scratch, LSR, dest));
|
||||
// Fix sign if sign bit was set.
|
||||
rsb(dest, dest, Operand(0, RelocInfo::NONE), LeaveCC, ne);
|
||||
rsb(dest, dest, Operand(0, RelocInfo::NONE32), LeaveCC, ne);
|
||||
bind(&done);
|
||||
}
|
||||
}
|
||||
@ -3368,7 +3368,7 @@ void MacroAssembler::CountLeadingZeros(Register zeros, // Answer.
|
||||
// Order of the next two lines is important: zeros register
|
||||
// can be the same as source register.
|
||||
Move(scratch, source);
|
||||
mov(zeros, Operand(0, RelocInfo::NONE));
|
||||
mov(zeros, Operand(0, RelocInfo::NONE32));
|
||||
// Top 16.
|
||||
tst(scratch, Operand(0xffff0000));
|
||||
add(zeros, zeros, Operand(16), LeaveCC, eq);
|
||||
|
@ -204,7 +204,7 @@ void RegExpMacroAssemblerARM::CheckAtStart(Label* on_at_start) {
|
||||
Label not_at_start;
|
||||
// Did we start the match at the start of the string at all?
|
||||
__ ldr(r0, MemOperand(frame_pointer(), kStartIndex));
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE32));
|
||||
BranchOrBacktrack(ne, ¬_at_start);
|
||||
|
||||
// If we did, are we still at the start of the input?
|
||||
@ -219,7 +219,7 @@ void RegExpMacroAssemblerARM::CheckAtStart(Label* on_at_start) {
|
||||
void RegExpMacroAssemblerARM::CheckNotAtStart(Label* on_not_at_start) {
|
||||
// Did we start the match at the start of the string at all?
|
||||
__ ldr(r0, MemOperand(frame_pointer(), kStartIndex));
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE32));
|
||||
BranchOrBacktrack(ne, on_not_at_start);
|
||||
// If we did, are we still at the start of the input?
|
||||
__ ldr(r1, MemOperand(frame_pointer(), kInputStart));
|
||||
@ -385,7 +385,7 @@ void RegExpMacroAssemblerARM::CheckNotBackReferenceIgnoreCase(
|
||||
}
|
||||
|
||||
// Check if function returned non-zero for success or zero for failure.
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE32));
|
||||
BranchOrBacktrack(eq, on_no_match);
|
||||
// On success, increment position by length of capture.
|
||||
__ add(current_input_offset(), current_input_offset(), Operand(r4));
|
||||
@ -675,7 +675,7 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
|
||||
// Set frame pointer in space for it if this is not a direct call
|
||||
// from generated code.
|
||||
__ add(frame_pointer(), sp, Operand(4 * kPointerSize));
|
||||
__ mov(r0, Operand(0, RelocInfo::NONE));
|
||||
__ mov(r0, Operand(0, RelocInfo::NONE32));
|
||||
__ push(r0); // Make room for success counter and initialize it to 0.
|
||||
__ push(r0); // Make room for "position - 1" constant (value is irrelevant).
|
||||
// Check if we have space on the stack for registers.
|
||||
@ -700,7 +700,7 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
|
||||
|
||||
__ bind(&stack_limit_hit);
|
||||
CallCheckStackGuardState(r0);
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE32));
|
||||
// If returned value is non-zero, we exit with the returned value as result.
|
||||
__ b(ne, &return_r0);
|
||||
|
||||
@ -728,7 +728,7 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
|
||||
|
||||
Label load_char_start_regexp, start_regexp;
|
||||
// Load newline if index is at start, previous character otherwise.
|
||||
__ cmp(r1, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r1, Operand(0, RelocInfo::NONE32));
|
||||
__ b(ne, &load_char_start_regexp);
|
||||
__ mov(current_character(), Operand('\n'), LeaveCC, eq);
|
||||
__ jmp(&start_regexp);
|
||||
@ -873,7 +873,7 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
|
||||
SafeCallTarget(&check_preempt_label_);
|
||||
|
||||
CallCheckStackGuardState(r0);
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE32));
|
||||
// If returning non-zero, we should end execution with the given
|
||||
// result as return value.
|
||||
__ b(ne, &return_r0);
|
||||
@ -900,7 +900,7 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
|
||||
__ CallCFunction(grow_stack, num_arguments);
|
||||
// If return NULL, we have failed to grow the stack, and
|
||||
// must exit with a stack-overflow exception.
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE32));
|
||||
__ b(eq, &exit_with_exception);
|
||||
// Otherwise use return value as new stack pointer.
|
||||
__ mov(backtrack_stackpointer(), r0);
|
||||
|
@ -1008,7 +1008,7 @@ static void StoreIntAsFloat(MacroAssembler* masm,
|
||||
|
||||
__ and_(fval, ival, Operand(kBinary32SignMask), SetCC);
|
||||
// Negate value if it is negative.
|
||||
__ rsb(ival, ival, Operand(0, RelocInfo::NONE), LeaveCC, ne);
|
||||
__ rsb(ival, ival, Operand(0, RelocInfo::NONE32), LeaveCC, ne);
|
||||
|
||||
// We have -1, 0 or 1, which we treat specially. Register ival contains
|
||||
// absolute value: it is either equal to 1 (special case of -1 and 1),
|
||||
@ -2241,7 +2241,7 @@ Handle<Code> CallStubCompiler::CompileMathFloorCall(
|
||||
__ mov(r0, Operand(r0, LSL, kSmiTagSize));
|
||||
|
||||
// Check for -0.
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE));
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE32));
|
||||
__ b(&restore_fpscr_and_return, ne);
|
||||
// r5 already holds the HeapNumber exponent.
|
||||
__ tst(r5, Operand(HeapNumber::kSignMask));
|
||||
@ -3944,18 +3944,18 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
|
||||
// and infinities. All these should be converted to 0.
|
||||
__ mov(r7, Operand(HeapNumber::kExponentMask));
|
||||
__ and_(r9, r5, Operand(r7), SetCC);
|
||||
__ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
|
||||
__ mov(r5, Operand(0, RelocInfo::NONE32), LeaveCC, eq);
|
||||
__ b(eq, &done);
|
||||
|
||||
__ teq(r9, Operand(r7));
|
||||
__ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
|
||||
__ mov(r5, Operand(0, RelocInfo::NONE32), LeaveCC, eq);
|
||||
__ b(eq, &done);
|
||||
|
||||
// Unbias exponent.
|
||||
__ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
|
||||
__ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC);
|
||||
// If exponent is negative then result is 0.
|
||||
__ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, mi);
|
||||
__ mov(r5, Operand(0, RelocInfo::NONE32), LeaveCC, mi);
|
||||
__ b(mi, &done);
|
||||
|
||||
// If exponent is too big then result is minimal value.
|
||||
@ -3971,14 +3971,14 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
|
||||
__ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl);
|
||||
__ b(pl, &sign);
|
||||
|
||||
__ rsb(r9, r9, Operand(0, RelocInfo::NONE));
|
||||
__ rsb(r9, r9, Operand(0, RelocInfo::NONE32));
|
||||
__ mov(r5, Operand(r5, LSL, r9));
|
||||
__ rsb(r9, r9, Operand(meaningfull_bits));
|
||||
__ orr(r5, r5, Operand(r6, LSR, r9));
|
||||
|
||||
__ bind(&sign);
|
||||
__ teq(r7, Operand(0, RelocInfo::NONE));
|
||||
__ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne);
|
||||
__ teq(r7, Operand(0, RelocInfo::NONE32));
|
||||
__ rsb(r5, r5, Operand(0, RelocInfo::NONE32), LeaveCC, ne);
|
||||
|
||||
__ bind(&done);
|
||||
switch (elements_kind) {
|
||||
|
@ -692,7 +692,7 @@ RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask) {
|
||||
#ifdef ENABLE_DISASSEMBLER
|
||||
const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
|
||||
switch (rmode) {
|
||||
case RelocInfo::NONE:
|
||||
case RelocInfo::NONE32:
|
||||
return "no reloc 32";
|
||||
case RelocInfo::NONE64:
|
||||
return "no reloc 64";
|
||||
@ -818,7 +818,7 @@ void RelocInfo::Verify() {
|
||||
case INTERNAL_REFERENCE:
|
||||
case CONST_POOL:
|
||||
case DEBUG_BREAK_SLOT:
|
||||
case NONE:
|
||||
case NONE32:
|
||||
case NONE64:
|
||||
break;
|
||||
case NUMBER_OF_MODES:
|
||||
|
@ -248,7 +248,7 @@ class RelocInfo BASE_EMBEDDED {
|
||||
// add more as needed
|
||||
// Pseudo-types
|
||||
NUMBER_OF_MODES, // There are at most 15 modes with noncompact encoding.
|
||||
NONE, // never recorded 32-bit value
|
||||
NONE32, // never recorded 32-bit value
|
||||
NONE64, // never recorded 64-bit value
|
||||
CODE_AGE_SEQUENCE, // Not stored in RelocInfo array, used explictly by
|
||||
// code aging.
|
||||
@ -320,7 +320,7 @@ class RelocInfo BASE_EMBEDDED {
|
||||
return mode == DEBUG_BREAK_SLOT;
|
||||
}
|
||||
static inline bool IsNone(Mode mode) {
|
||||
return mode == NONE || mode == NONE64;
|
||||
return mode == NONE32 || mode == NONE64;
|
||||
}
|
||||
static inline bool IsCodeAgeSequence(Mode mode) {
|
||||
return mode == CODE_AGE_SEQUENCE;
|
||||
|
@ -300,7 +300,7 @@ void RelocInfo::Visit(Heap* heap) {
|
||||
|
||||
Immediate::Immediate(int x) {
|
||||
x_ = x;
|
||||
rmode_ = RelocInfo::NONE;
|
||||
rmode_ = RelocInfo::NONE32;
|
||||
}
|
||||
|
||||
|
||||
@ -326,20 +326,20 @@ Immediate::Immediate(Handle<Object> handle) {
|
||||
} else {
|
||||
// no relocation needed
|
||||
x_ = reinterpret_cast<intptr_t>(obj);
|
||||
rmode_ = RelocInfo::NONE;
|
||||
rmode_ = RelocInfo::NONE32;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Immediate::Immediate(Smi* value) {
|
||||
x_ = reinterpret_cast<intptr_t>(value);
|
||||
rmode_ = RelocInfo::NONE;
|
||||
rmode_ = RelocInfo::NONE32;
|
||||
}
|
||||
|
||||
|
||||
Immediate::Immediate(Address addr) {
|
||||
x_ = reinterpret_cast<int32_t>(addr);
|
||||
rmode_ = RelocInfo::NONE;
|
||||
rmode_ = RelocInfo::NONE32;
|
||||
}
|
||||
|
||||
|
||||
@ -396,7 +396,7 @@ void Assembler::emit_code_relative_offset(Label* label) {
|
||||
|
||||
|
||||
void Assembler::emit_w(const Immediate& x) {
|
||||
ASSERT(x.rmode_ == RelocInfo::NONE);
|
||||
ASSERT(x.rmode_ == RelocInfo::NONE32);
|
||||
uint16_t value = static_cast<uint16_t>(x.x_);
|
||||
reinterpret_cast<uint16_t*>(pc_)[0] = value;
|
||||
pc_ += sizeof(uint16_t);
|
||||
|
@ -236,7 +236,7 @@ void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
|
||||
#endif
|
||||
|
||||
// Patch the code.
|
||||
patcher.masm()->call(target, RelocInfo::NONE);
|
||||
patcher.masm()->call(target, RelocInfo::NONE32);
|
||||
|
||||
// Check that the size of the code generated is as expected.
|
||||
ASSERT_EQ(kCallCodeSize,
|
||||
@ -255,11 +255,11 @@ void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
|
||||
|
||||
Operand::Operand(Register base, int32_t disp, RelocInfo::Mode rmode) {
|
||||
// [base + disp/r]
|
||||
if (disp == 0 && rmode == RelocInfo::NONE && !base.is(ebp)) {
|
||||
if (disp == 0 && rmode == RelocInfo::NONE32 && !base.is(ebp)) {
|
||||
// [base]
|
||||
set_modrm(0, base);
|
||||
if (base.is(esp)) set_sib(times_1, esp, base);
|
||||
} else if (is_int8(disp) && rmode == RelocInfo::NONE) {
|
||||
} else if (is_int8(disp) && rmode == RelocInfo::NONE32) {
|
||||
// [base + disp8]
|
||||
set_modrm(1, base);
|
||||
if (base.is(esp)) set_sib(times_1, esp, base);
|
||||
@ -280,11 +280,11 @@ Operand::Operand(Register base,
|
||||
RelocInfo::Mode rmode) {
|
||||
ASSERT(!index.is(esp)); // illegal addressing mode
|
||||
// [base + index*scale + disp/r]
|
||||
if (disp == 0 && rmode == RelocInfo::NONE && !base.is(ebp)) {
|
||||
if (disp == 0 && rmode == RelocInfo::NONE32 && !base.is(ebp)) {
|
||||
// [base + index*scale]
|
||||
set_modrm(0, esp);
|
||||
set_sib(scale, index, base);
|
||||
} else if (is_int8(disp) && rmode == RelocInfo::NONE) {
|
||||
} else if (is_int8(disp) && rmode == RelocInfo::NONE32) {
|
||||
// [base + index*scale + disp8]
|
||||
set_modrm(1, esp);
|
||||
set_sib(scale, index, base);
|
||||
@ -1180,7 +1180,7 @@ void Assembler::test(Register reg, const Immediate& imm) {
|
||||
EnsureSpace ensure_space(this);
|
||||
// Only use test against byte for registers that have a byte
|
||||
// variant: eax, ebx, ecx, and edx.
|
||||
if (imm.rmode_ == RelocInfo::NONE &&
|
||||
if (imm.rmode_ == RelocInfo::NONE32 &&
|
||||
is_uint8(imm.x_) &&
|
||||
reg.is_byte_register()) {
|
||||
uint8_t imm8 = imm.x_;
|
||||
@ -2614,7 +2614,7 @@ void Assembler::emit_operand(Register reg, const Operand& adr) {
|
||||
pc_ += length;
|
||||
|
||||
// Emit relocation information if necessary.
|
||||
if (length >= sizeof(int32_t) && adr.rmode_ != RelocInfo::NONE) {
|
||||
if (length >= sizeof(int32_t) && adr.rmode_ != RelocInfo::NONE32) {
|
||||
pc_ -= sizeof(int32_t); // pc_ must be *at* disp32
|
||||
RecordRelocInfo(adr.rmode_);
|
||||
pc_ += sizeof(int32_t);
|
||||
|
@ -337,12 +337,12 @@ class Immediate BASE_EMBEDDED {
|
||||
return Immediate(label);
|
||||
}
|
||||
|
||||
bool is_zero() const { return x_ == 0 && rmode_ == RelocInfo::NONE; }
|
||||
bool is_zero() const { return x_ == 0 && rmode_ == RelocInfo::NONE32; }
|
||||
bool is_int8() const {
|
||||
return -128 <= x_ && x_ < 128 && rmode_ == RelocInfo::NONE;
|
||||
return -128 <= x_ && x_ < 128 && rmode_ == RelocInfo::NONE32;
|
||||
}
|
||||
bool is_int16() const {
|
||||
return -32768 <= x_ && x_ < 32768 && rmode_ == RelocInfo::NONE;
|
||||
return -32768 <= x_ && x_ < 32768 && rmode_ == RelocInfo::NONE32;
|
||||
}
|
||||
|
||||
private:
|
||||
@ -382,20 +382,20 @@ class Operand BASE_EMBEDDED {
|
||||
|
||||
// [base + disp/r]
|
||||
explicit Operand(Register base, int32_t disp,
|
||||
RelocInfo::Mode rmode = RelocInfo::NONE);
|
||||
RelocInfo::Mode rmode = RelocInfo::NONE32);
|
||||
|
||||
// [base + index*scale + disp/r]
|
||||
explicit Operand(Register base,
|
||||
Register index,
|
||||
ScaleFactor scale,
|
||||
int32_t disp,
|
||||
RelocInfo::Mode rmode = RelocInfo::NONE);
|
||||
RelocInfo::Mode rmode = RelocInfo::NONE32);
|
||||
|
||||
// [index*scale + disp/r]
|
||||
explicit Operand(Register index,
|
||||
ScaleFactor scale,
|
||||
int32_t disp,
|
||||
RelocInfo::Mode rmode = RelocInfo::NONE);
|
||||
RelocInfo::Mode rmode = RelocInfo::NONE32);
|
||||
|
||||
static Operand StaticVariable(const ExternalReference& ext) {
|
||||
return Operand(reinterpret_cast<int32_t>(ext.address()),
|
||||
|
@ -7470,7 +7470,7 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
// Call the entry hook.
|
||||
int32_t hook_location = reinterpret_cast<int32_t>(&entry_hook_);
|
||||
__ call(Operand(hook_location, RelocInfo::NONE));
|
||||
__ call(Operand(hook_location, RelocInfo::NONE32));
|
||||
__ add(esp, Immediate(2 * kPointerSize));
|
||||
|
||||
// Restore ecx.
|
||||
|
@ -938,7 +938,7 @@ void Code::PatchPlatformCodeAge(byte* sequence,
|
||||
} else {
|
||||
Code* stub = GetCodeAgeStub(age, parity);
|
||||
CodePatcher patcher(sequence, young_length);
|
||||
patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE);
|
||||
patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE32);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -158,7 +158,7 @@ void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
|
||||
Address call_address = code_start_address + deopt_data->Pc(i)->value();
|
||||
CodePatcher patcher(call_address, patch_size());
|
||||
Address deopt_entry = GetDeoptimizationEntry(i, LAZY);
|
||||
patcher.masm()->call(deopt_entry, RelocInfo::NONE);
|
||||
patcher.masm()->call(deopt_entry, RelocInfo::NONE32);
|
||||
// We use RUNTIME_ENTRY for deoptimization bailouts.
|
||||
RelocInfo rinfo(call_address + 1, // 1 after the call opcode.
|
||||
RelocInfo::RUNTIME_ENTRY,
|
||||
|
@ -170,7 +170,7 @@ void MacroAssembler::LoadUint32(XMMRegister dst,
|
||||
Label done;
|
||||
cmp(src, Immediate(0));
|
||||
movdbl(scratch,
|
||||
Operand(reinterpret_cast<int32_t>(&kUint32Bias), RelocInfo::NONE));
|
||||
Operand(reinterpret_cast<int32_t>(&kUint32Bias), RelocInfo::NONE32));
|
||||
cvtsi2sd(dst, src);
|
||||
j(not_sign, &done, Label::kNear);
|
||||
addsd(dst, scratch);
|
||||
|
@ -273,7 +273,7 @@ RelocInfo::Mode IC::ComputeMode() {
|
||||
if (info->pc() == addr) return info->rmode();
|
||||
}
|
||||
UNREACHABLE();
|
||||
return RelocInfo::NONE;
|
||||
return RelocInfo::NONE32;
|
||||
}
|
||||
|
||||
|
||||
|
@ -66,7 +66,7 @@ Operand::Operand(const ExternalReference& f) {
|
||||
Operand::Operand(Smi* value) {
|
||||
rm_ = no_reg;
|
||||
imm32_ = reinterpret_cast<intptr_t>(value);
|
||||
rmode_ = RelocInfo::NONE;
|
||||
rmode_ = RelocInfo::NONE32;
|
||||
}
|
||||
|
||||
|
||||
|
@ -221,7 +221,7 @@ Operand::Operand(Handle<Object> handle) {
|
||||
} else {
|
||||
// No relocation needed.
|
||||
imm32_ = reinterpret_cast<intptr_t>(obj);
|
||||
rmode_ = RelocInfo::NONE;
|
||||
rmode_ = RelocInfo::NONE32;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -361,7 +361,7 @@ class Operand BASE_EMBEDDED {
|
||||
public:
|
||||
// Immediate.
|
||||
INLINE(explicit Operand(int32_t immediate,
|
||||
RelocInfo::Mode rmode = RelocInfo::NONE));
|
||||
RelocInfo::Mode rmode = RelocInfo::NONE32));
|
||||
INLINE(explicit Operand(const ExternalReference& f));
|
||||
INLINE(explicit Operand(const char* s));
|
||||
INLINE(explicit Operand(Object** opp));
|
||||
|
@ -1413,7 +1413,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
// a0: actual number of arguments
|
||||
// a1: function
|
||||
Label shift_arguments;
|
||||
__ li(t0, Operand(0, RelocInfo::NONE)); // Indicate regular JS_FUNCTION.
|
||||
__ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
|
||||
{ Label convert_to_object, use_global_receiver, patch_receiver;
|
||||
// Change context eagerly in case we need the global receiver.
|
||||
__ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
|
||||
@ -1467,7 +1467,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
__ sll(at, a0, kPointerSizeLog2);
|
||||
__ addu(at, sp, at);
|
||||
__ lw(a1, MemOperand(at));
|
||||
__ li(t0, Operand(0, RelocInfo::NONE));
|
||||
__ li(t0, Operand(0, RelocInfo::NONE32));
|
||||
__ Branch(&patch_receiver);
|
||||
|
||||
// Use the global receiver object from the called function as the
|
||||
@ -1490,11 +1490,11 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
|
||||
// 3b. Check for function proxy.
|
||||
__ bind(&slow);
|
||||
__ li(t0, Operand(1, RelocInfo::NONE)); // Indicate function proxy.
|
||||
__ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
|
||||
__ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
|
||||
|
||||
__ bind(&non_function);
|
||||
__ li(t0, Operand(2, RelocInfo::NONE)); // Indicate non-function.
|
||||
__ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
|
||||
|
||||
// 3c. Patch the first argument when calling a non-function. The
|
||||
// CALL_NON_FUNCTION builtin expects the non-function callee as
|
||||
@ -1725,7 +1725,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
|
||||
__ bind(&call_proxy);
|
||||
__ push(a1); // Add function proxy as last argument.
|
||||
__ Addu(a0, a0, Operand(1));
|
||||
__ li(a2, Operand(0, RelocInfo::NONE));
|
||||
__ li(a2, Operand(0, RelocInfo::NONE32));
|
||||
__ SetCallKind(t1, CALL_AS_METHOD);
|
||||
__ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
|
||||
__ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
|
@ -4109,7 +4109,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
|
||||
Isolate* isolate = masm->isolate();
|
||||
ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
|
||||
isolate);
|
||||
__ li(a0, Operand(false, RelocInfo::NONE));
|
||||
__ li(a0, Operand(false, RelocInfo::NONE32));
|
||||
__ li(a2, Operand(external_caught));
|
||||
__ sw(a0, MemOperand(a2));
|
||||
|
||||
@ -5538,8 +5538,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
// Check for function proxy.
|
||||
__ Branch(&non_function, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE));
|
||||
__ push(a1); // Put proxy as additional argument.
|
||||
__ li(a0, Operand(argc_ + 1, RelocInfo::NONE));
|
||||
__ li(a2, Operand(0, RelocInfo::NONE));
|
||||
__ li(a0, Operand(argc_ + 1, RelocInfo::NONE32));
|
||||
__ li(a2, Operand(0, RelocInfo::NONE32));
|
||||
__ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
|
||||
__ SetCallKind(t1, CALL_AS_METHOD);
|
||||
{
|
||||
@ -5596,7 +5596,7 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
|
||||
__ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
|
||||
__ bind(&do_call);
|
||||
// Set expected number of arguments to zero (not changing r0).
|
||||
__ li(a2, Operand(0, RelocInfo::NONE));
|
||||
__ li(a2, Operand(0, RelocInfo::NONE32));
|
||||
__ SetCallKind(t1, CALL_AS_METHOD);
|
||||
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
RelocInfo::CODE_TARGET);
|
||||
|
@ -75,12 +75,12 @@ void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
|
||||
Address call_address = code_start_address + deopt_data->Pc(i)->value();
|
||||
Address deopt_entry = GetDeoptimizationEntry(i, LAZY);
|
||||
int call_size_in_bytes = MacroAssembler::CallSize(deopt_entry,
|
||||
RelocInfo::NONE);
|
||||
RelocInfo::NONE32);
|
||||
int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
|
||||
ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
|
||||
ASSERT(call_size_in_bytes <= patch_size());
|
||||
CodePatcher patcher(call_address, call_size_in_words);
|
||||
patcher.masm()->Call(deopt_entry, RelocInfo::NONE);
|
||||
patcher.masm()->Call(deopt_entry, RelocInfo::NONE32);
|
||||
ASSERT(prev_call_address == NULL ||
|
||||
call_address >= prev_call_address + patch_size());
|
||||
ASSERT(call_address + patch_size() <= code->instruction_end());
|
||||
|
@ -89,7 +89,7 @@ TEST(1) {
|
||||
Label L, C;
|
||||
|
||||
__ mov(r1, Operand(r0));
|
||||
__ mov(r0, Operand(0, RelocInfo::NONE));
|
||||
__ mov(r0, Operand(0, RelocInfo::NONE32));
|
||||
__ b(&C);
|
||||
|
||||
__ bind(&L);
|
||||
@ -97,7 +97,7 @@ TEST(1) {
|
||||
__ sub(r1, r1, Operand(1));
|
||||
|
||||
__ bind(&C);
|
||||
__ teq(r1, Operand(0, RelocInfo::NONE));
|
||||
__ teq(r1, Operand(0, RelocInfo::NONE32));
|
||||
__ b(ne, &L);
|
||||
__ mov(pc, Operand(lr));
|
||||
|
||||
@ -134,7 +134,7 @@ TEST(2) {
|
||||
__ sub(r1, r1, Operand(1));
|
||||
|
||||
__ bind(&C);
|
||||
__ teq(r1, Operand(0, RelocInfo::NONE));
|
||||
__ teq(r1, Operand(0, RelocInfo::NONE32));
|
||||
__ b(ne, &L);
|
||||
__ mov(pc, Operand(lr));
|
||||
|
||||
|
@ -241,7 +241,7 @@ TEST(AssemblerIa325) {
|
||||
v8::internal::byte buffer[256];
|
||||
Assembler assm(Isolate::Current(), buffer, sizeof buffer);
|
||||
|
||||
__ mov(eax, Operand(reinterpret_cast<intptr_t>(&baz), RelocInfo::NONE));
|
||||
__ mov(eax, Operand(reinterpret_cast<intptr_t>(&baz), RelocInfo::NONE32));
|
||||
__ ret(0);
|
||||
|
||||
CodeDesc desc;
|
||||
|
@ -76,7 +76,7 @@ TEST(DisasmIa320) {
|
||||
|
||||
// ---- All instructions that I can think of
|
||||
__ add(edx, ebx);
|
||||
__ add(edx, Operand(12, RelocInfo::NONE));
|
||||
__ add(edx, Operand(12, RelocInfo::NONE32));
|
||||
__ add(edx, Operand(ebx, 0));
|
||||
__ add(edx, Operand(ebx, 16));
|
||||
__ add(edx, Operand(ebx, 1999));
|
||||
|
Loading…
Reference in New Issue
Block a user