A64: Remove Operand constructors where an implicit constructor can be used.
R=jochen@chromium.org Review URL: https://codereview.chromium.org/204293004 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20111 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
0768479e74
commit
bfcc117f4c
@ -861,7 +861,7 @@ static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::MANUAL);
|
||||
__ Push(x0, x1, fp, lr);
|
||||
__ Mov(x1, Operand(ExternalReference::isolate_address(masm->isolate())));
|
||||
__ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
|
||||
__ CallCFunction(
|
||||
ExternalReference::get_make_code_young_function(masm->isolate()), 2);
|
||||
__ Pop(lr, fp, x1, x0);
|
||||
@ -901,7 +901,7 @@ void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::MANUAL);
|
||||
__ Push(x0, x1, fp, lr);
|
||||
__ Mov(x1, Operand(ExternalReference::isolate_address(masm->isolate())));
|
||||
__ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
|
||||
__ CallCFunction(
|
||||
ExternalReference::get_mark_code_as_executed_function(
|
||||
masm->isolate()), 2);
|
||||
@ -963,7 +963,7 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
// Pass the deoptimization type to the runtime system.
|
||||
__ Mov(x0, Operand(Smi::FromInt(static_cast<int>(type))));
|
||||
__ Mov(x0, Smi::FromInt(static_cast<int>(type)));
|
||||
__ Push(x0);
|
||||
__ CallRuntime(Runtime::kNotifyDeoptimized, 1);
|
||||
}
|
||||
@ -1019,7 +1019,7 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
|
||||
|
||||
// If the code object is null, just return to the unoptimized code.
|
||||
Label skip;
|
||||
__ CompareAndBranch(x0, Operand(Smi::FromInt(0)), ne, &skip);
|
||||
__ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
|
||||
__ Ret();
|
||||
|
||||
__ Bind(&skip);
|
||||
@ -1358,7 +1358,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
|
||||
|
||||
// Use inline caching to access the arguments.
|
||||
__ Ldr(current, MemOperand(fp, kIndexOffset));
|
||||
__ Add(current, current, Operand(Smi::FromInt(1)));
|
||||
__ Add(current, current, Smi::FromInt(1));
|
||||
__ Str(current, MemOperand(fp, kIndexOffset));
|
||||
|
||||
// Test if the copy loop has finished copying all the elements from the
|
||||
@ -1402,7 +1402,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
|
||||
|
||||
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
|
||||
__ SmiTag(x10, x0);
|
||||
__ Mov(x11, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
||||
__ Push(lr, fp);
|
||||
__ Push(x11, x1, x10);
|
||||
__ Add(fp, jssp,
|
||||
|
@ -1080,7 +1080,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
|
||||
ASSERT((cond == gt) || (cond == ge)); // remaining cases
|
||||
ncr = LESS;
|
||||
}
|
||||
__ Mov(x10, Operand(Smi::FromInt(ncr)));
|
||||
__ Mov(x10, Smi::FromInt(ncr));
|
||||
__ Push(x10);
|
||||
}
|
||||
|
||||
@ -1111,7 +1111,7 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
AllowExternalCallThatCantCauseGC scope(masm);
|
||||
__ Mov(x0, Operand(ExternalReference::isolate_address(masm->isolate())));
|
||||
__ Mov(x0, ExternalReference::isolate_address(masm->isolate()));
|
||||
__ CallCFunction(
|
||||
ExternalReference::store_buffer_overflow_function(masm->isolate()),
|
||||
1, 0);
|
||||
@ -1490,7 +1490,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
|
||||
if (do_gc) {
|
||||
// Call Runtime::PerformGC, passing x0 (the result parameter for
|
||||
// PerformGC) and x1 (the isolate).
|
||||
__ Mov(x1, Operand(ExternalReference::isolate_address(masm->isolate())));
|
||||
__ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
|
||||
__ CallCFunction(
|
||||
ExternalReference::perform_gc_function(isolate), 2, 0);
|
||||
}
|
||||
@ -1507,7 +1507,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
|
||||
// Prepare AAPCS64 arguments to pass to the builtin.
|
||||
__ Mov(x0, argc);
|
||||
__ Mov(x1, argv);
|
||||
__ Mov(x2, Operand(ExternalReference::isolate_address(isolate)));
|
||||
__ Mov(x2, ExternalReference::isolate_address(isolate));
|
||||
|
||||
// Store the return address on the stack, in the space previously allocated
|
||||
// by EnterExitFrame. The return address is queried by
|
||||
@ -1820,8 +1820,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
||||
int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
|
||||
int64_t bad_frame_pointer = -1L; // Bad frame pointer to fail if it is used.
|
||||
__ Mov(x13, bad_frame_pointer);
|
||||
__ Mov(x12, Operand(Smi::FromInt(marker)));
|
||||
__ Mov(x11, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
|
||||
__ Mov(x12, Smi::FromInt(marker));
|
||||
__ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate));
|
||||
__ Ldr(x10, MemOperand(x11));
|
||||
|
||||
__ Push(x13, xzr, x12, x10);
|
||||
@ -1832,11 +1832,11 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
||||
// outermost JS call.
|
||||
Label non_outermost_js, done;
|
||||
ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
|
||||
__ Mov(x10, Operand(ExternalReference(js_entry_sp)));
|
||||
__ Mov(x10, ExternalReference(js_entry_sp));
|
||||
__ Ldr(x11, MemOperand(x10));
|
||||
__ Cbnz(x11, &non_outermost_js);
|
||||
__ Str(fp, MemOperand(x10));
|
||||
__ Mov(x12, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
|
||||
__ Mov(x12, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
|
||||
__ Push(x12);
|
||||
__ B(&done);
|
||||
__ Bind(&non_outermost_js);
|
||||
@ -1905,7 +1905,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
||||
ExternalReference entry(is_construct ? Builtins::kJSConstructEntryTrampoline
|
||||
: Builtins::kJSEntryTrampoline,
|
||||
isolate);
|
||||
__ Mov(x10, Operand(entry));
|
||||
__ Mov(x10, entry);
|
||||
|
||||
// Call the JSEntryTrampoline.
|
||||
__ Ldr(x11, MemOperand(x10)); // Dereference the address.
|
||||
@ -1929,15 +1929,15 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
||||
// Check if the current stack frame is marked as the outermost JS frame.
|
||||
Label non_outermost_js_2;
|
||||
__ Pop(x10);
|
||||
__ Cmp(x10, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
|
||||
__ Cmp(x10, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
|
||||
__ B(ne, &non_outermost_js_2);
|
||||
__ Mov(x11, Operand(ExternalReference(js_entry_sp)));
|
||||
__ Mov(x11, ExternalReference(js_entry_sp));
|
||||
__ Str(xzr, MemOperand(x11));
|
||||
__ Bind(&non_outermost_js_2);
|
||||
|
||||
// Restore the top frame descriptors from the stack.
|
||||
__ Pop(x10);
|
||||
__ Mov(x11, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
|
||||
__ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate));
|
||||
__ Str(x10, MemOperand(x11));
|
||||
|
||||
// Reset the stack to the callee saved registers.
|
||||
@ -2017,8 +2017,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
__ LoadTrueFalseRoots(res_true, res_false);
|
||||
} else {
|
||||
// This is counter-intuitive, but correct.
|
||||
__ Mov(res_true, Operand(Smi::FromInt(0)));
|
||||
__ Mov(res_false, Operand(Smi::FromInt(1)));
|
||||
__ Mov(res_true, Smi::FromInt(0));
|
||||
__ Mov(res_false, Smi::FromInt(1));
|
||||
}
|
||||
|
||||
// Check that the left hand side is a JS object and load its map as a side
|
||||
@ -2188,7 +2188,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
|
||||
__ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
||||
__ Ldr(caller_ctx, MemOperand(caller_fp,
|
||||
StandardFrameConstants::kContextOffset));
|
||||
__ Cmp(caller_ctx, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ Cmp(caller_ctx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
||||
__ Csel(local_fp, fp, caller_fp, ne);
|
||||
__ B(ne, &skip_adaptor);
|
||||
|
||||
@ -2238,7 +2238,7 @@ void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
|
||||
ArgumentsAdaptorFrameConstants::kLengthOffset));
|
||||
__ Poke(x11, 0 * kXRegSize);
|
||||
__ Add(x10, caller_fp, Operand::UntagSmiAndScale(x11, kPointerSizeLog2));
|
||||
__ Add(x10, x10, Operand(StandardFrameConstants::kCallerSPOffset));
|
||||
__ Add(x10, x10, StandardFrameConstants::kCallerSPOffset);
|
||||
__ Poke(x10, 1 * kXRegSize);
|
||||
|
||||
__ Bind(&runtime);
|
||||
@ -2271,7 +2271,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
|
||||
__ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
||||
__ Ldr(caller_ctx, MemOperand(caller_fp,
|
||||
StandardFrameConstants::kContextOffset));
|
||||
__ Cmp(caller_ctx, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ Cmp(caller_ctx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
||||
__ B(eq, &adaptor_frame);
|
||||
|
||||
// No adaptor, parameter count = argument count.
|
||||
@ -2473,7 +2473,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
|
||||
__ Str(index, MemOperand(elements, x10));
|
||||
__ Sub(x10, x10, kParameterMapHeaderSize - FixedArray::kHeaderSize);
|
||||
__ Str(the_hole, MemOperand(backing_store, x10));
|
||||
__ Add(index, index, Operand(Smi::FromInt(1)));
|
||||
__ Add(index, index, Smi::FromInt(1));
|
||||
__ Bind(¶meters_test);
|
||||
__ Cbnz(loop_count, ¶meters_loop);
|
||||
|
||||
@ -2542,7 +2542,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
|
||||
__ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
||||
__ Ldr(caller_ctx, MemOperand(caller_fp,
|
||||
StandardFrameConstants::kContextOffset));
|
||||
__ Cmp(caller_ctx, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ Cmp(caller_ctx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
||||
__ B(ne, &try_allocate);
|
||||
|
||||
// x1 param_count_smi number of parameters passed to function (smi)
|
||||
@ -2728,7 +2728,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
ExternalReference::address_of_regexp_stack_memory_address(isolate);
|
||||
ExternalReference address_of_regexp_stack_memory_size =
|
||||
ExternalReference::address_of_regexp_stack_memory_size(isolate);
|
||||
__ Mov(x10, Operand(address_of_regexp_stack_memory_size));
|
||||
__ Mov(x10, address_of_regexp_stack_memory_size);
|
||||
__ Ldr(x10, MemOperand(x10));
|
||||
__ Cbz(x10, &runtime);
|
||||
|
||||
@ -2750,7 +2750,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
// Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
|
||||
__ Ldr(x10, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
|
||||
__ Cmp(x10, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
|
||||
__ Cmp(x10, Smi::FromInt(JSRegExp::IRREGEXP));
|
||||
__ B(ne, &runtime);
|
||||
|
||||
// Check that the number of captures fit in the static offsets vector buffer.
|
||||
@ -2905,7 +2905,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
// csp[0]: Space for the return address placed by DirectCEntryStub.
|
||||
// csp[8]: Argument 9, the current isolate address.
|
||||
|
||||
__ Mov(x10, Operand(ExternalReference::isolate_address(isolate)));
|
||||
__ Mov(x10, ExternalReference::isolate_address(isolate));
|
||||
__ Poke(x10, kPointerSize);
|
||||
|
||||
Register length = w11;
|
||||
@ -2954,8 +2954,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
__ Add(x3, x2, Operand(w10, UXTW));
|
||||
|
||||
// Argument 5 (x4): static offsets vector buffer.
|
||||
__ Mov(x4,
|
||||
Operand(ExternalReference::address_of_static_offsets_vector(isolate)));
|
||||
__ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate));
|
||||
|
||||
// Argument 6 (x5): Set the number of capture registers to zero to force
|
||||
// global regexps to behave as non-global. This stub is not used for global
|
||||
@ -2963,9 +2962,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
__ Mov(x5, 0);
|
||||
|
||||
// Argument 7 (x6): Start (high end) of backtracking stack memory area.
|
||||
__ Mov(x10, Operand(address_of_regexp_stack_memory_address));
|
||||
__ Mov(x10, address_of_regexp_stack_memory_address);
|
||||
__ Ldr(x10, MemOperand(x10));
|
||||
__ Mov(x11, Operand(address_of_regexp_stack_memory_size));
|
||||
__ Mov(x11, address_of_regexp_stack_memory_size);
|
||||
__ Ldr(x11, MemOperand(x11));
|
||||
__ Add(x6, x10, x11);
|
||||
|
||||
@ -3061,7 +3060,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
// and fill the last match info.
|
||||
ExternalReference address_of_static_offsets_vector =
|
||||
ExternalReference::address_of_static_offsets_vector(isolate);
|
||||
__ Mov(offsets_vector_index, Operand(address_of_static_offsets_vector));
|
||||
__ Mov(offsets_vector_index, address_of_static_offsets_vector);
|
||||
|
||||
Label next_capture, done;
|
||||
// Capture register counter starts from number of capture registers and
|
||||
@ -3556,7 +3555,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
|
||||
|
||||
void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
|
||||
__ JumpIfNotSmi(code_, &slow_case_);
|
||||
__ Cmp(code_, Operand(Smi::FromInt(String::kMaxOneByteCharCode)));
|
||||
__ Cmp(code_, Smi::FromInt(String::kMaxOneByteCharCode));
|
||||
__ B(hi, &slow_case_);
|
||||
|
||||
__ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
|
||||
@ -3906,7 +3905,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
|
||||
// Preserve some caller-saved registers.
|
||||
__ Push(x1, x0, lr);
|
||||
// Push the arguments.
|
||||
__ Mov(op, Operand(Smi::FromInt(op_)));
|
||||
__ Mov(op, Smi::FromInt(op_));
|
||||
__ Push(left, right, op);
|
||||
|
||||
// Call the miss handler. This also pops the arguments.
|
||||
@ -4255,7 +4254,7 @@ void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
|
||||
__ B(eq, &check_zero_length);
|
||||
|
||||
__ Bind(&strings_not_equal);
|
||||
__ Mov(result, Operand(Smi::FromInt(NOT_EQUAL)));
|
||||
__ Mov(result, Smi::FromInt(NOT_EQUAL));
|
||||
__ Ret();
|
||||
|
||||
// Check if the length is zero. If so, the strings must be equal (and empty.)
|
||||
@ -4263,7 +4262,7 @@ void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
|
||||
__ Bind(&check_zero_length);
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
__ Cbnz(left_length, &compare_chars);
|
||||
__ Mov(result, Operand(Smi::FromInt(EQUAL)));
|
||||
__ Mov(result, Smi::FromInt(EQUAL));
|
||||
__ Ret();
|
||||
|
||||
// Compare characters. Falls through if all characters are equal.
|
||||
@ -4272,7 +4271,7 @@ void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
|
||||
scratch3, &strings_not_equal);
|
||||
|
||||
// Characters in strings are equal.
|
||||
__ Mov(result, Operand(Smi::FromInt(EQUAL)));
|
||||
__ Mov(result, Smi::FromInt(EQUAL));
|
||||
__ Ret();
|
||||
}
|
||||
|
||||
@ -4314,8 +4313,8 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
|
||||
__ Bind(&result_not_equal);
|
||||
Register greater = x10;
|
||||
Register less = x11;
|
||||
__ Mov(greater, Operand(Smi::FromInt(GREATER)));
|
||||
__ Mov(less, Operand(Smi::FromInt(LESS)));
|
||||
__ Mov(greater, Smi::FromInt(GREATER));
|
||||
__ Mov(less, Smi::FromInt(LESS));
|
||||
__ CmovX(result, greater, gt);
|
||||
__ CmovX(result, less, lt);
|
||||
__ Ret();
|
||||
@ -4441,7 +4440,7 @@ void ArrayPushStub::Generate(MacroAssembler* masm) {
|
||||
// Get the array's length and calculate new length.
|
||||
__ Ldr(length, FieldMemOperand(receiver, JSArray::kLengthOffset));
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
__ Add(length, length, Operand(Smi::FromInt(argc)));
|
||||
__ Add(length, length, Smi::FromInt(argc));
|
||||
|
||||
// Check if we could survive without allocation.
|
||||
__ Ldr(elements_length,
|
||||
@ -4568,12 +4567,12 @@ void ArrayPushStub::Generate(MacroAssembler* masm) {
|
||||
__ Add(end_elements, elements,
|
||||
Operand::UntagSmiAndScale(length, kPointerSizeLog2));
|
||||
__ Add(end_elements, end_elements, kEndElementsOffset);
|
||||
__ Mov(allocation_top_addr, Operand(new_space_allocation_top));
|
||||
__ Mov(allocation_top_addr, new_space_allocation_top);
|
||||
__ Ldr(allocation_top, MemOperand(allocation_top_addr));
|
||||
__ Cmp(end_elements, allocation_top);
|
||||
__ B(ne, &call_builtin);
|
||||
|
||||
__ Mov(x10, Operand(new_space_allocation_limit));
|
||||
__ Mov(x10, new_space_allocation_limit);
|
||||
__ Ldr(x10, MemOperand(x10));
|
||||
__ Add(allocation_top, allocation_top, kAllocationDelta * kPointerSize);
|
||||
__ Cmp(allocation_top, x10);
|
||||
@ -4592,9 +4591,7 @@ void ArrayPushStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
// Update elements' and array's sizes.
|
||||
__ Str(length, FieldMemOperand(receiver, JSArray::kLengthOffset));
|
||||
__ Add(elements_length,
|
||||
elements_length,
|
||||
Operand(Smi::FromInt(kAllocationDelta)));
|
||||
__ Add(elements_length, elements_length, Smi::FromInt(kAllocationDelta));
|
||||
__ Str(elements_length,
|
||||
FieldMemOperand(elements, FixedArray::kLengthOffset));
|
||||
|
||||
@ -4693,7 +4690,7 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
|
||||
__ Mov(address, regs_.address());
|
||||
__ Mov(x0, regs_.object());
|
||||
__ Mov(x1, address);
|
||||
__ Mov(x2, Operand(ExternalReference::isolate_address(masm->isolate())));
|
||||
__ Mov(x2, ExternalReference::isolate_address(masm->isolate()));
|
||||
|
||||
AllowExternalCallThatCantCauseGC scope(masm);
|
||||
ExternalReference function =
|
||||
@ -4948,7 +4945,7 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
|
||||
ExternalReference::BUILTIN_CALL,
|
||||
masm->isolate())));
|
||||
// It additionally takes an isolate as a third parameter
|
||||
__ Mov(x2, Operand(ExternalReference::isolate_address(masm->isolate())));
|
||||
__ Mov(x2, ExternalReference::isolate_address(masm->isolate()));
|
||||
#endif
|
||||
|
||||
// The caller's return address is above the saved temporaries.
|
||||
@ -5335,7 +5332,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
|
||||
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
|
||||
__ Ldr(x11, FieldMemOperand(allocation_site,
|
||||
AllocationSite::kTransitionInfoOffset));
|
||||
__ Add(x11, x11, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
|
||||
__ Add(x11, x11, Smi::FromInt(kFastElementsKindPackedToHoley));
|
||||
__ Str(x11, FieldMemOperand(allocation_site,
|
||||
AllocationSite::kTransitionInfoOffset));
|
||||
|
||||
@ -5617,7 +5614,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
|
||||
__ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
|
||||
}
|
||||
Register isolate_reg = x5;
|
||||
__ Mov(isolate_reg, Operand(ExternalReference::isolate_address(isolate)));
|
||||
__ Mov(isolate_reg, ExternalReference::isolate_address(isolate));
|
||||
|
||||
// FunctionCallbackArguments:
|
||||
// return value, return value default, isolate, holder.
|
||||
|
@ -519,7 +519,7 @@ void MathExpGenerator::EmitMathExp(MacroAssembler* masm,
|
||||
// ExternalReference::InitializeMathExpData().
|
||||
|
||||
// Load the address of the start of the array.
|
||||
__ Mov(constants, Operand(ExternalReference::math_exp_constants(0)));
|
||||
__ Mov(constants, ExternalReference::math_exp_constants(0));
|
||||
|
||||
// We have to do a four-way split here:
|
||||
// - If input <= about -708.4, the output always rounds to zero.
|
||||
@ -595,7 +595,7 @@ void MathExpGenerator::EmitMathExp(MacroAssembler* masm,
|
||||
__ Add(temp1, temp1, 0x3ff);
|
||||
|
||||
// Do the final table lookup.
|
||||
__ Mov(temp3, Operand(ExternalReference::math_exp_log_table()));
|
||||
__ Mov(temp3, ExternalReference::math_exp_log_table());
|
||||
|
||||
__ Add(temp3, temp3, Operand(temp2, LSL, kDRegSizeLog2));
|
||||
__ Ldp(temp2.W(), temp3.W(), MemOperand(temp3));
|
||||
|
@ -202,7 +202,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
|
||||
__ RecordComment("// Calling from debug break to runtime - come in - over");
|
||||
#endif
|
||||
__ Mov(x0, 0); // No arguments.
|
||||
__ Mov(x1, Operand(ExternalReference::debug_break(masm->isolate())));
|
||||
__ Mov(x1, ExternalReference::debug_break(masm->isolate()));
|
||||
|
||||
CEntryStub stub(1);
|
||||
__ CallStub(&stub);
|
||||
@ -234,7 +234,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
|
||||
// overwritten by the address of DebugBreakXXX.
|
||||
ExternalReference after_break_target(Debug_Address::AfterBreakTarget(),
|
||||
masm->isolate());
|
||||
__ Mov(scratch, Operand(after_break_target));
|
||||
__ Mov(scratch, after_break_target);
|
||||
__ Ldr(scratch, MemOperand(scratch));
|
||||
__ Br(scratch);
|
||||
}
|
||||
|
@ -185,7 +185,7 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
// - x2: bailout id
|
||||
// - x3: code object address
|
||||
// - x4: fp-to-sp delta
|
||||
__ Mov(x5, Operand(ExternalReference::isolate_address(isolate())));
|
||||
__ Mov(x5, ExternalReference::isolate_address(isolate()));
|
||||
|
||||
{
|
||||
// Call Deoptimizer::New().
|
||||
|
@ -243,7 +243,7 @@ void FullCodeGenerator::Generate() {
|
||||
int num_parameters = info->scope()->num_parameters();
|
||||
int offset = num_parameters * kPointerSize;
|
||||
__ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
|
||||
__ Mov(x1, Operand(Smi::FromInt(num_parameters)));
|
||||
__ Mov(x1, Smi::FromInt(num_parameters));
|
||||
__ Push(x3, x2, x1);
|
||||
|
||||
// Arguments to ArgumentsAccessStub:
|
||||
@ -322,14 +322,14 @@ void FullCodeGenerator::Generate() {
|
||||
|
||||
|
||||
void FullCodeGenerator::ClearAccumulator() {
|
||||
__ Mov(x0, Operand(Smi::FromInt(0)));
|
||||
__ Mov(x0, Smi::FromInt(0));
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
|
||||
__ Mov(x2, Operand(profiling_counter_));
|
||||
__ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
|
||||
__ Subs(x3, x3, Operand(Smi::FromInt(delta)));
|
||||
__ Subs(x3, x3, Smi::FromInt(delta));
|
||||
__ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
|
||||
}
|
||||
|
||||
@ -341,7 +341,7 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
|
||||
reset_value = FLAG_interrupt_budget >> 4;
|
||||
}
|
||||
__ Mov(x2, Operand(profiling_counter_));
|
||||
__ Mov(x3, Operand(Smi::FromInt(reset_value)));
|
||||
__ Mov(x3, Smi::FromInt(reset_value));
|
||||
__ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
|
||||
}
|
||||
|
||||
@ -831,7 +831,7 @@ void FullCodeGenerator::VisitVariableDeclaration(
|
||||
ASSERT(IsDeclaredVariableMode(mode));
|
||||
PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
|
||||
: NONE;
|
||||
__ Mov(x1, Operand(Smi::FromInt(attr)));
|
||||
__ Mov(x1, Smi::FromInt(attr));
|
||||
// Push initial value, if any.
|
||||
// Note: For variables we must not push an initial value (such as
|
||||
// 'undefined') because we may have a (legal) redeclaration and we
|
||||
@ -895,7 +895,7 @@ void FullCodeGenerator::VisitFunctionDeclaration(
|
||||
case Variable::LOOKUP: {
|
||||
Comment cmnt(masm_, "[ Function Declaration");
|
||||
__ Mov(x2, Operand(variable->name()));
|
||||
__ Mov(x1, Operand(Smi::FromInt(NONE)));
|
||||
__ Mov(x1, Smi::FromInt(NONE));
|
||||
__ Push(cp, x2, x1);
|
||||
// Push initial value for function declaration.
|
||||
VisitForStackValue(declaration->fun());
|
||||
@ -971,7 +971,7 @@ void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
|
||||
Register flags = xzr;
|
||||
if (Smi::FromInt(DeclareGlobalsFlags())) {
|
||||
flags = x10;
|
||||
__ Mov(flags, Operand(Smi::FromInt(DeclareGlobalsFlags())));
|
||||
__ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
|
||||
}
|
||||
__ Push(cp, x11, flags);
|
||||
__ CallRuntime(Runtime::kDeclareGlobals, 3);
|
||||
@ -1150,7 +1150,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
|
||||
// Set up the four remaining stack slots.
|
||||
__ Push(x0); // Map.
|
||||
__ Mov(x0, Operand(Smi::FromInt(0)));
|
||||
__ Mov(x0, Smi::FromInt(0));
|
||||
// Push enumeration cache, enumeration cache length (as smi) and zero.
|
||||
__ SmiTag(x1);
|
||||
__ Push(x2, x1, x0);
|
||||
@ -1168,10 +1168,10 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
isolate());
|
||||
StoreFeedbackVectorSlot(slot, feedback);
|
||||
__ LoadObject(x1, FeedbackVector());
|
||||
__ Mov(x10, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
|
||||
__ Mov(x10, Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker));
|
||||
__ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
|
||||
|
||||
__ Mov(x1, Operand(Smi::FromInt(1))); // Smi indicates slow check.
|
||||
__ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
|
||||
__ Peek(x10, 0); // Get enumerated object.
|
||||
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
|
||||
// TODO(all): similar check was done already. Can we avoid it here?
|
||||
@ -1237,7 +1237,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ Bind(loop_statement.continue_label());
|
||||
// TODO(all): We could use a callee saved register to avoid popping.
|
||||
__ Pop(x0);
|
||||
__ Add(x0, x0, Operand(Smi::FromInt(1)));
|
||||
__ Add(x0, x0, Smi::FromInt(1));
|
||||
__ Push(x0);
|
||||
|
||||
EmitBackEdgeBookkeeping(stmt, &loop);
|
||||
@ -1582,7 +1582,7 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
|
||||
|
||||
// Create regexp literal using runtime function.
|
||||
// Result will be in x0.
|
||||
__ Mov(x3, Operand(Smi::FromInt(expr->literal_index())));
|
||||
__ Mov(x3, Smi::FromInt(expr->literal_index()));
|
||||
__ Mov(x2, Operand(expr->pattern()));
|
||||
__ Mov(x1, Operand(expr->flags()));
|
||||
__ Push(x4, x3, x2, x1);
|
||||
@ -1596,7 +1596,7 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
|
||||
__ B(&allocated);
|
||||
|
||||
__ Bind(&runtime_allocate);
|
||||
__ Mov(x10, Operand(Smi::FromInt(size)));
|
||||
__ Mov(x10, Smi::FromInt(size));
|
||||
__ Push(x5, x10);
|
||||
__ CallRuntime(Runtime::kAllocateInNewSpace, 1);
|
||||
__ Pop(x5);
|
||||
@ -1628,7 +1628,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
||||
Handle<FixedArray> constant_properties = expr->constant_properties();
|
||||
__ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
||||
__ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
|
||||
__ Mov(x2, Operand(Smi::FromInt(expr->literal_index())));
|
||||
__ Mov(x2, Smi::FromInt(expr->literal_index()));
|
||||
__ Mov(x1, Operand(constant_properties));
|
||||
int flags = expr->fast_elements()
|
||||
? ObjectLiteral::kFastElements
|
||||
@ -1636,7 +1636,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
||||
flags |= expr->has_function()
|
||||
? ObjectLiteral::kHasFunction
|
||||
: ObjectLiteral::kNoFlags;
|
||||
__ Mov(x0, Operand(Smi::FromInt(flags)));
|
||||
__ Mov(x0, Smi::FromInt(flags));
|
||||
int properties_count = constant_properties->length() / 2;
|
||||
const int max_cloned_properties =
|
||||
FastCloneShallowObjectStub::kMaximumClonedProperties;
|
||||
@ -1695,7 +1695,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
||||
__ Push(x0);
|
||||
VisitForStackValue(key);
|
||||
VisitForStackValue(value);
|
||||
__ Mov(x0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
|
||||
__ Mov(x0, Smi::FromInt(NONE)); // PropertyAttributes
|
||||
__ Push(x0);
|
||||
__ CallRuntime(Runtime::kSetProperty, 4);
|
||||
} else {
|
||||
@ -1733,7 +1733,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
||||
VisitForStackValue(it->first);
|
||||
EmitAccessor(it->second->getter);
|
||||
EmitAccessor(it->second->setter);
|
||||
__ Mov(x10, Operand(Smi::FromInt(NONE)));
|
||||
__ Mov(x10, Smi::FromInt(NONE));
|
||||
__ Push(x10);
|
||||
__ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
|
||||
}
|
||||
@ -1779,8 +1779,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
|
||||
|
||||
__ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
||||
__ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
|
||||
// TODO(jbramley): Can these Operand constructors be implicit?
|
||||
__ Mov(x2, Operand(Smi::FromInt(expr->literal_index())));
|
||||
__ Mov(x2, Smi::FromInt(expr->literal_index()));
|
||||
__ Mov(x1, Operand(constant_elements));
|
||||
if (has_fast_elements && constant_elements_values->map() ==
|
||||
isolate()->heap()->fixed_cow_array_map()) {
|
||||
@ -1793,7 +1792,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
|
||||
isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11);
|
||||
} else if ((expr->depth() > 1) || Serializer::enabled() ||
|
||||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
|
||||
__ Mov(x0, Operand(Smi::FromInt(flags)));
|
||||
__ Mov(x0, Smi::FromInt(flags));
|
||||
__ Push(x3, x2, x1, x0);
|
||||
__ CallRuntime(Runtime::kCreateArrayLiteral, 4);
|
||||
} else {
|
||||
@ -1837,7 +1836,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
|
||||
kLRHasBeenSaved, kDontSaveFPRegs,
|
||||
EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
|
||||
} else {
|
||||
__ Mov(x3, Operand(Smi::FromInt(i)));
|
||||
__ Mov(x3, Smi::FromInt(i));
|
||||
StoreArrayLiteralElementStub stub;
|
||||
__ CallStub(&stub);
|
||||
}
|
||||
@ -2160,7 +2159,7 @@ void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
|
||||
void FullCodeGenerator::EmitCallStoreContextSlot(
|
||||
Handle<String> name, StrictMode strict_mode) {
|
||||
__ Mov(x11, Operand(name));
|
||||
__ Mov(x10, Operand(Smi::FromInt(strict_mode)));
|
||||
__ Mov(x10, Smi::FromInt(strict_mode));
|
||||
// jssp[0] : mode.
|
||||
// jssp[8] : name.
|
||||
// jssp[16] : context.
|
||||
@ -2410,7 +2409,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
|
||||
TypeFeedbackInfo::UninitializedSentinel(isolate());
|
||||
StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
|
||||
__ LoadObject(x2, FeedbackVector());
|
||||
__ Mov(x3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
|
||||
__ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot()));
|
||||
|
||||
// Record call targets in unoptimized code.
|
||||
CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
|
||||
@ -2441,9 +2440,9 @@ void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
|
||||
__ Push(x10, x11);
|
||||
|
||||
// Prepare to push the language mode.
|
||||
__ Mov(x10, Operand(Smi::FromInt(strict_mode())));
|
||||
__ Mov(x10, Smi::FromInt(strict_mode()));
|
||||
// Prepare to push the start position of the scope the calls resides in.
|
||||
__ Mov(x11, Operand(Smi::FromInt(scope()->start_position())));
|
||||
__ Mov(x11, Smi::FromInt(scope()->start_position()));
|
||||
|
||||
// Push.
|
||||
__ Push(x10, x11);
|
||||
@ -2616,7 +2615,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
|
||||
}
|
||||
|
||||
__ LoadObject(x2, FeedbackVector());
|
||||
__ Mov(x3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
|
||||
__ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot()));
|
||||
|
||||
CallConstructStub stub(RECORD_CALL_TARGET);
|
||||
__ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
|
||||
@ -2955,14 +2954,14 @@ void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
|
||||
// Skip the arguments adaptor frame if it exists.
|
||||
Label check_frame_marker;
|
||||
__ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
|
||||
__ Cmp(x1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
||||
__ B(ne, &check_frame_marker);
|
||||
__ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
|
||||
|
||||
// Check the marker in the calling frame.
|
||||
__ Bind(&check_frame_marker);
|
||||
__ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
|
||||
__ Cmp(x1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
|
||||
__ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
|
||||
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
||||
Split(eq, if_true, if_false, fall_through);
|
||||
|
||||
@ -3001,7 +3000,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
|
||||
// ArgumentsAccessStub expects the key in x1.
|
||||
VisitForAccumulatorValue(args->at(0));
|
||||
__ Mov(x1, x0);
|
||||
__ Mov(x0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
|
||||
__ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
|
||||
ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
|
||||
__ CallStub(&stub);
|
||||
context()->Plug(x0);
|
||||
@ -3012,12 +3011,12 @@ void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
|
||||
ASSERT(expr->arguments()->length() == 0);
|
||||
Label exit;
|
||||
// Get the number of formal parameters.
|
||||
__ Mov(x0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
|
||||
__ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
|
||||
|
||||
// Check if the calling frame is an arguments adaptor frame.
|
||||
__ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
||||
__ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
|
||||
__ Cmp(x13, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
||||
__ B(ne, &exit);
|
||||
|
||||
// Arguments adaptor case: Read the arguments length from the
|
||||
@ -3183,7 +3182,7 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
||||
} else {
|
||||
if (index->value() < JSDate::kFirstUncachedField) {
|
||||
ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
|
||||
__ Mov(x10, Operand(stamp));
|
||||
__ Mov(x10, stamp);
|
||||
__ Ldr(stamp_addr, MemOperand(x10));
|
||||
__ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
|
||||
__ Cmp(stamp_addr, stamp_cache);
|
||||
@ -3194,7 +3193,7 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
||||
}
|
||||
|
||||
__ Bind(&runtime);
|
||||
__ Mov(x1, Operand(index));
|
||||
__ Mov(x1, index);
|
||||
__ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
|
||||
__ B(&done);
|
||||
}
|
||||
@ -3422,7 +3421,7 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
|
||||
|
||||
__ Bind(&need_conversion);
|
||||
// Move smi zero into the result register, which will trigger conversion.
|
||||
__ Mov(result, Operand(Smi::FromInt(0)));
|
||||
__ Mov(result, Smi::FromInt(0));
|
||||
__ B(&done);
|
||||
|
||||
NopRuntimeCallHelper call_helper;
|
||||
@ -3675,7 +3674,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
||||
// element: Current array element.
|
||||
// elements_end: Array end.
|
||||
if (FLAG_debug_code) {
|
||||
__ Cmp(array_length, Operand(0));
|
||||
__ Cmp(array_length, 0);
|
||||
__ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
|
||||
}
|
||||
__ Bind(&loop);
|
||||
@ -3888,7 +3887,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
|
||||
if (property != NULL) {
|
||||
VisitForStackValue(property->obj());
|
||||
VisitForStackValue(property->key());
|
||||
__ Mov(x10, Operand(Smi::FromInt(strict_mode())));
|
||||
__ Mov(x10, Smi::FromInt(strict_mode()));
|
||||
__ Push(x10);
|
||||
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
|
||||
context()->Plug(x0);
|
||||
@ -3900,7 +3899,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
|
||||
if (var->IsUnallocated()) {
|
||||
__ Ldr(x12, GlobalObjectMemOperand());
|
||||
__ Mov(x11, Operand(var->name()));
|
||||
__ Mov(x10, Operand(Smi::FromInt(SLOPPY)));
|
||||
__ Mov(x10, Smi::FromInt(SLOPPY));
|
||||
__ Push(x12, x11, x10);
|
||||
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
|
||||
context()->Plug(x0);
|
||||
@ -4068,10 +4067,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
||||
}
|
||||
}
|
||||
|
||||
__ Adds(x0, x0, Operand(Smi::FromInt(count_value)));
|
||||
__ Adds(x0, x0, Smi::FromInt(count_value));
|
||||
__ B(vc, &done);
|
||||
// Call stub. Undo operation first.
|
||||
__ Sub(x0, x0, Operand(Smi::FromInt(count_value)));
|
||||
__ Sub(x0, x0, Smi::FromInt(count_value));
|
||||
__ B(&stub_call);
|
||||
__ Bind(&slow);
|
||||
}
|
||||
@ -4100,7 +4099,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
||||
|
||||
__ Bind(&stub_call);
|
||||
__ Mov(x1, x0);
|
||||
__ Mov(x0, Operand(Smi::FromInt(count_value)));
|
||||
__ Mov(x0, Smi::FromInt(count_value));
|
||||
|
||||
// Record position before stub call.
|
||||
SetSourcePosition(expr->position());
|
||||
@ -4434,7 +4433,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
|
||||
__ Bind(&suspend);
|
||||
VisitForAccumulatorValue(expr->generator_object());
|
||||
ASSERT((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
|
||||
__ Mov(x1, Operand(Smi::FromInt(continuation.pos())));
|
||||
__ Mov(x1, Smi::FromInt(continuation.pos()));
|
||||
__ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
|
||||
__ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
|
||||
__ Mov(x1, cp);
|
||||
@ -4457,7 +4456,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
|
||||
|
||||
case Yield::FINAL: {
|
||||
VisitForAccumulatorValue(expr->generator_object());
|
||||
__ Mov(x1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
|
||||
__ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
|
||||
__ Str(x1, FieldMemOperand(result_register(),
|
||||
JSGeneratorObject::kContinuationOffset));
|
||||
// Pop value from top-of-stack slot, box result into result register.
|
||||
@ -4509,7 +4508,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
|
||||
__ Peek(x0, generator_object_depth);
|
||||
__ Push(x0); // g
|
||||
ASSERT((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
|
||||
__ Mov(x1, Operand(Smi::FromInt(l_continuation.pos())));
|
||||
__ Mov(x1, Smi::FromInt(l_continuation.pos()));
|
||||
__ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
|
||||
__ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
|
||||
__ Mov(x1, cp);
|
||||
@ -4587,8 +4586,8 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
|
||||
JSGeneratorObject::kContinuationOffset));
|
||||
STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
|
||||
STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
|
||||
__ CompareAndBranch(x10, Operand(Smi::FromInt(0)), eq, &closed_state);
|
||||
__ CompareAndBranch(x10, Operand(Smi::FromInt(0)), lt, &wrong_state);
|
||||
__ CompareAndBranch(x10, Smi::FromInt(0), eq, &closed_state);
|
||||
__ CompareAndBranch(x10, Smi::FromInt(0), lt, &wrong_state);
|
||||
|
||||
// Load suspended function and context.
|
||||
__ Ldr(cp, FieldMemOperand(generator_object,
|
||||
@ -4642,7 +4641,7 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
|
||||
UntagSmiFieldMemOperand(generator_object,
|
||||
JSGeneratorObject::kContinuationOffset));
|
||||
__ Add(x10, x10, x11);
|
||||
__ Mov(x12, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
|
||||
__ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
|
||||
__ Str(x12, FieldMemOperand(generator_object,
|
||||
JSGeneratorObject::kContinuationOffset));
|
||||
__ Br(x10);
|
||||
@ -4654,7 +4653,7 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
|
||||
// up the stack and the handlers.
|
||||
__ PushMultipleTimes(the_hole, operand_stack_size);
|
||||
|
||||
__ Mov(x10, Operand(Smi::FromInt(resume_mode)));
|
||||
__ Mov(x10, Smi::FromInt(resume_mode));
|
||||
__ Push(generator_object, result_register(), x10);
|
||||
__ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
|
||||
// Not reached: the runtime call returns elsewhere.
|
||||
@ -4798,12 +4797,12 @@ void FullCodeGenerator::EnterFinallyBlock() {
|
||||
// Store pending message while executing finally block.
|
||||
ExternalReference pending_message_obj =
|
||||
ExternalReference::address_of_pending_message_obj(isolate());
|
||||
__ Mov(x10, Operand(pending_message_obj));
|
||||
__ Mov(x10, pending_message_obj);
|
||||
__ Ldr(x10, MemOperand(x10));
|
||||
|
||||
ExternalReference has_pending_message =
|
||||
ExternalReference::address_of_has_pending_message(isolate());
|
||||
__ Mov(x11, Operand(has_pending_message));
|
||||
__ Mov(x11, has_pending_message);
|
||||
__ Ldr(x11, MemOperand(x11));
|
||||
__ SmiTag(x11);
|
||||
|
||||
@ -4811,7 +4810,7 @@ void FullCodeGenerator::EnterFinallyBlock() {
|
||||
|
||||
ExternalReference pending_message_script =
|
||||
ExternalReference::address_of_pending_message_script(isolate());
|
||||
__ Mov(x10, Operand(pending_message_script));
|
||||
__ Mov(x10, pending_message_script);
|
||||
__ Ldr(x10, MemOperand(x10));
|
||||
__ Push(x10);
|
||||
}
|
||||
@ -4825,18 +4824,18 @@ void FullCodeGenerator::ExitFinallyBlock() {
|
||||
__ Pop(x10, x11, x12);
|
||||
ExternalReference pending_message_script =
|
||||
ExternalReference::address_of_pending_message_script(isolate());
|
||||
__ Mov(x13, Operand(pending_message_script));
|
||||
__ Mov(x13, pending_message_script);
|
||||
__ Str(x10, MemOperand(x13));
|
||||
|
||||
__ SmiUntag(x11);
|
||||
ExternalReference has_pending_message =
|
||||
ExternalReference::address_of_has_pending_message(isolate());
|
||||
__ Mov(x13, Operand(has_pending_message));
|
||||
__ Mov(x13, has_pending_message);
|
||||
__ Str(x11, MemOperand(x13));
|
||||
|
||||
ExternalReference pending_message_obj =
|
||||
ExternalReference::address_of_pending_message_obj(isolate());
|
||||
__ Mov(x13, Operand(pending_message_obj));
|
||||
__ Mov(x13, pending_message_obj);
|
||||
__ Str(x12, MemOperand(x13));
|
||||
|
||||
// Restore result register and cooked return address from the stack.
|
||||
|
@ -141,7 +141,7 @@ static void GenerateDictionaryLoad(MacroAssembler* masm,
|
||||
NameDictionary::kElementsStartIndex * kPointerSize;
|
||||
static const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
|
||||
__ Ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
|
||||
__ Tst(scratch1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
|
||||
__ Tst(scratch1, Smi::FromInt(PropertyDetails::TypeField::kMask));
|
||||
__ B(ne, miss);
|
||||
|
||||
// Get the value at the masked, scaled index and return.
|
||||
@ -376,7 +376,7 @@ static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
|
||||
// Check if element is in the range of mapped arguments. If not, jump
|
||||
// to the unmapped lookup.
|
||||
__ Ldr(scratch1, FieldMemOperand(map, FixedArray::kLengthOffset));
|
||||
__ Sub(scratch1, scratch1, Operand(Smi::FromInt(2)));
|
||||
__ Sub(scratch1, scratch1, Smi::FromInt(2));
|
||||
__ Cmp(key, scratch1);
|
||||
__ B(hs, unmapped_case);
|
||||
|
||||
@ -702,7 +702,7 @@ static void GenerateKeyedLoadWithNameKey(MacroAssembler* masm,
|
||||
ExternalReference cache_keys =
|
||||
ExternalReference::keyed_lookup_cache_keys(isolate);
|
||||
|
||||
__ Mov(scratch3, Operand(cache_keys));
|
||||
__ Mov(scratch3, cache_keys);
|
||||
__ Add(scratch3, scratch3, Operand(scratch2, LSL, kPointerSizeLog2 + 1));
|
||||
|
||||
for (int i = 0; i < kEntriesPerBucket - 1; i++) {
|
||||
@ -732,7 +732,7 @@ static void GenerateKeyedLoadWithNameKey(MacroAssembler* masm,
|
||||
// Hit on nth entry.
|
||||
for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
|
||||
__ Bind(&hit_on_nth_entry[i]);
|
||||
__ Mov(scratch3, Operand(cache_field_offsets));
|
||||
__ Mov(scratch3, cache_field_offsets);
|
||||
if (i != 0) {
|
||||
__ Add(scratch2, scratch2, i);
|
||||
}
|
||||
@ -939,7 +939,7 @@ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
|
||||
|
||||
// Push PropertyAttributes(NONE) and strict_mode for runtime call.
|
||||
STATIC_ASSERT(NONE == 0);
|
||||
__ Mov(x10, Operand(Smi::FromInt(strict_mode)));
|
||||
__ Mov(x10, Smi::FromInt(strict_mode));
|
||||
__ Push(xzr, x10);
|
||||
|
||||
__ TailCallRuntime(Runtime::kSetProperty, 5, 1);
|
||||
@ -996,7 +996,7 @@ static void KeyedStoreGenerateGenericHelper(
|
||||
__ Bind(&finish_store);
|
||||
if (increment_length == kIncrementLength) {
|
||||
// Add 1 to receiver->length.
|
||||
__ Add(x10, key, Operand(Smi::FromInt(1)));
|
||||
__ Add(x10, key, Smi::FromInt(1));
|
||||
__ Str(x10, FieldMemOperand(receiver, JSArray::kLengthOffset));
|
||||
}
|
||||
|
||||
@ -1048,7 +1048,7 @@ static void KeyedStoreGenerateGenericHelper(
|
||||
&transition_double_elements);
|
||||
if (increment_length == kIncrementLength) {
|
||||
// Add 1 to receiver->length.
|
||||
__ Add(x10, key, Operand(Smi::FromInt(1)));
|
||||
__ Add(x10, key, Smi::FromInt(1));
|
||||
__ Str(x10, FieldMemOperand(receiver, JSArray::kLengthOffset));
|
||||
}
|
||||
__ Ret();
|
||||
@ -1285,8 +1285,8 @@ void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
|
||||
|
||||
__ Push(x1, x2, x0);
|
||||
|
||||
__ Mov(x11, Operand(Smi::FromInt(NONE))); // PropertyAttributes
|
||||
__ Mov(x10, Operand(Smi::FromInt(strict_mode)));
|
||||
__ Mov(x11, Smi::FromInt(NONE)); // PropertyAttributes
|
||||
__ Mov(x10, Smi::FromInt(strict_mode));
|
||||
__ Push(x11, x10);
|
||||
|
||||
// Do tail-call to runtime routine.
|
||||
|
@ -801,7 +801,7 @@ bool LCodeGen::GenerateDeferredCode() {
|
||||
ASSERT(info()->IsStub());
|
||||
frame_is_built_ = true;
|
||||
__ Push(lr, fp, cp);
|
||||
__ Mov(fp, Operand(Smi::FromInt(StackFrame::STUB)));
|
||||
__ Mov(fp, Smi::FromInt(StackFrame::STUB));
|
||||
__ Push(fp);
|
||||
__ Add(fp, __ StackPointer(),
|
||||
StandardFrameConstants::kFixedFrameSizeFromFp);
|
||||
@ -855,8 +855,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
|
||||
Register stub_deopt_entry = temps.AcquireX();
|
||||
Register stub_marker = temps.AcquireX();
|
||||
|
||||
__ Mov(stub_deopt_entry,
|
||||
Operand(ExternalReference::ForDeoptEntry(entry)));
|
||||
__ Mov(stub_deopt_entry, ExternalReference::ForDeoptEntry(entry));
|
||||
if (needs_frame.is_bound()) {
|
||||
__ B(&needs_frame);
|
||||
} else {
|
||||
@ -865,7 +864,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
|
||||
// have a function pointer to install in the stack frame that we're
|
||||
// building, install a special marker there instead.
|
||||
ASSERT(info()->IsStub());
|
||||
__ Mov(stub_marker, Operand(Smi::FromInt(StackFrame::STUB)));
|
||||
__ Mov(stub_marker, Smi::FromInt(StackFrame::STUB));
|
||||
__ Push(lr, fp, cp, stub_marker);
|
||||
__ Add(fp, __ StackPointer(), 2 * kPointerSize);
|
||||
__ Call(stub_deopt_entry);
|
||||
@ -1005,7 +1004,7 @@ void LCodeGen::DeoptimizeBranch(
|
||||
|
||||
__ Push(x0, x1, x2);
|
||||
__ Mrs(x2, NZCV);
|
||||
__ Mov(x0, Operand(count));
|
||||
__ Mov(x0, count);
|
||||
__ Ldr(w1, MemOperand(x0));
|
||||
__ Subs(x1, x1, 1);
|
||||
__ B(gt, ¬_zero);
|
||||
@ -1552,13 +1551,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
|
||||
// TODO(3095996): Get rid of this. For now, we need to make the
|
||||
// result register contain a valid pointer because it is already
|
||||
// contained in the register pointer map.
|
||||
__ Mov(ToRegister(instr->result()), Operand(Smi::FromInt(0)));
|
||||
__ Mov(ToRegister(instr->result()), Smi::FromInt(0));
|
||||
|
||||
PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
|
||||
// We're in a SafepointRegistersScope so we can use any scratch registers.
|
||||
Register size = x0;
|
||||
if (instr->size()->IsConstantOperand()) {
|
||||
__ Mov(size, Operand(ToSmi(LConstantOperand::cast(instr->size()))));
|
||||
__ Mov(size, ToSmi(LConstantOperand::cast(instr->size())));
|
||||
} else {
|
||||
__ SmiTag(size, ToRegister32(instr->size()).X());
|
||||
}
|
||||
@ -1574,7 +1573,7 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
|
||||
} else {
|
||||
flags = AllocateTargetSpace::update(flags, NEW_SPACE);
|
||||
}
|
||||
__ Mov(x10, Operand(Smi::FromInt(flags)));
|
||||
__ Mov(x10, Smi::FromInt(flags));
|
||||
__ Push(size, x10);
|
||||
|
||||
CallRuntimeFromDeferred(
|
||||
@ -1654,7 +1653,7 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
|
||||
MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
||||
__ Ldr(result,
|
||||
MemOperand(previous_fp, StandardFrameConstants::kContextOffset));
|
||||
__ Cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ Cmp(result, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
||||
__ Csel(result, fp, previous_fp, ne);
|
||||
}
|
||||
}
|
||||
@ -1779,9 +1778,9 @@ void LCodeGen::DoBoundsCheck(LBoundsCheck *instr) {
|
||||
ToInteger32(LConstantOperand::cast(instr->index()));
|
||||
|
||||
if (instr->hydrogen()->length()->representation().IsSmi()) {
|
||||
__ Cmp(length, Operand(Smi::FromInt(constant_index)));
|
||||
__ Cmp(length, Smi::FromInt(constant_index));
|
||||
} else {
|
||||
__ Cmp(length, Operand(constant_index));
|
||||
__ Cmp(length, constant_index);
|
||||
}
|
||||
} else {
|
||||
ASSERT(instr->hydrogen()->index()->representation().IsInteger32());
|
||||
@ -1819,7 +1818,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
|
||||
EmitBranch(instr, eq);
|
||||
} else if (type.IsSmi()) {
|
||||
ASSERT(!info()->IsStub());
|
||||
EmitCompareAndBranch(instr, ne, value, Operand(Smi::FromInt(0)));
|
||||
EmitCompareAndBranch(instr, ne, value, Smi::FromInt(0));
|
||||
} else if (type.IsJSArray()) {
|
||||
ASSERT(!info()->IsStub());
|
||||
EmitGoto(instr->TrueDestination(chunk()));
|
||||
@ -3029,7 +3028,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
|
||||
Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value());
|
||||
__ LoadRelocated(scratch, Operand(Handle<Object>(cell)));
|
||||
__ ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
|
||||
__ cmp(map, Operand(scratch));
|
||||
__ cmp(map, scratch);
|
||||
__ b(&cache_miss, ne);
|
||||
// The address of this instruction is computed relative to the map check
|
||||
// above, so check the size of the code generated.
|
||||
@ -3141,7 +3140,7 @@ void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
|
||||
// Skip the arguments adaptor frame if it exists.
|
||||
Label check_frame_marker;
|
||||
__ Ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
|
||||
__ Cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
||||
__ Cmp(temp2, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
||||
__ B(ne, &check_frame_marker);
|
||||
__ Ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
|
||||
|
||||
@ -4135,7 +4134,7 @@ void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) {
|
||||
__ B(pl, ÷nd_is_not_negative);
|
||||
// Note that this is correct even for kMinInt operands.
|
||||
__ Neg(dividend, dividend);
|
||||
__ And(dividend, dividend, Operand(mask));
|
||||
__ And(dividend, dividend, mask);
|
||||
__ Negs(dividend, dividend);
|
||||
if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
|
||||
DeoptimizeIf(eq, instr->environment());
|
||||
@ -4144,7 +4143,7 @@ void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) {
|
||||
}
|
||||
|
||||
__ bind(÷nd_is_not_negative);
|
||||
__ And(dividend, dividend, Operand(mask));
|
||||
__ And(dividend, dividend, mask);
|
||||
__ bind(&done);
|
||||
}
|
||||
|
||||
@ -4874,7 +4873,7 @@ void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
|
||||
// TODO(all): if Mov could handle object in new space then it could be used
|
||||
// here.
|
||||
__ LoadHeapObject(scratch1, instr->hydrogen()->pairs());
|
||||
__ Mov(scratch2, Operand(Smi::FromInt(instr->hydrogen()->flags())));
|
||||
__ Mov(scratch2, Smi::FromInt(instr->hydrogen()->flags()));
|
||||
__ Push(cp, scratch1, scratch2); // The context is the first argument.
|
||||
CallRuntime(Runtime::kDeclareGlobals, 3, instr);
|
||||
}
|
||||
@ -5587,7 +5586,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
|
||||
__ B(&allocated);
|
||||
|
||||
__ Bind(&runtime_allocate);
|
||||
__ Mov(x0, Operand(Smi::FromInt(size)));
|
||||
__ Mov(x0, Smi::FromInt(size));
|
||||
__ Push(x1, x0);
|
||||
CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
|
||||
__ Pop(x1);
|
||||
@ -5821,7 +5820,7 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
|
||||
__ AssertSmi(index);
|
||||
|
||||
Label out_of_object, done;
|
||||
__ Cmp(index, Operand(Smi::FromInt(0)));
|
||||
__ Cmp(index, Smi::FromInt(0));
|
||||
__ B(lt, &out_of_object);
|
||||
|
||||
STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize);
|
||||
|
@ -257,7 +257,7 @@ void LGapResolver::EmitMove(int index) {
|
||||
if (destination->IsRegister()) {
|
||||
Register dst = cgen_->ToRegister(destination);
|
||||
if (cgen_->IsSmi(constant_source)) {
|
||||
__ Mov(dst, Operand(cgen_->ToSmi(constant_source)));
|
||||
__ Mov(dst, cgen_->ToSmi(constant_source));
|
||||
} else if (cgen_->IsInteger32Constant(constant_source)) {
|
||||
__ Mov(dst, cgen_->ToInteger32(constant_source));
|
||||
} else {
|
||||
@ -271,7 +271,7 @@ void LGapResolver::EmitMove(int index) {
|
||||
ASSERT(!in_cycle_); // Constant moves happen after all cycles are gone.
|
||||
need_to_restore_root_ = true;
|
||||
if (cgen_->IsSmi(constant_source)) {
|
||||
__ Mov(kSavedValue, Operand(cgen_->ToSmi(constant_source)));
|
||||
__ Mov(kSavedValue, cgen_->ToSmi(constant_source));
|
||||
} else if (cgen_->IsInteger32Constant(constant_source)) {
|
||||
__ Mov(kSavedValue, cgen_->ToInteger32(constant_source));
|
||||
} else {
|
||||
|
@ -1253,7 +1253,7 @@ void MacroAssembler::EnumLengthUntagged(Register dst, Register map) {
|
||||
void MacroAssembler::EnumLengthSmi(Register dst, Register map) {
|
||||
STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
|
||||
Ldr(dst, FieldMemOperand(map, Map::kBitField3Offset));
|
||||
And(dst, dst, Operand(Smi::FromInt(Map::EnumLengthBits::kMask)));
|
||||
And(dst, dst, Smi::FromInt(Map::EnumLengthBits::kMask));
|
||||
}
|
||||
|
||||
|
||||
@ -1326,10 +1326,10 @@ void MacroAssembler::TestJSArrayForAllocationMemento(Register receiver,
|
||||
|
||||
Add(scratch1, receiver,
|
||||
JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag);
|
||||
Cmp(scratch1, Operand(new_space_start));
|
||||
Cmp(scratch1, new_space_start);
|
||||
B(lt, no_memento_found);
|
||||
|
||||
Mov(scratch2, Operand(new_space_allocation_top));
|
||||
Mov(scratch2, new_space_allocation_top);
|
||||
Ldr(scratch2, MemOperand(scratch2));
|
||||
Cmp(scratch1, scratch2);
|
||||
B(gt, no_memento_found);
|
||||
@ -1367,8 +1367,8 @@ void MacroAssembler::InNewSpace(Register object,
|
||||
ASSERT(cond == eq || cond == ne);
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register temp = temps.AcquireX();
|
||||
And(temp, object, Operand(ExternalReference::new_space_mask(isolate())));
|
||||
Cmp(temp, Operand(ExternalReference::new_space_start(isolate())));
|
||||
And(temp, object, ExternalReference::new_space_mask(isolate()));
|
||||
Cmp(temp, ExternalReference::new_space_start(isolate()));
|
||||
B(cond, branch);
|
||||
}
|
||||
|
||||
@ -1471,7 +1471,7 @@ void MacroAssembler::Throw(BailoutReason reason) {
|
||||
RecordComment((msg != NULL) ? msg : "UNKNOWN");
|
||||
#endif
|
||||
|
||||
Mov(x0, Operand(Smi::FromInt(reason)));
|
||||
Mov(x0, Smi::FromInt(reason));
|
||||
Push(x0);
|
||||
|
||||
// Disable stub call restrictions to always allow calls to throw.
|
||||
@ -1600,7 +1600,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f,
|
||||
|
||||
// Place the necessary arguments.
|
||||
Mov(x0, num_arguments);
|
||||
Mov(x1, Operand(ExternalReference(f, isolate())));
|
||||
Mov(x1, ExternalReference(f, isolate()));
|
||||
|
||||
CEntryStub stub(1, save_doubles);
|
||||
CallStub(&stub);
|
||||
@ -1639,7 +1639,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
|
||||
Mov(x10, reinterpret_cast<uintptr_t>(is_profiling_flag));
|
||||
Ldrb(w10, MemOperand(x10));
|
||||
Cbz(w10, &profiler_disabled);
|
||||
Mov(x3, Operand(thunk_ref));
|
||||
Mov(x3, thunk_ref);
|
||||
B(&end_profiler_check);
|
||||
|
||||
Bind(&profiler_disabled);
|
||||
@ -1662,7 +1662,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
|
||||
Register limit_reg = x20;
|
||||
Register level_reg = w21;
|
||||
|
||||
Mov(handle_scope_base, Operand(next_address));
|
||||
Mov(handle_scope_base, next_address);
|
||||
Ldr(next_address_reg, MemOperand(handle_scope_base, kNextOffset));
|
||||
Ldr(limit_reg, MemOperand(handle_scope_base, kLimitOffset));
|
||||
Ldr(level_reg, MemOperand(handle_scope_base, kLevelOffset));
|
||||
@ -1672,7 +1672,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
|
||||
if (FLAG_log_timer_events) {
|
||||
FrameScope frame(this, StackFrame::MANUAL);
|
||||
PushSafepointRegisters();
|
||||
Mov(x0, Operand(ExternalReference::isolate_address(isolate())));
|
||||
Mov(x0, ExternalReference::isolate_address(isolate()));
|
||||
CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
|
||||
PopSafepointRegisters();
|
||||
}
|
||||
@ -1686,7 +1686,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
|
||||
if (FLAG_log_timer_events) {
|
||||
FrameScope frame(this, StackFrame::MANUAL);
|
||||
PushSafepointRegisters();
|
||||
Mov(x0, Operand(ExternalReference::isolate_address(isolate())));
|
||||
Mov(x0, ExternalReference::isolate_address(isolate()));
|
||||
CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
|
||||
PopSafepointRegisters();
|
||||
}
|
||||
@ -1722,7 +1722,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
|
||||
Peek(x22, (spill_offset + 3) * kXRegSize);
|
||||
|
||||
// Check if the function scheduled an exception.
|
||||
Mov(x5, Operand(ExternalReference::scheduled_exception_address(isolate())));
|
||||
Mov(x5, ExternalReference::scheduled_exception_address(isolate()));
|
||||
Ldr(x5, MemOperand(x5));
|
||||
JumpIfNotRoot(x5, Heap::kTheHoleValueRootIndex, &promote_scheduled_exception);
|
||||
Bind(&exception_handled);
|
||||
@ -1750,7 +1750,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
|
||||
// Save the return value in a callee-save register.
|
||||
Register saved_result = x19;
|
||||
Mov(saved_result, x0);
|
||||
Mov(x0, Operand(ExternalReference::isolate_address(isolate())));
|
||||
Mov(x0, ExternalReference::isolate_address(isolate()));
|
||||
CallCFunction(
|
||||
ExternalReference::delete_handle_scope_extensions(isolate()), 1);
|
||||
Mov(x0, saved_result);
|
||||
@ -1761,7 +1761,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
|
||||
void MacroAssembler::CallExternalReference(const ExternalReference& ext,
|
||||
int num_arguments) {
|
||||
Mov(x0, num_arguments);
|
||||
Mov(x1, Operand(ext));
|
||||
Mov(x1, ext);
|
||||
|
||||
CEntryStub stub(1);
|
||||
CallStub(&stub);
|
||||
@ -1769,7 +1769,7 @@ void MacroAssembler::CallExternalReference(const ExternalReference& ext,
|
||||
|
||||
|
||||
void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
|
||||
Mov(x1, Operand(builtin));
|
||||
Mov(x1, builtin);
|
||||
CEntryStub stub(1);
|
||||
Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
@ -1881,7 +1881,7 @@ void MacroAssembler::CallCFunction(ExternalReference function,
|
||||
int num_of_double_args) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register temp = temps.AcquireX();
|
||||
Mov(temp, Operand(function));
|
||||
Mov(temp, function);
|
||||
CallCFunction(temp, num_of_reg_args, num_of_double_args);
|
||||
}
|
||||
|
||||
@ -2912,7 +2912,7 @@ void MacroAssembler::Prologue(PrologueFrameMode frame_mode) {
|
||||
ASSERT(StackPointer().Is(jssp));
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register temp = temps.AcquireX();
|
||||
__ Mov(temp, Operand(Smi::FromInt(StackFrame::STUB)));
|
||||
__ Mov(temp, Smi::FromInt(StackFrame::STUB));
|
||||
// Compiled stubs don't age, and so they don't need the predictable code
|
||||
// ageing sequence.
|
||||
__ Push(lr, fp, cp, temp);
|
||||
@ -2935,7 +2935,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type) {
|
||||
Register code_reg = temps.AcquireX();
|
||||
|
||||
Push(lr, fp, cp);
|
||||
Mov(type_reg, Operand(Smi::FromInt(type)));
|
||||
Mov(type_reg, Smi::FromInt(type));
|
||||
Mov(code_reg, Operand(CodeObject()));
|
||||
Push(type_reg, code_reg);
|
||||
// jssp[4] : lr
|
||||
@ -3092,7 +3092,7 @@ void MacroAssembler::SetCounter(StatsCounter* counter, int value,
|
||||
Register scratch1, Register scratch2) {
|
||||
if (FLAG_native_code_counters && counter->Enabled()) {
|
||||
Mov(scratch1, value);
|
||||
Mov(scratch2, Operand(ExternalReference(counter)));
|
||||
Mov(scratch2, ExternalReference(counter));
|
||||
Str(scratch1, MemOperand(scratch2));
|
||||
}
|
||||
}
|
||||
@ -3102,7 +3102,7 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
|
||||
Register scratch1, Register scratch2) {
|
||||
ASSERT(value != 0);
|
||||
if (FLAG_native_code_counters && counter->Enabled()) {
|
||||
Mov(scratch2, Operand(ExternalReference(counter)));
|
||||
Mov(scratch2, ExternalReference(counter));
|
||||
Ldr(scratch1, MemOperand(scratch2));
|
||||
Add(scratch1, scratch1, value);
|
||||
Str(scratch1, MemOperand(scratch2));
|
||||
@ -3135,7 +3135,7 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
void MacroAssembler::DebugBreak() {
|
||||
Mov(x0, 0);
|
||||
Mov(x1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
|
||||
Mov(x1, ExternalReference(Runtime::kDebugBreak, isolate()));
|
||||
CEntryStub ces(1);
|
||||
ASSERT(AllowThisStubCall(&ces));
|
||||
Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
|
||||
@ -3174,7 +3174,7 @@ void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
|
||||
}
|
||||
|
||||
// Link the current handler as the next handler.
|
||||
Mov(x11, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
|
||||
Mov(x11, ExternalReference(Isolate::kHandlerAddress, isolate()));
|
||||
Ldr(x10, MemOperand(x11));
|
||||
Push(x10);
|
||||
// Set this new handler as the current one.
|
||||
@ -3185,7 +3185,7 @@ void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
|
||||
void MacroAssembler::PopTryHandler() {
|
||||
STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
|
||||
Pop(x10);
|
||||
Mov(x11, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
|
||||
Mov(x11, ExternalReference(Isolate::kHandlerAddress, isolate()));
|
||||
Drop(StackHandlerConstants::kSize - kXRegSize, kByteSizeInBytes);
|
||||
Str(x10, MemOperand(x11));
|
||||
}
|
||||
@ -3307,7 +3307,7 @@ void MacroAssembler::Allocate(Register object_size,
|
||||
// Set up allocation top address and object size registers.
|
||||
Register top_address = scratch1;
|
||||
Register allocation_limit = scratch2;
|
||||
Mov(top_address, Operand(heap_allocation_top));
|
||||
Mov(top_address, heap_allocation_top);
|
||||
|
||||
if ((flags & RESULT_CONTAINS_TOP) == 0) {
|
||||
// Load allocation top into result and the allocation limit.
|
||||
@ -3360,13 +3360,13 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object,
|
||||
Bic(object, object, kHeapObjectTagMask);
|
||||
#ifdef DEBUG
|
||||
// Check that the object un-allocated is below the current top.
|
||||
Mov(scratch, Operand(new_space_allocation_top));
|
||||
Mov(scratch, new_space_allocation_top);
|
||||
Ldr(scratch, MemOperand(scratch));
|
||||
Cmp(object, scratch);
|
||||
Check(lt, kUndoAllocationOfNonAllocatedMemory);
|
||||
#endif
|
||||
// Write the address of the object to un-allocate as the current top.
|
||||
Mov(scratch, Operand(new_space_allocation_top));
|
||||
Mov(scratch, new_space_allocation_top);
|
||||
Str(object, MemOperand(scratch));
|
||||
}
|
||||
|
||||
@ -3459,7 +3459,7 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
|
||||
|
||||
ExternalReference high_promotion_mode = ExternalReference::
|
||||
new_space_high_promotion_mode_active_address(isolate());
|
||||
Mov(scratch1, Operand(high_promotion_mode));
|
||||
Mov(scratch1, high_promotion_mode);
|
||||
Ldr(scratch1, MemOperand(scratch1));
|
||||
Cbz(scratch1, &allocate_new_space);
|
||||
|
||||
@ -4112,7 +4112,7 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
|
||||
Register scratch2 = temps.AcquireX();
|
||||
|
||||
// Load store buffer top.
|
||||
Mov(scratch2, Operand(ExternalReference::store_buffer_top(isolate())));
|
||||
Mov(scratch2, ExternalReference::store_buffer_top(isolate()));
|
||||
Ldr(scratch1, MemOperand(scratch2));
|
||||
// Store pointer to buffer and increment buffer top.
|
||||
Str(address, MemOperand(scratch1, kPointerSize, PostIndex));
|
||||
@ -4621,7 +4621,7 @@ void MacroAssembler::AssertIsString(const Register& object) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register temp = temps.AcquireX();
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
Tst(object, Operand(kSmiTagMask));
|
||||
Tst(object, kSmiTagMask);
|
||||
Check(ne, kOperandIsNotAString);
|
||||
Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset));
|
||||
CompareInstanceType(temp, temp, FIRST_NONSTRING_TYPE);
|
||||
@ -4676,7 +4676,7 @@ void MacroAssembler::Abort(BailoutReason reason) {
|
||||
// Avoid infinite recursion; Push contains some assertions that use Abort.
|
||||
NoUseRealAbortsScope no_real_aborts(this);
|
||||
|
||||
Mov(x0, Operand(Smi::FromInt(reason)));
|
||||
Mov(x0, Smi::FromInt(reason));
|
||||
Push(x0);
|
||||
|
||||
if (!has_frame_) {
|
||||
@ -5078,7 +5078,7 @@ void MacroAssembler::TruncatingDiv(Register result,
|
||||
ASSERT(!AreAliased(result, dividend));
|
||||
ASSERT(result.Is32Bits() && dividend.Is32Bits());
|
||||
MultiplierAndShift ms(divisor);
|
||||
Mov(result, Operand(ms.multiplier()));
|
||||
Mov(result, ms.multiplier());
|
||||
Smull(result.X(), dividend, result);
|
||||
Asr(result.X(), result.X(), 32);
|
||||
if (divisor > 0 && ms.multiplier() < 0) Add(result, result, dividend);
|
||||
|
@ -408,7 +408,7 @@ void RegExpMacroAssemblerA64::CheckNotBackReferenceIgnoreCase(
|
||||
// Address of current input position.
|
||||
__ Add(x1, input_end(), Operand(current_input_offset(), SXTW));
|
||||
// Isolate.
|
||||
__ Mov(x3, Operand(ExternalReference::isolate_address(isolate())));
|
||||
__ Mov(x3, ExternalReference::isolate_address(isolate()));
|
||||
|
||||
{
|
||||
AllowExternalCallThatCantCauseGC scope(masm_);
|
||||
@ -634,7 +634,7 @@ bool RegExpMacroAssemblerA64::CheckSpecialCharacterClass(uc16 type,
|
||||
CompareAndBranchOrBacktrack(current_character(), 'z', hi, on_no_match);
|
||||
}
|
||||
ExternalReference map = ExternalReference::re_word_character_map();
|
||||
__ Mov(x10, Operand(map));
|
||||
__ Mov(x10, map);
|
||||
__ Ldrb(w10, MemOperand(x10, current_character(), UXTW));
|
||||
CompareAndBranchOrBacktrack(w10, 0, eq, on_no_match);
|
||||
return true;
|
||||
@ -647,7 +647,7 @@ bool RegExpMacroAssemblerA64::CheckSpecialCharacterClass(uc16 type,
|
||||
__ B(hi, &done);
|
||||
}
|
||||
ExternalReference map = ExternalReference::re_word_character_map();
|
||||
__ Mov(x10, Operand(map));
|
||||
__ Mov(x10, map);
|
||||
__ Ldrb(w10, MemOperand(x10, current_character(), UXTW));
|
||||
CompareAndBranchOrBacktrack(w10, 0, ne, on_no_match);
|
||||
__ Bind(&done);
|
||||
@ -736,7 +736,7 @@ Handle<HeapObject> RegExpMacroAssemblerA64::GetCode(Handle<String> source) {
|
||||
|
||||
ExternalReference stack_limit =
|
||||
ExternalReference::address_of_stack_limit(isolate());
|
||||
__ Mov(x10, Operand(stack_limit));
|
||||
__ Mov(x10, stack_limit);
|
||||
__ Ldr(x10, MemOperand(x10));
|
||||
__ Subs(x10, csp, x10);
|
||||
|
||||
@ -1031,7 +1031,7 @@ Handle<HeapObject> RegExpMacroAssemblerA64::GetCode(Handle<String> source) {
|
||||
// The cached registers need to be retained.
|
||||
__ PushCPURegList(cached_registers);
|
||||
// Call GrowStack(backtrack_stackpointer(), &stack_base)
|
||||
__ Mov(x2, Operand(ExternalReference::isolate_address(isolate())));
|
||||
__ Mov(x2, ExternalReference::isolate_address(isolate()));
|
||||
__ Add(x1, frame_pointer(), kStackBase);
|
||||
__ Mov(x0, backtrack_stackpointer());
|
||||
ExternalReference grow_stack =
|
||||
@ -1455,7 +1455,7 @@ void RegExpMacroAssemblerA64::CallCheckStackGuardState(Register scratch) {
|
||||
|
||||
ExternalReference check_stack_guard_state =
|
||||
ExternalReference::re_check_stack_guard_state(isolate());
|
||||
__ Mov(scratch, Operand(check_stack_guard_state));
|
||||
__ Mov(scratch, check_stack_guard_state);
|
||||
DirectCEntryStub stub;
|
||||
stub.GenerateCall(masm_, scratch);
|
||||
|
||||
@ -1519,7 +1519,7 @@ void RegExpMacroAssemblerA64::CheckPreemption() {
|
||||
// Check for preemption.
|
||||
ExternalReference stack_limit =
|
||||
ExternalReference::address_of_stack_limit(isolate());
|
||||
__ Mov(x10, Operand(stack_limit));
|
||||
__ Mov(x10, stack_limit);
|
||||
__ Ldr(x10, MemOperand(x10));
|
||||
ASSERT(csp.Is(__ StackPointer()));
|
||||
__ Cmp(csp, x10);
|
||||
@ -1530,7 +1530,7 @@ void RegExpMacroAssemblerA64::CheckPreemption() {
|
||||
void RegExpMacroAssemblerA64::CheckStackLimit() {
|
||||
ExternalReference stack_limit =
|
||||
ExternalReference::address_of_regexp_stack_limit(isolate());
|
||||
__ Mov(x10, Operand(stack_limit));
|
||||
__ Mov(x10, stack_limit);
|
||||
__ Ldr(x10, MemOperand(x10));
|
||||
__ Cmp(backtrack_stackpointer(), x10);
|
||||
CallIf(&stack_overflow_label_, ls);
|
||||
|
@ -125,7 +125,7 @@ static void ProbeTable(Isolate* isolate,
|
||||
__ Add(scratch3, offset, Operand(offset, LSL, 1));
|
||||
|
||||
// Calculate the base address of the entry.
|
||||
__ Mov(scratch, Operand(key_offset));
|
||||
__ Mov(scratch, key_offset);
|
||||
__ Add(scratch, scratch, Operand(scratch3, LSL, kPointerSizeLog2));
|
||||
|
||||
// Check that the key in the entry matches the name.
|
||||
@ -761,7 +761,7 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
|
||||
ExternalReference ref = ExternalReference(&fun,
|
||||
ExternalReference::DIRECT_API_CALL,
|
||||
masm->isolate());
|
||||
__ Mov(api_function_address, Operand(ref));
|
||||
__ Mov(api_function_address, ref);
|
||||
|
||||
// Jump to stub.
|
||||
CallApiFunctionStub stub(is_store, call_data_undefined, argc);
|
||||
@ -1022,7 +1022,7 @@ void LoadStubCompiler::GenerateLoadCallback(
|
||||
// together. Can we use scratch1() here?
|
||||
__ LoadRoot(scratch4(), Heap::kUndefinedValueRootIndex);
|
||||
__ Push(scratch3(), scratch4());
|
||||
__ Mov(scratch3(), Operand(ExternalReference::isolate_address(isolate())));
|
||||
__ Mov(scratch3(), ExternalReference::isolate_address(isolate()));
|
||||
__ Push(scratch4(), scratch3(), reg, name());
|
||||
|
||||
Register args_addr = scratch2();
|
||||
@ -1044,7 +1044,7 @@ void LoadStubCompiler::GenerateLoadCallback(
|
||||
ApiFunction fun(getter_address);
|
||||
ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
|
||||
ExternalReference ref = ExternalReference(&fun, type, isolate());
|
||||
__ Mov(getter_address_reg, Operand(ref));
|
||||
__ Mov(getter_address_reg, ref);
|
||||
|
||||
CallApiGetterStub stub;
|
||||
__ TailCallStub(&stub);
|
||||
|
Loading…
Reference in New Issue
Block a user