[builtins] Introduce further constant & external reference indirections
This introduces further indirections for embedded constants and external references for builtins generated by the macro-assembler. The used mechanisms (LookupConstant and LookupExternalReference) are identical to what we already use in CSA. Almost all builtins are now isolate-independent in both release and debug modes. snapshot_blob.bin is roughly 670K smaller in embedded builds vs. non-embedded builds, while libv8.so is roughly 280K larger. Bug: v8:6666 Change-Id: I7a6c2193ef5a763e6cf7543dd51597d6fff6c110 Reviewed-on: https://chromium-review.googlesource.com/1006581 Commit-Queue: Jakob Gruber <jgruber@chromium.org> Reviewed-by: Michael Starzinger <mstarzinger@chromium.org> Cr-Commit-Position: refs/heads/master@{#52810}
This commit is contained in:
parent
27ed807156
commit
f5d308510a
@ -227,19 +227,23 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
Label invoke, handler_entry, exit;
|
||||
|
||||
ProfileEntryHookStub::MaybeCallEntryHook(masm);
|
||||
{
|
||||
NoRootArrayScope no_root_array(masm);
|
||||
|
||||
// Called from C, so do not pop argc and args on exit (preserve sp)
|
||||
// No need to save register-passed args
|
||||
// Save callee-saved registers (incl. cp and fp), sp, and lr
|
||||
__ stm(db_w, sp, kCalleeSaved | lr.bit());
|
||||
ProfileEntryHookStub::MaybeCallEntryHook(masm);
|
||||
|
||||
// Save callee-saved vfp registers.
|
||||
__ vstm(db_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
|
||||
// Set up the reserved register for 0.0.
|
||||
__ vmov(kDoubleRegZero, Double(0.0));
|
||||
// Called from C, so do not pop argc and args on exit (preserve sp)
|
||||
// No need to save register-passed args
|
||||
// Save callee-saved registers (incl. cp and fp), sp, and lr
|
||||
__ stm(db_w, sp, kCalleeSaved | lr.bit());
|
||||
|
||||
__ InitializeRootRegister();
|
||||
// Save callee-saved vfp registers.
|
||||
__ vstm(db_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
|
||||
// Set up the reserved register for 0.0.
|
||||
__ vmov(kDoubleRegZero, Double(0.0));
|
||||
|
||||
__ InitializeRootRegister();
|
||||
}
|
||||
|
||||
// Get address of argv, see stm above.
|
||||
// r0: code entry
|
||||
|
@ -11,6 +11,7 @@
|
||||
#include "src/base/division-by-constant.h"
|
||||
#include "src/base/utils/random-number-generator.h"
|
||||
#include "src/bootstrapper.h"
|
||||
#include "src/builtins/constants-table-builder.h"
|
||||
#include "src/callable.h"
|
||||
#include "src/code-stubs.h"
|
||||
#include "src/counters.h"
|
||||
@ -22,6 +23,7 @@
|
||||
#include "src/objects-inl.h"
|
||||
#include "src/register-configuration.h"
|
||||
#include "src/runtime/runtime.h"
|
||||
#include "src/snapshot/serializer-common.h"
|
||||
|
||||
#include "src/arm/macro-assembler-arm.h"
|
||||
|
||||
@ -122,6 +124,79 @@ int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
|
||||
return bytes;
|
||||
}
|
||||
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
void TurboAssembler::LookupConstant(Register destination,
|
||||
Handle<Object> object) {
|
||||
CHECK(isolate()->ShouldLoadConstantsFromRootList());
|
||||
CHECK(root_array_available_);
|
||||
|
||||
// TODO(jgruber, v8:6666): Support self-references. Currently, we'd end up
|
||||
// adding the temporary code object to the constants list, before creating the
|
||||
// final object in Factory::CopyCode.
|
||||
CHECK(code_object_.is_null() || !object.equals(code_object_));
|
||||
|
||||
// Ensure the given object is in the builtins constants table and fetch its
|
||||
// index.
|
||||
BuiltinsConstantsTableBuilder* builder =
|
||||
isolate()->builtins_constants_table_builder();
|
||||
uint32_t index = builder->AddObject(object);
|
||||
|
||||
// TODO(jgruber): Load builtins from the builtins table.
|
||||
// TODO(jgruber): Ensure that code generation can recognize constant targets
|
||||
// in kArchCallCodeObject.
|
||||
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
|
||||
// The ldr call below could end up clobbering the destination register when
|
||||
// the offset does not fit into 12 bits (and thus needs to be loaded from the
|
||||
// constant pool). In that case, we need to be extra-careful and temporarily
|
||||
// use another register as the target.
|
||||
|
||||
const uint32_t offset =
|
||||
FixedArray::kHeaderSize + index * kPointerSize - kHeapObjectTag;
|
||||
const bool could_clobber_ip = !is_uint12(offset) && destination == ip;
|
||||
|
||||
Register reg = destination;
|
||||
if (could_clobber_ip) {
|
||||
Push(r7);
|
||||
reg = r7;
|
||||
}
|
||||
|
||||
LoadRoot(reg, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
ldr(destination, MemOperand(reg, offset));
|
||||
|
||||
if (could_clobber_ip) {
|
||||
DCHECK_EQ(reg, r7);
|
||||
Pop(r7);
|
||||
}
|
||||
}
|
||||
|
||||
void TurboAssembler::LookupExternalReference(Register destination,
|
||||
ExternalReference reference) {
|
||||
CHECK(reference.address() !=
|
||||
ExternalReference::roots_array_start(isolate()).address());
|
||||
CHECK(isolate()->ShouldLoadConstantsFromRootList());
|
||||
CHECK(root_array_available_);
|
||||
|
||||
// Encode as an index into the external reference table stored on the isolate.
|
||||
|
||||
ExternalReferenceEncoder encoder(isolate());
|
||||
ExternalReferenceEncoder::Value v = encoder.Encode(reference.address());
|
||||
CHECK(!v.is_from_api());
|
||||
uint32_t index = v.index();
|
||||
|
||||
// Generate code to load from the external reference table.
|
||||
|
||||
int32_t roots_to_external_reference_offset =
|
||||
Heap::roots_to_external_reference_table_offset() +
|
||||
ExternalReferenceTable::OffsetOfEntry(index);
|
||||
|
||||
ldr(destination,
|
||||
MemOperand(kRootRegister, roots_to_external_reference_offset));
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
|
||||
void TurboAssembler::Jump(Register target, Condition cond) { bx(target, cond); }
|
||||
|
||||
void TurboAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
|
||||
@ -139,6 +214,16 @@ void TurboAssembler::Jump(Address target, RelocInfo::Mode rmode,
|
||||
void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
|
||||
Condition cond) {
|
||||
DCHECK(RelocInfo::IsCodeTarget(rmode));
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LookupConstant(scratch, code);
|
||||
add(scratch, scratch, Operand(Code::kHeaderSize - kHeapObjectTag));
|
||||
Jump(scratch, cond);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
// 'code' is always generated ARM code, never THUMB code
|
||||
Jump(static_cast<intptr_t>(code.address()), rmode, cond);
|
||||
}
|
||||
@ -221,6 +306,16 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
|
||||
Condition cond, TargetAddressStorageMode mode,
|
||||
bool check_constant_pool) {
|
||||
DCHECK(RelocInfo::IsCodeTarget(rmode));
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
// Use ip directly instead of using UseScratchRegisterScope, as we do not
|
||||
// preserve scratch registers across calls.
|
||||
LookupConstant(ip, code);
|
||||
add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
|
||||
Call(ip, cond);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
// 'code' is always generated ARM code, never THUMB code
|
||||
Call(code.address(), rmode, cond, mode);
|
||||
}
|
||||
@ -261,9 +356,33 @@ void TurboAssembler::Push(Smi* smi) {
|
||||
void TurboAssembler::Move(Register dst, Smi* smi) { mov(dst, Operand(smi)); }
|
||||
|
||||
void TurboAssembler::Move(Register dst, Handle<HeapObject> value) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList() &&
|
||||
!value.equals(CodeObject())) {
|
||||
Heap::RootListIndex root_index;
|
||||
if (!isolate()->heap()->IsRootHandle(value, &root_index)) {
|
||||
LookupConstant(dst, value);
|
||||
} else {
|
||||
LoadRoot(dst, root_index);
|
||||
}
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
mov(dst, Operand(value));
|
||||
}
|
||||
|
||||
void TurboAssembler::Move(Register dst, ExternalReference reference) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList() &&
|
||||
reference.address() !=
|
||||
ExternalReference::roots_array_start(isolate()).address()) {
|
||||
LookupExternalReference(dst, reference);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
mov(dst, Operand(reference));
|
||||
}
|
||||
|
||||
void TurboAssembler::Move(Register dst, Register src, Condition cond) {
|
||||
if (dst != src) {
|
||||
mov(dst, src, LeaveCC, cond);
|
||||
@ -539,8 +658,7 @@ void TurboAssembler::CallRecordWriteStub(
|
||||
Pop(slot_parameter);
|
||||
Pop(object_parameter);
|
||||
|
||||
Move(isolate_parameter,
|
||||
Operand(ExternalReference::isolate_address(isolate())));
|
||||
Move(isolate_parameter, ExternalReference::isolate_address(isolate()));
|
||||
Move(remembered_set_parameter, Smi::FromEnum(remembered_set_action));
|
||||
Move(fp_mode_parameter, Smi::FromEnum(fp_mode));
|
||||
Call(callable.code(), RelocInfo::CODE_TARGET);
|
||||
@ -558,10 +676,12 @@ void MacroAssembler::RecordWrite(Register object, Register address,
|
||||
SmiCheck smi_check) {
|
||||
DCHECK(object != value);
|
||||
if (emit_debug_code()) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
ldr(scratch, MemOperand(address));
|
||||
cmp(scratch, value);
|
||||
{
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
ldr(scratch, MemOperand(address));
|
||||
cmp(scratch, value);
|
||||
}
|
||||
Check(eq, AbortReason::kWrongAddressOrValuePassedToRecordWrite);
|
||||
}
|
||||
|
||||
@ -1141,11 +1261,11 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space,
|
||||
str(scratch, MemOperand(fp, ExitFrameConstants::kCodeOffset));
|
||||
|
||||
// Save the frame pointer and the context in top.
|
||||
mov(scratch, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kCEntryFPAddress, isolate())));
|
||||
Move(scratch, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
|
||||
isolate()));
|
||||
str(fp, MemOperand(scratch));
|
||||
mov(scratch, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kContextAddress, isolate())));
|
||||
Move(scratch, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kContextAddress, isolate())));
|
||||
str(cp, MemOperand(scratch));
|
||||
|
||||
// Optionally save all double registers.
|
||||
@ -1204,8 +1324,8 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
|
||||
|
||||
// Clear top frame.
|
||||
mov(r3, Operand::Zero());
|
||||
mov(scratch, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kCEntryFPAddress, isolate())));
|
||||
Move(scratch, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
|
||||
isolate()));
|
||||
str(r3, MemOperand(scratch));
|
||||
|
||||
// Restore current context from top and clear it in debug mode.
|
||||
@ -1372,9 +1492,9 @@ void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
|
||||
const ParameterCount& actual) {
|
||||
Label skip_hook;
|
||||
|
||||
ExternalReference debug_hook_avtive =
|
||||
ExternalReference debug_hook_active =
|
||||
ExternalReference::debug_hook_on_function_call_address(isolate());
|
||||
mov(r4, Operand(debug_hook_avtive));
|
||||
Move(r4, debug_hook_active);
|
||||
ldrsb(r4, MemOperand(r4));
|
||||
cmp(r4, Operand(0));
|
||||
b(eq, &skip_hook);
|
||||
@ -1676,7 +1796,7 @@ void TurboAssembler::CallRuntimeDelayed(Zone* zone, Runtime::FunctionId fid,
|
||||
// should remove this need and make the runtime routine entry code
|
||||
// smarter.
|
||||
mov(r0, Operand(f->nargs));
|
||||
mov(r1, Operand(ExternalReference::Create(f)));
|
||||
Move(r1, ExternalReference::Create(f));
|
||||
CallStubDelayed(new (zone) CEntryStub(nullptr, 1, save_doubles));
|
||||
}
|
||||
|
||||
@ -1695,7 +1815,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f,
|
||||
// should remove this need and make the runtime routine entry code
|
||||
// smarter.
|
||||
mov(r0, Operand(num_arguments));
|
||||
mov(r1, Operand(ExternalReference::Create(f)));
|
||||
Move(r1, ExternalReference::Create(f));
|
||||
CEntryStub stub(isolate(), 1, save_doubles);
|
||||
CallStub(&stub);
|
||||
}
|
||||
@ -1719,7 +1839,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
|
||||
// Thumb mode builtin.
|
||||
DCHECK_EQ(builtin.address() & 1, 1);
|
||||
#endif
|
||||
mov(r1, Operand(builtin));
|
||||
Move(r1, builtin);
|
||||
CEntryStub stub(isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
|
||||
builtin_exit_frame);
|
||||
Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
|
||||
@ -1742,7 +1862,7 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
|
||||
Register scratch1, Register scratch2) {
|
||||
DCHECK_GT(value, 0);
|
||||
if (FLAG_native_code_counters && counter->Enabled()) {
|
||||
mov(scratch2, Operand(ExternalReference::Create(counter)));
|
||||
Move(scratch2, ExternalReference::Create(counter));
|
||||
ldr(scratch1, MemOperand(scratch2));
|
||||
add(scratch1, scratch1, Operand(value));
|
||||
str(scratch1, MemOperand(scratch2));
|
||||
@ -1754,7 +1874,7 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
|
||||
Register scratch1, Register scratch2) {
|
||||
DCHECK_GT(value, 0);
|
||||
if (FLAG_native_code_counters && counter->Enabled()) {
|
||||
mov(scratch2, Operand(ExternalReference::Create(counter)));
|
||||
Move(scratch2, ExternalReference::Create(counter));
|
||||
ldr(scratch1, MemOperand(scratch2));
|
||||
sub(scratch1, scratch1, Operand(value));
|
||||
str(scratch1, MemOperand(scratch2));
|
||||
@ -1974,7 +2094,7 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
|
||||
|
||||
|
||||
void TurboAssembler::CheckFor32DRegs(Register scratch) {
|
||||
mov(scratch, Operand(ExternalReference::cpu_features()));
|
||||
Move(scratch, ExternalReference::cpu_features());
|
||||
ldr(scratch, MemOperand(scratch));
|
||||
tst(scratch, Operand(1u << VFP32DREGS));
|
||||
}
|
||||
@ -2206,7 +2326,7 @@ void TurboAssembler::CallCFunction(ExternalReference function,
|
||||
int num_double_arguments) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
mov(scratch, Operand(function));
|
||||
Move(scratch, function);
|
||||
CallCFunctionHelper(scratch, num_reg_arguments, num_double_arguments);
|
||||
}
|
||||
|
||||
|
@ -28,7 +28,7 @@ constexpr Register kInterpreterDispatchTableRegister = r8;
|
||||
constexpr Register kJavaScriptCallArgCountRegister = r0;
|
||||
constexpr Register kJavaScriptCallCodeStartRegister = r2;
|
||||
constexpr Register kJavaScriptCallNewTargetRegister = r3;
|
||||
constexpr Register kOffHeapTrampolineRegister = r6;
|
||||
constexpr Register kOffHeapTrampolineRegister = ip;
|
||||
constexpr Register kRuntimeCallFunctionRegister = r1;
|
||||
constexpr Register kRuntimeCallArgCountRegister = r0;
|
||||
constexpr Register kWasmInstanceRegister = r3;
|
||||
@ -321,6 +321,12 @@ class TurboAssembler : public Assembler {
|
||||
void AsrPair(Register dst_low, Register dst_high, Register src_low,
|
||||
Register src_high, uint32_t shift);
|
||||
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
void LookupConstant(Register destination, Handle<Object> object);
|
||||
void LookupExternalReference(Register destination,
|
||||
ExternalReference reference);
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
|
||||
// Returns the size of a call in instructions. Note, the value returned is
|
||||
// only valid as long as no entries are added to the constant pool between
|
||||
// checking the call size and emitting the actual call.
|
||||
@ -466,6 +472,7 @@ class TurboAssembler : public Assembler {
|
||||
// Register move. May do nothing if the registers are identical.
|
||||
void Move(Register dst, Smi* smi);
|
||||
void Move(Register dst, Handle<HeapObject> value);
|
||||
void Move(Register dst, ExternalReference reference);
|
||||
void Move(Register dst, Register src, Condition cond = al);
|
||||
void Move(Register dst, const Operand& src, SBit sbit = LeaveCC,
|
||||
Condition cond = al) {
|
||||
@ -541,8 +548,12 @@ class TurboAssembler : public Assembler {
|
||||
|
||||
void ResetSpeculationPoisonRegister();
|
||||
|
||||
bool root_array_available() const { return root_array_available_; }
|
||||
void set_root_array_available(bool v) { root_array_available_ = v; }
|
||||
|
||||
private:
|
||||
bool has_frame_ = false;
|
||||
bool root_array_available_ = true;
|
||||
Isolate* const isolate_;
|
||||
// This handle will be patched with the code object on installation.
|
||||
Handle<HeapObject> code_object_;
|
||||
|
@ -445,7 +445,7 @@ ALIAS_REGISTER(Register, ip1, x17);
|
||||
ALIAS_REGISTER(Register, wip0, w16);
|
||||
ALIAS_REGISTER(Register, wip1, w17);
|
||||
// Root register.
|
||||
ALIAS_REGISTER(Register, root, x26);
|
||||
ALIAS_REGISTER(Register, kRootRegister, x26);
|
||||
ALIAS_REGISTER(Register, rr, x26);
|
||||
// Context pointer register.
|
||||
ALIAS_REGISTER(Register, cp, x27);
|
||||
|
@ -303,23 +303,27 @@ void CEntryStub::Generate(MacroAssembler* masm) {
|
||||
// Output:
|
||||
// x0: result.
|
||||
void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
Register code_entry = x0;
|
||||
|
||||
// Enable instruction instrumentation. This only works on the simulator, and
|
||||
// will have no effect on the model or real hardware.
|
||||
__ EnableInstrumentation();
|
||||
|
||||
Label invoke, handler_entry, exit;
|
||||
|
||||
__ PushCalleeSavedRegisters();
|
||||
Register code_entry = x0;
|
||||
|
||||
ProfileEntryHookStub::MaybeCallEntryHook(masm);
|
||||
{
|
||||
NoRootArrayScope no_root_array(masm);
|
||||
|
||||
// Set up the reserved register for 0.0.
|
||||
__ Fmov(fp_zero, 0.0);
|
||||
// Enable instruction instrumentation. This only works on the simulator, and
|
||||
// will have no effect on the model or real hardware.
|
||||
__ EnableInstrumentation();
|
||||
|
||||
// Initialize the root array register
|
||||
__ InitializeRootRegister();
|
||||
__ PushCalleeSavedRegisters();
|
||||
|
||||
ProfileEntryHookStub::MaybeCallEntryHook(masm);
|
||||
|
||||
// Set up the reserved register for 0.0.
|
||||
__ Fmov(fp_zero, 0.0);
|
||||
|
||||
// Initialize the root array register
|
||||
__ InitializeRootRegister();
|
||||
}
|
||||
|
||||
// Build an entry frame (see layout below).
|
||||
StackFrame::Type marker = type();
|
||||
|
@ -1039,7 +1039,7 @@ void TurboAssembler::Uxtw(const Register& rd, const Register& rn) {
|
||||
void TurboAssembler::InitializeRootRegister() {
|
||||
ExternalReference roots_array_start =
|
||||
ExternalReference::roots_array_start(isolate());
|
||||
Mov(root, Operand(roots_array_start));
|
||||
Mov(kRootRegister, Operand(roots_array_start));
|
||||
}
|
||||
|
||||
|
||||
|
@ -8,6 +8,7 @@
|
||||
#include "src/base/bits.h"
|
||||
#include "src/base/division-by-constant.h"
|
||||
#include "src/bootstrapper.h"
|
||||
#include "src/builtins/constants-table-builder.h"
|
||||
#include "src/callable.h"
|
||||
#include "src/code-stubs.h"
|
||||
#include "src/debug/debug.h"
|
||||
@ -18,6 +19,7 @@
|
||||
#include "src/instruction-stream.h"
|
||||
#include "src/register-configuration.h"
|
||||
#include "src/runtime/runtime.h"
|
||||
#include "src/snapshot/serializer-common.h"
|
||||
|
||||
#include "src/arm64/macro-assembler-arm64-inl.h"
|
||||
#include "src/arm64/macro-assembler-arm64.h" // Cannot be the first include
|
||||
@ -352,6 +354,16 @@ void TurboAssembler::Mov(const Register& rd, const Operand& operand,
|
||||
}
|
||||
}
|
||||
|
||||
void TurboAssembler::Mov(const Register& rd, ExternalReference reference) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
LookupExternalReference(rd, reference);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
Mov(rd, Operand(reference));
|
||||
}
|
||||
|
||||
void TurboAssembler::Movi16bitHelper(const VRegister& vd, uint64_t imm) {
|
||||
DCHECK(is_uint16(imm));
|
||||
int byte1 = (imm & 0xFF);
|
||||
@ -1552,7 +1564,7 @@ void TurboAssembler::LoadRoot(CPURegister destination,
|
||||
Heap::RootListIndex index) {
|
||||
// TODO(jbramley): Most root values are constants, and can be synthesized
|
||||
// without a load. Refer to the ARM back end for details.
|
||||
Ldr(destination, MemOperand(root, index << kPointerSizeLog2));
|
||||
Ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2));
|
||||
}
|
||||
|
||||
|
||||
@ -1566,7 +1578,18 @@ void MacroAssembler::LoadObject(Register result, Handle<Object> object) {
|
||||
}
|
||||
|
||||
void TurboAssembler::Move(Register dst, Register src) { Mov(dst, src); }
|
||||
void TurboAssembler::Move(Register dst, Handle<HeapObject> x) { Mov(dst, x); }
|
||||
|
||||
void TurboAssembler::Move(Register dst, Handle<HeapObject> x) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList() &&
|
||||
!x.equals(CodeObject())) {
|
||||
LookupConstant(dst, x);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
Mov(dst, x);
|
||||
}
|
||||
|
||||
void TurboAssembler::Move(Register dst, Smi* src) { Mov(dst, src); }
|
||||
|
||||
void TurboAssembler::Swap(Register lhs, Register rhs) {
|
||||
@ -1851,7 +1874,67 @@ void TurboAssembler::CallCFunction(Register function, int num_of_reg_args,
|
||||
}
|
||||
}
|
||||
|
||||
void TurboAssembler::Jump(Register target) { Br(target); }
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
void TurboAssembler::LookupConstant(Register destination,
|
||||
Handle<Object> object) {
|
||||
CHECK(isolate()->ShouldLoadConstantsFromRootList());
|
||||
CHECK(root_array_available_);
|
||||
|
||||
// TODO(jgruber, v8:6666): Support self-references. Currently, we'd end up
|
||||
// adding the temporary code object to the constants list, before creating the
|
||||
// final object in Factory::CopyCode.
|
||||
CHECK(code_object_.is_null() || !object.equals(code_object_));
|
||||
|
||||
// Ensure the given object is in the builtins constants table and fetch its
|
||||
// index.
|
||||
BuiltinsConstantsTableBuilder* builder =
|
||||
isolate()->builtins_constants_table_builder();
|
||||
uint32_t index = builder->AddObject(object);
|
||||
|
||||
// TODO(jgruber): Load builtins from the builtins table.
|
||||
// TODO(jgruber): Ensure that code generation can recognize constant targets
|
||||
// in kArchCallCodeObject.
|
||||
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
|
||||
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
Ldr(destination, FieldMemOperand(destination, FixedArray::kHeaderSize +
|
||||
index * kPointerSize));
|
||||
}
|
||||
|
||||
void TurboAssembler::LookupExternalReference(Register destination,
|
||||
ExternalReference reference) {
|
||||
CHECK(reference.address() !=
|
||||
ExternalReference::roots_array_start(isolate()).address());
|
||||
CHECK(isolate()->ShouldLoadConstantsFromRootList());
|
||||
CHECK(root_array_available_);
|
||||
|
||||
// Encode as an index into the external reference table stored on the isolate.
|
||||
|
||||
ExternalReferenceEncoder encoder(isolate());
|
||||
ExternalReferenceEncoder::Value v = encoder.Encode(reference.address());
|
||||
CHECK(!v.is_from_api());
|
||||
uint32_t index = v.index();
|
||||
|
||||
// Generate code to load from the external reference table.
|
||||
|
||||
int32_t roots_to_external_reference_offset =
|
||||
Heap::roots_to_external_reference_table_offset() +
|
||||
ExternalReferenceTable::OffsetOfEntry(index);
|
||||
|
||||
Ldr(destination,
|
||||
MemOperand(kRootRegister, roots_to_external_reference_offset));
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
|
||||
void TurboAssembler::Jump(Register target, Condition cond) {
|
||||
if (cond == nv) return;
|
||||
Label done;
|
||||
if (cond != al) B(NegateCondition(cond), &done);
|
||||
Br(target);
|
||||
Bind(&done);
|
||||
}
|
||||
|
||||
void TurboAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
|
||||
Condition cond) {
|
||||
@ -1874,6 +1957,16 @@ void TurboAssembler::Jump(Address target, RelocInfo::Mode rmode,
|
||||
void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
|
||||
Condition cond) {
|
||||
DCHECK(RelocInfo::IsCodeTarget(rmode));
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.AcquireX();
|
||||
LookupConstant(scratch, code);
|
||||
Add(scratch, scratch, Operand(Code::kHeaderSize - kHeapObjectTag));
|
||||
Jump(scratch, cond);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
Jump(static_cast<intptr_t>(code.address()), rmode, cond);
|
||||
}
|
||||
|
||||
@ -1940,6 +2033,16 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode) {
|
||||
Bind(&start_call);
|
||||
#endif
|
||||
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.AcquireX();
|
||||
LookupConstant(scratch, code);
|
||||
Add(scratch, scratch, Operand(Code::kHeaderSize - kHeapObjectTag));
|
||||
Call(scratch);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
Call(code.address(), rmode);
|
||||
|
||||
#ifdef DEBUG
|
||||
@ -1951,9 +2054,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode) {
|
||||
void TurboAssembler::Call(ExternalReference target) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register temp = temps.AcquireX();
|
||||
// Immediate is in charge of setting the relocation mode to
|
||||
// EXTERNAL_REFERENCE.
|
||||
Ldr(temp, Immediate(target));
|
||||
Mov(temp, target);
|
||||
Call(temp);
|
||||
}
|
||||
|
||||
@ -2160,9 +2261,7 @@ void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
|
||||
const ParameterCount& actual) {
|
||||
Label skip_hook;
|
||||
|
||||
ExternalReference debug_hook_active =
|
||||
ExternalReference::debug_hook_on_function_call_address(isolate());
|
||||
Mov(x4, Operand(debug_hook_active));
|
||||
Mov(x4, ExternalReference::debug_hook_on_function_call_address(isolate()));
|
||||
Ldrsb(x4, MemOperand(x4));
|
||||
Cbz(x4, &skip_hook);
|
||||
|
||||
@ -2438,11 +2537,11 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch,
|
||||
STATIC_ASSERT((-4 * kPointerSize) == ExitFrameConstants::kPaddingOffset);
|
||||
|
||||
// Save the frame pointer and context pointer in the top frame.
|
||||
Mov(scratch, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kCEntryFPAddress, isolate())));
|
||||
Mov(scratch,
|
||||
ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate()));
|
||||
Str(fp, MemOperand(scratch));
|
||||
Mov(scratch, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kContextAddress, isolate())));
|
||||
Mov(scratch,
|
||||
ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()));
|
||||
Str(cp, MemOperand(scratch));
|
||||
|
||||
STATIC_ASSERT((-4 * kPointerSize) == ExitFrameConstants::kLastExitFrameField);
|
||||
@ -2484,20 +2583,20 @@ void MacroAssembler::LeaveExitFrame(bool restore_doubles,
|
||||
}
|
||||
|
||||
// Restore the context pointer from the top frame.
|
||||
Mov(scratch, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kContextAddress, isolate())));
|
||||
Mov(scratch,
|
||||
ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()));
|
||||
Ldr(cp, MemOperand(scratch));
|
||||
|
||||
if (emit_debug_code()) {
|
||||
// Also emit debug code to clear the cp in the top frame.
|
||||
Mov(scratch2, Operand(Context::kInvalidContext));
|
||||
Mov(scratch, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kContextAddress, isolate())));
|
||||
Mov(scratch, ExternalReference::Create(IsolateAddressId::kContextAddress,
|
||||
isolate()));
|
||||
Str(scratch2, MemOperand(scratch));
|
||||
}
|
||||
// Clear the frame pointer from the top frame.
|
||||
Mov(scratch, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kCEntryFPAddress, isolate())));
|
||||
Mov(scratch,
|
||||
ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate()));
|
||||
Str(xzr, MemOperand(scratch));
|
||||
|
||||
// Pop the exit frame.
|
||||
@ -2534,9 +2633,7 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
|
||||
|
||||
void MacroAssembler::MaybeDropFrames() {
|
||||
// Check whether we need to drop frames to restart a function on the stack.
|
||||
ExternalReference restart_fp =
|
||||
ExternalReference::debug_restart_fp_address(isolate());
|
||||
Mov(x1, Operand(restart_fp));
|
||||
Mov(x1, ExternalReference::debug_restart_fp_address(isolate()));
|
||||
Ldr(x1, MemOperand(x1));
|
||||
Tst(x1, x1);
|
||||
Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET,
|
||||
|
@ -232,6 +232,7 @@ class TurboAssembler : public Assembler {
|
||||
|
||||
void Mov(const Register& rd, const Operand& operand,
|
||||
DiscardMoveMode discard_mode = kDontDiscardForSameWReg);
|
||||
void Mov(const Register& rd, ExternalReference reference);
|
||||
void Mov(const Register& rd, uint64_t imm);
|
||||
inline void Mov(const Register& rd, const Register& rm);
|
||||
void Mov(const VRegister& vd, int vd_index, const VRegister& vn,
|
||||
@ -868,7 +869,13 @@ class TurboAssembler : public Assembler {
|
||||
int shift_amount = 0);
|
||||
void Movi(const VRegister& vd, uint64_t hi, uint64_t lo);
|
||||
|
||||
void Jump(Register target);
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
void LookupConstant(Register destination, Handle<Object> object);
|
||||
void LookupExternalReference(Register destination,
|
||||
ExternalReference reference);
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
|
||||
void Jump(Register target, Condition cond = al);
|
||||
void Jump(Address target, RelocInfo::Mode rmode, Condition cond = al);
|
||||
void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
|
||||
void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
|
||||
@ -1207,6 +1214,9 @@ class TurboAssembler : public Assembler {
|
||||
|
||||
void ResetSpeculationPoisonRegister();
|
||||
|
||||
bool root_array_available() const { return root_array_available_; }
|
||||
void set_root_array_available(bool v) { root_array_available_ = v; }
|
||||
|
||||
protected:
|
||||
// The actual Push and Pop implementations. These don't generate any code
|
||||
// other than that required for the push or pop. This allows
|
||||
@ -1241,6 +1251,7 @@ class TurboAssembler : public Assembler {
|
||||
|
||||
private:
|
||||
bool has_frame_ = false;
|
||||
bool root_array_available_ = true;
|
||||
Isolate* const isolate_;
|
||||
#if DEBUG
|
||||
// Tell whether any of the macro instruction can be used. When false the
|
||||
|
@ -27,7 +27,7 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
|
||||
ExternalReference::Create(address).address()) &
|
||||
1);
|
||||
#endif
|
||||
__ mov(r5, Operand(ExternalReference::Create(address)));
|
||||
__ Move(r5, ExternalReference::Create(address));
|
||||
if (exit_frame_type == BUILTIN_EXIT) {
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
|
||||
RelocInfo::CODE_TARGET);
|
||||
@ -850,8 +850,8 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
|
||||
DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
|
||||
bytecode));
|
||||
|
||||
__ mov(bytecode_size_table,
|
||||
Operand(ExternalReference::bytecode_size_table_address()));
|
||||
__ Move(bytecode_size_table,
|
||||
ExternalReference::bytecode_size_table_address());
|
||||
|
||||
// Check if the bytecode is a Wide or ExtraWide prefix bytecode.
|
||||
Label process_bytecode, extra_wide;
|
||||
@ -1228,9 +1228,9 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
|
||||
Code::kHeaderSize - kHeapObjectTag));
|
||||
|
||||
// Initialize the dispatch table register.
|
||||
__ mov(kInterpreterDispatchTableRegister,
|
||||
Operand(ExternalReference::interpreter_dispatch_table_address(
|
||||
masm->isolate())));
|
||||
__ Move(
|
||||
kInterpreterDispatchTableRegister,
|
||||
ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
|
||||
|
||||
// Get the bytecode array pointer from the frame.
|
||||
__ ldr(kInterpreterBytecodeArrayRegister,
|
||||
|
@ -666,7 +666,6 @@ static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
|
||||
// x0: result.
|
||||
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
bool is_construct) {
|
||||
// Called from JSEntryStub::GenerateBody().
|
||||
Register new_target = x0;
|
||||
Register function = x1;
|
||||
Register receiver = x2;
|
||||
@ -675,19 +674,21 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
Register scratch = x10;
|
||||
Register slots_to_claim = x11;
|
||||
|
||||
ProfileEntryHookStub::MaybeCallEntryHook(masm);
|
||||
{
|
||||
NoRootArrayScope no_root_array(masm);
|
||||
ProfileEntryHookStub::MaybeCallEntryHook(masm);
|
||||
__ InitializeRootRegister();
|
||||
}
|
||||
|
||||
{
|
||||
// Enter an internal frame.
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
|
||||
// Setup the context (we need to use the caller context from the isolate).
|
||||
__ Mov(scratch, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kContextAddress, masm->isolate())));
|
||||
__ Mov(scratch, ExternalReference::Create(IsolateAddressId::kContextAddress,
|
||||
masm->isolate()));
|
||||
__ Ldr(cp, MemOperand(scratch));
|
||||
|
||||
__ InitializeRootRegister();
|
||||
|
||||
// Claim enough space for the arguments, the receiver and the function,
|
||||
// including an optional slot of padding.
|
||||
__ Add(slots_to_claim, argc, 3);
|
||||
@ -755,7 +756,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
__ Mov(x25, x19);
|
||||
__ Mov(x28, x19);
|
||||
// Don't initialize the reserved registers.
|
||||
// x26 : root register (root).
|
||||
// x26 : root register (kRootRegister).
|
||||
// x27 : context pointer (cp).
|
||||
// x29 : frame pointer (fp).
|
||||
|
||||
@ -933,8 +934,7 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
|
||||
DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
|
||||
bytecode));
|
||||
|
||||
__ Mov(bytecode_size_table,
|
||||
Operand(ExternalReference::bytecode_size_table_address()));
|
||||
__ Mov(bytecode_size_table, ExternalReference::bytecode_size_table_address());
|
||||
|
||||
// Check if the bytecode is a Wide or ExtraWide prefix bytecode.
|
||||
Label process_bytecode, extra_wide;
|
||||
@ -1102,9 +1102,9 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// handler at the current bytecode offset.
|
||||
Label do_dispatch;
|
||||
__ bind(&do_dispatch);
|
||||
__ Mov(kInterpreterDispatchTableRegister,
|
||||
Operand(ExternalReference::interpreter_dispatch_table_address(
|
||||
masm->isolate())));
|
||||
__ Mov(
|
||||
kInterpreterDispatchTableRegister,
|
||||
ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
|
||||
__ Ldrb(x18, MemOperand(kInterpreterBytecodeArrayRegister,
|
||||
kInterpreterBytecodeOffsetRegister));
|
||||
__ Mov(x1, Operand(x18, LSL, kPointerSizeLog2));
|
||||
@ -1341,9 +1341,9 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
|
||||
Code::kHeaderSize - kHeapObjectTag));
|
||||
|
||||
// Initialize the dispatch table register.
|
||||
__ Mov(kInterpreterDispatchTableRegister,
|
||||
Operand(ExternalReference::interpreter_dispatch_table_address(
|
||||
masm->isolate())));
|
||||
__ Mov(
|
||||
kInterpreterDispatchTableRegister,
|
||||
ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
|
||||
|
||||
// Get the bytecode array pointer from the frame.
|
||||
__ Ldr(kInterpreterBytecodeArrayRegister,
|
||||
|
@ -291,34 +291,28 @@ bool Builtins::IsLazy(int index) {
|
||||
bool Builtins::IsIsolateIndependent(int index) {
|
||||
DCHECK(IsBuiltinId(index));
|
||||
switch (index) {
|
||||
#ifdef DEBUG
|
||||
case kAbortJS:
|
||||
case kContinueToCodeStubBuiltin:
|
||||
case kContinueToCodeStubBuiltinWithResult:
|
||||
case kContinueToJavaScriptBuiltin:
|
||||
case kContinueToJavaScriptBuiltinWithResult:
|
||||
case kKeyedLoadIC_Slow:
|
||||
case kKeyedStoreIC_Slow:
|
||||
case kLoadGlobalIC_Slow:
|
||||
case kLoadIC_Slow:
|
||||
case kStoreGlobalIC_Slow:
|
||||
case kWasmStackGuard:
|
||||
case kThrowWasmTrapUnreachable:
|
||||
case kThrowWasmTrapMemOutOfBounds:
|
||||
case kThrowWasmTrapDivByZero:
|
||||
case kThrowWasmTrapDivUnrepresentable:
|
||||
case kThrowWasmTrapRemByZero:
|
||||
case kThrowWasmTrapFloatUnrepresentable:
|
||||
case kThrowWasmTrapFuncInvalid:
|
||||
case kThrowWasmTrapFuncSigMismatch:
|
||||
#else
|
||||
case kAbort:
|
||||
case kAbortJS:
|
||||
case kAdaptorWithBuiltinExitFrame:
|
||||
case kAdaptorWithExitFrame:
|
||||
case kAdd:
|
||||
case kAllocateHeapNumber:
|
||||
case kAllocateInNewSpace:
|
||||
case kAllocateInOldSpace:
|
||||
case kArgumentsAdaptorTrampoline:
|
||||
case kArrayBufferConstructor:
|
||||
case kArrayBufferConstructor_DoNotInitialize:
|
||||
case kArrayBufferIsView:
|
||||
case kArrayBufferPrototypeGetByteLength:
|
||||
case kArrayBufferPrototypeSlice:
|
||||
case kArrayConcat:
|
||||
case kArrayConstructor:
|
||||
case kArrayEvery:
|
||||
case kArrayEveryLoopContinuation:
|
||||
case kArrayEveryLoopEagerDeoptContinuation:
|
||||
case kArrayEveryLoopLazyDeoptContinuation:
|
||||
case kArrayFilter:
|
||||
case kArrayFilterLoopContinuation:
|
||||
case kArrayFilterLoopEagerDeoptContinuation:
|
||||
case kArrayFilterLoopLazyDeoptContinuation:
|
||||
case kArrayFindIndexLoopAfterCallbackLazyDeoptContinuation:
|
||||
@ -335,18 +329,33 @@ bool Builtins::IsIsolateIndependent(int index) {
|
||||
case kArrayForEachLoopLazyDeoptContinuation:
|
||||
case kArrayFrom:
|
||||
case kArrayIncludes:
|
||||
case kArrayIncludesHoleyDoubles:
|
||||
case kArrayIncludesPackedDoubles:
|
||||
case kArrayIncludesSmiOrObject:
|
||||
case kArrayIndexOf:
|
||||
case kArrayIndexOfHoleyDoubles:
|
||||
case kArrayIndexOfPackedDoubles:
|
||||
case kArrayIndexOfSmiOrObject:
|
||||
case kArrayIsArray:
|
||||
case kArrayIteratorPrototypeNext:
|
||||
case kArrayMap:
|
||||
case kArrayMapLoopContinuation:
|
||||
case kArrayMapLoopEagerDeoptContinuation:
|
||||
case kArrayMapLoopLazyDeoptContinuation:
|
||||
case kArrayOf:
|
||||
case kArrayPop:
|
||||
case kArrayPrototypeEntries:
|
||||
case kArrayPrototypeFind:
|
||||
case kArrayPrototypeFindIndex:
|
||||
case kArrayPrototypeFlatMap:
|
||||
case kArrayPrototypeFlatten:
|
||||
case kArrayPrototypeKeys:
|
||||
case kArrayPrototypePop:
|
||||
case kArrayPrototypePush:
|
||||
case kArrayPrototypeShift:
|
||||
case kArrayPrototypeSlice:
|
||||
case kArrayPrototypeValues:
|
||||
case kArrayPush:
|
||||
case kArrayReduce:
|
||||
case kArrayReduceLoopContinuation:
|
||||
case kArrayReduceLoopEagerDeoptContinuation:
|
||||
@ -357,35 +366,159 @@ bool Builtins::IsIsolateIndependent(int index) {
|
||||
case kArrayReduceRightLoopEagerDeoptContinuation:
|
||||
case kArrayReduceRightLoopLazyDeoptContinuation:
|
||||
case kArrayReduceRightPreLoopEagerDeoptContinuation:
|
||||
case kArrayShift:
|
||||
case kArraySome:
|
||||
case kArraySomeLoopContinuation:
|
||||
case kArraySomeLoopEagerDeoptContinuation:
|
||||
case kArraySomeLoopLazyDeoptContinuation:
|
||||
case kArraySplice:
|
||||
case kArraySpliceTorque:
|
||||
case kArrayUnshift:
|
||||
case kAsyncFromSyncIteratorPrototypeNext:
|
||||
case kAsyncFromSyncIteratorPrototypeReturn:
|
||||
case kAsyncFromSyncIteratorPrototypeThrow:
|
||||
case kAsyncFunctionAwaitCaught:
|
||||
case kAsyncFunctionAwaitFulfill:
|
||||
case kAsyncFunctionAwaitReject:
|
||||
case kAsyncFunctionAwaitUncaught:
|
||||
case kAsyncFunctionConstructor:
|
||||
case kAsyncFunctionPromiseCreate:
|
||||
case kAsyncFunctionPromiseRelease:
|
||||
case kAsyncGeneratorAwaitCaught:
|
||||
case kAsyncGeneratorAwaitFulfill:
|
||||
case kAsyncGeneratorAwaitReject:
|
||||
case kAsyncGeneratorAwaitUncaught:
|
||||
case kAsyncGeneratorFunctionConstructor:
|
||||
case kAsyncGeneratorPrototypeNext:
|
||||
case kAsyncGeneratorPrototypeReturn:
|
||||
case kAsyncGeneratorPrototypeThrow:
|
||||
case kAsyncGeneratorReject:
|
||||
case kAsyncGeneratorResolve:
|
||||
case kAsyncGeneratorResumeNext:
|
||||
case kAsyncGeneratorReturn:
|
||||
case kAsyncGeneratorReturnClosedFulfill:
|
||||
case kAsyncGeneratorReturnClosedReject:
|
||||
case kAsyncGeneratorReturnFulfill:
|
||||
case kAsyncGeneratorYield:
|
||||
case kAsyncGeneratorYieldFulfill:
|
||||
case kAsyncIteratorValueUnwrap:
|
||||
case kAtomicsAdd:
|
||||
case kAtomicsAnd:
|
||||
case kAtomicsCompareExchange:
|
||||
case kAtomicsExchange:
|
||||
case kAtomicsIsLockFree:
|
||||
case kAtomicsLoad:
|
||||
case kAtomicsOr:
|
||||
case kAtomicsStore:
|
||||
case kAtomicsSub:
|
||||
case kAtomicsWait:
|
||||
case kAtomicsWake:
|
||||
case kAtomicsXor:
|
||||
case kBigIntAsIntN:
|
||||
case kBigIntAsUintN:
|
||||
case kBigIntConstructor:
|
||||
case kBigIntPrototypeToLocaleString:
|
||||
case kBigIntPrototypeToString:
|
||||
case kBigIntPrototypeValueOf:
|
||||
case kBitwiseAnd:
|
||||
case kBitwiseNot:
|
||||
case kBitwiseOr:
|
||||
case kBitwiseXor:
|
||||
case kBooleanConstructor:
|
||||
case kBooleanPrototypeToString:
|
||||
case kBooleanPrototypeValueOf:
|
||||
case kCallForwardVarargs:
|
||||
case kCallFunctionForwardVarargs:
|
||||
case kCallProxy:
|
||||
case kCallSitePrototypeGetColumnNumber:
|
||||
case kCallSitePrototypeGetEvalOrigin:
|
||||
case kCallSitePrototypeGetFileName:
|
||||
case kCallSitePrototypeGetFunction:
|
||||
case kCallSitePrototypeGetFunctionName:
|
||||
case kCallSitePrototypeGetLineNumber:
|
||||
case kCallSitePrototypeGetMethodName:
|
||||
case kCallSitePrototypeGetPosition:
|
||||
case kCallSitePrototypeGetScriptNameOrSourceURL:
|
||||
case kCallSitePrototypeGetThis:
|
||||
case kCallSitePrototypeGetTypeName:
|
||||
case kCallSitePrototypeIsConstructor:
|
||||
case kCallSitePrototypeIsEval:
|
||||
case kCallSitePrototypeIsNative:
|
||||
case kCallSitePrototypeIsToplevel:
|
||||
case kCallSitePrototypeToString:
|
||||
case kCallVarargs:
|
||||
case kCallWithArrayLike:
|
||||
case kCallWithSpread:
|
||||
case kCloneFastJSArray:
|
||||
case kConsoleAssert:
|
||||
case kConsoleClear:
|
||||
case kConsoleContext:
|
||||
case kConsoleCount:
|
||||
case kConsoleDebug:
|
||||
case kConsoleDir:
|
||||
case kConsoleDirXml:
|
||||
case kConsoleError:
|
||||
case kConsoleGroup:
|
||||
case kConsoleGroupCollapsed:
|
||||
case kConsoleGroupEnd:
|
||||
case kConsoleInfo:
|
||||
case kConsoleLog:
|
||||
case kConsoleMarkTimeline:
|
||||
case kConsoleProfile:
|
||||
case kConsoleProfileEnd:
|
||||
case kConsoleTable:
|
||||
case kConsoleTime:
|
||||
case kConsoleTimeEnd:
|
||||
case kConsoleTimeline:
|
||||
case kConsoleTimelineEnd:
|
||||
case kConsoleTimeStamp:
|
||||
case kConsoleTrace:
|
||||
case kConsoleWarn:
|
||||
case kConstruct:
|
||||
case kConstructFunction:
|
||||
case kConstructProxy:
|
||||
case kConstructVarargs:
|
||||
case kConstructWithArrayLike:
|
||||
case kConstructWithSpread:
|
||||
case kContinueToCodeStubBuiltin:
|
||||
case kContinueToCodeStubBuiltinWithResult:
|
||||
case kContinueToJavaScriptBuiltin:
|
||||
case kContinueToJavaScriptBuiltinWithResult:
|
||||
case kCopyFastSmiOrObjectElements:
|
||||
case kCreateEmptyArrayLiteral:
|
||||
case kCreateGeneratorObject:
|
||||
case kCreateIterResultObject:
|
||||
case kCreateRegExpLiteral:
|
||||
case kCreateShallowArrayLiteral:
|
||||
case kCreateShallowObjectLiteral:
|
||||
case kCreateTypedArray:
|
||||
case kDataViewConstructor:
|
||||
case kDataViewPrototypeGetBigInt64:
|
||||
case kDataViewPrototypeGetBigUint64:
|
||||
case kDataViewPrototypeGetBuffer:
|
||||
case kDataViewPrototypeGetByteLength:
|
||||
case kDataViewPrototypeGetByteOffset:
|
||||
case kDataViewPrototypeGetFloat32:
|
||||
case kDataViewPrototypeGetFloat64:
|
||||
case kDataViewPrototypeGetInt16:
|
||||
case kDataViewPrototypeGetInt32:
|
||||
case kDataViewPrototypeGetInt8:
|
||||
case kDataViewPrototypeGetUint16:
|
||||
case kDataViewPrototypeGetUint32:
|
||||
case kDataViewPrototypeGetUint8:
|
||||
case kDataViewPrototypeSetBigInt64:
|
||||
case kDataViewPrototypeSetBigUint64:
|
||||
case kDataViewPrototypeSetFloat32:
|
||||
case kDataViewPrototypeSetFloat64:
|
||||
case kDataViewPrototypeSetInt16:
|
||||
case kDataViewPrototypeSetInt32:
|
||||
case kDataViewPrototypeSetInt8:
|
||||
case kDataViewPrototypeSetUint16:
|
||||
case kDataViewPrototypeSetUint32:
|
||||
case kDataViewPrototypeSetUint8:
|
||||
case kDateConstructor:
|
||||
case kDateNow:
|
||||
case kDateParse:
|
||||
case kDatePrototypeGetDate:
|
||||
case kDatePrototypeGetDay:
|
||||
case kDatePrototypeGetFullYear:
|
||||
@ -404,36 +537,112 @@ bool Builtins::IsIsolateIndependent(int index) {
|
||||
case kDatePrototypeGetUTCMinutes:
|
||||
case kDatePrototypeGetUTCMonth:
|
||||
case kDatePrototypeGetUTCSeconds:
|
||||
case kDatePrototypeGetYear:
|
||||
case kDatePrototypeSetDate:
|
||||
case kDatePrototypeSetFullYear:
|
||||
case kDatePrototypeSetHours:
|
||||
case kDatePrototypeSetMilliseconds:
|
||||
case kDatePrototypeSetMinutes:
|
||||
case kDatePrototypeSetMonth:
|
||||
case kDatePrototypeSetSeconds:
|
||||
case kDatePrototypeSetTime:
|
||||
case kDatePrototypeSetUTCDate:
|
||||
case kDatePrototypeSetUTCFullYear:
|
||||
case kDatePrototypeSetUTCHours:
|
||||
case kDatePrototypeSetUTCMilliseconds:
|
||||
case kDatePrototypeSetUTCMinutes:
|
||||
case kDatePrototypeSetUTCMonth:
|
||||
case kDatePrototypeSetUTCSeconds:
|
||||
case kDatePrototypeSetYear:
|
||||
case kDatePrototypeToDateString:
|
||||
case kDatePrototypeToISOString:
|
||||
case kDatePrototypeToJson:
|
||||
case kDatePrototypeToPrimitive:
|
||||
case kDatePrototypeToString:
|
||||
case kDatePrototypeToTimeString:
|
||||
case kDatePrototypeToUTCString:
|
||||
case kDatePrototypeValueOf:
|
||||
case kDateUTC:
|
||||
case kDebugBreakTrampoline:
|
||||
case kDecrement:
|
||||
case kDeleteProperty:
|
||||
case kDivide:
|
||||
case kDoubleToI:
|
||||
case kEmptyFunction:
|
||||
case kEnqueueMicrotask:
|
||||
case kEqual:
|
||||
case kErrorCaptureStackTrace:
|
||||
case kErrorConstructor:
|
||||
case kErrorPrototypeToString:
|
||||
case kExponentiate:
|
||||
case kExtractFastJSArray:
|
||||
case kFastConsoleAssert:
|
||||
case kFastFunctionPrototypeBind:
|
||||
case kFastNewClosure:
|
||||
case kFastNewFunctionContextEval:
|
||||
case kFastNewFunctionContextFunction:
|
||||
case kFastNewObject:
|
||||
case kFindOrderedHashMapEntry:
|
||||
case kFlatMapIntoArray:
|
||||
case kFlattenIntoArray:
|
||||
case kForInEnumerate:
|
||||
case kForInFilter:
|
||||
case kFulfillPromise:
|
||||
case kFunctionConstructor:
|
||||
case kFunctionPrototypeApply:
|
||||
case kFunctionPrototypeBind:
|
||||
case kFunctionPrototypeCall:
|
||||
case kFunctionPrototypeHasInstance:
|
||||
case kFunctionPrototypeToString:
|
||||
case kGeneratorFunctionConstructor:
|
||||
case kGeneratorPrototypeNext:
|
||||
case kGeneratorPrototypeReturn:
|
||||
case kGeneratorPrototypeThrow:
|
||||
case kGetSuperConstructor:
|
||||
case kGlobalDecodeURI:
|
||||
case kGlobalDecodeURIComponent:
|
||||
case kGlobalEncodeURI:
|
||||
case kGlobalEncodeURIComponent:
|
||||
case kGlobalEscape:
|
||||
case kGlobalEval:
|
||||
case kGlobalIsFinite:
|
||||
case kGlobalIsNaN:
|
||||
case kGlobalUnescape:
|
||||
case kGreaterThan:
|
||||
case kGreaterThanOrEqual:
|
||||
case kGrowFastDoubleElements:
|
||||
case kGrowFastSmiOrObjectElements:
|
||||
case kHandleApiCall:
|
||||
case kHandleApiCallAsConstructor:
|
||||
case kHandleApiCallAsFunction:
|
||||
case kHasProperty:
|
||||
case kIllegal:
|
||||
case kIncrement:
|
||||
case kInstanceOf:
|
||||
case kInternalArrayConstructor:
|
||||
case kInterpreterEnterBytecodeAdvance:
|
||||
case kInterpreterEnterBytecodeDispatch:
|
||||
case kInterpreterPushArgsThenCall:
|
||||
case kInterpreterPushArgsThenCallWithFinalSpread:
|
||||
case kInterpreterPushArgsThenConstruct:
|
||||
case kInterpreterPushArgsThenConstructArrayFunction:
|
||||
case kInterpreterPushArgsThenConstructWithFinalSpread:
|
||||
case kInterpreterPushUndefinedAndArgsThenCall:
|
||||
case kInterruptCheck:
|
||||
case kIsPromise:
|
||||
case kIterableToList:
|
||||
case kJSBuiltinsConstructStub:
|
||||
case kJSConstructStubGenericRestrictedReturn:
|
||||
case kJSConstructStubGenericUnrestrictedReturn:
|
||||
case kJsonParse:
|
||||
case kJsonStringify:
|
||||
case kKeyedLoadIC:
|
||||
case kKeyedLoadIC_Megamorphic:
|
||||
case kKeyedLoadIC_PolymorphicName:
|
||||
case kKeyedLoadIC_Slow:
|
||||
case kKeyedLoadICTrampoline:
|
||||
case kKeyedStoreIC:
|
||||
case kKeyedStoreIC_Megamorphic:
|
||||
case kKeyedStoreIC_Slow:
|
||||
case kKeyedStoreICTrampoline:
|
||||
case kLessThan:
|
||||
@ -451,61 +660,137 @@ bool Builtins::IsIsolateIndependent(int index) {
|
||||
case kLoadIC_StringWrapperLength:
|
||||
case kLoadICTrampoline:
|
||||
case kLoadIC_Uninitialized:
|
||||
case kMakeError:
|
||||
case kMakeRangeError:
|
||||
case kMakeSyntaxError:
|
||||
case kMakeTypeError:
|
||||
case kMakeURIError:
|
||||
case kMapConstructor:
|
||||
case kMapIteratorPrototypeNext:
|
||||
case kMapPrototypeClear:
|
||||
case kMapPrototypeDelete:
|
||||
case kMapPrototypeEntries:
|
||||
case kMapPrototypeForEach:
|
||||
case kMapPrototypeGet:
|
||||
case kMapPrototypeGetSize:
|
||||
case kMapPrototypeHas:
|
||||
case kMapPrototypeKeys:
|
||||
case kMapPrototypeSet:
|
||||
case kMapPrototypeValues:
|
||||
case kMathAbs:
|
||||
case kMathAcos:
|
||||
case kMathAcosh:
|
||||
case kMathAsin:
|
||||
case kMathAsinh:
|
||||
case kMathAtan:
|
||||
case kMathAtan2:
|
||||
case kMathAtanh:
|
||||
case kMathCbrt:
|
||||
case kMathCeil:
|
||||
case kMathClz32:
|
||||
case kMathCos:
|
||||
case kMathCosh:
|
||||
case kMathExp:
|
||||
case kMathExpm1:
|
||||
case kMathFloor:
|
||||
case kMathFround:
|
||||
case kMathHypot:
|
||||
case kMathImul:
|
||||
case kMathLog:
|
||||
case kMathLog10:
|
||||
case kMathLog1p:
|
||||
case kMathLog2:
|
||||
case kMathMax:
|
||||
case kMathMin:
|
||||
case kMathPow:
|
||||
case kMathPowInternal:
|
||||
case kMathRandom:
|
||||
case kMathRound:
|
||||
case kMathSign:
|
||||
case kMathSin:
|
||||
case kMathSinh:
|
||||
case kMathSqrt:
|
||||
case kMathTan:
|
||||
case kMathTanh:
|
||||
case kMathTrunc:
|
||||
case kModulus:
|
||||
case kMultiply:
|
||||
case kNegate:
|
||||
case kNewArgumentsElements:
|
||||
case kNewPromiseCapability:
|
||||
case kNonNumberToNumber:
|
||||
case kNonNumberToNumeric:
|
||||
case kNonPrimitiveToPrimitive_Default:
|
||||
case kNonPrimitiveToPrimitive_Number:
|
||||
case kNonPrimitiveToPrimitive_String:
|
||||
case kNumberConstructor:
|
||||
case kNumberFormatPrototypeFormatToParts:
|
||||
case kNumberIsFinite:
|
||||
case kNumberIsInteger:
|
||||
case kNumberIsNaN:
|
||||
case kNumberIsSafeInteger:
|
||||
case kNumberParseFloat:
|
||||
case kNumberParseInt:
|
||||
case kNumberPrototypeToExponential:
|
||||
case kNumberPrototypeToFixed:
|
||||
case kNumberPrototypeToLocaleString:
|
||||
case kNumberPrototypeToPrecision:
|
||||
case kNumberPrototypeToString:
|
||||
case kNumberPrototypeValueOf:
|
||||
case kNumberToString:
|
||||
case kObjectAssign:
|
||||
case kObjectConstructor:
|
||||
case kObjectCreate:
|
||||
case kObjectDefineGetter:
|
||||
case kObjectDefineProperties:
|
||||
case kObjectDefineProperty:
|
||||
case kObjectDefineSetter:
|
||||
case kObjectEntries:
|
||||
case kObjectFreeze:
|
||||
case kObjectGetOwnPropertyDescriptor:
|
||||
case kObjectGetOwnPropertyDescriptors:
|
||||
case kObjectGetOwnPropertyNames:
|
||||
case kObjectGetOwnPropertySymbols:
|
||||
case kObjectGetPrototypeOf:
|
||||
case kObjectIs:
|
||||
case kObjectIsExtensible:
|
||||
case kObjectIsFrozen:
|
||||
case kObjectIsSealed:
|
||||
case kObjectKeys:
|
||||
case kObjectLookupGetter:
|
||||
case kObjectLookupSetter:
|
||||
case kObjectPreventExtensions:
|
||||
case kObjectPrototypeGetProto:
|
||||
case kObjectPrototypeHasOwnProperty:
|
||||
case kObjectPrototypeIsPrototypeOf:
|
||||
case kObjectPrototypePropertyIsEnumerable:
|
||||
case kObjectPrototypeSetProto:
|
||||
case kObjectPrototypeToLocaleString:
|
||||
case kObjectPrototypeToString:
|
||||
case kObjectPrototypeValueOf:
|
||||
case kObjectSeal:
|
||||
case kObjectSetPrototypeOf:
|
||||
case kObjectValues:
|
||||
case kOrderedHashTableHealIndex:
|
||||
case kOrdinaryHasInstance:
|
||||
case kOrdinaryToPrimitive_Number:
|
||||
case kOrdinaryToPrimitive_String:
|
||||
case kPerformPromiseThen:
|
||||
case kPromiseAll:
|
||||
case kPromiseAllResolveElementClosure:
|
||||
case kPromiseCapabilityDefaultReject:
|
||||
case kPromiseCapabilityDefaultResolve:
|
||||
case kPromiseCatchFinally:
|
||||
case kPromiseConstructor:
|
||||
case kPromiseConstructorLazyDeoptContinuation:
|
||||
case kPromiseFulfillReactionJob:
|
||||
case kPromiseGetCapabilitiesExecutor:
|
||||
case kPromiseInternalConstructor:
|
||||
case kPromiseInternalReject:
|
||||
case kPromiseInternalResolve:
|
||||
case kPromisePrototypeCatch:
|
||||
case kPromisePrototypeFinally:
|
||||
case kPromisePrototypeThen:
|
||||
case kPromiseRace:
|
||||
case kPromiseReject:
|
||||
case kPromiseRejectReactionJob:
|
||||
@ -515,51 +800,120 @@ bool Builtins::IsIsolateIndependent(int index) {
|
||||
case kPromiseThenFinally:
|
||||
case kPromiseThrowerFinally:
|
||||
case kPromiseValueThunkFinally:
|
||||
case kProxyConstructor:
|
||||
case kProxyGetProperty:
|
||||
case kProxyHasProperty:
|
||||
case kProxyRevocable:
|
||||
case kProxyRevoke:
|
||||
case kProxySetProperty:
|
||||
case kRecordWrite:
|
||||
case kReflectApply:
|
||||
case kReflectConstruct:
|
||||
case kReflectDefineProperty:
|
||||
case kReflectDeleteProperty:
|
||||
case kReflectGet:
|
||||
case kReflectGetOwnPropertyDescriptor:
|
||||
case kReflectGetPrototypeOf:
|
||||
case kReflectHas:
|
||||
case kReflectIsExtensible:
|
||||
case kReflectOwnKeys:
|
||||
case kReflectPreventExtensions:
|
||||
case kReflectSet:
|
||||
case kReflectSetPrototypeOf:
|
||||
case kRegExpCapture1Getter:
|
||||
case kRegExpCapture2Getter:
|
||||
case kRegExpCapture3Getter:
|
||||
case kRegExpCapture4Getter:
|
||||
case kRegExpCapture5Getter:
|
||||
case kRegExpCapture6Getter:
|
||||
case kRegExpCapture7Getter:
|
||||
case kRegExpCapture8Getter:
|
||||
case kRegExpCapture9Getter:
|
||||
case kRegExpConstructor:
|
||||
case kRegExpExecAtom:
|
||||
case kRegExpInputGetter:
|
||||
case kRegExpInputSetter:
|
||||
case kRegExpInternalMatch:
|
||||
case kRegExpLastMatchGetter:
|
||||
case kRegExpLastParenGetter:
|
||||
case kRegExpLeftContextGetter:
|
||||
case kRegExpMatchFast:
|
||||
case kRegExpPrototypeCompile:
|
||||
case kRegExpPrototypeDotAllGetter:
|
||||
case kRegExpPrototypeExec:
|
||||
case kRegExpPrototypeExecSlow:
|
||||
case kRegExpPrototypeFlagsGetter:
|
||||
case kRegExpPrototypeGlobalGetter:
|
||||
case kRegExpPrototypeIgnoreCaseGetter:
|
||||
case kRegExpPrototypeMatch:
|
||||
case kRegExpPrototypeMatchAll:
|
||||
case kRegExpPrototypeMultilineGetter:
|
||||
case kRegExpPrototypeReplace:
|
||||
case kRegExpPrototypeSearch:
|
||||
case kRegExpPrototypeSourceGetter:
|
||||
case kRegExpPrototypeSplit:
|
||||
case kRegExpPrototypeStickyGetter:
|
||||
case kRegExpPrototypeTest:
|
||||
case kRegExpPrototypeToString:
|
||||
case kRegExpPrototypeUnicodeGetter:
|
||||
case kRegExpReplace:
|
||||
case kRegExpRightContextGetter:
|
||||
case kRegExpSearchFast:
|
||||
case kRegExpSplit:
|
||||
case kRegExpStringIteratorPrototypeNext:
|
||||
case kRejectPromise:
|
||||
case kResolvePromise:
|
||||
case kReturnReceiver:
|
||||
case kRunMicrotasks:
|
||||
case kSameValue:
|
||||
case kSetConstructor:
|
||||
case kSetIteratorPrototypeNext:
|
||||
case kSetPrototypeAdd:
|
||||
case kSetPrototypeClear:
|
||||
case kSetPrototypeDelete:
|
||||
case kSetPrototypeEntries:
|
||||
case kSetPrototypeForEach:
|
||||
case kSetPrototypeGetSize:
|
||||
case kSetPrototypeHas:
|
||||
case kSetPrototypeValues:
|
||||
case kSharedArrayBufferPrototypeGetByteLength:
|
||||
case kSharedArrayBufferPrototypeSlice:
|
||||
case kShiftLeft:
|
||||
case kShiftRight:
|
||||
case kShiftRightLogical:
|
||||
case kStackCheck:
|
||||
case kStoreGlobalIC:
|
||||
case kStoreGlobalIC_Slow:
|
||||
case kStoreGlobalICTrampoline:
|
||||
case kStoreIC:
|
||||
case kStoreICTrampoline:
|
||||
case kStoreIC_Uninitialized:
|
||||
case kStoreInArrayLiteralIC:
|
||||
case kStoreInArrayLiteralIC_Slow:
|
||||
case kStrictEqual:
|
||||
case kStrictPoisonPillThrower:
|
||||
case kStringCharAt:
|
||||
case kStringCodePointAtUTF16:
|
||||
case kStringCodePointAtUTF32:
|
||||
case kStringConstructor:
|
||||
case kStringEqual:
|
||||
case kStringFromCharCode:
|
||||
case kStringFromCodePoint:
|
||||
case kStringGreaterThan:
|
||||
case kStringGreaterThanOrEqual:
|
||||
case kStringIndexOf:
|
||||
case kStringIteratorPrototypeNext:
|
||||
case kStringLessThan:
|
||||
case kStringLessThanOrEqual:
|
||||
case kStringPrototypeAnchor:
|
||||
case kStringPrototypeBig:
|
||||
case kStringPrototypeBlink:
|
||||
case kStringPrototypeBold:
|
||||
case kStringPrototypeCharAt:
|
||||
case kStringPrototypeCharCodeAt:
|
||||
case kStringPrototypeCodePointAt:
|
||||
case kStringPrototypeConcat:
|
||||
case kStringPrototypeEndsWith:
|
||||
case kStringPrototypeFixed:
|
||||
case kStringPrototypeFontcolor:
|
||||
case kStringPrototypeFontsize:
|
||||
@ -567,26 +921,40 @@ bool Builtins::IsIsolateIndependent(int index) {
|
||||
case kStringPrototypeIndexOf:
|
||||
case kStringPrototypeItalics:
|
||||
case kStringPrototypeIterator:
|
||||
case kStringPrototypeLastIndexOf:
|
||||
case kStringPrototypeLink:
|
||||
case kStringPrototypeLocaleCompare:
|
||||
case kStringPrototypeMatch:
|
||||
case kStringPrototypeMatchAll:
|
||||
case kStringPrototypeNormalizeIntl:
|
||||
case kStringPrototypePadEnd:
|
||||
case kStringPrototypePadStart:
|
||||
case kStringPrototypeRepeat:
|
||||
case kStringPrototypeReplace:
|
||||
case kStringPrototypeSearch:
|
||||
case kStringPrototypeSlice:
|
||||
case kStringPrototypeSmall:
|
||||
case kStringPrototypeSplit:
|
||||
case kStringPrototypeStartsWith:
|
||||
case kStringPrototypeStrike:
|
||||
case kStringPrototypeSub:
|
||||
case kStringPrototypeSubstr:
|
||||
case kStringPrototypeSubstring:
|
||||
case kStringPrototypeSup:
|
||||
#ifdef V8_INTL_SUPPORT
|
||||
case kStringPrototypeToLowerCaseIntl:
|
||||
case kStringToLowerCaseIntl:
|
||||
#endif
|
||||
case kStringPrototypeToString:
|
||||
case kStringPrototypeToUpperCaseIntl:
|
||||
case kStringPrototypeTrim:
|
||||
case kStringPrototypeTrimEnd:
|
||||
case kStringPrototypeTrimStart:
|
||||
case kStringPrototypeValueOf:
|
||||
case kStringRaw:
|
||||
case kStringRepeat:
|
||||
case kStringSubstring:
|
||||
case kStringToNumber:
|
||||
case kSubtract:
|
||||
case kSymbolConstructor:
|
||||
case kSymbolFor:
|
||||
case kSymbolKeyFor:
|
||||
case kSymbolPrototypeToPrimitive:
|
||||
case kSymbolPrototypeToString:
|
||||
case kSymbolPrototypeValueOf:
|
||||
@ -602,22 +970,39 @@ bool Builtins::IsIsolateIndependent(int index) {
|
||||
case kToBooleanLazyDeoptContinuation:
|
||||
case kToInteger:
|
||||
case kToInteger_TruncateMinusZero:
|
||||
case kToLength:
|
||||
case kToName:
|
||||
case kToNumber:
|
||||
case kToNumeric:
|
||||
case kToObject:
|
||||
case kToString:
|
||||
case kTypedArrayBaseConstructor:
|
||||
case kTypedArrayConstructor:
|
||||
case kTypedArrayConstructorLazyDeoptContinuation:
|
||||
case kTypedArrayFrom:
|
||||
case kTypedArrayInitialize:
|
||||
case kTypedArrayInitializeWithBuffer:
|
||||
case kTypedArrayOf:
|
||||
case kTypedArrayPrototypeBuffer:
|
||||
case kTypedArrayPrototypeByteLength:
|
||||
case kTypedArrayPrototypeByteOffset:
|
||||
case kTypedArrayPrototypeCopyWithin:
|
||||
case kTypedArrayPrototypeEntries:
|
||||
case kTypedArrayPrototypeEvery:
|
||||
case kTypedArrayPrototypeFill:
|
||||
case kTypedArrayPrototypeFilter:
|
||||
case kTypedArrayPrototypeFind:
|
||||
case kTypedArrayPrototypeFindIndex:
|
||||
case kTypedArrayPrototypeForEach:
|
||||
case kTypedArrayPrototypeIncludes:
|
||||
case kTypedArrayPrototypeIndexOf:
|
||||
case kTypedArrayPrototypeKeys:
|
||||
case kTypedArrayPrototypeLastIndexOf:
|
||||
case kTypedArrayPrototypeLength:
|
||||
case kTypedArrayPrototypeMap:
|
||||
case kTypedArrayPrototypeReduce:
|
||||
case kTypedArrayPrototypeReduceRight:
|
||||
case kTypedArrayPrototypeReverse:
|
||||
case kTypedArrayPrototypeSet:
|
||||
case kTypedArrayPrototypeSlice:
|
||||
case kTypedArrayPrototypeSome:
|
||||
@ -625,16 +1010,24 @@ bool Builtins::IsIsolateIndependent(int index) {
|
||||
case kTypedArrayPrototypeToStringTag:
|
||||
case kTypedArrayPrototypeValues:
|
||||
case kTypeof:
|
||||
case kUnsupportedThrower:
|
||||
case kWasmStackGuard:
|
||||
case kWeakCollectionDelete:
|
||||
case kWeakCollectionSet:
|
||||
case kWeakMapConstructor:
|
||||
case kWeakMapGet:
|
||||
case kWeakMapHas:
|
||||
case kWeakMapLookupHashIndex:
|
||||
case kWeakMapPrototypeDelete:
|
||||
case kWeakMapPrototypeSet:
|
||||
case kWeakSetConstructor:
|
||||
case kWeakSetHas:
|
||||
case kWeakSetPrototypeAdd:
|
||||
case kWeakSetPrototypeDelete:
|
||||
#endif // !DEBUG
|
||||
#ifdef V8_INTL_SUPPORT
|
||||
case kStringPrototypeToLowerCaseIntl:
|
||||
case kStringToLowerCaseIntl:
|
||||
#endif
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
@ -20,7 +20,7 @@ namespace internal {
|
||||
|
||||
void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
|
||||
ExitFrameType exit_frame_type) {
|
||||
__ li(s2, Operand(ExternalReference::Create(address)));
|
||||
__ li(s2, ExternalReference::Create(address));
|
||||
if (exit_frame_type == BUILTIN_EXIT) {
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
|
||||
RelocInfo::CODE_TARGET);
|
||||
@ -466,8 +466,6 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
|
||||
|
||||
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
bool is_construct) {
|
||||
// Called from JSEntryStub::GenerateBody
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- a0: new.target
|
||||
// -- a1: function
|
||||
@ -832,8 +830,7 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
|
||||
DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
|
||||
bytecode));
|
||||
|
||||
__ li(bytecode_size_table,
|
||||
Operand(ExternalReference::bytecode_size_table_address()));
|
||||
__ li(bytecode_size_table, ExternalReference::bytecode_size_table_address());
|
||||
|
||||
// Check if the bytecode is a Wide or ExtraWide prefix bytecode.
|
||||
Label process_bytecode, extra_wide;
|
||||
@ -999,8 +996,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
Label do_dispatch;
|
||||
__ bind(&do_dispatch);
|
||||
__ li(kInterpreterDispatchTableRegister,
|
||||
Operand(ExternalReference::interpreter_dispatch_table_address(
|
||||
masm->isolate())));
|
||||
ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
|
||||
__ Addu(a0, kInterpreterBytecodeArrayRegister,
|
||||
kInterpreterBytecodeOffsetRegister);
|
||||
__ lbu(t3, MemOperand(a0));
|
||||
@ -1223,7 +1219,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
|
||||
__ Branch(&trampoline_loaded);
|
||||
|
||||
__ bind(&builtin_trampoline);
|
||||
__ li(t0, Operand(BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline)));
|
||||
__ li(t0, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
|
||||
|
||||
__ bind(&trampoline_loaded);
|
||||
__ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
|
||||
@ -1231,8 +1227,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
|
||||
|
||||
// Initialize the dispatch table register.
|
||||
__ li(kInterpreterDispatchTableRegister,
|
||||
Operand(ExternalReference::interpreter_dispatch_table_address(
|
||||
masm->isolate())));
|
||||
ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
|
||||
|
||||
// Get the bytecode array pointer from the frame.
|
||||
__ lw(kInterpreterBytecodeArrayRegister,
|
||||
@ -1325,8 +1320,7 @@ static void GetSharedFunctionInfoCode(MacroAssembler* masm, Register sfi_data,
|
||||
|
||||
// IsSmi: Is builtin
|
||||
__ JumpIfNotSmi(sfi_data, &check_is_bytecode_array);
|
||||
__ li(scratch1,
|
||||
Operand(ExternalReference::builtins_address(masm->isolate())));
|
||||
__ li(scratch1, ExternalReference::builtins_address(masm->isolate()));
|
||||
// Avoid untagging the Smi.
|
||||
STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize);
|
||||
STATIC_ASSERT(kSmiShiftSize == 0);
|
||||
@ -1462,8 +1456,7 @@ void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
|
||||
// Load the code object at builtins_table[builtin_id] into scratch1.
|
||||
|
||||
__ SmiUntag(scratch1);
|
||||
__ li(scratch0,
|
||||
Operand(ExternalReference::builtins_address(masm->isolate())));
|
||||
__ li(scratch0, ExternalReference::builtins_address(masm->isolate()));
|
||||
__ Lsa(scratch1, scratch0, scratch1, kPointerSizeLog2);
|
||||
__ lw(scratch1, MemOperand(scratch1));
|
||||
|
||||
|
@ -20,7 +20,7 @@ namespace internal {
|
||||
|
||||
void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
|
||||
ExitFrameType exit_frame_type) {
|
||||
__ li(s2, Operand(ExternalReference::Create(address)));
|
||||
__ li(s2, ExternalReference::Create(address));
|
||||
if (exit_frame_type == BUILTIN_EXIT) {
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
|
||||
RelocInfo::CODE_TARGET);
|
||||
@ -605,8 +605,6 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
|
||||
|
||||
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
bool is_construct) {
|
||||
// Called from JSEntryStub::GenerateBody
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- a0: new.target
|
||||
// -- a1: function
|
||||
@ -829,8 +827,7 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
|
||||
Register bytecode_size_table = scratch1;
|
||||
DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
|
||||
bytecode));
|
||||
__ li(bytecode_size_table,
|
||||
Operand(ExternalReference::bytecode_size_table_address()));
|
||||
__ li(bytecode_size_table, ExternalReference::bytecode_size_table_address());
|
||||
|
||||
// Check if the bytecode is a Wide or ExtraWide prefix bytecode.
|
||||
Label process_bytecode, extra_wide;
|
||||
@ -996,8 +993,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
Label do_dispatch;
|
||||
__ bind(&do_dispatch);
|
||||
__ li(kInterpreterDispatchTableRegister,
|
||||
Operand(ExternalReference::interpreter_dispatch_table_address(
|
||||
masm->isolate())));
|
||||
ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
|
||||
__ Daddu(a0, kInterpreterBytecodeArrayRegister,
|
||||
kInterpreterBytecodeOffsetRegister);
|
||||
__ Lbu(a7, MemOperand(a0));
|
||||
@ -1220,7 +1216,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
|
||||
__ Branch(&trampoline_loaded);
|
||||
|
||||
__ bind(&builtin_trampoline);
|
||||
__ li(t0, Operand(BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline)));
|
||||
__ li(t0, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
|
||||
|
||||
__ bind(&trampoline_loaded);
|
||||
__ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
|
||||
@ -1228,8 +1224,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
|
||||
|
||||
// Initialize the dispatch table register.
|
||||
__ li(kInterpreterDispatchTableRegister,
|
||||
Operand(ExternalReference::interpreter_dispatch_table_address(
|
||||
masm->isolate())));
|
||||
ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
|
||||
|
||||
// Get the bytecode array pointer from the frame.
|
||||
__ Ld(kInterpreterBytecodeArrayRegister,
|
||||
@ -1322,8 +1317,7 @@ static void GetSharedFunctionInfoCode(MacroAssembler* masm, Register sfi_data,
|
||||
|
||||
// IsSmi: Is builtin
|
||||
__ JumpIfNotSmi(sfi_data, &check_is_bytecode_array);
|
||||
__ li(scratch1,
|
||||
Operand(ExternalReference::builtins_address(masm->isolate())));
|
||||
__ li(scratch1, ExternalReference::builtins_address(masm->isolate()));
|
||||
// Avoid untagging the Smi by merging the shift
|
||||
STATIC_ASSERT(kPointerSizeLog2 < kSmiShift);
|
||||
__ dsrl(sfi_data, sfi_data, kSmiShift - kPointerSizeLog2);
|
||||
@ -1459,8 +1453,7 @@ void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
|
||||
// Load the code object at builtins_table[builtin_id] into scratch1.
|
||||
|
||||
__ SmiUntag(scratch1);
|
||||
__ li(scratch0,
|
||||
Operand(ExternalReference::builtins_address(masm->isolate())));
|
||||
__ li(scratch0, ExternalReference::builtins_address(masm->isolate()));
|
||||
__ Dlsa(scratch1, scratch0, scratch1, kPointerSizeLog2);
|
||||
__ Ld(scratch1, MemOperand(scratch1));
|
||||
|
||||
|
@ -75,7 +75,7 @@ void AdaptorWithExitFrameType(MacroAssembler* masm,
|
||||
// JumpToExternalReference because rbx is loaded by Generate_adaptor.
|
||||
CEntryStub ces(masm->isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
|
||||
exit_frame_type == Builtins::BUILTIN_EXIT);
|
||||
__ jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
|
||||
__ Jump(ces.GetCode(), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
} // namespace
|
||||
|
||||
@ -740,7 +740,7 @@ static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
|
||||
Runtime::FunctionId function_id) {
|
||||
Label no_match;
|
||||
__ SmiCompare(smi_entry, Smi::FromEnum(marker));
|
||||
__ j(not_equal, &no_match, Label::kNear);
|
||||
__ j(not_equal, &no_match);
|
||||
GenerateTailCallToReturnedCode(masm, function_id);
|
||||
__ bind(&no_match);
|
||||
}
|
||||
@ -2335,7 +2335,7 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
||||
// Patch receiver to global proxy.
|
||||
__ LoadGlobalProxy(rcx);
|
||||
}
|
||||
__ jmp(&convert_receiver, Label::kNear);
|
||||
__ jmp(&convert_receiver);
|
||||
}
|
||||
__ bind(&convert_to_object);
|
||||
{
|
||||
@ -2500,17 +2500,25 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
|
||||
// -----------------------------------
|
||||
StackArgumentsAccessor args(rsp, rax);
|
||||
|
||||
Label non_callable, non_function, non_smi;
|
||||
// TODO(jgruber): Support conditional jumps (Assembler::j) with Code targets.
|
||||
|
||||
Label non_js_function, non_js_bound_function, non_callable, non_function,
|
||||
non_smi;
|
||||
__ JumpIfSmi(rdi, &non_callable);
|
||||
__ bind(&non_smi);
|
||||
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
|
||||
__ j(equal, masm->isolate()->builtins()->CallFunction(mode),
|
||||
RelocInfo::CODE_TARGET);
|
||||
__ j(not_equal, &non_js_function, Label::kNear);
|
||||
__ Jump(masm->isolate()->builtins()->CallFunction(mode),
|
||||
RelocInfo::CODE_TARGET);
|
||||
|
||||
__ bind(&non_js_function);
|
||||
__ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
|
||||
__ j(equal, BUILTIN_CODE(masm->isolate(), CallBoundFunction),
|
||||
RelocInfo::CODE_TARGET);
|
||||
__ j(not_equal, &non_js_bound_function, Label::kNear);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
|
||||
RelocInfo::CODE_TARGET);
|
||||
|
||||
// Check if target has a [[Call]] internal method.
|
||||
__ bind(&non_js_bound_function);
|
||||
__ testb(FieldOperand(rcx, Map::kBitFieldOffset),
|
||||
Immediate(Map::IsCallableBit::kMask));
|
||||
__ j(zero, &non_callable, Label::kNear);
|
||||
@ -2610,26 +2618,33 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
|
||||
|
||||
// Check if target is a Smi.
|
||||
Label non_constructor, non_proxy;
|
||||
__ JumpIfSmi(rdi, &non_constructor, Label::kNear);
|
||||
__ JumpIfSmi(rdi, &non_constructor);
|
||||
|
||||
// Check if target has a [[Construct]] internal method.
|
||||
__ movq(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
|
||||
__ testb(FieldOperand(rcx, Map::kBitFieldOffset),
|
||||
Immediate(Map::IsConstructorBit::kMask));
|
||||
__ j(zero, &non_constructor, Label::kNear);
|
||||
__ j(zero, &non_constructor);
|
||||
|
||||
// TODO(jgruber): Support conditional jumps (Assembler::j) with Code targets.
|
||||
Label non_js_function, non_js_bound_function;
|
||||
|
||||
// Dispatch based on instance type.
|
||||
__ CmpInstanceType(rcx, JS_FUNCTION_TYPE);
|
||||
__ j(equal, BUILTIN_CODE(masm->isolate(), ConstructFunction),
|
||||
RelocInfo::CODE_TARGET);
|
||||
__ j(not_equal, &non_js_function);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
|
||||
RelocInfo::CODE_TARGET);
|
||||
|
||||
// Only dispatch to bound functions after checking whether they are
|
||||
// constructors.
|
||||
__ bind(&non_js_function);
|
||||
__ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
|
||||
__ j(equal, BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
|
||||
RelocInfo::CODE_TARGET);
|
||||
__ j(not_equal, &non_js_bound_function);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
|
||||
RelocInfo::CODE_TARGET);
|
||||
|
||||
// Only dispatch to proxies after checking whether they are constructors.
|
||||
__ bind(&non_js_bound_function);
|
||||
__ CmpInstanceType(rcx, JS_PROXY_TYPE);
|
||||
__ j(not_equal, &non_proxy, Label::kNear);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
|
||||
|
@ -632,6 +632,9 @@ void CodeGenerator::BailoutIfDeoptimized() {
|
||||
__ ldr(scratch,
|
||||
FieldMemOperand(scratch, CodeDataContainer::kKindSpecificFlagsOffset));
|
||||
__ tst(scratch, Operand(1 << Code::kMarkedForDeoptimizationBit));
|
||||
// Ensure we're not serializing (otherwise we'd need to use an indirection to
|
||||
// access the builtin below).
|
||||
DCHECK(!isolate()->ShouldLoadConstantsFromRootList());
|
||||
Handle<Code> code = isolate()->builtins()->builtin_handle(
|
||||
Builtins::kCompileLazyDeoptimizedCode);
|
||||
__ Jump(code, RelocInfo::CODE_TARGET, ne);
|
||||
|
@ -561,6 +561,9 @@ void CodeGenerator::BailoutIfDeoptimized() {
|
||||
FieldMemOperand(scratch, CodeDataContainer::kKindSpecificFlagsOffset));
|
||||
Label not_deoptimized;
|
||||
__ Tbz(scratch, Code::kMarkedForDeoptimizationBit, ¬_deoptimized);
|
||||
// Ensure we're not serializing (otherwise we'd need to use an indirection to
|
||||
// access the builtin below).
|
||||
DCHECK(!isolate()->ShouldLoadConstantsFromRootList());
|
||||
Handle<Code> code = isolate()->builtins()->builtin_handle(
|
||||
Builtins::kCompileLazyDeoptimizedCode);
|
||||
__ Jump(code, RelocInfo::CODE_TARGET);
|
||||
@ -835,7 +838,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
}
|
||||
break;
|
||||
case kArchRootsPointer:
|
||||
__ mov(i.OutputRegister(), root);
|
||||
__ mov(i.OutputRegister(), kRootRegister);
|
||||
break;
|
||||
case kArchTruncateDoubleToI:
|
||||
__ TruncateDoubleToI(isolate(), zone(), i.OutputRegister(),
|
||||
|
@ -245,7 +245,7 @@ bool CodeAssembler::IsIntPtrAbsWithOverflowSupported() const {
|
||||
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
TNode<HeapObject> CodeAssembler::LookupConstant(Handle<HeapObject> object) {
|
||||
DCHECK(isolate()->serializer_enabled());
|
||||
DCHECK(isolate()->ShouldLoadConstantsFromRootList());
|
||||
|
||||
// Ensure the given object is in the builtins constants table and fetch its
|
||||
// index.
|
||||
@ -272,7 +272,7 @@ TNode<HeapObject> CodeAssembler::LookupConstant(Handle<HeapObject> object) {
|
||||
// External references are stored in the external reference table.
|
||||
TNode<ExternalReference> CodeAssembler::LookupExternalReference(
|
||||
ExternalReference reference) {
|
||||
DCHECK(isolate()->serializer_enabled());
|
||||
DCHECK(isolate()->ShouldLoadConstantsFromRootList());
|
||||
|
||||
// Encode as an index into the external reference table stored on the isolate.
|
||||
|
||||
@ -313,7 +313,12 @@ TNode<Number> CodeAssembler::NumberConstant(double value) {
|
||||
if (DoubleToSmiInteger(value, &smi_value)) {
|
||||
return UncheckedCast<Number>(SmiConstant(smi_value));
|
||||
} else {
|
||||
return UncheckedCast<Number>(raw_assembler()->NumberConstant(value));
|
||||
// We allocate the heap number constant eagerly at this point instead of
|
||||
// deferring allocation to code generation
|
||||
// (see AllocateAndInstallRequestedHeapObjects) since that makes it easier
|
||||
// to generate constant lookups for embedded builtins.
|
||||
return UncheckedCast<Number>(HeapConstant(
|
||||
isolate()->factory()->NewHeapNumber(value, IMMUTABLE, TENURED)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -331,7 +336,7 @@ TNode<HeapObject> CodeAssembler::UntypedHeapConstant(
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
// Root constants are simply loaded from the root list, while non-root
|
||||
// constants must be looked up from the builtins constants table.
|
||||
if (ShouldLoadConstantsFromRootList()) {
|
||||
if (isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
Heap::RootListIndex root_index;
|
||||
if (!isolate()->heap()->IsRootHandle(object, &root_index)) {
|
||||
return LookupConstant(object);
|
||||
@ -354,7 +359,7 @@ TNode<Oddball> CodeAssembler::BooleanConstant(bool value) {
|
||||
TNode<ExternalReference> CodeAssembler::ExternalConstant(
|
||||
ExternalReference address) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (ShouldLoadConstantsFromRootList()) {
|
||||
if (isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
return LookupExternalReference(address);
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
|
@ -654,13 +654,6 @@ class V8_EXPORT_PRIVATE CodeAssembler {
|
||||
#endif
|
||||
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
// Off-heap builtins cannot embed constants within the code object itself,
|
||||
// and thus need to load them from the root list.
|
||||
bool ShouldLoadConstantsFromRootList() const {
|
||||
return (isolate()->serializer_enabled() &&
|
||||
isolate()->builtins_constants_table_builder() != nullptr);
|
||||
}
|
||||
|
||||
TNode<HeapObject> LookupConstant(Handle<HeapObject> object);
|
||||
TNode<ExternalReference> LookupExternalReference(ExternalReference reference);
|
||||
#endif
|
||||
|
@ -545,6 +545,9 @@ void CodeGenerator::BailoutIfDeoptimized() {
|
||||
__ mov(ebx, Operand(kJavaScriptCallCodeStartRegister, offset));
|
||||
__ test(FieldOperand(ebx, CodeDataContainer::kKindSpecificFlagsOffset),
|
||||
Immediate(1 << Code::kMarkedForDeoptimizationBit));
|
||||
// Ensure we're not serializing (otherwise we'd need to use an indirection to
|
||||
// access the builtin below).
|
||||
DCHECK(!isolate()->ShouldLoadConstantsFromRootList());
|
||||
Handle<Code> code = isolate()->builtins()->builtin_handle(
|
||||
Builtins::kCompileLazyDeoptimizedCode);
|
||||
__ j(not_zero, code, RelocInfo::CODE_TARGET);
|
||||
|
@ -558,6 +558,9 @@ void CodeGenerator::BailoutIfDeoptimized() {
|
||||
__ lw(at, MemOperand(kJavaScriptCallCodeStartRegister, offset));
|
||||
__ lw(at, FieldMemOperand(at, CodeDataContainer::kKindSpecificFlagsOffset));
|
||||
__ And(at, at, Operand(1 << Code::kMarkedForDeoptimizationBit));
|
||||
// Ensure we're not serializing (otherwise we'd need to use an indirection to
|
||||
// access the builtin below).
|
||||
DCHECK(!isolate()->ShouldLoadConstantsFromRootList());
|
||||
Handle<Code> code = isolate()->builtins()->builtin_handle(
|
||||
Builtins::kCompileLazyDeoptimizedCode);
|
||||
__ Jump(code, RelocInfo::CODE_TARGET, ne, at, Operand(zero_reg));
|
||||
|
@ -570,6 +570,9 @@ void CodeGenerator::BailoutIfDeoptimized() {
|
||||
__ Ld(at, MemOperand(kJavaScriptCallCodeStartRegister, offset));
|
||||
__ Lw(at, FieldMemOperand(at, CodeDataContainer::kKindSpecificFlagsOffset));
|
||||
__ And(at, at, Operand(1 << Code::kMarkedForDeoptimizationBit));
|
||||
// Ensure we're not serializing (otherwise we'd need to use an indirection to
|
||||
// access the builtin below).
|
||||
DCHECK(!isolate()->ShouldLoadConstantsFromRootList());
|
||||
Handle<Code> code = isolate()->builtins()->builtin_handle(
|
||||
Builtins::kCompileLazyDeoptimizedCode);
|
||||
__ Jump(code, RelocInfo::CODE_TARGET, ne, at, Operand(zero_reg));
|
||||
|
@ -816,6 +816,9 @@ void CodeGenerator::BailoutIfDeoptimized() {
|
||||
__ LoadWordArith(
|
||||
r11, FieldMemOperand(r11, CodeDataContainer::kKindSpecificFlagsOffset));
|
||||
__ TestBit(r11, Code::kMarkedForDeoptimizationBit);
|
||||
// Ensure we're not serializing (otherwise we'd need to use an indirection to
|
||||
// access the builtin below).
|
||||
DCHECK(!isolate()->ShouldLoadConstantsFromRootList());
|
||||
Handle<Code> code = isolate()->builtins()->builtin_handle(
|
||||
Builtins::kCompileLazyDeoptimizedCode);
|
||||
__ Jump(code, RelocInfo::CODE_TARGET, ne, cr0);
|
||||
|
@ -1063,6 +1063,9 @@ void CodeGenerator::BailoutIfDeoptimized() {
|
||||
__ LoadW(ip,
|
||||
FieldMemOperand(ip, CodeDataContainer::kKindSpecificFlagsOffset));
|
||||
__ TestBit(ip, Code::kMarkedForDeoptimizationBit);
|
||||
// Ensure we're not serializing (otherwise we'd need to use an indirection to
|
||||
// access the builtin below).
|
||||
DCHECK(!isolate()->ShouldLoadConstantsFromRootList());
|
||||
Handle<Code> code = isolate()->builtins()->builtin_handle(
|
||||
Builtins::kCompileLazyDeoptimizedCode);
|
||||
__ Jump(code, RelocInfo::CODE_TARGET, ne);
|
||||
|
@ -654,6 +654,9 @@ void CodeGenerator::BailoutIfDeoptimized() {
|
||||
__ movp(rbx, Operand(kJavaScriptCallCodeStartRegister, offset));
|
||||
__ testl(FieldOperand(rbx, CodeDataContainer::kKindSpecificFlagsOffset),
|
||||
Immediate(1 << Code::kMarkedForDeoptimizationBit));
|
||||
// Ensure we're not serializing (otherwise we'd need to use an indirection to
|
||||
// access the builtin below).
|
||||
DCHECK(!isolate()->ShouldLoadConstantsFromRootList());
|
||||
Handle<Code> code = isolate()->builtins()->builtin_handle(
|
||||
Builtins::kCompileLazyDeoptimizedCode);
|
||||
__ j(not_zero, code, RelocInfo::CODE_TARGET);
|
||||
|
@ -229,6 +229,7 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
|
||||
void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
|
||||
int count,
|
||||
BailoutType type) {
|
||||
NoRootArrayScope no_root_array(masm);
|
||||
TableEntryGenerator generator(masm, type, count);
|
||||
generator.Generate();
|
||||
}
|
||||
|
@ -357,8 +357,12 @@ class TurboAssembler : public Assembler {
|
||||
|
||||
void ResetSpeculationPoisonRegister();
|
||||
|
||||
bool root_array_available() const { return root_array_available_; }
|
||||
void set_root_array_available(bool v) { root_array_available_ = v; }
|
||||
|
||||
private:
|
||||
bool has_frame_ = false;
|
||||
bool root_array_available_ = false;
|
||||
Isolate* const isolate_;
|
||||
// This handle will be patched with the code object on installation.
|
||||
Handle<HeapObject> code_object_;
|
||||
|
@ -1267,6 +1267,17 @@ class Isolate : private HiddenFactory {
|
||||
return &partial_snapshot_cache_;
|
||||
}
|
||||
|
||||
// Off-heap builtins cannot embed constants within the code object itself,
|
||||
// and thus need to load them from the root list.
|
||||
bool ShouldLoadConstantsFromRootList() const {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
return (serializer_enabled() &&
|
||||
builtins_constants_table_builder() != nullptr);
|
||||
#else
|
||||
return false;
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
}
|
||||
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
// Called only prior to serialization.
|
||||
// This function copies off-heap-safe builtins off the heap, creates off-heap
|
||||
|
@ -189,6 +189,22 @@ class NoCurrentFrameScope {
|
||||
bool saved_;
|
||||
};
|
||||
|
||||
// Prevent the use of the RootArray during the lifetime of this
|
||||
// scope object.
|
||||
class NoRootArrayScope {
|
||||
public:
|
||||
explicit NoRootArrayScope(MacroAssembler* masm)
|
||||
: masm_(masm), old_value_(masm->root_array_available()) {
|
||||
masm->set_root_array_available(false);
|
||||
}
|
||||
|
||||
~NoRootArrayScope() { masm_->set_root_array_available(old_value_); }
|
||||
|
||||
private:
|
||||
MacroAssembler* masm_;
|
||||
bool old_value_;
|
||||
};
|
||||
|
||||
// Wrapper class for passing expected and actual parameter counts as
|
||||
// either registers or immediate values. Used to make sure that the
|
||||
// caller provides exactly the expected number of parameters to the
|
||||
|
@ -102,7 +102,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
|
||||
__ AssertStackIsAligned();
|
||||
|
||||
// a0 = argc, a1 = argv, a2 = isolate
|
||||
__ li(a2, Operand(ExternalReference::isolate_address(isolate())));
|
||||
__ li(a2, ExternalReference::isolate_address(isolate()));
|
||||
__ mov(a1, s1);
|
||||
|
||||
// To let the GC traverse the return address of the exit frames, we need to
|
||||
@ -195,7 +195,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
|
||||
__ PrepareCallCFunction(3, 0, a0);
|
||||
__ mov(a0, zero_reg);
|
||||
__ mov(a1, zero_reg);
|
||||
__ li(a2, Operand(ExternalReference::isolate_address(isolate())));
|
||||
__ li(a2, ExternalReference::isolate_address(isolate()));
|
||||
__ CallCFunction(find_handler, 3);
|
||||
}
|
||||
|
||||
@ -231,32 +231,36 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
Label invoke, handler_entry, exit;
|
||||
Isolate* isolate = masm->isolate();
|
||||
|
||||
// Registers:
|
||||
// a0: entry address
|
||||
// a1: function
|
||||
// a2: receiver
|
||||
// a3: argc
|
||||
//
|
||||
// Stack:
|
||||
// 4 args slots
|
||||
// args
|
||||
{
|
||||
NoRootArrayScope no_root_array(masm);
|
||||
|
||||
ProfileEntryHookStub::MaybeCallEntryHook(masm);
|
||||
// Registers:
|
||||
// a0: entry address
|
||||
// a1: function
|
||||
// a2: receiver
|
||||
// a3: argc
|
||||
//
|
||||
// Stack:
|
||||
// 4 args slots
|
||||
// args
|
||||
|
||||
// Save callee saved registers on the stack.
|
||||
__ MultiPush(kCalleeSaved | ra.bit());
|
||||
ProfileEntryHookStub::MaybeCallEntryHook(masm);
|
||||
|
||||
// Save callee-saved FPU registers.
|
||||
__ MultiPushFPU(kCalleeSavedFPU);
|
||||
// Set up the reserved register for 0.0.
|
||||
__ Move(kDoubleRegZero, 0.0);
|
||||
// Save callee saved registers on the stack.
|
||||
__ MultiPush(kCalleeSaved | ra.bit());
|
||||
|
||||
// Save callee-saved FPU registers.
|
||||
__ MultiPushFPU(kCalleeSavedFPU);
|
||||
// Set up the reserved register for 0.0.
|
||||
__ Move(kDoubleRegZero, 0.0);
|
||||
|
||||
__ InitializeRootRegister();
|
||||
}
|
||||
|
||||
// Load argv in s0 register.
|
||||
int offset_to_argv = (kNumCalleeSaved + 1) * kPointerSize;
|
||||
offset_to_argv += kNumCalleeSavedFPU * kDoubleSize;
|
||||
|
||||
__ InitializeRootRegister();
|
||||
__ lw(s0, MemOperand(sp, offset_to_argv + kCArgsSlotsSize));
|
||||
|
||||
// We build an EntryFrame.
|
||||
@ -264,8 +268,8 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
StackFrame::Type marker = type();
|
||||
__ li(t2, Operand(StackFrame::TypeToMarker(marker)));
|
||||
__ li(t1, Operand(StackFrame::TypeToMarker(marker)));
|
||||
__ li(t0, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kCEntryFPAddress, isolate)));
|
||||
__ li(t0,
|
||||
ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate));
|
||||
__ lw(t0, MemOperand(t0));
|
||||
__ Push(t3, t2, t1, t0);
|
||||
// Set up frame pointer for the frame to be pushed.
|
||||
@ -291,7 +295,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
Label non_outermost_js;
|
||||
ExternalReference js_entry_sp =
|
||||
ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress, isolate);
|
||||
__ li(t1, Operand(js_entry_sp));
|
||||
__ li(t1, js_entry_sp);
|
||||
__ lw(t2, MemOperand(t1));
|
||||
__ Branch(&non_outermost_js, ne, t2, Operand(zero_reg));
|
||||
__ sw(fp, MemOperand(t1));
|
||||
@ -313,8 +317,8 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
// field in the JSEnv and return a failure sentinel. Coming in here the
|
||||
// fp will be invalid because the PushStackHandler below sets it to 0 to
|
||||
// signal the existence of the JSEntry frame.
|
||||
__ li(t0, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kPendingExceptionAddress, isolate)));
|
||||
__ li(t0, ExternalReference::Create(
|
||||
IsolateAddressId::kPendingExceptionAddress, isolate));
|
||||
__ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0.
|
||||
__ LoadRoot(v0, Heap::kExceptionRootIndex);
|
||||
__ b(&exit); // b exposes branch delay slot.
|
||||
@ -356,14 +360,14 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
__ pop(t1);
|
||||
__ Branch(&non_outermost_js_2, ne, t1,
|
||||
Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
|
||||
__ li(t1, Operand(js_entry_sp));
|
||||
__ li(t1, ExternalReference(js_entry_sp));
|
||||
__ sw(zero_reg, MemOperand(t1));
|
||||
__ bind(&non_outermost_js_2);
|
||||
|
||||
// Restore the top frame descriptors from the stack.
|
||||
__ pop(t1);
|
||||
__ li(t0, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kCEntryFPAddress, isolate)));
|
||||
__ li(t0,
|
||||
ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate));
|
||||
__ sw(t1, MemOperand(t0));
|
||||
|
||||
// Reset the stack to the callee saved registers.
|
||||
@ -472,11 +476,11 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
|
||||
// Under the simulator we need to indirect the entry hook through a
|
||||
// trampoline function at a known address.
|
||||
// It additionally takes an isolate as a third parameter.
|
||||
__ li(a2, Operand(ExternalReference::isolate_address(isolate())));
|
||||
__ li(a2, ExternalReference::isolate_address(isolate()));
|
||||
|
||||
ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
|
||||
__ li(t9, Operand(ExternalReference::Create(
|
||||
&dispatcher, ExternalReference::BUILTIN_CALL)));
|
||||
__ li(t9, ExternalReference::Create(&dispatcher,
|
||||
ExternalReference::BUILTIN_CALL));
|
||||
#endif
|
||||
// Call C function through t9 to conform ABI for PIC.
|
||||
__ Call(t9);
|
||||
@ -794,7 +798,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
|
||||
Label profiler_disabled;
|
||||
Label end_profiler_check;
|
||||
__ li(t9, Operand(ExternalReference::is_profiling_address(isolate)));
|
||||
__ li(t9, ExternalReference::is_profiling_address(isolate));
|
||||
__ lb(t9, MemOperand(t9, 0));
|
||||
__ Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
|
||||
|
||||
@ -818,7 +822,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
FrameScope frame(masm, StackFrame::MANUAL);
|
||||
__ PushSafepointRegisters();
|
||||
__ PrepareCallCFunction(1, a0);
|
||||
__ li(a0, Operand(ExternalReference::isolate_address(isolate)));
|
||||
__ li(a0, ExternalReference::isolate_address(isolate));
|
||||
__ CallCFunction(ExternalReference::log_enter_external_function(), 1);
|
||||
__ PopSafepointRegisters();
|
||||
}
|
||||
@ -833,7 +837,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
FrameScope frame(masm, StackFrame::MANUAL);
|
||||
__ PushSafepointRegisters();
|
||||
__ PrepareCallCFunction(1, a0);
|
||||
__ li(a0, Operand(ExternalReference::isolate_address(isolate)));
|
||||
__ li(a0, ExternalReference::isolate_address(isolate));
|
||||
__ CallCFunction(ExternalReference::log_leave_external_function(), 1);
|
||||
__ PopSafepointRegisters();
|
||||
}
|
||||
@ -875,7 +879,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
|
||||
// Check if the function scheduled an exception.
|
||||
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
|
||||
__ li(at, Operand(ExternalReference::scheduled_exception_address(isolate)));
|
||||
__ li(at, ExternalReference::scheduled_exception_address(isolate));
|
||||
__ lw(t1, MemOperand(at));
|
||||
__ Branch(&promote_scheduled_exception, ne, t0, Operand(t1));
|
||||
|
||||
@ -891,7 +895,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
__ mov(s0, v0);
|
||||
__ mov(a0, v0);
|
||||
__ PrepareCallCFunction(1, s1);
|
||||
__ li(a0, Operand(ExternalReference::isolate_address(isolate)));
|
||||
__ li(a0, ExternalReference::isolate_address(isolate));
|
||||
__ CallCFunction(ExternalReference::delete_handle_scope_extensions(), 1);
|
||||
__ mov(v0, s0);
|
||||
__ jmp(&leave_exit_frame);
|
||||
@ -934,7 +938,7 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
// Push return value and default return value.
|
||||
__ Push(scratch, scratch);
|
||||
__ li(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
|
||||
__ li(scratch, ExternalReference::isolate_address(masm->isolate()));
|
||||
// Push isolate and holder.
|
||||
__ Push(scratch, holder);
|
||||
|
||||
@ -1005,7 +1009,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
|
||||
__ sw(scratch, MemOperand(sp, (PCA::kReturnValueOffset + 1) * kPointerSize));
|
||||
__ sw(scratch, MemOperand(sp, (PCA::kReturnValueDefaultValueIndex + 1) *
|
||||
kPointerSize));
|
||||
__ li(scratch, Operand(ExternalReference::isolate_address(isolate())));
|
||||
__ li(scratch, ExternalReference::isolate_address(isolate()));
|
||||
__ sw(scratch, MemOperand(sp, (PCA::kIsolateIndex + 1) * kPointerSize));
|
||||
__ sw(holder, MemOperand(sp, (PCA::kHolderIndex + 1) * kPointerSize));
|
||||
// should_throw_on_error -> false
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "src/base/bits.h"
|
||||
#include "src/base/division-by-constant.h"
|
||||
#include "src/bootstrapper.h"
|
||||
#include "src/builtins/constants-table-builder.h"
|
||||
#include "src/callable.h"
|
||||
#include "src/code-stubs.h"
|
||||
#include "src/debug/debug.h"
|
||||
@ -19,6 +20,7 @@
|
||||
#include "src/mips/macro-assembler-mips.h"
|
||||
#include "src/register-configuration.h"
|
||||
#include "src/runtime/runtime.h"
|
||||
#include "src/snapshot/serializer-common.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -125,14 +127,14 @@ int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
|
||||
lw(destination, MemOperand(s6, index << kPointerSizeLog2));
|
||||
lw(destination, MemOperand(kRootRegister, index << kPointerSizeLog2));
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index,
|
||||
Condition cond, Register src1,
|
||||
const Operand& src2) {
|
||||
Branch(2, NegateCondition(cond), src1, src2);
|
||||
lw(destination, MemOperand(s6, index << kPointerSizeLog2));
|
||||
lw(destination, MemOperand(kRootRegister, index << kPointerSizeLog2));
|
||||
}
|
||||
|
||||
|
||||
@ -282,7 +284,7 @@ void TurboAssembler::CallRecordWriteStub(
|
||||
Pop(slot_parameter);
|
||||
Pop(object_parameter);
|
||||
|
||||
li(isolate_parameter, Operand(ExternalReference::isolate_address(isolate())));
|
||||
li(isolate_parameter, ExternalReference::isolate_address(isolate()));
|
||||
Move(remembered_set_parameter, Smi::FromEnum(remembered_set_action));
|
||||
Move(fp_mode_parameter, Smi::FromEnum(fp_mode));
|
||||
Call(callable.code(), RelocInfo::CODE_TARGET);
|
||||
@ -1320,6 +1322,23 @@ void TurboAssembler::Sc(Register rd, const MemOperand& rs) {
|
||||
}
|
||||
|
||||
void TurboAssembler::li(Register dst, Handle<HeapObject> value, LiFlags mode) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList() &&
|
||||
!value.equals(CodeObject())) {
|
||||
LookupConstant(dst, value);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
li(dst, Operand(value), mode);
|
||||
}
|
||||
|
||||
void TurboAssembler::li(Register dst, ExternalReference value, LiFlags mode) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
LookupExternalReference(dst, value);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
li(dst, Operand(value), mode);
|
||||
}
|
||||
|
||||
@ -3580,6 +3599,60 @@ bool TurboAssembler::BranchAndLinkShortCheck(int32_t offset, Label* L,
|
||||
return false;
|
||||
}
|
||||
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
void TurboAssembler::LookupConstant(Register destination,
|
||||
Handle<Object> object) {
|
||||
CHECK(isolate()->ShouldLoadConstantsFromRootList());
|
||||
CHECK(root_array_available_);
|
||||
|
||||
// TODO(jgruber, v8:6666): Support self-references. Currently, we'd end up
|
||||
// adding the temporary code object to the constants list, before creating the
|
||||
// final object in Factory::CopyCode.
|
||||
CHECK(code_object_.is_null() || !object.equals(code_object_));
|
||||
|
||||
// Ensure the given object is in the builtins constants table and fetch its
|
||||
// index.
|
||||
BuiltinsConstantsTableBuilder* builder =
|
||||
isolate()->builtins_constants_table_builder();
|
||||
uint32_t index = builder->AddObject(object);
|
||||
|
||||
// TODO(jgruber): Load builtins from the builtins table.
|
||||
// TODO(jgruber): Ensure that code generation can recognize constant targets
|
||||
// in kArchCallCodeObject.
|
||||
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
|
||||
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
lw(destination, FieldMemOperand(destination, FixedArray::kHeaderSize +
|
||||
index * kPointerSize));
|
||||
}
|
||||
|
||||
void TurboAssembler::LookupExternalReference(Register destination,
|
||||
ExternalReference reference) {
|
||||
CHECK(reference.address() !=
|
||||
ExternalReference::roots_array_start(isolate()).address());
|
||||
CHECK(isolate()->ShouldLoadConstantsFromRootList());
|
||||
CHECK(root_array_available_);
|
||||
|
||||
// Encode as an index into the external reference table stored on the isolate.
|
||||
|
||||
ExternalReferenceEncoder encoder(isolate());
|
||||
ExternalReferenceEncoder::Value v = encoder.Encode(reference.address());
|
||||
CHECK(!v.is_from_api());
|
||||
uint32_t index = v.index();
|
||||
|
||||
// Generate code to load from the external reference table.
|
||||
|
||||
int32_t roots_to_external_reference_offset =
|
||||
Heap::roots_to_external_reference_table_offset() +
|
||||
ExternalReferenceTable::OffsetOfEntry(index);
|
||||
|
||||
lw(destination,
|
||||
MemOperand(kRootRegister, roots_to_external_reference_offset));
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
|
||||
void TurboAssembler::Jump(Register target, int16_t offset, Condition cond,
|
||||
Register rs, const Operand& rt, BranchDelaySlot bd) {
|
||||
BlockTrampolinePoolScope block_trampoline_pool(this);
|
||||
@ -3707,6 +3780,15 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
|
||||
Condition cond, Register rs, const Operand& rt,
|
||||
BranchDelaySlot bd) {
|
||||
DCHECK(RelocInfo::IsCodeTarget(rmode));
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LookupConstant(scratch, code);
|
||||
Jump(scratch, Code::kHeaderSize - kHeapObjectTag, cond, rs, rt, bd);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
Jump(static_cast<intptr_t>(code.address()), rmode, cond, rs, rt, bd);
|
||||
}
|
||||
|
||||
@ -3858,6 +3940,15 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
|
||||
Condition cond, Register rs, const Operand& rt,
|
||||
BranchDelaySlot bd) {
|
||||
BlockTrampolinePoolScope block_trampoline_pool(this);
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LookupConstant(scratch, code);
|
||||
Call(scratch, Code::kHeaderSize - kHeapObjectTag, cond, rs, rt, bd);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
Label start;
|
||||
bind(&start);
|
||||
DCHECK(RelocInfo::IsCodeTarget(rmode));
|
||||
@ -4033,9 +4124,7 @@ void TurboAssembler::Push(Smi* smi) {
|
||||
|
||||
void MacroAssembler::MaybeDropFrames() {
|
||||
// Check whether we need to drop frames to restart a function on the stack.
|
||||
ExternalReference restart_fp =
|
||||
ExternalReference::debug_restart_fp_address(isolate());
|
||||
li(a1, Operand(restart_fp));
|
||||
li(a1, ExternalReference::debug_restart_fp_address(isolate()));
|
||||
lw(a1, MemOperand(a1));
|
||||
Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET,
|
||||
ne, a1, Operand(zero_reg));
|
||||
@ -4052,8 +4141,8 @@ void MacroAssembler::PushStackHandler() {
|
||||
Push(Smi::kZero); // Padding.
|
||||
|
||||
// Link the current handler as the next handler.
|
||||
li(t2, Operand(ExternalReference::Create(IsolateAddressId::kHandlerAddress,
|
||||
isolate())));
|
||||
li(t2,
|
||||
ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate()));
|
||||
lw(t1, MemOperand(t2));
|
||||
push(t1);
|
||||
|
||||
@ -4068,8 +4157,8 @@ void MacroAssembler::PopStackHandler() {
|
||||
Addu(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
li(scratch, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kHandlerAddress, isolate())));
|
||||
li(scratch,
|
||||
ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate()));
|
||||
sw(a1, MemOperand(scratch));
|
||||
}
|
||||
|
||||
@ -4275,9 +4364,7 @@ void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
|
||||
const ParameterCount& expected,
|
||||
const ParameterCount& actual) {
|
||||
Label skip_hook;
|
||||
ExternalReference debug_hook_active =
|
||||
ExternalReference::debug_hook_on_function_call_address(isolate());
|
||||
li(t0, Operand(debug_hook_active));
|
||||
li(t0, ExternalReference::debug_hook_on_function_call_address(isolate()));
|
||||
lb(t0, MemOperand(t0));
|
||||
Branch(&skip_hook, eq, t0, Operand(zero_reg));
|
||||
|
||||
@ -4606,7 +4693,7 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
|
||||
Register scratch1, Register scratch2) {
|
||||
DCHECK_GT(value, 0);
|
||||
if (FLAG_native_code_counters && counter->Enabled()) {
|
||||
li(scratch2, Operand(ExternalReference::Create(counter)));
|
||||
li(scratch2, ExternalReference::Create(counter));
|
||||
lw(scratch1, MemOperand(scratch2));
|
||||
Addu(scratch1, scratch1, Operand(value));
|
||||
sw(scratch1, MemOperand(scratch2));
|
||||
@ -4618,7 +4705,7 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
|
||||
Register scratch1, Register scratch2) {
|
||||
DCHECK_GT(value, 0);
|
||||
if (FLAG_native_code_counters && counter->Enabled()) {
|
||||
li(scratch2, Operand(ExternalReference::Create(counter)));
|
||||
li(scratch2, ExternalReference::Create(counter));
|
||||
lw(scratch1, MemOperand(scratch2));
|
||||
Subu(scratch1, scratch1, Operand(value));
|
||||
sw(scratch1, MemOperand(scratch2));
|
||||
@ -4790,11 +4877,11 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space,
|
||||
sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
|
||||
|
||||
// Save the frame pointer and the context in top.
|
||||
li(t8, Operand(ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
|
||||
isolate())));
|
||||
li(t8,
|
||||
ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate()));
|
||||
sw(fp, MemOperand(t8));
|
||||
li(t8, Operand(ExternalReference::Create(IsolateAddressId::kContextAddress,
|
||||
isolate())));
|
||||
li(t8,
|
||||
ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()));
|
||||
sw(cp, MemOperand(t8));
|
||||
|
||||
const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
|
||||
@ -4846,18 +4933,18 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
|
||||
}
|
||||
|
||||
// Clear top frame.
|
||||
li(t8, Operand(ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
|
||||
isolate())));
|
||||
li(t8,
|
||||
ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate()));
|
||||
sw(zero_reg, MemOperand(t8));
|
||||
|
||||
// Restore current context from top and clear it in debug mode.
|
||||
li(t8, Operand(ExternalReference::Create(IsolateAddressId::kContextAddress,
|
||||
isolate())));
|
||||
li(t8,
|
||||
ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()));
|
||||
lw(cp, MemOperand(t8));
|
||||
|
||||
#ifdef DEBUG
|
||||
li(t8, Operand(ExternalReference::Create(IsolateAddressId::kContextAddress,
|
||||
isolate())));
|
||||
li(t8,
|
||||
ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()));
|
||||
sw(a3, MemOperand(t8));
|
||||
#endif
|
||||
|
||||
@ -5301,7 +5388,7 @@ void TurboAssembler::CallCFunction(ExternalReference function,
|
||||
// Linux/MIPS convention demands that register t9 contains
|
||||
// the address of the function being call in case of
|
||||
// Position independent code
|
||||
li(t9, Operand(function));
|
||||
li(t9, function);
|
||||
CallCFunctionHelper(t9, 0, num_reg_arguments, num_double_arguments);
|
||||
}
|
||||
|
||||
|
@ -247,8 +247,15 @@ class TurboAssembler : public Assembler {
|
||||
li(rd, Operand(j), mode);
|
||||
}
|
||||
void li(Register dst, Handle<HeapObject> value, LiFlags mode = OPTIMIZE_SIZE);
|
||||
void li(Register dst, ExternalReference value, LiFlags mode = OPTIMIZE_SIZE);
|
||||
|
||||
// Jump, Call, and Ret pseudo instructions implementing inter-working.
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
void LookupConstant(Register destination, Handle<Object> object);
|
||||
void LookupExternalReference(Register destination,
|
||||
ExternalReference reference);
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
|
||||
// Jump, Call, and Ret pseudo instructions implementing inter-working.
|
||||
#define COND_ARGS Condition cond = al, Register rs = zero_reg, \
|
||||
const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
|
||||
|
||||
@ -534,7 +541,7 @@ class TurboAssembler : public Assembler {
|
||||
// See comments at the beginning of CEntryStub::Generate.
|
||||
inline void PrepareCEntryArgs(int num_args) { li(a0, num_args); }
|
||||
inline void PrepareCEntryFunction(const ExternalReference& ref) {
|
||||
li(a1, Operand(ref));
|
||||
li(a1, ref);
|
||||
}
|
||||
|
||||
void CheckPageFlag(Register object, Register scratch, int mask, Condition cc,
|
||||
@ -838,6 +845,9 @@ class TurboAssembler : public Assembler {
|
||||
|
||||
void ResetSpeculationPoisonRegister();
|
||||
|
||||
bool root_array_available() const { return root_array_available_; }
|
||||
void set_root_array_available(bool v) { root_array_available_ = v; }
|
||||
|
||||
protected:
|
||||
void BranchLong(Label* L, BranchDelaySlot bdslot);
|
||||
|
||||
@ -847,6 +857,7 @@ class TurboAssembler : public Assembler {
|
||||
|
||||
private:
|
||||
bool has_frame_ = false;
|
||||
bool root_array_available_ = true;
|
||||
Isolate* const isolate_;
|
||||
// This handle will be patched with the code object on installation.
|
||||
Handle<HeapObject> code_object_;
|
||||
|
@ -101,7 +101,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
|
||||
__ AssertStackIsAligned();
|
||||
|
||||
// a0 = argc, a1 = argv, a2 = isolate
|
||||
__ li(a2, Operand(ExternalReference::isolate_address(isolate())));
|
||||
__ li(a2, ExternalReference::isolate_address(isolate()));
|
||||
__ mov(a1, s1);
|
||||
|
||||
// To let the GC traverse the return address of the exit frames, we need to
|
||||
@ -194,7 +194,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
|
||||
__ PrepareCallCFunction(3, 0, a0);
|
||||
__ mov(a0, zero_reg);
|
||||
__ mov(a1, zero_reg);
|
||||
__ li(a2, Operand(ExternalReference::isolate_address(isolate())));
|
||||
__ li(a2, ExternalReference::isolate_address(isolate()));
|
||||
__ CallCFunction(find_handler, 3);
|
||||
}
|
||||
|
||||
@ -230,32 +230,36 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
Label invoke, handler_entry, exit;
|
||||
Isolate* isolate = masm->isolate();
|
||||
|
||||
// TODO(plind): unify the ABI description here.
|
||||
// Registers:
|
||||
// a0: entry address
|
||||
// a1: function
|
||||
// a2: receiver
|
||||
// a3: argc
|
||||
// a4 (a4): on mips64
|
||||
{
|
||||
NoRootArrayScope no_root_array(masm);
|
||||
|
||||
// Stack:
|
||||
// 0 arg slots on mips64 (4 args slots on mips)
|
||||
// args -- in a4/a4 on mips64, on stack on mips
|
||||
// TODO(plind): unify the ABI description here.
|
||||
// Registers:
|
||||
// a0: entry address
|
||||
// a1: function
|
||||
// a2: receiver
|
||||
// a3: argc
|
||||
// a4 (a4): on mips64
|
||||
|
||||
ProfileEntryHookStub::MaybeCallEntryHook(masm);
|
||||
// Stack:
|
||||
// 0 arg slots on mips64 (4 args slots on mips)
|
||||
// args -- in a4/a4 on mips64, on stack on mips
|
||||
|
||||
// Save callee saved registers on the stack.
|
||||
__ MultiPush(kCalleeSaved | ra.bit());
|
||||
ProfileEntryHookStub::MaybeCallEntryHook(masm);
|
||||
|
||||
// Save callee-saved FPU registers.
|
||||
__ MultiPushFPU(kCalleeSavedFPU);
|
||||
// Set up the reserved register for 0.0.
|
||||
__ Move(kDoubleRegZero, 0.0);
|
||||
// Save callee saved registers on the stack.
|
||||
__ MultiPush(kCalleeSaved | ra.bit());
|
||||
|
||||
// Load argv in s0 register.
|
||||
__ mov(s0, a4); // 5th parameter in mips64 a4 (a4) register.
|
||||
// Save callee-saved FPU registers.
|
||||
__ MultiPushFPU(kCalleeSavedFPU);
|
||||
// Set up the reserved register for 0.0.
|
||||
__ Move(kDoubleRegZero, 0.0);
|
||||
|
||||
__ InitializeRootRegister();
|
||||
// Load argv in s0 register.
|
||||
__ mov(s0, a4); // 5th parameter in mips64 a4 (a4) register.
|
||||
|
||||
__ InitializeRootRegister();
|
||||
}
|
||||
|
||||
// We build an EntryFrame.
|
||||
__ li(a7, Operand(-1)); // Push a bad frame pointer to fail if it is used.
|
||||
@ -290,7 +294,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
Label non_outermost_js;
|
||||
ExternalReference js_entry_sp =
|
||||
ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress, isolate);
|
||||
__ li(a5, Operand(js_entry_sp));
|
||||
__ li(a5, js_entry_sp);
|
||||
__ Ld(a6, MemOperand(a5));
|
||||
__ Branch(&non_outermost_js, ne, a6, Operand(zero_reg));
|
||||
__ Sd(fp, MemOperand(a5));
|
||||
@ -312,8 +316,8 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
// field in the JSEnv and return a failure sentinel. Coming in here the
|
||||
// fp will be invalid because the PushStackHandler below sets it to 0 to
|
||||
// signal the existence of the JSEntry frame.
|
||||
__ li(a4, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kPendingExceptionAddress, isolate)));
|
||||
__ li(a4, ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress,
|
||||
isolate));
|
||||
__ Sd(v0, MemOperand(a4)); // We come back from 'invoke'. result is in v0.
|
||||
__ LoadRoot(v0, Heap::kExceptionRootIndex);
|
||||
__ b(&exit); // b exposes branch delay slot.
|
||||
@ -355,14 +359,14 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
__ pop(a5);
|
||||
__ Branch(&non_outermost_js_2, ne, a5,
|
||||
Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
|
||||
__ li(a5, Operand(js_entry_sp));
|
||||
__ li(a5, ExternalReference(js_entry_sp));
|
||||
__ Sd(zero_reg, MemOperand(a5));
|
||||
__ bind(&non_outermost_js_2);
|
||||
|
||||
// Restore the top frame descriptors from the stack.
|
||||
__ pop(a5);
|
||||
__ li(a4, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kCEntryFPAddress, isolate)));
|
||||
__ li(a4,
|
||||
ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate));
|
||||
__ Sd(a5, MemOperand(a4));
|
||||
|
||||
// Reset the stack to the callee saved registers.
|
||||
@ -472,11 +476,11 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
|
||||
// Under the simulator we need to indirect the entry hook through a
|
||||
// trampoline function at a known address.
|
||||
// It additionally takes an isolate as a third parameter.
|
||||
__ li(a2, Operand(ExternalReference::isolate_address(isolate())));
|
||||
__ li(a2, ExternalReference::isolate_address(isolate()));
|
||||
|
||||
ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
|
||||
__ li(t9, Operand(ExternalReference::Create(
|
||||
&dispatcher, ExternalReference::BUILTIN_CALL)));
|
||||
__ li(t9, ExternalReference::Create(&dispatcher,
|
||||
ExternalReference::BUILTIN_CALL));
|
||||
#endif
|
||||
// Call C function through t9 to conform ABI for PIC.
|
||||
__ Call(t9);
|
||||
@ -796,7 +800,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
|
||||
Label profiler_disabled;
|
||||
Label end_profiler_check;
|
||||
__ li(t9, Operand(ExternalReference::is_profiling_address(isolate)));
|
||||
__ li(t9, ExternalReference::is_profiling_address(isolate));
|
||||
__ Lb(t9, MemOperand(t9, 0));
|
||||
__ Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
|
||||
|
||||
@ -820,7 +824,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
FrameScope frame(masm, StackFrame::MANUAL);
|
||||
__ PushSafepointRegisters();
|
||||
__ PrepareCallCFunction(1, a0);
|
||||
__ li(a0, Operand(ExternalReference::isolate_address(isolate)));
|
||||
__ li(a0, ExternalReference::isolate_address(isolate));
|
||||
__ CallCFunction(ExternalReference::log_enter_external_function(), 1);
|
||||
__ PopSafepointRegisters();
|
||||
}
|
||||
@ -835,7 +839,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
FrameScope frame(masm, StackFrame::MANUAL);
|
||||
__ PushSafepointRegisters();
|
||||
__ PrepareCallCFunction(1, a0);
|
||||
__ li(a0, Operand(ExternalReference::isolate_address(isolate)));
|
||||
__ li(a0, ExternalReference::isolate_address(isolate));
|
||||
__ CallCFunction(ExternalReference::log_leave_external_function(), 1);
|
||||
__ PopSafepointRegisters();
|
||||
}
|
||||
@ -876,7 +880,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
|
||||
// Check if the function scheduled an exception.
|
||||
__ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
|
||||
__ li(at, Operand(ExternalReference::scheduled_exception_address(isolate)));
|
||||
__ li(at, ExternalReference::scheduled_exception_address(isolate));
|
||||
__ Ld(a5, MemOperand(at));
|
||||
__ Branch(&promote_scheduled_exception, ne, a4, Operand(a5));
|
||||
|
||||
@ -892,7 +896,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
__ mov(s0, v0);
|
||||
__ mov(a0, v0);
|
||||
__ PrepareCallCFunction(1, s1);
|
||||
__ li(a0, Operand(ExternalReference::isolate_address(isolate)));
|
||||
__ li(a0, ExternalReference::isolate_address(isolate));
|
||||
__ CallCFunction(ExternalReference::delete_handle_scope_extensions(), 1);
|
||||
__ mov(v0, s0);
|
||||
__ jmp(&leave_exit_frame);
|
||||
@ -935,7 +939,7 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
// Push return value and default return value.
|
||||
__ Push(scratch, scratch);
|
||||
__ li(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
|
||||
__ li(scratch, ExternalReference::isolate_address(masm->isolate()));
|
||||
// Push isolate and holder.
|
||||
__ Push(scratch, holder);
|
||||
|
||||
@ -1009,7 +1013,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
|
||||
__ Sd(scratch, MemOperand(sp, (PCA::kReturnValueOffset + 1) * kPointerSize));
|
||||
__ Sd(scratch, MemOperand(sp, (PCA::kReturnValueDefaultValueIndex + 1) *
|
||||
kPointerSize));
|
||||
__ li(scratch, Operand(ExternalReference::isolate_address(isolate())));
|
||||
__ li(scratch, ExternalReference::isolate_address(isolate()));
|
||||
__ Sd(scratch, MemOperand(sp, (PCA::kIsolateIndex + 1) * kPointerSize));
|
||||
__ Sd(holder, MemOperand(sp, (PCA::kHolderIndex + 1) * kPointerSize));
|
||||
// should_throw_on_error -> false
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "src/base/bits.h"
|
||||
#include "src/base/division-by-constant.h"
|
||||
#include "src/bootstrapper.h"
|
||||
#include "src/builtins/constants-table-builder.h"
|
||||
#include "src/callable.h"
|
||||
#include "src/code-stubs.h"
|
||||
#include "src/debug/debug.h"
|
||||
@ -19,6 +20,7 @@
|
||||
#include "src/mips64/macro-assembler-mips64.h"
|
||||
#include "src/register-configuration.h"
|
||||
#include "src/runtime/runtime.h"
|
||||
#include "src/snapshot/serializer-common.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -282,7 +284,7 @@ void TurboAssembler::CallRecordWriteStub(
|
||||
Pop(slot_parameter);
|
||||
Pop(object_parameter);
|
||||
|
||||
li(isolate_parameter, Operand(ExternalReference::isolate_address(isolate())));
|
||||
li(isolate_parameter, ExternalReference::isolate_address(isolate()));
|
||||
Move(remembered_set_parameter, Smi::FromEnum(remembered_set_action));
|
||||
Move(fp_mode_parameter, Smi::FromEnum(fp_mode));
|
||||
Call(callable.code(), RelocInfo::CODE_TARGET);
|
||||
@ -1559,6 +1561,23 @@ void TurboAssembler::Scd(Register rd, const MemOperand& rs) {
|
||||
}
|
||||
|
||||
void TurboAssembler::li(Register dst, Handle<HeapObject> value, LiFlags mode) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList() &&
|
||||
!value.equals(CodeObject())) {
|
||||
LookupConstant(dst, value);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
li(dst, Operand(value), mode);
|
||||
}
|
||||
|
||||
void TurboAssembler::li(Register dst, ExternalReference value, LiFlags mode) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
LookupExternalReference(dst, value);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
li(dst, Operand(value), mode);
|
||||
}
|
||||
|
||||
@ -4089,6 +4108,60 @@ bool TurboAssembler::BranchAndLinkShortCheck(int32_t offset, Label* L,
|
||||
return false;
|
||||
}
|
||||
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
void TurboAssembler::LookupConstant(Register destination,
|
||||
Handle<Object> object) {
|
||||
CHECK(isolate()->ShouldLoadConstantsFromRootList());
|
||||
CHECK(root_array_available_);
|
||||
|
||||
// TODO(jgruber, v8:6666): Support self-references. Currently, we'd end up
|
||||
// adding the temporary code object to the constants list, before creating the
|
||||
// final object in Factory::CopyCode.
|
||||
CHECK(code_object_.is_null() || !object.equals(code_object_));
|
||||
|
||||
// Ensure the given object is in the builtins constants table and fetch its
|
||||
// index.
|
||||
BuiltinsConstantsTableBuilder* builder =
|
||||
isolate()->builtins_constants_table_builder();
|
||||
uint32_t index = builder->AddObject(object);
|
||||
|
||||
// TODO(jgruber): Load builtins from the builtins table.
|
||||
// TODO(jgruber): Ensure that code generation can recognize constant targets
|
||||
// in kArchCallCodeObject.
|
||||
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
|
||||
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
Ld(destination, FieldMemOperand(destination, FixedArray::kHeaderSize +
|
||||
index * kPointerSize));
|
||||
}
|
||||
|
||||
void TurboAssembler::LookupExternalReference(Register destination,
|
||||
ExternalReference reference) {
|
||||
CHECK(reference.address() !=
|
||||
ExternalReference::roots_array_start(isolate()).address());
|
||||
CHECK(isolate()->ShouldLoadConstantsFromRootList());
|
||||
CHECK(root_array_available_);
|
||||
|
||||
// Encode as an index into the external reference table stored on the isolate.
|
||||
|
||||
ExternalReferenceEncoder encoder(isolate());
|
||||
ExternalReferenceEncoder::Value v = encoder.Encode(reference.address());
|
||||
CHECK(!v.is_from_api());
|
||||
uint32_t index = v.index();
|
||||
|
||||
// Generate code to load from the external reference table.
|
||||
|
||||
int32_t roots_to_external_reference_offset =
|
||||
Heap::roots_to_external_reference_table_offset() +
|
||||
ExternalReferenceTable::OffsetOfEntry(index);
|
||||
|
||||
Ld(destination,
|
||||
MemOperand(kRootRegister, roots_to_external_reference_offset));
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
|
||||
void TurboAssembler::Jump(Register target, Condition cond, Register rs,
|
||||
const Operand& rt, BranchDelaySlot bd) {
|
||||
BlockTrampolinePoolScope block_trampoline_pool(this);
|
||||
@ -4137,6 +4210,16 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
|
||||
Condition cond, Register rs, const Operand& rt,
|
||||
BranchDelaySlot bd) {
|
||||
DCHECK(RelocInfo::IsCodeTarget(rmode));
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LookupConstant(scratch, code);
|
||||
Daddu(scratch, scratch, Operand(Code::kHeaderSize - kHeapObjectTag));
|
||||
Jump(scratch, cond, rs, rt, bd);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
Jump(static_cast<intptr_t>(code.address()), rmode, cond, rs, rt, bd);
|
||||
}
|
||||
|
||||
@ -4220,6 +4303,16 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
|
||||
Condition cond, Register rs, const Operand& rt,
|
||||
BranchDelaySlot bd) {
|
||||
BlockTrampolinePoolScope block_trampoline_pool(this);
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LookupConstant(scratch, code);
|
||||
Daddu(scratch, scratch, Operand(Code::kHeaderSize - kHeapObjectTag));
|
||||
Call(scratch, cond, rs, rt, bd);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
Label start;
|
||||
bind(&start);
|
||||
DCHECK(RelocInfo::IsCodeTarget(rmode));
|
||||
@ -4346,9 +4439,7 @@ void TurboAssembler::Push(Handle<HeapObject> handle) {
|
||||
|
||||
void MacroAssembler::MaybeDropFrames() {
|
||||
// Check whether we need to drop frames to restart a function on the stack.
|
||||
ExternalReference restart_fp =
|
||||
ExternalReference::debug_restart_fp_address(isolate());
|
||||
li(a1, Operand(restart_fp));
|
||||
li(a1, ExternalReference::debug_restart_fp_address(isolate()));
|
||||
Ld(a1, MemOperand(a1));
|
||||
Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET,
|
||||
ne, a1, Operand(zero_reg));
|
||||
@ -4365,8 +4456,8 @@ void MacroAssembler::PushStackHandler() {
|
||||
Push(Smi::kZero); // Padding.
|
||||
|
||||
// Link the current handler as the next handler.
|
||||
li(a6, Operand(ExternalReference::Create(IsolateAddressId::kHandlerAddress,
|
||||
isolate())));
|
||||
li(a6,
|
||||
ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate()));
|
||||
Ld(a5, MemOperand(a6));
|
||||
push(a5);
|
||||
|
||||
@ -4382,8 +4473,8 @@ void MacroAssembler::PopStackHandler() {
|
||||
kPointerSize)));
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
li(scratch, Operand(ExternalReference::Create(
|
||||
IsolateAddressId::kHandlerAddress, isolate())));
|
||||
li(scratch,
|
||||
ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate()));
|
||||
Sd(a1, MemOperand(scratch));
|
||||
}
|
||||
|
||||
@ -4591,9 +4682,7 @@ void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
|
||||
const ParameterCount& actual) {
|
||||
Label skip_hook;
|
||||
|
||||
ExternalReference debug_hook_active =
|
||||
ExternalReference::debug_hook_on_function_call_address(isolate());
|
||||
li(t0, Operand(debug_hook_active));
|
||||
li(t0, ExternalReference::debug_hook_on_function_call_address(isolate()));
|
||||
Lb(t0, MemOperand(t0));
|
||||
Branch(&skip_hook, eq, t0, Operand(zero_reg));
|
||||
|
||||
@ -4923,7 +5012,7 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
|
||||
Register scratch1, Register scratch2) {
|
||||
DCHECK_GT(value, 0);
|
||||
if (FLAG_native_code_counters && counter->Enabled()) {
|
||||
li(scratch2, Operand(ExternalReference::Create(counter)));
|
||||
li(scratch2, ExternalReference::Create(counter));
|
||||
Lw(scratch1, MemOperand(scratch2));
|
||||
Addu(scratch1, scratch1, Operand(value));
|
||||
Sw(scratch1, MemOperand(scratch2));
|
||||
@ -4935,7 +5024,7 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
|
||||
Register scratch1, Register scratch2) {
|
||||
DCHECK_GT(value, 0);
|
||||
if (FLAG_native_code_counters && counter->Enabled()) {
|
||||
li(scratch2, Operand(ExternalReference::Create(counter)));
|
||||
li(scratch2, ExternalReference::Create(counter));
|
||||
Lw(scratch1, MemOperand(scratch2));
|
||||
Subu(scratch1, scratch1, Operand(value));
|
||||
Sw(scratch1, MemOperand(scratch2));
|
||||
@ -5108,11 +5197,11 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space,
|
||||
Sd(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
|
||||
|
||||
// Save the frame pointer and the context in top.
|
||||
li(t8, Operand(ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
|
||||
isolate())));
|
||||
li(t8,
|
||||
ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate()));
|
||||
Sd(fp, MemOperand(t8));
|
||||
li(t8, Operand(ExternalReference::Create(IsolateAddressId::kContextAddress,
|
||||
isolate())));
|
||||
li(t8,
|
||||
ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()));
|
||||
Sd(cp, MemOperand(t8));
|
||||
|
||||
const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
|
||||
@ -5162,18 +5251,18 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
|
||||
}
|
||||
|
||||
// Clear top frame.
|
||||
li(t8, Operand(ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
|
||||
isolate())));
|
||||
li(t8,
|
||||
ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate()));
|
||||
Sd(zero_reg, MemOperand(t8));
|
||||
|
||||
// Restore current context from top and clear it in debug mode.
|
||||
li(t8, Operand(ExternalReference::Create(IsolateAddressId::kContextAddress,
|
||||
isolate())));
|
||||
li(t8,
|
||||
ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()));
|
||||
Ld(cp, MemOperand(t8));
|
||||
|
||||
#ifdef DEBUG
|
||||
li(t8, Operand(ExternalReference::Create(IsolateAddressId::kContextAddress,
|
||||
isolate())));
|
||||
li(t8,
|
||||
ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()));
|
||||
Sd(a3, MemOperand(t8));
|
||||
#endif
|
||||
|
||||
@ -5634,7 +5723,7 @@ void TurboAssembler::PrepareCallCFunction(int num_reg_arguments,
|
||||
void TurboAssembler::CallCFunction(ExternalReference function,
|
||||
int num_reg_arguments,
|
||||
int num_double_arguments) {
|
||||
li(t9, Operand(function));
|
||||
li(t9, function);
|
||||
CallCFunctionHelper(t9, num_reg_arguments, num_double_arguments);
|
||||
}
|
||||
|
||||
|
@ -279,8 +279,15 @@ class TurboAssembler : public Assembler {
|
||||
li(rd, Operand(j), mode);
|
||||
}
|
||||
void li(Register dst, Handle<HeapObject> value, LiFlags mode = OPTIMIZE_SIZE);
|
||||
void li(Register dst, ExternalReference value, LiFlags mode = OPTIMIZE_SIZE);
|
||||
|
||||
// Jump, Call, and Ret pseudo instructions implementing inter-working.
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
void LookupConstant(Register destination, Handle<Object> object);
|
||||
void LookupExternalReference(Register destination,
|
||||
ExternalReference reference);
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
|
||||
// Jump, Call, and Ret pseudo instructions implementing inter-working.
|
||||
#define COND_ARGS Condition cond = al, Register rs = zero_reg, \
|
||||
const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
|
||||
|
||||
@ -565,7 +572,7 @@ class TurboAssembler : public Assembler {
|
||||
// See comments at the beginning of CEntryStub::Generate.
|
||||
inline void PrepareCEntryArgs(int num_args) { li(a0, num_args); }
|
||||
inline void PrepareCEntryFunction(const ExternalReference& ref) {
|
||||
li(a1, Operand(ref));
|
||||
li(a1, ref);
|
||||
}
|
||||
|
||||
void CheckPageFlag(Register object, Register scratch, int mask, Condition cc,
|
||||
@ -864,12 +871,16 @@ class TurboAssembler : public Assembler {
|
||||
|
||||
void ResetSpeculationPoisonRegister();
|
||||
|
||||
bool root_array_available() const { return root_array_available_; }
|
||||
void set_root_array_available(bool v) { root_array_available_ = v; }
|
||||
|
||||
protected:
|
||||
inline Register GetRtAsRegisterHelper(const Operand& rt, Register scratch);
|
||||
inline int32_t GetOffset(int32_t offset, Label* L, OffsetSize bits);
|
||||
|
||||
private:
|
||||
bool has_frame_ = false;
|
||||
bool root_array_available_ = true;
|
||||
Isolate* const isolate_;
|
||||
// This handle will be patched with the code object on installation.
|
||||
Handle<HeapObject> code_object_;
|
||||
|
@ -252,7 +252,7 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
|
||||
Isolate* isolate() const { return masm_.isolate(); }
|
||||
|
||||
MacroAssembler masm_;
|
||||
MacroAssembler::NoRootArrayScope no_root_array_scope_;
|
||||
NoRootArrayScope no_root_array_scope_;
|
||||
|
||||
ZoneList<int> code_relative_fixup_positions_;
|
||||
|
||||
|
@ -343,11 +343,11 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
|
||||
if (Builtins::IsIsolateIndependent(i)) {
|
||||
DCHECK(!Builtins::IsLazy(i));
|
||||
|
||||
// Sanity-check that the given builtin is process-independent and does not
|
||||
// Sanity-check that the given builtin is isolate-independent and does not
|
||||
// use the trampoline register in its calling convention.
|
||||
if (!code->IsProcessIndependent()) {
|
||||
saw_unsafe_builtin = true;
|
||||
fprintf(stderr, "%s is not process-independent.\n", Builtins::name(i));
|
||||
fprintf(stderr, "%s is not isolate-independent.\n", Builtins::name(i));
|
||||
}
|
||||
if (BuiltinAliasesOffHeapTrampolineRegister(isolate, code)) {
|
||||
saw_unsafe_builtin = true;
|
||||
@ -368,7 +368,12 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
|
||||
lengths[i] = 0;
|
||||
}
|
||||
}
|
||||
CHECK(!saw_unsafe_builtin);
|
||||
CHECK_WITH_MSG(
|
||||
!saw_unsafe_builtin,
|
||||
"One or more builtins marked as isolate-independent either contains "
|
||||
"isolate-dependent code or aliases the off-heap trampoline register. "
|
||||
"If in doubt, ask jgruber@ or remove the affected builtin from the "
|
||||
"Builtins::IsIsolateIndependent whitelist");
|
||||
|
||||
const uint32_t blob_size = RawDataOffset() + raw_data_size;
|
||||
uint8_t* blob = new uint8_t[blob_size];
|
||||
|
@ -230,7 +230,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
ProfileEntryHookStub::MaybeCallEntryHook(masm);
|
||||
|
||||
{ // NOLINT. Scope block confuses linter.
|
||||
MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
|
||||
NoRootArrayScope uninitialized_root_register(masm);
|
||||
// Set up frame.
|
||||
__ pushq(rbp);
|
||||
__ movp(rbp, rsp);
|
||||
|
@ -8,6 +8,7 @@
|
||||
#include "src/base/division-by-constant.h"
|
||||
#include "src/base/utils/random-number-generator.h"
|
||||
#include "src/bootstrapper.h"
|
||||
#include "src/builtins/constants-table-builder.h"
|
||||
#include "src/callable.h"
|
||||
#include "src/code-stubs.h"
|
||||
#include "src/counters.h"
|
||||
@ -18,6 +19,7 @@
|
||||
#include "src/instruction-stream.h"
|
||||
#include "src/objects-inl.h"
|
||||
#include "src/register-configuration.h"
|
||||
#include "src/snapshot/serializer-common.h"
|
||||
#include "src/x64/assembler-x64.h"
|
||||
|
||||
#include "src/x64/macro-assembler-x64.h" // Cannot be the first include.
|
||||
@ -107,6 +109,13 @@ void MacroAssembler::Load(Register destination, ExternalReference source) {
|
||||
}
|
||||
}
|
||||
// Safe code.
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
LookupExternalReference(kScratchRegister, source);
|
||||
movp(destination, Operand(kScratchRegister, 0));
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
if (destination == rax) {
|
||||
load_rax(source);
|
||||
} else {
|
||||
@ -133,6 +142,59 @@ void MacroAssembler::Store(ExternalReference destination, Register source) {
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
void TurboAssembler::LookupConstant(Register destination,
|
||||
Handle<Object> object) {
|
||||
CHECK(isolate()->ShouldLoadConstantsFromRootList());
|
||||
CHECK(root_array_available_);
|
||||
|
||||
// TODO(jgruber, v8:6666): Support self-references. Currently, we'd end up
|
||||
// adding the temporary code object to the constants list, before creating the
|
||||
// final object in Factory::CopyCode.
|
||||
CHECK(code_object_.is_null() || !object.equals(code_object_));
|
||||
|
||||
// Ensure the given object is in the builtins constants table and fetch its
|
||||
// index.
|
||||
BuiltinsConstantsTableBuilder* builder =
|
||||
isolate()->builtins_constants_table_builder();
|
||||
uint32_t index = builder->AddObject(object);
|
||||
|
||||
// TODO(jgruber): Load builtins from the builtins table.
|
||||
// TODO(jgruber): Ensure that code generation can recognize constant targets
|
||||
// in kArchCallCodeObject.
|
||||
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
|
||||
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
movp(destination, FieldOperand(destination, FixedArray::kHeaderSize +
|
||||
index * kPointerSize));
|
||||
}
|
||||
|
||||
void TurboAssembler::LookupExternalReference(Register destination,
|
||||
ExternalReference reference) {
|
||||
CHECK(reference.address() !=
|
||||
ExternalReference::roots_array_start(isolate()).address());
|
||||
CHECK(isolate()->ShouldLoadConstantsFromRootList());
|
||||
CHECK(root_array_available_);
|
||||
|
||||
// Encode as an index into the external reference table stored on the isolate.
|
||||
|
||||
ExternalReferenceEncoder encoder(isolate());
|
||||
ExternalReferenceEncoder::Value v = encoder.Encode(reference.address());
|
||||
CHECK(!v.is_from_api());
|
||||
uint32_t index = v.index();
|
||||
|
||||
// Generate code to load from the external reference table.
|
||||
|
||||
int32_t roots_to_external_reference_offset =
|
||||
Heap::roots_to_external_reference_table_offset() - kRootRegisterBias +
|
||||
ExternalReferenceTable::OffsetOfEntry(index);
|
||||
|
||||
movp(destination, Operand(kRootRegister, roots_to_external_reference_offset));
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
|
||||
void TurboAssembler::LoadAddress(Register destination,
|
||||
ExternalReference source) {
|
||||
if (root_array_available_ && !serializer_enabled()) {
|
||||
@ -143,6 +205,12 @@ void TurboAssembler::LoadAddress(Register destination,
|
||||
}
|
||||
}
|
||||
// Safe code.
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
LookupExternalReference(destination, source);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
Move(destination, source);
|
||||
}
|
||||
|
||||
@ -519,7 +587,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
|
||||
LoadAddress(rbx, ext);
|
||||
CEntryStub ces(isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
|
||||
builtin_exit_frame);
|
||||
jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
|
||||
Jump(ces.GetCode(), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
static constexpr Register saved_regs[] = {rax, rcx, rdx, rbx, rbp, rsi,
|
||||
@ -1008,6 +1076,18 @@ void TurboAssembler::Move(Register dst, Smi* source) {
|
||||
}
|
||||
}
|
||||
|
||||
void TurboAssembler::Move(Register dst, ExternalReference ext) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList() &&
|
||||
ext.address() !=
|
||||
ExternalReference::roots_array_start(isolate()).address()) {
|
||||
LookupExternalReference(dst, ext);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
movp(dst, ext.address(), RelocInfo::EXTERNAL_REFERENCE);
|
||||
}
|
||||
|
||||
void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
if (dst != src) {
|
||||
@ -1312,6 +1392,18 @@ void TurboAssembler::Push(Handle<HeapObject> source) {
|
||||
|
||||
void TurboAssembler::Move(Register result, Handle<HeapObject> object,
|
||||
RelocInfo::Mode rmode) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList() &&
|
||||
!object.equals(CodeObject())) {
|
||||
Heap::RootListIndex root_index;
|
||||
if (!isolate()->heap()->IsRootHandle(object, &root_index)) {
|
||||
LookupConstant(result, object);
|
||||
} else {
|
||||
LoadRoot(result, root_index);
|
||||
}
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
movp(result, object.address(), rmode);
|
||||
}
|
||||
|
||||
@ -1451,6 +1543,14 @@ void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
|
||||
|
||||
void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
|
||||
// TODO(X64): Inline this
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
LookupConstant(kScratchRegister, code_object);
|
||||
leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
|
||||
jmp(kScratchRegister);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
jmp(code_object, rmode);
|
||||
}
|
||||
|
||||
@ -1493,6 +1593,14 @@ void TurboAssembler::Call(Address destination, RelocInfo::Mode rmode) {
|
||||
}
|
||||
|
||||
void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) {
|
||||
LookupConstant(kScratchRegister, code_object);
|
||||
leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
|
||||
call(kScratchRegister);
|
||||
return;
|
||||
}
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
#ifdef DEBUG
|
||||
int end_position = pc_offset() + CallSize(code_object);
|
||||
#endif
|
||||
@ -2020,8 +2128,12 @@ void MacroAssembler::MaybeDropFrames() {
|
||||
ExternalReference::debug_restart_fp_address(isolate());
|
||||
Load(rbx, restart_fp);
|
||||
testp(rbx, rbx);
|
||||
j(not_zero, BUILTIN_CODE(isolate(), FrameDropperTrampoline),
|
||||
RelocInfo::CODE_TARGET);
|
||||
|
||||
Label dont_drop;
|
||||
j(zero, &dont_drop, Label::kNear);
|
||||
Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET);
|
||||
|
||||
bind(&dont_drop);
|
||||
}
|
||||
|
||||
void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
|
||||
|
@ -334,9 +334,7 @@ class TurboAssembler : public Assembler {
|
||||
movp(dst, constant);
|
||||
}
|
||||
|
||||
void Move(Register dst, ExternalReference ext) {
|
||||
movp(dst, ext.address(), RelocInfo::EXTERNAL_REFERENCE);
|
||||
}
|
||||
void Move(Register dst, ExternalReference ext);
|
||||
|
||||
void Move(XMMRegister dst, uint32_t src);
|
||||
void Move(XMMRegister dst, uint64_t src);
|
||||
@ -368,6 +366,12 @@ class TurboAssembler : public Assembler {
|
||||
// register.
|
||||
void LoadAddress(Register destination, ExternalReference source);
|
||||
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
void LookupConstant(Register destination, Handle<Object> object);
|
||||
void LookupExternalReference(Register destination,
|
||||
ExternalReference reference);
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
|
||||
// Operand pointing to an external reference.
|
||||
// May emit code to set up the scratch register. The operand is
|
||||
// only guaranteed to be correct as long as the scratch register
|
||||
@ -544,23 +548,6 @@ class MacroAssembler : public TurboAssembler {
|
||||
MacroAssembler(Isolate* isolate, void* buffer, int size,
|
||||
CodeObjectRequired create_code_object);
|
||||
|
||||
// Prevent the use of the RootArray during the lifetime of this
|
||||
// scope object.
|
||||
class NoRootArrayScope BASE_EMBEDDED {
|
||||
public:
|
||||
explicit NoRootArrayScope(MacroAssembler* assembler)
|
||||
: variable_(&assembler->root_array_available_),
|
||||
old_value_(assembler->root_array_available_) {
|
||||
assembler->root_array_available_ = false;
|
||||
}
|
||||
~NoRootArrayScope() {
|
||||
*variable_ = old_value_;
|
||||
}
|
||||
private:
|
||||
bool* variable_;
|
||||
bool old_value_;
|
||||
};
|
||||
|
||||
// Loads and stores the value of an external reference.
|
||||
// Special case code for load and store to take advantage of
|
||||
// load_rax/store_rax if possible/necessary.
|
||||
@ -914,6 +901,9 @@ class MacroAssembler : public TurboAssembler {
|
||||
void EnterBuiltinFrame(Register context, Register target, Register argc);
|
||||
void LeaveBuiltinFrame(Register context, Register target, Register argc);
|
||||
|
||||
bool root_array_available() const { return root_array_available_; }
|
||||
void set_root_array_available(bool v) { root_array_available_ = v; }
|
||||
|
||||
private:
|
||||
// Order general registers are pushed by Pushad.
|
||||
// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14, r15.
|
||||
|
@ -72,21 +72,19 @@ UNINITIALIZED_TEST(VerifyBuiltinsIsolateIndependence) {
|
||||
for (int i = 0; i < Builtins::builtin_count; i++) {
|
||||
Code* code = isolate->builtins()->builtin(i);
|
||||
|
||||
if (kVerbose) {
|
||||
printf("%s %s\n", Builtins::KindNameOf(i),
|
||||
isolate->builtins()->name(i));
|
||||
}
|
||||
|
||||
bool is_isolate_independent = true;
|
||||
for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
|
||||
is_isolate_independent = false;
|
||||
|
||||
#ifdef ENABLE_DISASSEMBLER
|
||||
if (kVerbose) {
|
||||
if (is_isolate_independent) {
|
||||
printf("%s %s\n", Builtins::KindNameOf(i),
|
||||
isolate->builtins()->name(i));
|
||||
}
|
||||
#ifdef ENABLE_DISASSEMBLER
|
||||
RelocInfo::Mode mode = it.rinfo()->rmode();
|
||||
printf(" %s\n", RelocInfo::RelocModeName(mode));
|
||||
}
|
||||
#endif
|
||||
}
|
||||
is_isolate_independent = false;
|
||||
}
|
||||
|
||||
// Relaxed condition only checks whether the isolate-independent list is
|
||||
|
Loading…
Reference in New Issue
Block a user