[cleanup] Move enum Heap::RootListIndex to enum class RootIndex
and introduce RootsTable - a V8 heap roots storage. So, the renaming part looks like this: Heap::RootListIndex -> RootIndex Heap::kBlahBlahRootIndex -> RootIndex::kBlahBlah Bug: v8:8015, v8:8182 Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng Change-Id: I38e1f3e3f6813ef35e37b0bed35e9ae14a62134f Reviewed-on: https://chromium-review.googlesource.com/1234613 Reviewed-by: Toon Verwaest <verwaest@chromium.org> Reviewed-by: Hannes Payer <hpayer@chromium.org> Reviewed-by: Tobias Tebbi <tebbi@chromium.org> Reviewed-by: Ross McIlroy <rmcilroy@chromium.org> Commit-Queue: Igor Sheludko <ishell@chromium.org> Cr-Commit-Position: refs/heads/master@{#56067}
This commit is contained in:
parent
6989ec70a9
commit
2fde54330a
@ -14,8 +14,9 @@ RootIndexMap::RootIndexMap(Isolate* isolate) {
|
||||
map_ = isolate->root_index_map();
|
||||
if (map_ != nullptr) return;
|
||||
map_ = new HeapObjectToIndexHashMap();
|
||||
for (uint32_t i = 0; i < Heap::kStrongRootListLength; i++) {
|
||||
Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
|
||||
for (uint32_t i = 0; i < static_cast<int>(RootIndex::kStrongRootListLength);
|
||||
i++) {
|
||||
RootIndex root_index = static_cast<RootIndex>(i);
|
||||
Object* root = isolate->heap()->root(root_index);
|
||||
if (!root->IsHeapObject()) continue;
|
||||
// Omit root entries that can be written after initialization. They must
|
||||
|
10
src/api.cc
10
src/api.cc
@ -7819,8 +7819,8 @@ Local<Symbol> v8::Symbol::New(Isolate* isolate, Local<String> name) {
|
||||
Local<Symbol> v8::Symbol::For(Isolate* isolate, Local<String> name) {
|
||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
|
||||
i::Handle<i::String> i_name = Utils::OpenHandle(*name);
|
||||
return Utils::ToLocal(i_isolate->SymbolFor(
|
||||
i::Heap::kPublicSymbolTableRootIndex, i_name, false));
|
||||
return Utils::ToLocal(
|
||||
i_isolate->SymbolFor(i::RootIndex::kPublicSymbolTable, i_name, false));
|
||||
}
|
||||
|
||||
|
||||
@ -7828,7 +7828,7 @@ Local<Symbol> v8::Symbol::ForApi(Isolate* isolate, Local<String> name) {
|
||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
|
||||
i::Handle<i::String> i_name = Utils::OpenHandle(*name);
|
||||
return Utils::ToLocal(
|
||||
i_isolate->SymbolFor(i::Heap::kApiSymbolTableRootIndex, i_name, false));
|
||||
i_isolate->SymbolFor(i::RootIndex::kApiSymbolTable, i_name, false));
|
||||
}
|
||||
|
||||
#define WELL_KNOWN_SYMBOLS(V) \
|
||||
@ -7869,8 +7869,8 @@ Local<Private> v8::Private::New(Isolate* isolate, Local<String> name) {
|
||||
Local<Private> v8::Private::ForApi(Isolate* isolate, Local<String> name) {
|
||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
|
||||
i::Handle<i::String> i_name = Utils::OpenHandle(*name);
|
||||
Local<Symbol> result = Utils::ToLocal(i_isolate->SymbolFor(
|
||||
i::Heap::kApiPrivateSymbolTableRootIndex, i_name, true));
|
||||
Local<Symbol> result = Utils::ToLocal(
|
||||
i_isolate->SymbolFor(i::RootIndex::kApiPrivateSymbolTable, i_name, true));
|
||||
return v8::Local<Private>(reinterpret_cast<Private*>(*result));
|
||||
}
|
||||
|
||||
|
@ -131,7 +131,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
IsolateAddressId::kPendingExceptionAddress, isolate())));
|
||||
}
|
||||
__ str(r0, MemOperand(scratch));
|
||||
__ LoadRoot(r0, Heap::kExceptionRootIndex);
|
||||
__ LoadRoot(r0, RootIndex::kException);
|
||||
__ b(&exit);
|
||||
|
||||
// Invoke: Link this frame into the handler chain.
|
||||
@ -418,7 +418,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
__ LeaveExitFrame(false, r4, stack_space_operand != nullptr);
|
||||
|
||||
// Check if the function scheduled an exception.
|
||||
__ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(r4, RootIndex::kTheHoleValue);
|
||||
__ Move(r6, ExternalReference::scheduled_exception_address(isolate));
|
||||
__ ldr(r5, MemOperand(r6));
|
||||
__ cmp(r4, r5);
|
||||
@ -469,14 +469,14 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
|
||||
STATIC_ASSERT(FCA::kHolderIndex == 0);
|
||||
|
||||
// new target
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
|
||||
// call data
|
||||
__ push(call_data);
|
||||
|
||||
Register scratch0 = call_data;
|
||||
Register scratch1 = r5;
|
||||
__ LoadRoot(scratch0, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch0, RootIndex::kUndefinedValue);
|
||||
// return value
|
||||
__ push(scratch0);
|
||||
// return value default
|
||||
@ -549,7 +549,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
|
||||
// Push data from AccessorInfo.
|
||||
__ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
|
||||
__ push(scratch);
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
__ Push(scratch, scratch);
|
||||
__ Move(scratch, ExternalReference::isolate_address(isolate()));
|
||||
__ Push(scratch, holder);
|
||||
|
@ -130,7 +130,7 @@ int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
|
||||
void TurboAssembler::LoadFromConstantsTable(Register destination,
|
||||
int constant_index) {
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
RootIndex::kBuiltinsConstantsTable));
|
||||
|
||||
// The ldr call below could end up clobbering ip when the offset does not fit
|
||||
// into 12 bits (and thus needs to be loaded from the constant pool). In that
|
||||
@ -147,7 +147,7 @@ void TurboAssembler::LoadFromConstantsTable(Register destination,
|
||||
reg = r7;
|
||||
}
|
||||
|
||||
LoadRoot(reg, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
LoadRoot(reg, RootIndex::kBuiltinsConstantsTable);
|
||||
ldr(destination, MemOperand(reg, offset));
|
||||
|
||||
if (could_clobber_ip) {
|
||||
@ -527,7 +527,7 @@ void MacroAssembler::Store(Register src,
|
||||
}
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index,
|
||||
void TurboAssembler::LoadRoot(Register destination, RootIndex index,
|
||||
Condition cond) {
|
||||
ldr(destination, MemOperand(kRootRegister, RootRegisterOffset(index)), cond);
|
||||
}
|
||||
@ -1517,7 +1517,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
|
||||
|
||||
// Clear the new.target register if not given.
|
||||
if (!new_target.is_valid()) {
|
||||
LoadRoot(r3, Heap::kUndefinedValueRootIndex);
|
||||
LoadRoot(r3, RootIndex::kUndefinedValue);
|
||||
}
|
||||
|
||||
Label done;
|
||||
@ -1639,9 +1639,7 @@ void MacroAssembler::CompareInstanceType(Register map,
|
||||
cmp(type_reg, Operand(type));
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::CompareRoot(Register obj,
|
||||
Heap::RootListIndex index) {
|
||||
void MacroAssembler::CompareRoot(Register obj, RootIndex index) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
DCHECK(obj != scratch);
|
||||
@ -2050,7 +2048,7 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
|
||||
if (emit_debug_code()) {
|
||||
Label done_checking;
|
||||
AssertNotSmi(object);
|
||||
CompareRoot(object, Heap::kUndefinedValueRootIndex);
|
||||
CompareRoot(object, RootIndex::kUndefinedValue);
|
||||
b(eq, &done_checking);
|
||||
ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
|
||||
CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE);
|
||||
|
@ -484,11 +484,10 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
||||
}
|
||||
|
||||
// Load an object from the root table.
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index) override {
|
||||
void LoadRoot(Register destination, RootIndex index) override {
|
||||
LoadRoot(destination, index, al);
|
||||
}
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index,
|
||||
Condition cond);
|
||||
void LoadRoot(Register destination, RootIndex index, Condition cond);
|
||||
|
||||
// Jump if the register contains a smi.
|
||||
void JumpIfSmi(Register value, Label* smi_label);
|
||||
@ -720,8 +719,8 @@ class MacroAssembler : public TurboAssembler {
|
||||
|
||||
// Compare the object in a register to a value from the root list.
|
||||
// Acquires a scratch register.
|
||||
void CompareRoot(Register obj, Heap::RootListIndex index);
|
||||
void PushRoot(Heap::RootListIndex index) {
|
||||
void CompareRoot(Register obj, RootIndex index);
|
||||
void PushRoot(RootIndex index) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LoadRoot(scratch, index);
|
||||
@ -729,14 +728,13 @@ class MacroAssembler : public TurboAssembler {
|
||||
}
|
||||
|
||||
// Compare the object in a register to a value and jump if they are equal.
|
||||
void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal) {
|
||||
void JumpIfRoot(Register with, RootIndex index, Label* if_equal) {
|
||||
CompareRoot(with, index);
|
||||
b(eq, if_equal);
|
||||
}
|
||||
|
||||
// Compare the object in a register to a value and jump if they are not equal.
|
||||
void JumpIfNotRoot(Register with, Heap::RootListIndex index,
|
||||
Label* if_not_equal) {
|
||||
void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal) {
|
||||
CompareRoot(with, index);
|
||||
b(ne, if_not_equal);
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
IsolateAddressId::kPendingExceptionAddress, isolate())));
|
||||
}
|
||||
__ Str(code_entry, MemOperand(x10));
|
||||
__ LoadRoot(x0, Heap::kExceptionRootIndex);
|
||||
__ LoadRoot(x0, RootIndex::kException);
|
||||
__ B(&exit);
|
||||
|
||||
// Invoke: Link this frame into the handler chain.
|
||||
@ -434,8 +434,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
// Check if the function scheduled an exception.
|
||||
__ Mov(x5, ExternalReference::scheduled_exception_address(isolate));
|
||||
__ Ldr(x5, MemOperand(x5));
|
||||
__ JumpIfNotRoot(x5, Heap::kTheHoleValueRootIndex,
|
||||
&promote_scheduled_exception);
|
||||
__ JumpIfNotRoot(x5, RootIndex::kTheHoleValue, &promote_scheduled_exception);
|
||||
|
||||
__ DropSlots(stack_space);
|
||||
__ Ret();
|
||||
@ -484,7 +483,7 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
|
||||
STATIC_ASSERT(FCA::kHolderIndex == 0);
|
||||
|
||||
Register undef = x7;
|
||||
__ LoadRoot(undef, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(undef, RootIndex::kUndefinedValue);
|
||||
|
||||
// Push new target, call data.
|
||||
__ Push(undef, call_data);
|
||||
@ -562,7 +561,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
|
||||
name));
|
||||
|
||||
__ Ldr(data, FieldMemOperand(callback, AccessorInfo::kDataOffset));
|
||||
__ LoadRoot(undef, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(undef, RootIndex::kUndefinedValue);
|
||||
__ Mov(isolate_address, ExternalReference::isolate_address(isolate()));
|
||||
__ Ldr(name, FieldMemOperand(callback, AccessorInfo::kNameOffset));
|
||||
|
||||
|
@ -1516,7 +1516,7 @@ void TurboAssembler::CanonicalizeNaN(const VRegister& dst,
|
||||
Fsub(dst, src, fp_zero);
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
|
||||
void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
|
||||
// TODO(jbramley): Most root values are constants, and can be synthesized
|
||||
// without a load. Refer to the ARM back end for details.
|
||||
Ldr(destination, MemOperand(kRootRegister, RootRegisterOffset(index)));
|
||||
@ -1646,7 +1646,7 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
|
||||
Register scratch = temps.AcquireX();
|
||||
Label done_checking;
|
||||
AssertNotSmi(object);
|
||||
JumpIfRoot(object, Heap::kUndefinedValueRootIndex, &done_checking);
|
||||
JumpIfRoot(object, RootIndex::kUndefinedValue, &done_checking);
|
||||
Ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
|
||||
CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE);
|
||||
Assert(eq, AbortReason::kExpectedUndefinedOrCell);
|
||||
@ -1806,8 +1806,8 @@ void TurboAssembler::CallCFunction(Register function, int num_of_reg_args,
|
||||
void TurboAssembler::LoadFromConstantsTable(Register destination,
|
||||
int constant_index) {
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
RootIndex::kBuiltinsConstantsTable));
|
||||
LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
|
||||
Ldr(destination,
|
||||
FieldMemOperand(destination,
|
||||
FixedArray::kHeaderSize + constant_index * kPointerSize));
|
||||
@ -2225,7 +2225,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
|
||||
|
||||
// Clear the new.target register if not given.
|
||||
if (!new_target.is_valid()) {
|
||||
LoadRoot(x3, Heap::kUndefinedValueRootIndex);
|
||||
LoadRoot(x3, RootIndex::kUndefinedValue);
|
||||
}
|
||||
|
||||
Label done;
|
||||
@ -2597,8 +2597,7 @@ void MacroAssembler::LoadElementsKindFromMap(Register result, Register map) {
|
||||
DecodeField<Map::ElementsKindBits>(result);
|
||||
}
|
||||
|
||||
void MacroAssembler::CompareRoot(const Register& obj,
|
||||
Heap::RootListIndex index) {
|
||||
void MacroAssembler::CompareRoot(const Register& obj, RootIndex index) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register temp = temps.AcquireX();
|
||||
DCHECK(!AreAliased(obj, temp));
|
||||
@ -2606,17 +2605,13 @@ void MacroAssembler::CompareRoot(const Register& obj,
|
||||
Cmp(obj, temp);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::JumpIfRoot(const Register& obj,
|
||||
Heap::RootListIndex index,
|
||||
void MacroAssembler::JumpIfRoot(const Register& obj, RootIndex index,
|
||||
Label* if_equal) {
|
||||
CompareRoot(obj, index);
|
||||
B(eq, if_equal);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::JumpIfNotRoot(const Register& obj,
|
||||
Heap::RootListIndex index,
|
||||
void MacroAssembler::JumpIfNotRoot(const Register& obj, RootIndex index,
|
||||
Label* if_not_equal) {
|
||||
CompareRoot(obj, index);
|
||||
B(ne, if_not_equal);
|
||||
@ -2912,8 +2907,7 @@ void TurboAssembler::AssertUnreachable(AbortReason reason) {
|
||||
if (emit_debug_code()) Abort(reason);
|
||||
}
|
||||
|
||||
void MacroAssembler::AssertRegisterIsRoot(Register reg,
|
||||
Heap::RootListIndex index,
|
||||
void MacroAssembler::AssertRegisterIsRoot(Register reg, RootIndex index,
|
||||
AbortReason reason) {
|
||||
if (emit_debug_code()) {
|
||||
CompareRoot(reg, index);
|
||||
|
@ -1129,7 +1129,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
||||
#undef DECLARE_FUNCTION
|
||||
|
||||
// Load an object from the root table.
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index) override;
|
||||
void LoadRoot(Register destination, RootIndex index) override;
|
||||
|
||||
inline void Ret(const Register& xn = lr);
|
||||
|
||||
@ -1828,17 +1828,13 @@ class MacroAssembler : public TurboAssembler {
|
||||
void LoadElementsKindFromMap(Register result, Register map);
|
||||
|
||||
// Compare the object in a register to a value from the root list.
|
||||
void CompareRoot(const Register& obj, Heap::RootListIndex index);
|
||||
void CompareRoot(const Register& obj, RootIndex index);
|
||||
|
||||
// Compare the object in a register to a value and jump if they are equal.
|
||||
void JumpIfRoot(const Register& obj,
|
||||
Heap::RootListIndex index,
|
||||
Label* if_equal);
|
||||
void JumpIfRoot(const Register& obj, RootIndex index, Label* if_equal);
|
||||
|
||||
// Compare the object in a register to a value and jump if they are not equal.
|
||||
void JumpIfNotRoot(const Register& obj,
|
||||
Heap::RootListIndex index,
|
||||
Label* if_not_equal);
|
||||
void JumpIfNotRoot(const Register& obj, RootIndex index, Label* if_not_equal);
|
||||
|
||||
// Compare the contents of a register with an operand, and branch to true,
|
||||
// false or fall through, depending on condition.
|
||||
@ -1951,7 +1947,7 @@ class MacroAssembler : public TurboAssembler {
|
||||
// Debugging.
|
||||
|
||||
void AssertRegisterIsRoot(
|
||||
Register reg, Heap::RootListIndex index,
|
||||
Register reg, RootIndex index,
|
||||
AbortReason reason = AbortReason::kRegisterDidNotMatchExpectedRoot);
|
||||
|
||||
// Abort if the specified register contains the invalid color bit pattern.
|
||||
|
@ -61,7 +61,7 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
|
||||
// Run the native code for the InternalArray function called as a normal
|
||||
// function.
|
||||
// tail call a stub
|
||||
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r2, RootIndex::kUndefinedValue);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
@ -122,7 +122,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
|
||||
__ SmiUntag(r0);
|
||||
|
||||
// The receiver for the builtin/api call.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
|
||||
// Set up pointer to last argument.
|
||||
__ add(r4, fp, Operand(StandardFrameConstants::kCallerSPOffset));
|
||||
@ -188,7 +188,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
Label post_instantiation_deopt_entry, not_create_implicit_receiver;
|
||||
|
||||
// Preserve the incoming parameters on the stack.
|
||||
__ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(r4, RootIndex::kTheHoleValue);
|
||||
__ SmiTag(r0);
|
||||
__ Push(cp, r0, r1, r4, r3);
|
||||
|
||||
@ -214,7 +214,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
|
||||
// Else: use TheHoleValue as receiver for constructor call
|
||||
__ bind(¬_create_implicit_receiver);
|
||||
__ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(r0, RootIndex::kTheHoleValue);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- r0: receiver
|
||||
@ -303,7 +303,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
Label use_receiver, do_throw, leave_frame;
|
||||
|
||||
// If the result is undefined, we jump out to using the implicit receiver.
|
||||
__ JumpIfRoot(r0, Heap::kUndefinedValueRootIndex, &use_receiver);
|
||||
__ JumpIfRoot(r0, RootIndex::kUndefinedValue, &use_receiver);
|
||||
|
||||
// Otherwise we do a smi check and fall through to check if the return value
|
||||
// is a valid receiver.
|
||||
@ -325,7 +325,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// on-stack receiver as the result.
|
||||
__ bind(&use_receiver);
|
||||
__ ldr(r0, MemOperand(sp, 0 * kPointerSize));
|
||||
__ JumpIfRoot(r0, Heap::kTheHoleValueRootIndex, &do_throw);
|
||||
__ JumpIfRoot(r0, RootIndex::kTheHoleValue, &do_throw);
|
||||
|
||||
__ bind(&leave_frame);
|
||||
// Restore smi-tagged arguments count from the frame.
|
||||
@ -399,7 +399,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label stack_overflow;
|
||||
__ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(sp, RootIndex::kRealStackLimit);
|
||||
__ b(lo, &stack_overflow);
|
||||
|
||||
// Push receiver.
|
||||
@ -466,7 +466,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
|
||||
__ Push(r1, r4);
|
||||
// Push hole as receiver since we do not use it for stepping.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
__ CallRuntime(Runtime::kDebugOnFunctionCall);
|
||||
__ Pop(r1);
|
||||
__ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
|
||||
@ -503,7 +503,7 @@ static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
|
||||
// Check the stack for overflow. We are not trying to catch
|
||||
// interruptions (e.g. debug break and preemption) here, so the "real stack
|
||||
// limit" is checked.
|
||||
__ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kRealStackLimit);
|
||||
// Make scratch the space we have left. The stack might already be overflowed
|
||||
// here which will cause scratch to become negative.
|
||||
__ sub(scratch, sp, scratch);
|
||||
@ -573,7 +573,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
|
||||
// Initialize all JavaScript callee-saved registers, since they will be seen
|
||||
// by the garbage collector as part of handlers.
|
||||
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r4, RootIndex::kUndefinedValue);
|
||||
__ mov(r5, Operand(r4));
|
||||
__ mov(r6, Operand(r4));
|
||||
__ mov(r8, Operand(r4));
|
||||
@ -878,7 +878,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// Do a stack check to ensure we don't go over the limit.
|
||||
Label ok;
|
||||
__ sub(r9, sp, Operand(r4));
|
||||
__ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(r2, RootIndex::kRealStackLimit);
|
||||
__ cmp(r9, Operand(r2));
|
||||
__ b(hs, &ok);
|
||||
__ CallRuntime(Runtime::kThrowStackOverflow);
|
||||
@ -887,7 +887,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// If ok, push undefined as the initial value for all register file entries.
|
||||
Label loop_header;
|
||||
Label loop_check;
|
||||
__ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r9, RootIndex::kUndefinedValue);
|
||||
__ b(&loop_check, al);
|
||||
__ bind(&loop_header);
|
||||
// TODO(rmcilroy): Consider doing more than one push per loop iteration.
|
||||
@ -907,7 +907,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
__ str(r3, MemOperand(fp, r9, LSL, kPointerSizeLog2), ne);
|
||||
|
||||
// Load accumulator with undefined.
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
|
||||
|
||||
// Load the dispatch table into a register and dispatch to the bytecode
|
||||
// handler at the current bytecode offset.
|
||||
@ -987,7 +987,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
|
||||
|
||||
// Push "undefined" as the receiver arg if we need to.
|
||||
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ mov(r3, r0); // Argument count is correct.
|
||||
}
|
||||
|
||||
@ -1201,7 +1201,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
|
||||
__ push(r4);
|
||||
}
|
||||
for (int i = 0; i < 3 - j; ++i) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
}
|
||||
if (j < 3) {
|
||||
__ jmp(&args_done);
|
||||
@ -1360,7 +1360,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
// arguments from the stack (including the receiver), and push thisArg (if
|
||||
// present) instead.
|
||||
{
|
||||
__ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r5, RootIndex::kUndefinedValue);
|
||||
__ mov(r2, r5);
|
||||
__ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); // receiver
|
||||
__ sub(r4, r0, Operand(1), SetCC);
|
||||
@ -1383,8 +1383,8 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
|
||||
// 3. Tail call with no arguments if argArray is null or undefined.
|
||||
Label no_arguments;
|
||||
__ JumpIfRoot(r2, Heap::kNullValueRootIndex, &no_arguments);
|
||||
__ JumpIfRoot(r2, Heap::kUndefinedValueRootIndex, &no_arguments);
|
||||
__ JumpIfRoot(r2, RootIndex::kNullValue, &no_arguments);
|
||||
__ JumpIfRoot(r2, RootIndex::kUndefinedValue, &no_arguments);
|
||||
|
||||
// 4a. Apply the receiver to the given argArray.
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
|
||||
@ -1407,7 +1407,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
|
||||
Label done;
|
||||
__ cmp(r0, Operand::Zero());
|
||||
__ b(ne, &done);
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ add(r0, r0, Operand(1));
|
||||
__ bind(&done);
|
||||
}
|
||||
@ -1456,7 +1456,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
|
||||
// remove all arguments from the stack (including the receiver), and push
|
||||
// thisArgument (if present) instead.
|
||||
{
|
||||
__ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r1, RootIndex::kUndefinedValue);
|
||||
__ mov(r5, r1);
|
||||
__ mov(r2, r1);
|
||||
__ sub(r4, r0, Operand(1), SetCC);
|
||||
@ -1498,7 +1498,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
||||
// arguments from the stack (including the receiver), and push thisArgument
|
||||
// (if present) instead.
|
||||
{
|
||||
__ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r1, RootIndex::kUndefinedValue);
|
||||
__ mov(r2, r1);
|
||||
__ str(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2)); // receiver
|
||||
__ sub(r4, r0, Operand(1), SetCC);
|
||||
@ -1590,7 +1590,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label done;
|
||||
__ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kRealStackLimit);
|
||||
// The stack might already be overflowed here which will cause 'scratch' to
|
||||
// become negative.
|
||||
__ sub(scratch, sp, scratch);
|
||||
@ -1604,7 +1604,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
// Push arguments onto the stack (thisArgument is already on the stack).
|
||||
{
|
||||
__ mov(r6, Operand(0));
|
||||
__ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(r5, RootIndex::kTheHoleValue);
|
||||
Label done, loop;
|
||||
__ bind(&loop);
|
||||
__ cmp(r6, r4);
|
||||
@ -1612,7 +1612,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
__ add(scratch, r2, Operand(r6, LSL, kPointerSizeLog2));
|
||||
__ ldr(scratch, FieldMemOperand(scratch, FixedArray::kHeaderSize));
|
||||
__ cmp(scratch, r5);
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex, eq);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue, eq);
|
||||
__ Push(scratch);
|
||||
__ add(r6, r6, Operand(1));
|
||||
__ b(&loop);
|
||||
@ -1756,9 +1756,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
||||
__ b(hs, &done_convert);
|
||||
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
|
||||
Label convert_global_proxy;
|
||||
__ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex,
|
||||
&convert_global_proxy);
|
||||
__ JumpIfNotRoot(r3, Heap::kNullValueRootIndex, &convert_to_object);
|
||||
__ JumpIfRoot(r3, RootIndex::kUndefinedValue, &convert_global_proxy);
|
||||
__ JumpIfNotRoot(r3, RootIndex::kNullValue, &convert_to_object);
|
||||
__ bind(&convert_global_proxy);
|
||||
{
|
||||
// Patch receiver to global proxy.
|
||||
@ -1844,7 +1843,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack
|
||||
// limit".
|
||||
__ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(sp, RootIndex::kRealStackLimit);
|
||||
__ b(hs, &done);
|
||||
// Restore the stack pointer.
|
||||
__ add(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
|
||||
@ -1972,7 +1971,7 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
|
||||
|
||||
// Calling convention for function specific ConstructStubs require
|
||||
// r2 to contain either an AllocationSite or undefined.
|
||||
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r2, RootIndex::kUndefinedValue);
|
||||
|
||||
Label call_generic_stub;
|
||||
|
||||
@ -2150,7 +2149,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// r1: function
|
||||
// r2: expected number of arguments
|
||||
// r3: new target (passed through to callee)
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
__ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
|
||||
// Adjust for frame.
|
||||
__ sub(r4, r4,
|
||||
@ -2316,7 +2315,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
|
||||
// Check result for exception sentinel.
|
||||
Label exception_returned;
|
||||
__ CompareRoot(r0, Heap::kExceptionRootIndex);
|
||||
__ CompareRoot(r0, RootIndex::kException);
|
||||
__ b(eq, &exception_returned);
|
||||
|
||||
// Check that there is no pending exception, otherwise we
|
||||
@ -2327,7 +2326,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
IsolateAddressId::kPendingExceptionAddress, masm->isolate());
|
||||
__ Move(r3, pending_exception_address);
|
||||
__ ldr(r3, MemOperand(r3));
|
||||
__ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
|
||||
__ CompareRoot(r3, RootIndex::kTheHoleValue);
|
||||
// Cannot use check here as it attempts to generate call into runtime.
|
||||
__ b(eq, &okay);
|
||||
__ stop("Unexpected pending exception");
|
||||
|
@ -55,7 +55,7 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
|
||||
|
||||
// Run the native code for the InternalArray function called as a normal
|
||||
// function.
|
||||
__ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(x2, RootIndex::kUndefinedValue);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
@ -129,7 +129,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
|
||||
__ Claim(slot_count);
|
||||
|
||||
// Preserve the incoming parameters on the stack.
|
||||
__ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(x10, RootIndex::kTheHoleValue);
|
||||
|
||||
// Compute a pointer to the slot immediately above the location on the
|
||||
// stack to which arguments will be later copied.
|
||||
@ -249,7 +249,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
|
||||
// Else: use TheHoleValue as receiver for constructor call
|
||||
__ Bind(¬_create_implicit_receiver);
|
||||
__ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(x0, RootIndex::kTheHoleValue);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- x0: receiver
|
||||
@ -342,7 +342,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
Label use_receiver, do_throw, leave_frame;
|
||||
|
||||
// If the result is undefined, we jump out to using the implicit receiver.
|
||||
__ CompareRoot(x0, Heap::kUndefinedValueRootIndex);
|
||||
__ CompareRoot(x0, RootIndex::kUndefinedValue);
|
||||
__ B(eq, &use_receiver);
|
||||
|
||||
// Otherwise we do a smi check and fall through to check if the return value
|
||||
@ -364,7 +364,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// on-stack receiver as the result.
|
||||
__ Bind(&use_receiver);
|
||||
__ Peek(x0, 0 * kPointerSize);
|
||||
__ CompareRoot(x0, Heap::kTheHoleValueRootIndex);
|
||||
__ CompareRoot(x0, RootIndex::kTheHoleValue);
|
||||
__ B(eq, &do_throw);
|
||||
|
||||
__ Bind(&leave_frame);
|
||||
@ -425,7 +425,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label stack_overflow;
|
||||
__ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(sp, RootIndex::kRealStackLimit);
|
||||
__ B(lo, &stack_overflow);
|
||||
|
||||
// Get number of arguments for generator function.
|
||||
@ -508,7 +508,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
// Push hole as receiver since we do not use it for stepping.
|
||||
__ LoadRoot(x5, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(x5, RootIndex::kTheHoleValue);
|
||||
__ Push(x1, padreg, x4, x5);
|
||||
__ CallRuntime(Runtime::kDebugOnFunctionCall);
|
||||
__ Pop(padreg, x1);
|
||||
@ -543,7 +543,7 @@ static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
|
||||
// We are not trying to catch interruptions (e.g. debug break and
|
||||
// preemption) here, so the "real stack limit" is checked.
|
||||
Label enough_stack_space;
|
||||
__ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kRealStackLimit);
|
||||
// Make scratch the space we have left. The stack might already be overflowed
|
||||
// here which will cause scratch to become negative.
|
||||
__ Sub(scratch, sp, scratch);
|
||||
@ -639,7 +639,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
// Initialize all JavaScript callee-saved registers, since they will be seen
|
||||
// by the garbage collector as part of handlers.
|
||||
// The original values have been saved in JSEntryStub::GenerateBody().
|
||||
__ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(x19, RootIndex::kUndefinedValue);
|
||||
__ Mov(x20, x19);
|
||||
__ Mov(x21, x19);
|
||||
__ Mov(x22, x19);
|
||||
@ -957,7 +957,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// Do a stack check to ensure we don't go over the limit.
|
||||
Label ok;
|
||||
__ Sub(x10, sp, Operand(x11));
|
||||
__ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(x10, RootIndex::kRealStackLimit);
|
||||
__ B(hs, &ok);
|
||||
__ CallRuntime(Runtime::kThrowStackOverflow);
|
||||
__ Bind(&ok);
|
||||
@ -966,7 +966,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// Note: there should always be at least one stack slot for the return
|
||||
// register in the register file.
|
||||
Label loop_header;
|
||||
__ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(x10, RootIndex::kUndefinedValue);
|
||||
__ Lsr(x11, x11, kPointerSizeLog2);
|
||||
// Round up the number of registers to a multiple of 2, to align the stack
|
||||
// to 16 bytes.
|
||||
@ -988,7 +988,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
__ Bind(&no_incoming_new_target_or_generator_register);
|
||||
|
||||
// Load accumulator with undefined.
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
|
||||
|
||||
// Load the dispatch table into a register and dispatch to the bytecode
|
||||
// handler at the current bytecode offset.
|
||||
@ -1081,7 +1081,7 @@ static void Generate_InterpreterPushArgs(MacroAssembler* masm,
|
||||
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
|
||||
// Store "undefined" as the receiver arg if we need to.
|
||||
Register receiver = x14;
|
||||
__ LoadRoot(receiver, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(receiver, RootIndex::kUndefinedValue);
|
||||
__ SlotAddress(stack_addr, num_args);
|
||||
__ Str(receiver, MemOperand(stack_addr));
|
||||
__ Mov(slots_to_copy, num_args);
|
||||
@ -1300,7 +1300,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
|
||||
Register scratch1 = x12;
|
||||
Register scratch2 = x13;
|
||||
Register scratch3 = x14;
|
||||
__ LoadRoot(undef, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(undef, RootIndex::kUndefinedValue);
|
||||
|
||||
Label at_least_one_arg;
|
||||
Label three_args;
|
||||
@ -1511,8 +1511,8 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
Register undefined_value = x3;
|
||||
Register null_value = x4;
|
||||
|
||||
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(null_value, Heap::kNullValueRootIndex);
|
||||
__ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
|
||||
__ LoadRoot(null_value, RootIndex::kNullValue);
|
||||
|
||||
// 1. Load receiver into x1, argArray into x2 (if present), remove all
|
||||
// arguments from the stack (including the receiver), and push thisArg (if
|
||||
@ -1594,7 +1594,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
|
||||
Label non_zero;
|
||||
Register scratch = x10;
|
||||
__ Cbnz(argc, &non_zero);
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
// Overwrite receiver with undefined, which will be the new receiver.
|
||||
// We do not need to overwrite the padding slot above it with anything.
|
||||
__ Poke(scratch, 0);
|
||||
@ -1651,7 +1651,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
|
||||
Register this_argument = x4;
|
||||
Register undefined_value = x3;
|
||||
|
||||
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
|
||||
|
||||
// 1. Load target into x1 (if present), argumentsList into x2 (if present),
|
||||
// remove all arguments from the stack (including the receiver), and push
|
||||
@ -1728,7 +1728,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
||||
Register new_target = x3;
|
||||
Register undefined_value = x4;
|
||||
|
||||
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
|
||||
|
||||
// 1. Load target into x1 (if present), argumentsList into x2 (if present),
|
||||
// new.target into x3 (if present, otherwise use target), remove all
|
||||
@ -1923,7 +1923,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label done;
|
||||
__ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(x10, RootIndex::kRealStackLimit);
|
||||
// Make x10 the space we have left. The stack might already be overflowed
|
||||
// here which will cause x10 to become negative.
|
||||
__ Sub(x10, sp, x10);
|
||||
@ -1948,8 +1948,8 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
Register undefined_value = x12;
|
||||
Register scratch = x13;
|
||||
__ Add(src, arguments_list, FixedArray::kHeaderSize - kHeapObjectTag);
|
||||
__ LoadRoot(the_hole_value, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(the_hole_value, RootIndex::kTheHoleValue);
|
||||
__ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
|
||||
// We do not use the CompareRoot macro as it would do a LoadRoot behind the
|
||||
// scenes and we want to avoid that in a loop.
|
||||
// TODO(all): Consider using Ldp and Stp.
|
||||
@ -2106,9 +2106,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
||||
__ B(hs, &done_convert);
|
||||
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
|
||||
Label convert_global_proxy;
|
||||
__ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex,
|
||||
&convert_global_proxy);
|
||||
__ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
|
||||
__ JumpIfRoot(x3, RootIndex::kUndefinedValue, &convert_global_proxy);
|
||||
__ JumpIfNotRoot(x3, RootIndex::kNullValue, &convert_to_object);
|
||||
__ Bind(&convert_global_proxy);
|
||||
{
|
||||
// Patch receiver to global proxy.
|
||||
@ -2196,7 +2195,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
|
||||
// (i.e. debug break and preemption) here, so check the "real stack
|
||||
// limit".
|
||||
Label done;
|
||||
__ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(x10, RootIndex::kRealStackLimit);
|
||||
// Make x10 the space we have left. The stack might already be overflowed
|
||||
// here which will cause x10 to become negative.
|
||||
__ Sub(x10, sp, x10);
|
||||
@ -2364,7 +2363,7 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
|
||||
|
||||
// Calling convention for function specific ConstructStubs require
|
||||
// x2 to contain either an AllocationSite or undefined.
|
||||
__ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(x2, RootIndex::kUndefinedValue);
|
||||
|
||||
Label call_generic_stub;
|
||||
|
||||
@ -2571,7 +2570,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// Fill the remaining expected arguments with undefined.
|
||||
__ RecordComment("-- Fill slots with undefined --");
|
||||
__ Sub(copy_end, copy_to, Operand(scratch1, LSL, kPointerSizeLog2));
|
||||
__ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch1, RootIndex::kUndefinedValue);
|
||||
|
||||
Label fill;
|
||||
__ Bind(&fill);
|
||||
@ -2841,7 +2840,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
|
||||
// Check result for exception sentinel.
|
||||
Label exception_returned;
|
||||
__ CompareRoot(result, Heap::kExceptionRootIndex);
|
||||
__ CompareRoot(result, RootIndex::kException);
|
||||
__ B(eq, &exception_returned);
|
||||
|
||||
// The call succeeded, so unwind the stack and return.
|
||||
|
@ -61,7 +61,7 @@ type LanguageMode generates 'TNode<Smi>' constexpr 'LanguageMode';
|
||||
type ExtractFixedArrayFlags generates
|
||||
'TNode<Smi>' constexpr 'ExtractFixedArrayFlags';
|
||||
type ParameterMode generates 'TNode<Int32T>' constexpr 'ParameterMode';
|
||||
type RootListIndex generates 'TNode<Int32T>' constexpr 'Heap::RootListIndex';
|
||||
type RootIndex generates 'TNode<Int32T>' constexpr 'RootIndex';
|
||||
type WriteBarrierMode generates 'TNode<Int32T>' constexpr 'WriteBarrierMode';
|
||||
|
||||
type MessageTemplate constexpr 'MessageTemplate::Template';
|
||||
@ -114,10 +114,10 @@ const kAllFixedArrays: constexpr ExtractFixedArrayFlags generates
|
||||
const kFixedArrays: constexpr ExtractFixedArrayFlags generates
|
||||
'ExtractFixedArrayFlag::kFixedArrays';
|
||||
|
||||
const kFixedCOWArrayMapRootIndex: constexpr RootListIndex generates
|
||||
'Heap::kFixedCOWArrayMapRootIndex';
|
||||
const kEmptyFixedArrayRootIndex: constexpr RootListIndex generates
|
||||
'Heap::kEmptyFixedArrayRootIndex';
|
||||
const kFixedCOWArrayMapRootIndex: constexpr RootIndex generates
|
||||
'RootIndex::kFixedCOWArrayMap';
|
||||
const kEmptyFixedArrayRootIndex: constexpr RootIndex generates
|
||||
'RootIndex::kEmptyFixedArray';
|
||||
|
||||
const kInvalidArrayLength: constexpr MessageTemplate generates
|
||||
'MessageTemplate::kInvalidArrayLength';
|
||||
@ -214,9 +214,9 @@ extern runtime NormalizeElements(Context, JSObject);
|
||||
extern runtime TransitionElementsKindWithKind(Context, JSObject, Smi);
|
||||
extern runtime CreateDataProperty(Context, JSReceiver, Object, Object);
|
||||
|
||||
extern macro LoadRoot(constexpr RootListIndex): Object;
|
||||
extern macro StoreRoot(constexpr RootListIndex, Object): Object;
|
||||
extern macro LoadAndUntagToWord32Root(constexpr RootListIndex): int32;
|
||||
extern macro LoadRoot(constexpr RootIndex): Object;
|
||||
extern macro StoreRoot(constexpr RootIndex, Object): Object;
|
||||
extern macro LoadAndUntagToWord32Root(constexpr RootIndex): int32;
|
||||
|
||||
extern runtime StringEqual(Context, String, String): Oddball;
|
||||
extern builtin StringLessThan(Context, String, String): Boolean;
|
||||
|
@ -89,7 +89,7 @@ ArgumentsBuiltinsAssembler::AllocateArgumentsObject(Node* map,
|
||||
Node* result = Allocate(size);
|
||||
Comment("Initialize arguments object");
|
||||
StoreMapNoWriteBarrier(result, map);
|
||||
Node* empty_fixed_array = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
|
||||
Node* empty_fixed_array = LoadRoot(RootIndex::kEmptyFixedArray);
|
||||
StoreObjectField(result, JSArray::kPropertiesOrHashOffset, empty_fixed_array);
|
||||
Node* smi_arguments_count = ParameterToTagged(arguments_count, mode);
|
||||
StoreObjectFieldNoWriteBarrier(result, JSArray::kLengthOffset,
|
||||
@ -99,7 +99,7 @@ ArgumentsBuiltinsAssembler::AllocateArgumentsObject(Node* map,
|
||||
arguments = InnerAllocate(result, elements_offset);
|
||||
StoreObjectFieldNoWriteBarrier(arguments, FixedArray::kLengthOffset,
|
||||
smi_arguments_count);
|
||||
Node* fixed_array_map = LoadRoot(Heap::kFixedArrayMapRootIndex);
|
||||
Node* fixed_array_map = LoadRoot(RootIndex::kFixedArrayMap);
|
||||
StoreMapNoWriteBarrier(arguments, fixed_array_map);
|
||||
}
|
||||
Node* parameter_map = nullptr;
|
||||
@ -110,7 +110,7 @@ ArgumentsBuiltinsAssembler::AllocateArgumentsObject(Node* map,
|
||||
StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset,
|
||||
parameter_map);
|
||||
Node* sloppy_elements_map =
|
||||
LoadRoot(Heap::kSloppyArgumentsElementsMapRootIndex);
|
||||
LoadRoot(RootIndex::kSloppyArgumentsElementsMap);
|
||||
StoreMapNoWriteBarrier(parameter_map, sloppy_elements_map);
|
||||
parameter_map_count = ParameterToTagged(parameter_map_count, mode);
|
||||
StoreObjectFieldNoWriteBarrier(parameter_map, FixedArray::kLengthOffset,
|
||||
|
@ -965,8 +965,7 @@ TF_BUILTIN(ArrayPrototypePop, CodeStubAssembler) {
|
||||
|
||||
// 3) Check that the elements backing store isn't copy-on-write.
|
||||
Node* elements = LoadElements(array_receiver);
|
||||
GotoIf(WordEqual(LoadMap(elements),
|
||||
LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
|
||||
GotoIf(WordEqual(LoadMap(elements), LoadRoot(RootIndex::kFixedCOWArrayMap)),
|
||||
&runtime);
|
||||
|
||||
Node* new_length = IntPtrSub(length, IntPtrConstant(1));
|
||||
@ -1541,8 +1540,7 @@ TF_BUILTIN(ArrayPrototypeShift, CodeStubAssembler) {
|
||||
|
||||
// 3) Check that the elements backing store isn't copy-on-write.
|
||||
Node* elements = LoadElements(array_receiver);
|
||||
GotoIf(WordEqual(LoadMap(elements),
|
||||
LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
|
||||
GotoIf(WordEqual(LoadMap(elements), LoadRoot(RootIndex::kFixedCOWArrayMap)),
|
||||
&runtime);
|
||||
|
||||
Node* new_length = IntPtrSub(length, IntPtrConstant(1));
|
||||
|
@ -275,11 +275,11 @@ void AsyncBuiltinsAssembler::InitializeNativeClosure(Node* context,
|
||||
STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kPointerSize);
|
||||
StoreMapNoWriteBarrier(function, function_map);
|
||||
StoreObjectFieldRoot(function, JSObject::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(function, JSObject::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(function, JSFunction::kFeedbackCellOffset,
|
||||
Heap::kManyClosuresCellRootIndex);
|
||||
RootIndex::kManyClosuresCell);
|
||||
|
||||
Node* shared_info = LoadContextElement(native_context, context_index);
|
||||
CSA_ASSERT(this, IsSharedFunctionInfo(shared_info));
|
||||
|
@ -207,7 +207,7 @@ Node* AsyncGeneratorBuiltinsAssembler::AllocateAsyncGeneratorRequest(
|
||||
Node* promise) {
|
||||
CSA_SLOW_ASSERT(this, HasInstanceType(promise, JS_PROMISE_TYPE));
|
||||
Node* request = Allocate(AsyncGeneratorRequest::kSize);
|
||||
StoreMapNoWriteBarrier(request, Heap::kAsyncGeneratorRequestMapRootIndex);
|
||||
StoreMapNoWriteBarrier(request, RootIndex::kAsyncGeneratorRequestMap);
|
||||
StoreObjectFieldNoWriteBarrier(request, AsyncGeneratorRequest::kNextOffset,
|
||||
UndefinedConstant());
|
||||
StoreObjectFieldNoWriteBarrier(request,
|
||||
@ -218,7 +218,7 @@ Node* AsyncGeneratorBuiltinsAssembler::AllocateAsyncGeneratorRequest(
|
||||
StoreObjectFieldNoWriteBarrier(request, AsyncGeneratorRequest::kPromiseOffset,
|
||||
promise);
|
||||
StoreObjectFieldRoot(request, AsyncGeneratorRequest::kNextOffset,
|
||||
Heap::kUndefinedValueRootIndex);
|
||||
RootIndex::kUndefinedValue);
|
||||
return request;
|
||||
}
|
||||
|
||||
@ -519,9 +519,9 @@ TF_BUILTIN(AsyncGeneratorResolve, AsyncGeneratorBuiltinsAssembler) {
|
||||
Context::ITERATOR_RESULT_MAP_INDEX);
|
||||
StoreMapNoWriteBarrier(iter_result, map);
|
||||
StoreObjectFieldRoot(iter_result, JSIteratorResult::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(iter_result, JSIteratorResult::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldNoWriteBarrier(iter_result, JSIteratorResult::kValueOffset,
|
||||
value);
|
||||
StoreObjectFieldNoWriteBarrier(iter_result, JSIteratorResult::kDoneOffset,
|
||||
|
@ -308,7 +308,7 @@ void CallOrConstructBuiltinsAssembler::CallOrConstructWithSpread(
|
||||
// Check that the Array.prototype hasn't been modified in a way that would
|
||||
// affect iteration.
|
||||
TNode<PropertyCell> protector_cell =
|
||||
CAST(LoadRoot(Heap::kArrayIteratorProtectorRootIndex));
|
||||
CAST(LoadRoot(RootIndex::kArrayIteratorProtector));
|
||||
GotoIf(WordEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset),
|
||||
SmiConstant(Isolate::kProtectorInvalid)),
|
||||
&if_generic);
|
||||
|
@ -731,9 +731,9 @@ Node* CollectionsBuiltinsAssembler::AllocateJSCollectionIterator(
|
||||
Node* const iterator = AllocateInNewSpace(IteratorType::kSize);
|
||||
StoreMapNoWriteBarrier(iterator, iterator_map);
|
||||
StoreObjectFieldRoot(iterator, IteratorType::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(iterator, IteratorType::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldNoWriteBarrier(iterator, IteratorType::kTableOffset, table);
|
||||
StoreObjectFieldNoWriteBarrier(iterator, IteratorType::kIndexOffset,
|
||||
SmiConstant(0));
|
||||
@ -1700,7 +1700,7 @@ TF_BUILTIN(MapIteratorPrototypeNext, CollectionsBuiltinsAssembler) {
|
||||
BIND(&return_end);
|
||||
{
|
||||
StoreObjectFieldRoot(receiver, JSMapIterator::kTableOffset,
|
||||
Heap::kEmptyOrderedHashMapRootIndex);
|
||||
RootIndex::kEmptyOrderedHashMap);
|
||||
Goto(&return_value);
|
||||
}
|
||||
}
|
||||
@ -1908,7 +1908,7 @@ TF_BUILTIN(SetIteratorPrototypeNext, CollectionsBuiltinsAssembler) {
|
||||
BIND(&return_end);
|
||||
{
|
||||
StoreObjectFieldRoot(receiver, JSSetIterator::kTableOffset,
|
||||
Heap::kEmptyOrderedHashSetRootIndex);
|
||||
RootIndex::kEmptyOrderedHashSet);
|
||||
Goto(&return_value);
|
||||
}
|
||||
}
|
||||
@ -2063,8 +2063,8 @@ TNode<Object> WeakCollectionsBuiltinsAssembler::AllocateTable(
|
||||
TNode<FixedArray> table = CAST(
|
||||
AllocateFixedArray(HOLEY_ELEMENTS, length, kAllowLargeObjectAllocation));
|
||||
|
||||
Heap::RootListIndex map_root_index = static_cast<Heap::RootListIndex>(
|
||||
EphemeronHashTableShape::GetMapRootIndex());
|
||||
RootIndex map_root_index =
|
||||
static_cast<RootIndex>(EphemeronHashTableShape::GetMapRootIndex());
|
||||
StoreMapNoWriteBarrier(table, map_root_index);
|
||||
StoreFixedArrayElement(table, EphemeronHashTable::kNumberOfElementsIndex,
|
||||
SmiConstant(0), SKIP_WRITE_BARRIER);
|
||||
@ -2076,7 +2076,7 @@ TNode<Object> WeakCollectionsBuiltinsAssembler::AllocateTable(
|
||||
|
||||
TNode<IntPtrT> start = KeyIndexFromEntry(IntPtrConstant(0));
|
||||
FillFixedArrayWithValue(HOLEY_ELEMENTS, table, start, length,
|
||||
Heap::kUndefinedValueRootIndex);
|
||||
RootIndex::kUndefinedValue);
|
||||
return table;
|
||||
}
|
||||
|
||||
|
@ -77,11 +77,11 @@ TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
|
||||
Goto(&cell_done);
|
||||
|
||||
BIND(&no_closures);
|
||||
StoreMapNoWriteBarrier(feedback_cell, Heap::kOneClosureCellMapRootIndex);
|
||||
StoreMapNoWriteBarrier(feedback_cell, RootIndex::kOneClosureCellMap);
|
||||
Goto(&cell_done);
|
||||
|
||||
BIND(&one_closure);
|
||||
StoreMapNoWriteBarrier(feedback_cell, Heap::kManyClosuresCellMapRootIndex);
|
||||
StoreMapNoWriteBarrier(feedback_cell, RootIndex::kManyClosuresCellMap);
|
||||
Goto(&cell_done);
|
||||
|
||||
BIND(&cell_done);
|
||||
@ -116,9 +116,9 @@ TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
|
||||
|
||||
// Initialize the rest of the function.
|
||||
StoreObjectFieldRoot(result, JSObject::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(result, JSObject::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
{
|
||||
// Set function prototype if necessary.
|
||||
Label done(this), init_prototype(this);
|
||||
@ -127,7 +127,7 @@ TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
|
||||
|
||||
BIND(&init_prototype);
|
||||
StoreObjectFieldRoot(result, JSFunction::kPrototypeOrInitialMapOffset,
|
||||
Heap::kTheHoleValueRootIndex);
|
||||
RootIndex::kTheHoleValue);
|
||||
Goto(&done);
|
||||
BIND(&done);
|
||||
}
|
||||
@ -236,13 +236,13 @@ Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext(
|
||||
TNode<Context> function_context =
|
||||
UncheckedCast<Context>(AllocateInNewSpace(size));
|
||||
|
||||
Heap::RootListIndex context_type;
|
||||
RootIndex context_type;
|
||||
switch (scope_type) {
|
||||
case EVAL_SCOPE:
|
||||
context_type = Heap::kEvalContextMapRootIndex;
|
||||
context_type = RootIndex::kEvalContextMap;
|
||||
break;
|
||||
case FUNCTION_SCOPE:
|
||||
context_type = Heap::kFunctionContextMapRootIndex;
|
||||
context_type = RootIndex::kFunctionContextMap;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
|
@ -434,9 +434,9 @@ TF_BUILTIN(ToObject, CodeStubAssembler) {
|
||||
Node* js_value = Allocate(JSValue::kSize);
|
||||
StoreMapNoWriteBarrier(js_value, initial_map);
|
||||
StoreObjectFieldRoot(js_value, JSValue::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(js_value, JSObject::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectField(js_value, JSValue::kValueOffset, object);
|
||||
Return(js_value);
|
||||
|
||||
|
@ -193,11 +193,11 @@ TF_BUILTIN(DatePrototypeToPrimitive, CodeStubAssembler) {
|
||||
hint_is_invalid(this, Label::kDeferred);
|
||||
|
||||
// Fast cases for internalized strings.
|
||||
Node* number_string = LoadRoot(Heap::knumber_stringRootIndex);
|
||||
Node* number_string = LoadRoot(RootIndex::knumber_string);
|
||||
GotoIf(WordEqual(hint, number_string), &hint_is_number);
|
||||
Node* default_string = LoadRoot(Heap::kdefault_stringRootIndex);
|
||||
Node* default_string = LoadRoot(RootIndex::kdefault_string);
|
||||
GotoIf(WordEqual(hint, default_string), &hint_is_string);
|
||||
Node* string_string = LoadRoot(Heap::kstring_stringRootIndex);
|
||||
Node* string_string = LoadRoot(RootIndex::kstring_string);
|
||||
GotoIf(WordEqual(hint, string_string), &hint_is_string);
|
||||
|
||||
// Slow-case with actual string comparisons.
|
||||
|
@ -62,7 +62,7 @@ TF_BUILTIN(FastFunctionPrototypeBind, CodeStubAssembler) {
|
||||
const int length_index = JSFunction::kLengthDescriptorIndex;
|
||||
TNode<Name> maybe_length = CAST(LoadWeakFixedArrayElement(
|
||||
descriptors, DescriptorArray::ToKeyIndex(length_index)));
|
||||
GotoIf(WordNotEqual(maybe_length, LoadRoot(Heap::klength_stringRootIndex)),
|
||||
GotoIf(WordNotEqual(maybe_length, LoadRoot(RootIndex::klength_string)),
|
||||
&slow);
|
||||
|
||||
TNode<Object> maybe_length_accessor = CAST(LoadWeakFixedArrayElement(
|
||||
@ -74,8 +74,7 @@ TF_BUILTIN(FastFunctionPrototypeBind, CodeStubAssembler) {
|
||||
const int name_index = JSFunction::kNameDescriptorIndex;
|
||||
TNode<Name> maybe_name = CAST(LoadWeakFixedArrayElement(
|
||||
descriptors, DescriptorArray::ToKeyIndex(name_index)));
|
||||
GotoIf(WordNotEqual(maybe_name, LoadRoot(Heap::kname_stringRootIndex)),
|
||||
&slow);
|
||||
GotoIf(WordNotEqual(maybe_name, LoadRoot(RootIndex::kname_string)), &slow);
|
||||
|
||||
TNode<Object> maybe_name_accessor = CAST(LoadWeakFixedArrayElement(
|
||||
descriptors, DescriptorArray::ToValueIndex(name_index)));
|
||||
|
@ -474,7 +474,7 @@ class DeletePropertyBaseAssembler : public AccessorAssembler {
|
||||
dont_delete);
|
||||
// Overwrite the entry itself (see NameDictionary::SetEntry).
|
||||
TNode<HeapObject> filler = TheHoleConstant();
|
||||
DCHECK(Heap::RootIsImmortalImmovable(Heap::kTheHoleValueRootIndex));
|
||||
DCHECK(Heap::RootIsImmortalImmovable(RootIndex::kTheHoleValue));
|
||||
StoreFixedArrayElement(properties, key_index, filler, SKIP_WRITE_BARRIER);
|
||||
StoreValueByKeyIndex<NameDictionary>(properties, key_index, filler,
|
||||
SKIP_WRITE_BARRIER);
|
||||
@ -725,7 +725,7 @@ TF_BUILTIN(AdaptorWithBuiltinExitFrame, InternalBuiltinsAssembler) {
|
||||
|
||||
TNode<MicrotaskQueue> InternalBuiltinsAssembler::GetDefaultMicrotaskQueue() {
|
||||
return TNode<MicrotaskQueue>::UncheckedCast(
|
||||
LoadRoot(Heap::kDefaultMicrotaskQueueRootIndex));
|
||||
LoadRoot(RootIndex::kDefaultMicrotaskQueue));
|
||||
}
|
||||
|
||||
TNode<IntPtrT> InternalBuiltinsAssembler::GetPendingMicrotaskCount(
|
||||
@ -872,7 +872,7 @@ TF_BUILTIN(EnqueueMicrotask, InternalBuiltinsAssembler) {
|
||||
StoreFixedArrayElement(new_queue, num_tasks, microtask,
|
||||
SKIP_WRITE_BARRIER);
|
||||
FillFixedArrayWithValue(PACKED_ELEMENTS, new_queue, new_num_tasks,
|
||||
new_queue_length, Heap::kUndefinedValueRootIndex);
|
||||
new_queue_length, RootIndex::kUndefinedValue);
|
||||
SetQueuedMicrotasks(microtask_queue, new_queue);
|
||||
Goto(&done);
|
||||
}
|
||||
@ -886,7 +886,7 @@ TF_BUILTIN(EnqueueMicrotask, InternalBuiltinsAssembler) {
|
||||
CopyFixedArrayElements(PACKED_ELEMENTS, queue, new_queue, num_tasks);
|
||||
StoreFixedArrayElement(new_queue, num_tasks, microtask);
|
||||
FillFixedArrayWithValue(PACKED_ELEMENTS, new_queue, new_num_tasks,
|
||||
new_queue_length, Heap::kUndefinedValueRootIndex);
|
||||
new_queue_length, RootIndex::kUndefinedValue);
|
||||
SetQueuedMicrotasks(microtask_queue, new_queue);
|
||||
Goto(&done);
|
||||
}
|
||||
|
@ -298,7 +298,7 @@ TNode<JSArray> ObjectEntriesValuesBuiltinsAssembler::FastGetOwnValuesOrEntries(
|
||||
// So the array filled by the-hole even if enum_cache exists.
|
||||
FillFixedArrayWithValue(PACKED_ELEMENTS, values_or_entries,
|
||||
IntPtrConstant(0), object_enum_length,
|
||||
Heap::kTheHoleValueRootIndex);
|
||||
RootIndex::kTheHoleValue);
|
||||
|
||||
TVARIABLE(IntPtrT, var_result_index, IntPtrConstant(0));
|
||||
TVARIABLE(IntPtrT, var_descriptor_number, IntPtrConstant(0));
|
||||
@ -877,13 +877,13 @@ TF_BUILTIN(ObjectPrototypeToString, ObjectBuiltinsAssembler) {
|
||||
|
||||
BIND(&if_arguments);
|
||||
{
|
||||
var_default.Bind(LoadRoot(Heap::karguments_to_stringRootIndex));
|
||||
var_default.Bind(LoadRoot(RootIndex::karguments_to_string));
|
||||
Goto(&checkstringtag);
|
||||
}
|
||||
|
||||
BIND(&if_array);
|
||||
{
|
||||
var_default.Bind(LoadRoot(Heap::karray_to_stringRootIndex));
|
||||
var_default.Bind(LoadRoot(RootIndex::karray_to_string));
|
||||
Goto(&checkstringtag);
|
||||
}
|
||||
|
||||
@ -896,26 +896,26 @@ TF_BUILTIN(ObjectPrototypeToString, ObjectBuiltinsAssembler) {
|
||||
boolean_constructor, JSFunction::kPrototypeOrInitialMapOffset);
|
||||
Node* boolean_prototype =
|
||||
LoadObjectField(boolean_initial_map, Map::kPrototypeOffset);
|
||||
var_default.Bind(LoadRoot(Heap::kboolean_to_stringRootIndex));
|
||||
var_default.Bind(LoadRoot(RootIndex::kboolean_to_string));
|
||||
var_holder.Bind(boolean_prototype);
|
||||
Goto(&checkstringtag);
|
||||
}
|
||||
|
||||
BIND(&if_date);
|
||||
{
|
||||
var_default.Bind(LoadRoot(Heap::kdate_to_stringRootIndex));
|
||||
var_default.Bind(LoadRoot(RootIndex::kdate_to_string));
|
||||
Goto(&checkstringtag);
|
||||
}
|
||||
|
||||
BIND(&if_error);
|
||||
{
|
||||
var_default.Bind(LoadRoot(Heap::kerror_to_stringRootIndex));
|
||||
var_default.Bind(LoadRoot(RootIndex::kerror_to_string));
|
||||
Goto(&checkstringtag);
|
||||
}
|
||||
|
||||
BIND(&if_function);
|
||||
{
|
||||
var_default.Bind(LoadRoot(Heap::kfunction_to_stringRootIndex));
|
||||
var_default.Bind(LoadRoot(RootIndex::kfunction_to_string));
|
||||
Goto(&checkstringtag);
|
||||
}
|
||||
|
||||
@ -928,7 +928,7 @@ TF_BUILTIN(ObjectPrototypeToString, ObjectBuiltinsAssembler) {
|
||||
number_constructor, JSFunction::kPrototypeOrInitialMapOffset);
|
||||
Node* number_prototype =
|
||||
LoadObjectField(number_initial_map, Map::kPrototypeOffset);
|
||||
var_default.Bind(LoadRoot(Heap::knumber_to_stringRootIndex));
|
||||
var_default.Bind(LoadRoot(RootIndex::knumber_to_string));
|
||||
var_holder.Bind(number_prototype);
|
||||
Goto(&checkstringtag);
|
||||
}
|
||||
@ -936,7 +936,7 @@ TF_BUILTIN(ObjectPrototypeToString, ObjectBuiltinsAssembler) {
|
||||
BIND(&if_object);
|
||||
{
|
||||
CSA_ASSERT(this, IsJSReceiver(receiver));
|
||||
var_default.Bind(LoadRoot(Heap::kobject_to_stringRootIndex));
|
||||
var_default.Bind(LoadRoot(RootIndex::kobject_to_string));
|
||||
Goto(&checkstringtag);
|
||||
}
|
||||
|
||||
@ -951,10 +951,10 @@ TF_BUILTIN(ObjectPrototypeToString, ObjectBuiltinsAssembler) {
|
||||
GotoIf(IsSymbolMap(receiver_map), &if_symbol);
|
||||
GotoIf(IsUndefined(receiver), &return_undefined);
|
||||
CSA_ASSERT(this, IsNull(receiver));
|
||||
Return(LoadRoot(Heap::knull_to_stringRootIndex));
|
||||
Return(LoadRoot(RootIndex::knull_to_string));
|
||||
|
||||
BIND(&return_undefined);
|
||||
Return(LoadRoot(Heap::kundefined_to_stringRootIndex));
|
||||
Return(LoadRoot(RootIndex::kundefined_to_string));
|
||||
}
|
||||
|
||||
BIND(&if_proxy);
|
||||
@ -968,12 +968,12 @@ TF_BUILTIN(ObjectPrototypeToString, ObjectBuiltinsAssembler) {
|
||||
CallRuntime(Runtime::kArrayIsArray, context, receiver);
|
||||
TNode<String> builtin_tag = Select<String>(
|
||||
IsTrue(receiver_is_array),
|
||||
[=] { return CAST(LoadRoot(Heap::kArray_stringRootIndex)); },
|
||||
[=] { return CAST(LoadRoot(RootIndex::kArray_string)); },
|
||||
[=] {
|
||||
return Select<String>(
|
||||
IsCallableMap(receiver_map),
|
||||
[=] { return CAST(LoadRoot(Heap::kFunction_stringRootIndex)); },
|
||||
[=] { return CAST(LoadRoot(Heap::kObject_stringRootIndex)); });
|
||||
[=] { return CAST(LoadRoot(RootIndex::kFunction_string)); },
|
||||
[=] { return CAST(LoadRoot(RootIndex::kObject_string)); });
|
||||
});
|
||||
|
||||
// Lookup the @@toStringTag property on the {receiver}.
|
||||
@ -994,7 +994,7 @@ TF_BUILTIN(ObjectPrototypeToString, ObjectBuiltinsAssembler) {
|
||||
|
||||
BIND(&if_regexp);
|
||||
{
|
||||
var_default.Bind(LoadRoot(Heap::kregexp_to_stringRootIndex));
|
||||
var_default.Bind(LoadRoot(RootIndex::kregexp_to_string));
|
||||
Goto(&checkstringtag);
|
||||
}
|
||||
|
||||
@ -1007,7 +1007,7 @@ TF_BUILTIN(ObjectPrototypeToString, ObjectBuiltinsAssembler) {
|
||||
string_constructor, JSFunction::kPrototypeOrInitialMapOffset);
|
||||
Node* string_prototype =
|
||||
LoadObjectField(string_initial_map, Map::kPrototypeOffset);
|
||||
var_default.Bind(LoadRoot(Heap::kstring_to_stringRootIndex));
|
||||
var_default.Bind(LoadRoot(RootIndex::kstring_to_string));
|
||||
var_holder.Bind(string_prototype);
|
||||
Goto(&checkstringtag);
|
||||
}
|
||||
@ -1021,7 +1021,7 @@ TF_BUILTIN(ObjectPrototypeToString, ObjectBuiltinsAssembler) {
|
||||
symbol_constructor, JSFunction::kPrototypeOrInitialMapOffset);
|
||||
Node* symbol_prototype =
|
||||
LoadObjectField(symbol_initial_map, Map::kPrototypeOffset);
|
||||
var_default.Bind(LoadRoot(Heap::kobject_to_stringRootIndex));
|
||||
var_default.Bind(LoadRoot(RootIndex::kobject_to_string));
|
||||
var_holder.Bind(symbol_prototype);
|
||||
Goto(&checkstringtag);
|
||||
}
|
||||
@ -1035,7 +1035,7 @@ TF_BUILTIN(ObjectPrototypeToString, ObjectBuiltinsAssembler) {
|
||||
bigint_constructor, JSFunction::kPrototypeOrInitialMapOffset);
|
||||
Node* bigint_prototype =
|
||||
LoadObjectField(bigint_initial_map, Map::kPrototypeOffset);
|
||||
var_default.Bind(LoadRoot(Heap::kobject_to_stringRootIndex));
|
||||
var_default.Bind(LoadRoot(RootIndex::kobject_to_string));
|
||||
var_holder.Bind(bigint_prototype);
|
||||
Goto(&checkstringtag);
|
||||
}
|
||||
@ -1078,7 +1078,7 @@ TF_BUILTIN(ObjectPrototypeToString, ObjectBuiltinsAssembler) {
|
||||
BIND(&return_generic);
|
||||
{
|
||||
Node* tag = GetProperty(context, ToObject(context, receiver),
|
||||
LoadRoot(Heap::kto_string_tag_symbolRootIndex));
|
||||
LoadRoot(RootIndex::kto_string_tag_symbol));
|
||||
GotoIf(TaggedIsSmi(tag), &return_default);
|
||||
GotoIfNot(IsString(tag), &return_default);
|
||||
ReturnToStringFormat(context, tag);
|
||||
@ -1192,7 +1192,7 @@ TF_BUILTIN(ObjectCreate, ObjectBuiltinsAssembler) {
|
||||
GotoIf(IsSpecialReceiverMap(properties_map), &call_runtime);
|
||||
// Stay on the fast path only if there are no elements.
|
||||
GotoIfNot(WordEqual(LoadElements(properties),
|
||||
LoadRoot(Heap::kEmptyFixedArrayRootIndex)),
|
||||
LoadRoot(RootIndex::kEmptyFixedArray)),
|
||||
&call_runtime);
|
||||
// Handle dictionary objects or fast objects with properties in runtime.
|
||||
Node* bit_field3 = LoadMapBitField3(properties_map);
|
||||
@ -1345,8 +1345,7 @@ TF_BUILTIN(CreateGeneratorObject, ObjectBuiltinsAssembler) {
|
||||
formal_parameter_count);
|
||||
Node* parameters_and_registers = AllocateFixedArray(HOLEY_ELEMENTS, size);
|
||||
FillFixedArrayWithValue(HOLEY_ELEMENTS, parameters_and_registers,
|
||||
IntPtrConstant(0), size,
|
||||
Heap::kUndefinedValueRootIndex);
|
||||
IntPtrConstant(0), size, RootIndex::kUndefinedValue);
|
||||
// TODO(cbruni): support start_offset to avoid double initialization.
|
||||
Node* result = AllocateJSObjectFromMap(maybe_map, nullptr, nullptr, kNone,
|
||||
kWithSlackTracking);
|
||||
|
@ -28,9 +28,9 @@ Node* PromiseBuiltinsAssembler::AllocateJSPromise(Node* context) {
|
||||
Node* const promise = Allocate(JSPromise::kSizeWithEmbedderFields);
|
||||
StoreMapNoWriteBarrier(promise, promise_map);
|
||||
StoreObjectFieldRoot(promise, JSPromise::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(promise, JSPromise::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
return promise;
|
||||
}
|
||||
|
||||
@ -137,7 +137,7 @@ TF_BUILTIN(NewPromiseCapability, PromiseBuiltinsAssembler) {
|
||||
CreatePromiseResolvingFunctions(promise, debug_event, native_context);
|
||||
|
||||
Node* capability = Allocate(PromiseCapability::kSize);
|
||||
StoreMapNoWriteBarrier(capability, Heap::kPromiseCapabilityMapRootIndex);
|
||||
StoreMapNoWriteBarrier(capability, RootIndex::kPromiseCapabilityMap);
|
||||
StoreObjectFieldNoWriteBarrier(capability,
|
||||
PromiseCapability::kPromiseOffset, promise);
|
||||
StoreObjectFieldNoWriteBarrier(capability,
|
||||
@ -150,13 +150,13 @@ TF_BUILTIN(NewPromiseCapability, PromiseBuiltinsAssembler) {
|
||||
BIND(&if_slow_promise_capability);
|
||||
{
|
||||
Node* capability = Allocate(PromiseCapability::kSize);
|
||||
StoreMapNoWriteBarrier(capability, Heap::kPromiseCapabilityMapRootIndex);
|
||||
StoreMapNoWriteBarrier(capability, RootIndex::kPromiseCapabilityMap);
|
||||
StoreObjectFieldRoot(capability, PromiseCapability::kPromiseOffset,
|
||||
Heap::kUndefinedValueRootIndex);
|
||||
RootIndex::kUndefinedValue);
|
||||
StoreObjectFieldRoot(capability, PromiseCapability::kResolveOffset,
|
||||
Heap::kUndefinedValueRootIndex);
|
||||
RootIndex::kUndefinedValue);
|
||||
StoreObjectFieldRoot(capability, PromiseCapability::kRejectOffset,
|
||||
Heap::kUndefinedValueRootIndex);
|
||||
RootIndex::kUndefinedValue);
|
||||
|
||||
Node* executor_context =
|
||||
CreatePromiseGetCapabilitiesExecutorContext(capability, native_context);
|
||||
@ -352,7 +352,7 @@ void PromiseBuiltinsAssembler::PerformPromiseThen(
|
||||
|
||||
BIND(&if_fulfilled);
|
||||
{
|
||||
var_map.Bind(LoadRoot(Heap::kPromiseFulfillReactionJobTaskMapRootIndex));
|
||||
var_map.Bind(LoadRoot(RootIndex::kPromiseFulfillReactionJobTaskMap));
|
||||
var_handler.Bind(on_fulfilled);
|
||||
Goto(&enqueue);
|
||||
}
|
||||
@ -360,7 +360,7 @@ void PromiseBuiltinsAssembler::PerformPromiseThen(
|
||||
BIND(&if_rejected);
|
||||
{
|
||||
CSA_ASSERT(this, IsPromiseStatus(status, v8::Promise::kRejected));
|
||||
var_map.Bind(LoadRoot(Heap::kPromiseRejectReactionJobTaskMapRootIndex));
|
||||
var_map.Bind(LoadRoot(RootIndex::kPromiseRejectReactionJobTaskMap));
|
||||
var_handler.Bind(on_rejected);
|
||||
GotoIf(PromiseHasHandler(promise), &enqueue);
|
||||
CallRuntime(Runtime::kPromiseRevokeReject, context, promise);
|
||||
@ -401,7 +401,7 @@ Node* PromiseBuiltinsAssembler::AllocatePromiseReaction(
|
||||
Node* next, Node* promise_or_capability, Node* fulfill_handler,
|
||||
Node* reject_handler) {
|
||||
Node* const reaction = Allocate(PromiseReaction::kSize);
|
||||
StoreMapNoWriteBarrier(reaction, Heap::kPromiseReactionMapRootIndex);
|
||||
StoreMapNoWriteBarrier(reaction, RootIndex::kPromiseReactionMap);
|
||||
StoreObjectFieldNoWriteBarrier(reaction, PromiseReaction::kNextOffset, next);
|
||||
StoreObjectFieldNoWriteBarrier(reaction,
|
||||
PromiseReaction::kPromiseOrCapabilityOffset,
|
||||
@ -431,10 +431,10 @@ Node* PromiseBuiltinsAssembler::AllocatePromiseReactionJobTask(
|
||||
}
|
||||
|
||||
Node* PromiseBuiltinsAssembler::AllocatePromiseReactionJobTask(
|
||||
Heap::RootListIndex map_root_index, Node* context, Node* argument,
|
||||
Node* handler, Node* promise_or_capability) {
|
||||
DCHECK(map_root_index == Heap::kPromiseFulfillReactionJobTaskMapRootIndex ||
|
||||
map_root_index == Heap::kPromiseRejectReactionJobTaskMapRootIndex);
|
||||
RootIndex map_root_index, Node* context, Node* argument, Node* handler,
|
||||
Node* promise_or_capability) {
|
||||
DCHECK(map_root_index == RootIndex::kPromiseFulfillReactionJobTaskMap ||
|
||||
map_root_index == RootIndex::kPromiseRejectReactionJobTaskMap);
|
||||
Node* const map = LoadRoot(map_root_index);
|
||||
return AllocatePromiseReactionJobTask(map, context, argument, handler,
|
||||
promise_or_capability);
|
||||
@ -444,7 +444,7 @@ Node* PromiseBuiltinsAssembler::AllocatePromiseResolveThenableJobTask(
|
||||
Node* promise_to_resolve, Node* then, Node* thenable, Node* context) {
|
||||
Node* const microtask = Allocate(PromiseResolveThenableJobTask::kSize);
|
||||
StoreMapNoWriteBarrier(microtask,
|
||||
Heap::kPromiseResolveThenableJobTaskMapRootIndex);
|
||||
RootIndex::kPromiseResolveThenableJobTaskMap);
|
||||
StoreObjectFieldNoWriteBarrier(
|
||||
microtask, PromiseResolveThenableJobTask::kContextOffset, context);
|
||||
StoreObjectFieldNoWriteBarrier(
|
||||
@ -502,8 +502,8 @@ Node* PromiseBuiltinsAssembler::TriggerPromiseReactions(
|
||||
// of stores here to avoid screwing up the store buffer.
|
||||
STATIC_ASSERT(PromiseReaction::kSize == PromiseReactionJobTask::kSize);
|
||||
if (type == PromiseReaction::kFulfill) {
|
||||
StoreMapNoWriteBarrier(
|
||||
current, Heap::kPromiseFulfillReactionJobTaskMapRootIndex);
|
||||
StoreMapNoWriteBarrier(current,
|
||||
RootIndex::kPromiseFulfillReactionJobTaskMap);
|
||||
StoreObjectField(current, PromiseReactionJobTask::kArgumentOffset,
|
||||
argument);
|
||||
StoreObjectField(current, PromiseReactionJobTask::kContextOffset,
|
||||
@ -516,7 +516,7 @@ Node* PromiseBuiltinsAssembler::TriggerPromiseReactions(
|
||||
Node* handler =
|
||||
LoadObjectField(current, PromiseReaction::kRejectHandlerOffset);
|
||||
StoreMapNoWriteBarrier(current,
|
||||
Heap::kPromiseRejectReactionJobTaskMapRootIndex);
|
||||
RootIndex::kPromiseRejectReactionJobTaskMap);
|
||||
StoreObjectField(current, PromiseReactionJobTask::kArgumentOffset,
|
||||
argument);
|
||||
StoreObjectField(current, PromiseReactionJobTask::kContextOffset,
|
||||
|
@ -89,9 +89,8 @@ class PromiseBuiltinsAssembler : public CodeStubAssembler {
|
||||
Node* AllocatePromiseReaction(Node* next, Node* promise_or_capability,
|
||||
Node* fulfill_handler, Node* reject_handler);
|
||||
|
||||
Node* AllocatePromiseReactionJobTask(Heap::RootListIndex map_root_index,
|
||||
Node* context, Node* argument,
|
||||
Node* handler,
|
||||
Node* AllocatePromiseReactionJobTask(RootIndex map_root_index, Node* context,
|
||||
Node* argument, Node* handler,
|
||||
Node* promise_or_capability);
|
||||
Node* AllocatePromiseReactionJobTask(Node* map, Node* context, Node* argument,
|
||||
Node* handler,
|
||||
|
@ -58,7 +58,7 @@ Node* ProxiesCodeStubAssembler::AllocateProxy(Node* target, Node* handler,
|
||||
Node* proxy = Allocate(JSProxy::kSize);
|
||||
StoreMapNoWriteBarrier(proxy, map.value());
|
||||
StoreObjectFieldRoot(proxy, JSProxy::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyPropertyDictionaryRootIndex);
|
||||
RootIndex::kEmptyPropertyDictionary);
|
||||
StoreObjectFieldNoWriteBarrier(proxy, JSProxy::kTargetOffset, target);
|
||||
StoreObjectFieldNoWriteBarrier(proxy, JSProxy::kHandlerOffset, handler);
|
||||
|
||||
@ -124,7 +124,7 @@ Node* ProxiesCodeStubAssembler::AllocateJSArrayForCodeStubArguments(
|
||||
Node* ProxiesCodeStubAssembler::CreateProxyRevokeFunctionContext(
|
||||
Node* proxy, Node* native_context) {
|
||||
Node* const context = Allocate(FixedArray::SizeFor(kProxyContextLength));
|
||||
StoreMapNoWriteBarrier(context, Heap::kFunctionContextMapRootIndex);
|
||||
StoreMapNoWriteBarrier(context, RootIndex::kFunctionContextMap);
|
||||
InitializeFunctionContext(native_context, context, kProxyContextLength);
|
||||
StoreContextElementNoWriteBarrier(context, kProxySlot, proxy);
|
||||
return context;
|
||||
@ -230,9 +230,9 @@ TF_BUILTIN(ProxyRevocable, ProxiesCodeStubAssembler) {
|
||||
native_context, Context::PROXY_REVOCABLE_RESULT_MAP_INDEX);
|
||||
StoreMapNoWriteBarrier(result, result_map);
|
||||
StoreObjectFieldRoot(result, JSProxyRevocableResult::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(result, JSProxyRevocableResult::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldNoWriteBarrier(result, JSProxyRevocableResult::kProxyOffset,
|
||||
proxy);
|
||||
StoreObjectFieldNoWriteBarrier(result, JSProxyRevocableResult::kRevokeOffset,
|
||||
|
@ -81,13 +81,13 @@ TNode<JSRegExpResult> RegExpBuiltinsAssembler::AllocateRegExpResult(
|
||||
// Initialize the elements.
|
||||
|
||||
DCHECK(!IsDoubleElementsKind(elements_kind));
|
||||
const Heap::RootListIndex map_index = Heap::kFixedArrayMapRootIndex;
|
||||
const RootIndex map_index = RootIndex::kFixedArrayMap;
|
||||
DCHECK(Heap::RootIsImmortalImmovable(map_index));
|
||||
StoreMapNoWriteBarrier(elements, map_index);
|
||||
StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
|
||||
|
||||
FillFixedArrayWithValue(elements_kind, elements, IntPtrZero(), length_intptr,
|
||||
Heap::kUndefinedValueRootIndex);
|
||||
RootIndex::kUndefinedValue);
|
||||
|
||||
return CAST(result);
|
||||
}
|
||||
@ -2109,9 +2109,9 @@ TNode<Object> RegExpBuiltinsAssembler::MatchAllIterator(
|
||||
StoreMapNoWriteBarrier(iterator, map);
|
||||
StoreObjectFieldRoot(iterator,
|
||||
JSRegExpStringIterator::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(iterator, JSRegExpStringIterator::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
|
||||
// 5. Set iterator.[[IteratingRegExp]] to R.
|
||||
StoreObjectFieldNoWriteBarrier(
|
||||
|
@ -1800,7 +1800,7 @@ TNode<JSArray> StringBuiltinsAssembler::StringToArray(
|
||||
TNode<RawPtrT> string_data = UncheckedCast<RawPtrT>(
|
||||
to_direct.PointerToData(&fill_thehole_and_call_runtime));
|
||||
TNode<IntPtrT> string_data_offset = to_direct.offset();
|
||||
TNode<Object> cache = LoadRoot(Heap::kSingleCharacterStringCacheRootIndex);
|
||||
TNode<Object> cache = LoadRoot(RootIndex::kSingleCharacterStringCache);
|
||||
|
||||
BuildFastLoop(
|
||||
IntPtrConstant(0), length,
|
||||
@ -1832,7 +1832,7 @@ TNode<JSArray> StringBuiltinsAssembler::StringToArray(
|
||||
BIND(&fill_thehole_and_call_runtime);
|
||||
{
|
||||
FillFixedArrayWithValue(PACKED_ELEMENTS, elements, IntPtrConstant(0),
|
||||
length, Heap::kTheHoleValueRootIndex);
|
||||
length, RootIndex::kTheHoleValue);
|
||||
Goto(&call_runtime);
|
||||
}
|
||||
}
|
||||
@ -2364,9 +2364,9 @@ TF_BUILTIN(StringPrototypeIterator, CodeStubAssembler) {
|
||||
Node* iterator = Allocate(JSStringIterator::kSize);
|
||||
StoreMapNoWriteBarrier(iterator, map);
|
||||
StoreObjectFieldRoot(iterator, JSValue::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(iterator, JSObject::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldNoWriteBarrier(iterator, JSStringIterator::kStringOffset,
|
||||
string);
|
||||
Node* index = SmiConstant(0);
|
||||
|
@ -39,7 +39,7 @@ BUILTIN(SymbolFor) {
|
||||
Handle<String> key;
|
||||
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, key,
|
||||
Object::ToString(isolate, key_obj));
|
||||
return *isolate->SymbolFor(Heap::kPublicSymbolTableRootIndex, key, false);
|
||||
return *isolate->SymbolFor(RootIndex::kPublicSymbolTable, key, false);
|
||||
}
|
||||
|
||||
// ES6 section 19.4.2.5 Symbol.keyFor.
|
||||
|
@ -178,7 +178,7 @@ TF_BUILTIN(TypedArrayInitialize, TypedArrayBuiltinsAssembler) {
|
||||
Node* native_context = LoadNativeContext(context);
|
||||
Node* map =
|
||||
LoadContextElement(native_context, Context::ARRAY_BUFFER_MAP_INDEX);
|
||||
Node* empty_fixed_array = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
|
||||
Node* empty_fixed_array = LoadRoot(RootIndex::kEmptyFixedArray);
|
||||
|
||||
Node* buffer = Allocate(JSArrayBuffer::kSizeWithEmbedderFields);
|
||||
StoreMapNoWriteBarrier(buffer, map);
|
||||
|
@ -19,14 +19,14 @@ BuiltinsConstantsTableBuilder::BuiltinsConstantsTableBuilder(Isolate* isolate)
|
||||
// as a constant, which means that codegen will load it using the root
|
||||
// register.
|
||||
DCHECK(isolate_->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kEmptyFixedArrayRootIndex));
|
||||
RootIndex::kEmptyFixedArray));
|
||||
}
|
||||
|
||||
uint32_t BuiltinsConstantsTableBuilder::AddObject(Handle<Object> object) {
|
||||
#ifdef DEBUG
|
||||
// Roots must not be inserted into the constants table as they are already
|
||||
// accessibly from the root list.
|
||||
Heap::RootListIndex root_list_index;
|
||||
RootIndex root_list_index;
|
||||
DCHECK(!isolate_->heap()->IsRootHandle(object, &root_list_index));
|
||||
|
||||
// Not yet finalized.
|
||||
@ -56,7 +56,7 @@ void BuiltinsConstantsTableBuilder::PatchSelfReference(
|
||||
#ifdef DEBUG
|
||||
// Roots must not be inserted into the constants table as they are already
|
||||
// accessibly from the root list.
|
||||
Heap::RootListIndex root_list_index;
|
||||
RootIndex root_list_index;
|
||||
DCHECK(!isolate_->heap()->IsRootHandle(code_object, &root_list_index));
|
||||
|
||||
// Not yet finalized.
|
||||
|
@ -89,7 +89,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
|
||||
__ SmiUntag(eax);
|
||||
|
||||
// The receiver for the builtin/api call.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
|
||||
// Set up pointer to last argument.
|
||||
__ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
|
||||
@ -159,7 +159,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
__ Push(esi);
|
||||
__ Push(ecx);
|
||||
__ Push(edi);
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
__ Push(edx);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
@ -183,7 +183,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
|
||||
// Else: use TheHoleValue as receiver for constructor call
|
||||
__ bind(¬_create_implicit_receiver);
|
||||
__ LoadRoot(eax, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(eax, RootIndex::kTheHoleValue);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax: implicit receiver
|
||||
@ -273,8 +273,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
Label use_receiver, do_throw, leave_frame;
|
||||
|
||||
// If the result is undefined, we jump out to using the implicit receiver.
|
||||
__ JumpIfRoot(eax, Heap::kUndefinedValueRootIndex, &use_receiver,
|
||||
Label::kNear);
|
||||
__ JumpIfRoot(eax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear);
|
||||
|
||||
// Otherwise we do a smi check and fall through to check if the return value
|
||||
// is a valid receiver.
|
||||
@ -296,7 +295,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// on-stack receiver as the result.
|
||||
__ bind(&use_receiver);
|
||||
__ mov(eax, Operand(esp, 0 * kPointerSize));
|
||||
__ JumpIfRoot(eax, Heap::kTheHoleValueRootIndex, &do_throw);
|
||||
__ JumpIfRoot(eax, RootIndex::kTheHoleValue, &do_throw);
|
||||
|
||||
__ bind(&leave_frame);
|
||||
// Restore smi-tagged arguments count from the frame.
|
||||
@ -477,7 +476,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label stack_overflow;
|
||||
__ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(esp, ecx, RootIndex::kRealStackLimit);
|
||||
__ j(below, &stack_overflow);
|
||||
|
||||
// Pop return address.
|
||||
@ -548,7 +547,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
__ Push(edx);
|
||||
__ Push(edi);
|
||||
// Push hole as receiver since we do not use it for stepping.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
__ CallRuntime(Runtime::kDebugOnFunctionCall);
|
||||
__ Pop(edx);
|
||||
__ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
|
||||
@ -893,7 +892,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
__ bind(&no_incoming_new_target_or_generator_register);
|
||||
|
||||
// Load accumulator and bytecode offset into registers.
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
|
||||
__ mov(kInterpreterBytecodeOffsetRegister,
|
||||
Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
|
||||
|
||||
@ -989,7 +988,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
|
||||
|
||||
// Push "undefined" as the receiver arg if we need to.
|
||||
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ sub(scratch, Immediate(1)); // Subtract one for receiver.
|
||||
}
|
||||
|
||||
@ -1281,7 +1280,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
|
||||
ebp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
|
||||
}
|
||||
for (int i = 0; i < 3 - j; ++i) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
}
|
||||
if (j < 3) {
|
||||
__ jmp(&args_done, Label::kNear);
|
||||
@ -1404,7 +1403,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
// Spill receiver to allow the usage of edi as a scratch register.
|
||||
__ movd(xmm0, Operand(esp, eax, times_pointer_size, kPointerSize));
|
||||
|
||||
__ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(edx, RootIndex::kUndefinedValue);
|
||||
__ mov(edi, edx);
|
||||
__ test(eax, eax);
|
||||
__ j(zero, &no_this_arg, Label::kNear);
|
||||
@ -1438,9 +1437,8 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
|
||||
// 3. Tail call with no arguments if argArray is null or undefined.
|
||||
Label no_arguments;
|
||||
__ JumpIfRoot(edx, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
|
||||
__ JumpIfRoot(edx, Heap::kUndefinedValueRootIndex, &no_arguments,
|
||||
Label::kNear);
|
||||
__ JumpIfRoot(edx, RootIndex::kNullValue, &no_arguments, Label::kNear);
|
||||
__ JumpIfRoot(edx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
|
||||
|
||||
// 4a. Apply the receiver to the given argArray.
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
|
||||
@ -1475,7 +1473,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
|
||||
__ test(eax, eax);
|
||||
__ j(not_zero, &done, Label::kNear);
|
||||
__ PopReturnAddressTo(edx);
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ PushReturnAddressFrom(edx);
|
||||
__ inc(eax);
|
||||
__ bind(&done);
|
||||
@ -1518,7 +1516,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
|
||||
// thisArgument (if present) instead.
|
||||
{
|
||||
Label done;
|
||||
__ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(edi, RootIndex::kUndefinedValue);
|
||||
__ mov(edx, edi);
|
||||
__ mov(ebx, edi);
|
||||
__ cmp(eax, Immediate(1));
|
||||
@ -1569,7 +1567,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
||||
// (if present) instead.
|
||||
{
|
||||
Label done;
|
||||
__ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(edi, RootIndex::kUndefinedValue);
|
||||
__ mov(edx, edi);
|
||||
__ mov(ebx, edi);
|
||||
__ cmp(eax, Immediate(1));
|
||||
@ -1584,7 +1582,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
||||
__ bind(&done);
|
||||
__ PopReturnAddressTo(ecx);
|
||||
__ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ PushReturnAddressFrom(ecx);
|
||||
}
|
||||
|
||||
@ -1746,9 +1744,9 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
// Turn the hole into undefined as we go.
|
||||
__ mov(edi, FieldOperand(kArgumentsList, eax, times_pointer_size,
|
||||
FixedArray::kHeaderSize));
|
||||
__ CompareRoot(edi, Heap::kTheHoleValueRootIndex);
|
||||
__ CompareRoot(edi, RootIndex::kTheHoleValue);
|
||||
__ j(not_equal, &push, Label::kNear);
|
||||
__ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(edi, RootIndex::kUndefinedValue);
|
||||
__ bind(&push);
|
||||
__ Push(edi);
|
||||
__ inc(eax);
|
||||
@ -1833,7 +1831,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
|
||||
// (i.e. debug break and preemption) here, so check the "real stack
|
||||
// limit".
|
||||
Label done;
|
||||
__ LoadRoot(ecx, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(ecx, RootIndex::kRealStackLimit);
|
||||
// Make ecx the space we have left. The stack might already be
|
||||
// overflowed here which will cause ecx to become negative.
|
||||
__ neg(ecx);
|
||||
@ -1920,9 +1918,9 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
||||
__ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
|
||||
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
|
||||
Label convert_global_proxy;
|
||||
__ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex,
|
||||
&convert_global_proxy, Label::kNear);
|
||||
__ JumpIfNotRoot(ecx, Heap::kNullValueRootIndex, &convert_to_object,
|
||||
__ JumpIfRoot(ecx, RootIndex::kUndefinedValue, &convert_global_proxy,
|
||||
Label::kNear);
|
||||
__ JumpIfNotRoot(ecx, RootIndex::kNullValue, &convert_to_object,
|
||||
Label::kNear);
|
||||
__ bind(&convert_global_proxy);
|
||||
{
|
||||
@ -2014,7 +2012,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack
|
||||
// limit".
|
||||
__ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(esp, ecx, RootIndex::kRealStackLimit);
|
||||
__ j(above_equal, &done, Label::kNear);
|
||||
// Restore the stack pointer.
|
||||
__ lea(esp, Operand(esp, edx, times_pointer_size, 0));
|
||||
@ -2157,14 +2155,14 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
|
||||
|
||||
// Calling convention for function specific ConstructStubs require
|
||||
// ecx to contain either an AllocationSite or undefined.
|
||||
__ LoadRoot(ecx, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(ecx, RootIndex::kUndefinedValue);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
|
||||
RelocInfo::CODE_TARGET);
|
||||
|
||||
__ bind(&call_generic_stub);
|
||||
// Calling convention for function specific ConstructStubs require
|
||||
// ecx to contain either an AllocationSite or undefined.
|
||||
__ LoadRoot(ecx, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(ecx, RootIndex::kUndefinedValue);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
|
||||
|
||||
// Run the native code for the InternalArray function called as a normal
|
||||
// function.
|
||||
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(a2, RootIndex::kUndefinedValue);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
@ -109,7 +109,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
|
||||
__ SmiUntag(a0);
|
||||
|
||||
// The receiver for the builtin/api call.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
|
||||
// Set up pointer to last argument.
|
||||
__ Addu(t2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
|
||||
@ -176,7 +176,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// Preserve the incoming parameters on the stack.
|
||||
__ SmiTag(a0);
|
||||
__ Push(cp, a0, a1);
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
__ Push(a3);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
@ -201,7 +201,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
|
||||
// Else: use TheHoleValue as receiver for constructor call
|
||||
__ bind(¬_create_implicit_receiver);
|
||||
__ LoadRoot(v0, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(v0, RootIndex::kTheHoleValue);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- v0: receiver
|
||||
@ -291,7 +291,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
Label use_receiver, do_throw, leave_frame;
|
||||
|
||||
// If the result is undefined, we jump out to using the implicit receiver.
|
||||
__ JumpIfRoot(v0, Heap::kUndefinedValueRootIndex, &use_receiver);
|
||||
__ JumpIfRoot(v0, RootIndex::kUndefinedValue, &use_receiver);
|
||||
|
||||
// Otherwise we do a smi check and fall through to check if the return value
|
||||
// is a valid receiver.
|
||||
@ -313,7 +313,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// on-stack receiver as the result.
|
||||
__ bind(&use_receiver);
|
||||
__ lw(v0, MemOperand(sp, 0 * kPointerSize));
|
||||
__ JumpIfRoot(v0, Heap::kTheHoleValueRootIndex, &do_throw);
|
||||
__ JumpIfRoot(v0, RootIndex::kTheHoleValue, &do_throw);
|
||||
|
||||
__ bind(&leave_frame);
|
||||
// Restore smi-tagged arguments count from the frame.
|
||||
@ -342,7 +342,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
|
||||
// interruptions (e.g. debug break and preemption) here, so the "real stack
|
||||
// limit" is checked.
|
||||
Label okay;
|
||||
__ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(a2, RootIndex::kRealStackLimit);
|
||||
// Make a2 the space we have left. The stack might already be overflowed
|
||||
// here which will cause a2 to become negative.
|
||||
__ Subu(a2, sp, a2);
|
||||
@ -410,7 +410,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
|
||||
// Initialize all JavaScript callee-saved registers, since they will be seen
|
||||
// by the garbage collector as part of handlers.
|
||||
__ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(t0, RootIndex::kUndefinedValue);
|
||||
__ mov(s1, t0);
|
||||
__ mov(s2, t0);
|
||||
__ mov(s3, t0);
|
||||
@ -491,7 +491,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label stack_overflow;
|
||||
__ LoadRoot(kScratchReg, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
|
||||
__ Branch(&stack_overflow, lo, sp, Operand(kScratchReg));
|
||||
|
||||
// Push receiver.
|
||||
@ -558,7 +558,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ Push(a1, t0);
|
||||
// Push hole as receiver since we do not use it for stepping.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
__ CallRuntime(Runtime::kDebugOnFunctionCall);
|
||||
__ Pop(a1);
|
||||
}
|
||||
@ -854,7 +854,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// Do a stack check to ensure we don't go over the limit.
|
||||
Label ok;
|
||||
__ Subu(t1, sp, Operand(t0));
|
||||
__ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(a2, RootIndex::kRealStackLimit);
|
||||
__ Branch(&ok, hs, t1, Operand(a2));
|
||||
__ CallRuntime(Runtime::kThrowStackOverflow);
|
||||
__ bind(&ok);
|
||||
@ -862,7 +862,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// If ok, push undefined as the initial value for all register file entries.
|
||||
Label loop_header;
|
||||
Label loop_check;
|
||||
__ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(t1, RootIndex::kUndefinedValue);
|
||||
__ Branch(&loop_check);
|
||||
__ bind(&loop_header);
|
||||
// TODO(rmcilroy): Consider doing more than one push per loop iteration.
|
||||
@ -886,7 +886,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
__ bind(&no_incoming_new_target_or_generator_register);
|
||||
|
||||
// Load accumulator with undefined.
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
|
||||
|
||||
// Load the dispatch table into a register and dispatch to the bytecode
|
||||
// handler at the current bytecode offset.
|
||||
@ -934,7 +934,7 @@ static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
|
||||
// Check the stack for overflow. We are not trying to catch
|
||||
// interruptions (e.g. debug break and preemption) here, so the "real stack
|
||||
// limit" is checked.
|
||||
__ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(scratch1, RootIndex::kRealStackLimit);
|
||||
// Make scratch1 the space we have left. The stack might already be overflowed
|
||||
// here which will cause scratch1 to become negative.
|
||||
__ subu(scratch1, sp, scratch1);
|
||||
@ -983,7 +983,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
|
||||
|
||||
// Push "undefined" as the receiver arg if we need to.
|
||||
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ mov(t0, a0); // No receiver.
|
||||
}
|
||||
|
||||
@ -1191,7 +1191,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
|
||||
__ push(t4);
|
||||
}
|
||||
for (int i = 0; i < 3 - j; ++i) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
}
|
||||
if (j < 3) {
|
||||
__ jmp(&args_done);
|
||||
@ -1344,7 +1344,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
{
|
||||
Label no_arg;
|
||||
Register scratch = t0;
|
||||
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(a2, RootIndex::kUndefinedValue);
|
||||
__ mov(a3, a2);
|
||||
// Lsa() cannot be used hare as scratch value used later.
|
||||
__ sll(scratch, a0, kPointerSizeLog2);
|
||||
@ -1374,8 +1374,8 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
|
||||
// 3. Tail call with no arguments if argArray is null or undefined.
|
||||
Label no_arguments;
|
||||
__ JumpIfRoot(a2, Heap::kNullValueRootIndex, &no_arguments);
|
||||
__ JumpIfRoot(a2, Heap::kUndefinedValueRootIndex, &no_arguments);
|
||||
__ JumpIfRoot(a2, RootIndex::kNullValue, &no_arguments);
|
||||
__ JumpIfRoot(a2, RootIndex::kUndefinedValue, &no_arguments);
|
||||
|
||||
// 4a. Apply the receiver to the given argArray.
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
|
||||
@ -1397,7 +1397,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
|
||||
{
|
||||
Label done;
|
||||
__ Branch(&done, ne, a0, Operand(zero_reg));
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ Addu(a0, a0, Operand(1));
|
||||
__ bind(&done);
|
||||
}
|
||||
@ -1447,7 +1447,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
|
||||
{
|
||||
Label no_arg;
|
||||
Register scratch = t0;
|
||||
__ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(a1, RootIndex::kUndefinedValue);
|
||||
__ mov(a2, a1);
|
||||
__ mov(a3, a1);
|
||||
__ sll(scratch, a0, kPointerSizeLog2);
|
||||
@ -1499,7 +1499,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
||||
{
|
||||
Label no_arg;
|
||||
Register scratch = t0;
|
||||
__ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(a1, RootIndex::kUndefinedValue);
|
||||
__ mov(a2, a1);
|
||||
// Lsa() cannot be used hare as scratch value used later.
|
||||
__ sll(scratch, a0, kPointerSizeLog2);
|
||||
@ -1592,7 +1592,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label done;
|
||||
__ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(t1, RootIndex::kRealStackLimit);
|
||||
// Make ip the space we have left. The stack might already be overflowed
|
||||
// here which will cause ip to become negative.
|
||||
__ Subu(t1, sp, t1);
|
||||
@ -1607,13 +1607,13 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
{
|
||||
__ mov(t2, zero_reg);
|
||||
Label done, push, loop;
|
||||
__ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(t1, RootIndex::kTheHoleValue);
|
||||
__ bind(&loop);
|
||||
__ Branch(&done, eq, t2, Operand(t0));
|
||||
__ Lsa(kScratchReg, a2, t2, kPointerSizeLog2);
|
||||
__ lw(kScratchReg, FieldMemOperand(kScratchReg, FixedArray::kHeaderSize));
|
||||
__ Branch(&push, ne, t1, Operand(kScratchReg));
|
||||
__ LoadRoot(kScratchReg, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(kScratchReg, RootIndex::kUndefinedValue);
|
||||
__ bind(&push);
|
||||
__ Push(kScratchReg);
|
||||
__ Addu(t2, t2, Operand(1));
|
||||
@ -1757,9 +1757,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
||||
__ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE));
|
||||
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
|
||||
Label convert_global_proxy;
|
||||
__ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
|
||||
&convert_global_proxy);
|
||||
__ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
|
||||
__ JumpIfRoot(a3, RootIndex::kUndefinedValue, &convert_global_proxy);
|
||||
__ JumpIfNotRoot(a3, RootIndex::kNullValue, &convert_to_object);
|
||||
__ bind(&convert_global_proxy);
|
||||
{
|
||||
// Patch receiver to global proxy.
|
||||
@ -1848,7 +1847,7 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
|
||||
__ Subu(sp, sp, Operand(t1));
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
__ LoadRoot(kScratchReg, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
|
||||
__ Branch(&done, hs, sp, Operand(kScratchReg));
|
||||
// Restore the stack pointer.
|
||||
__ Addu(sp, sp, Operand(t1));
|
||||
@ -1958,7 +1957,7 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
|
||||
|
||||
// Calling convention for function specific ConstructStubs require
|
||||
// a2 to contain either an AllocationSite or undefined.
|
||||
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(a2, RootIndex::kUndefinedValue);
|
||||
|
||||
Label call_generic_stub;
|
||||
|
||||
@ -2006,7 +2005,7 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
|
||||
__ Subu(sp, sp, Operand(t1));
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
__ LoadRoot(kScratchReg, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
|
||||
__ Branch(&done, hs, sp, Operand(kScratchReg));
|
||||
// Restore the stack pointer.
|
||||
__ Addu(sp, sp, Operand(t1));
|
||||
@ -2203,7 +2202,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// a1: function
|
||||
// a2: expected number of arguments
|
||||
// a3: new target (passed through to callee)
|
||||
__ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(t0, RootIndex::kUndefinedValue);
|
||||
__ sll(t2, a2, kPointerSizeLog2);
|
||||
__ Subu(t1, fp, Operand(t2));
|
||||
// Adjust for frame.
|
||||
@ -2376,7 +2375,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
|
||||
// Check result for exception sentinel.
|
||||
Label exception_returned;
|
||||
__ LoadRoot(t0, Heap::kExceptionRootIndex);
|
||||
__ LoadRoot(t0, RootIndex::kException);
|
||||
__ Branch(&exception_returned, eq, t0, Operand(v0));
|
||||
|
||||
// Check that there is no pending exception, otherwise we
|
||||
@ -2387,7 +2386,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
IsolateAddressId::kPendingExceptionAddress, masm->isolate());
|
||||
__ li(a2, pending_exception_address);
|
||||
__ lw(a2, MemOperand(a2));
|
||||
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(t0, RootIndex::kTheHoleValue);
|
||||
// Cannot use check here as it attempts to generate call into runtime.
|
||||
__ Branch(&okay, eq, t0, Operand(a2));
|
||||
__ stop("Unexpected pending exception");
|
||||
|
@ -56,7 +56,7 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
|
||||
|
||||
// Run the native code for the InternalArray function called as a normal
|
||||
// function.
|
||||
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(a2, RootIndex::kUndefinedValue);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
@ -108,7 +108,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
|
||||
__ SmiUntag(a0);
|
||||
|
||||
// The receiver for the builtin/api call.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
|
||||
// Set up pointer to last argument.
|
||||
__ Daddu(t2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
|
||||
@ -176,7 +176,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// Preserve the incoming parameters on the stack.
|
||||
__ SmiTag(a0);
|
||||
__ Push(cp, a0, a1);
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
__ Push(a3);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
@ -201,7 +201,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
|
||||
// Else: use TheHoleValue as receiver for constructor call
|
||||
__ bind(¬_create_implicit_receiver);
|
||||
__ LoadRoot(v0, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(v0, RootIndex::kTheHoleValue);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- v0: receiver
|
||||
@ -291,7 +291,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
Label use_receiver, do_throw, leave_frame;
|
||||
|
||||
// If the result is undefined, we jump out to using the implicit receiver.
|
||||
__ JumpIfRoot(v0, Heap::kUndefinedValueRootIndex, &use_receiver);
|
||||
__ JumpIfRoot(v0, RootIndex::kUndefinedValue, &use_receiver);
|
||||
|
||||
// Otherwise we do a smi check and fall through to check if the return value
|
||||
// is a valid receiver.
|
||||
@ -313,7 +313,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// on-stack receiver as the result.
|
||||
__ bind(&use_receiver);
|
||||
__ Ld(v0, MemOperand(sp, 0 * kPointerSize));
|
||||
__ JumpIfRoot(v0, Heap::kTheHoleValueRootIndex, &do_throw);
|
||||
__ JumpIfRoot(v0, RootIndex::kTheHoleValue, &do_throw);
|
||||
|
||||
__ bind(&leave_frame);
|
||||
// Restore smi-tagged arguments count from the frame.
|
||||
@ -382,7 +382,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label stack_overflow;
|
||||
__ LoadRoot(kScratchReg, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
|
||||
__ Branch(&stack_overflow, lo, sp, Operand(kScratchReg));
|
||||
|
||||
// Push receiver.
|
||||
@ -451,7 +451,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ Push(a1, a4);
|
||||
// Push hole as receiver since we do not use it for stepping.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
__ CallRuntime(Runtime::kDebugOnFunctionCall);
|
||||
__ Pop(a1);
|
||||
}
|
||||
@ -488,7 +488,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
|
||||
// interruptions (e.g. debug break and preemption) here, so the "real stack
|
||||
// limit" is checked.
|
||||
Label okay;
|
||||
__ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(a2, RootIndex::kRealStackLimit);
|
||||
// Make a2 the space we have left. The stack might already be overflowed
|
||||
// here which will cause r2 to become negative.
|
||||
__ dsubu(a2, sp, a2);
|
||||
@ -555,7 +555,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
|
||||
// Initialize all JavaScript callee-saved registers, since they will be seen
|
||||
// by the garbage collector as part of handlers.
|
||||
__ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(a4, RootIndex::kUndefinedValue);
|
||||
__ mov(s1, a4);
|
||||
__ mov(s2, a4);
|
||||
__ mov(s3, a4);
|
||||
@ -853,7 +853,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// Do a stack check to ensure we don't go over the limit.
|
||||
Label ok;
|
||||
__ Dsubu(a5, sp, Operand(a4));
|
||||
__ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(a2, RootIndex::kRealStackLimit);
|
||||
__ Branch(&ok, hs, a5, Operand(a2));
|
||||
__ CallRuntime(Runtime::kThrowStackOverflow);
|
||||
__ bind(&ok);
|
||||
@ -861,7 +861,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// If ok, push undefined as the initial value for all register file entries.
|
||||
Label loop_header;
|
||||
Label loop_check;
|
||||
__ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(a5, RootIndex::kUndefinedValue);
|
||||
__ Branch(&loop_check);
|
||||
__ bind(&loop_header);
|
||||
// TODO(rmcilroy): Consider doing more than one push per loop iteration.
|
||||
@ -885,7 +885,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
__ bind(&no_incoming_new_target_or_generator_register);
|
||||
|
||||
// Load accumulator as undefined.
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
|
||||
|
||||
// Load the dispatch table into a register and dispatch to the bytecode
|
||||
// handler at the current bytecode offset.
|
||||
@ -933,7 +933,7 @@ static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
|
||||
// Check the stack for overflow. We are not trying to catch
|
||||
// interruptions (e.g. debug break and preemption) here, so the "real stack
|
||||
// limit" is checked.
|
||||
__ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(scratch1, RootIndex::kRealStackLimit);
|
||||
// Make scratch1 the space we have left. The stack might already be overflowed
|
||||
// here which will cause scratch1 to become negative.
|
||||
__ dsubu(scratch1, sp, scratch1);
|
||||
@ -980,7 +980,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
|
||||
|
||||
// Push "undefined" as the receiver arg if we need to.
|
||||
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ Dsubu(a3, a3, Operand(1)); // Subtract one for receiver.
|
||||
}
|
||||
|
||||
@ -1188,7 +1188,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
|
||||
__ push(t2);
|
||||
}
|
||||
for (int i = 0; i < 3 - j; ++i) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
}
|
||||
if (j < 3) {
|
||||
__ jmp(&args_done);
|
||||
@ -1341,7 +1341,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
Register undefined_value = a3;
|
||||
Register scratch = a4;
|
||||
|
||||
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
|
||||
|
||||
// 1. Load receiver into a1, argArray into a2 (if present), remove all
|
||||
// arguments from the stack (including the receiver), and push thisArg (if
|
||||
@ -1375,7 +1375,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
|
||||
// 3. Tail call with no arguments if argArray is null or undefined.
|
||||
Label no_arguments;
|
||||
__ JumpIfRoot(arg_array, Heap::kNullValueRootIndex, &no_arguments);
|
||||
__ JumpIfRoot(arg_array, RootIndex::kNullValue, &no_arguments);
|
||||
__ Branch(&no_arguments, eq, arg_array, Operand(undefined_value));
|
||||
|
||||
// 4a. Apply the receiver to the given argArray.
|
||||
@ -1399,7 +1399,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
|
||||
{
|
||||
Label done;
|
||||
__ Branch(&done, ne, a0, Operand(zero_reg));
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ Daddu(a0, a0, Operand(1));
|
||||
__ bind(&done);
|
||||
}
|
||||
@ -1450,7 +1450,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
|
||||
Register undefined_value = a3;
|
||||
Register scratch = a4;
|
||||
|
||||
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
|
||||
|
||||
// 1. Load target into a1 (if present), argumentsList into a2 (if present),
|
||||
// remove all arguments from the stack (including the receiver), and push
|
||||
@ -1506,7 +1506,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
||||
Register undefined_value = a4;
|
||||
Register scratch = a5;
|
||||
|
||||
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
|
||||
|
||||
// 1. Load target into a1 (if present), argumentsList into a2 (if present),
|
||||
// new.target into a3 (if present, otherwise use target), remove all
|
||||
@ -1609,7 +1609,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label done;
|
||||
__ LoadRoot(a5, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(a5, RootIndex::kRealStackLimit);
|
||||
// Make ip the space we have left. The stack might already be overflowed
|
||||
// here which will cause ip to become negative.
|
||||
__ Dsubu(a5, sp, a5);
|
||||
@ -1631,11 +1631,11 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
__ Daddu(a0, a0, len); // The 'len' argument for Call() or Construct().
|
||||
__ dsll(scratch, len, kPointerSizeLog2);
|
||||
__ Dsubu(scratch, sp, Operand(scratch));
|
||||
__ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(t1, RootIndex::kTheHoleValue);
|
||||
__ bind(&loop);
|
||||
__ Ld(a5, MemOperand(src));
|
||||
__ Branch(&push, ne, a5, Operand(t1));
|
||||
__ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(a5, RootIndex::kUndefinedValue);
|
||||
__ bind(&push);
|
||||
__ daddiu(src, src, kPointerSize);
|
||||
__ Push(a5);
|
||||
@ -1778,9 +1778,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
||||
__ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE));
|
||||
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
|
||||
Label convert_global_proxy;
|
||||
__ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
|
||||
&convert_global_proxy);
|
||||
__ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
|
||||
__ JumpIfRoot(a3, RootIndex::kUndefinedValue, &convert_global_proxy);
|
||||
__ JumpIfNotRoot(a3, RootIndex::kNullValue, &convert_to_object);
|
||||
__ bind(&convert_global_proxy);
|
||||
{
|
||||
// Patch receiver to global proxy.
|
||||
@ -1868,7 +1867,7 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
|
||||
__ Dsubu(sp, sp, Operand(a5));
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
__ LoadRoot(kScratchReg, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
|
||||
__ Branch(&done, hs, sp, Operand(kScratchReg));
|
||||
// Restore the stack pointer.
|
||||
__ Daddu(sp, sp, Operand(a5));
|
||||
@ -1975,7 +1974,7 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
|
||||
|
||||
// Calling convention for function specific ConstructStubs require
|
||||
// a2 to contain either an AllocationSite or undefined.
|
||||
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(a2, RootIndex::kUndefinedValue);
|
||||
|
||||
Label call_generic_stub;
|
||||
|
||||
@ -2022,7 +2021,7 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
|
||||
__ Dsubu(sp, sp, Operand(a5));
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
__ LoadRoot(kScratchReg, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
|
||||
__ Branch(&done, hs, sp, Operand(kScratchReg));
|
||||
// Restore the stack pointer.
|
||||
__ Daddu(sp, sp, Operand(a5));
|
||||
@ -2220,7 +2219,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// a1: function
|
||||
// a2: expected number of arguments
|
||||
// a3: new target (passed through to callee)
|
||||
__ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(a5, RootIndex::kUndefinedValue);
|
||||
__ dsll(a6, a2, kPointerSizeLog2);
|
||||
__ Dsubu(a4, fp, Operand(a6));
|
||||
// Adjust for frame.
|
||||
@ -2394,7 +2393,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
|
||||
// Check result for exception sentinel.
|
||||
Label exception_returned;
|
||||
__ LoadRoot(a4, Heap::kExceptionRootIndex);
|
||||
__ LoadRoot(a4, RootIndex::kException);
|
||||
__ Branch(&exception_returned, eq, a4, Operand(v0));
|
||||
|
||||
// Check that there is no pending exception, otherwise we
|
||||
@ -2405,7 +2404,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
IsolateAddressId::kPendingExceptionAddress, masm->isolate());
|
||||
__ li(a2, pending_exception_address);
|
||||
__ Ld(a2, MemOperand(a2));
|
||||
__ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(a4, RootIndex::kTheHoleValue);
|
||||
// Cannot use check here as it attempts to generate call into runtime.
|
||||
__ Branch(&okay, eq, a4, Operand(a2));
|
||||
__ stop("Unexpected pending exception");
|
||||
|
@ -54,7 +54,7 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
|
||||
// Run the native code for the InternalArray function called as a normal
|
||||
// function.
|
||||
// tail call a stub
|
||||
__ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r5, RootIndex::kUndefinedValue);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
@ -109,7 +109,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
|
||||
__ Push(cp, r3);
|
||||
__ SmiUntag(r3, SetRC);
|
||||
// The receiver for the builtin/api call.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
// Set up pointer to last argument.
|
||||
__ addi(r7, fp, Operand(StandardFrameConstants::kCallerSPOffset));
|
||||
|
||||
@ -184,7 +184,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// Preserve the incoming parameters on the stack.
|
||||
__ SmiTag(r3);
|
||||
__ Push(cp, r3, r4);
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ Push(r6);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
@ -209,7 +209,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
|
||||
// Else: use TheHoleValue as receiver for constructor call
|
||||
__ bind(¬_create_implicit_receiver);
|
||||
__ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(r3, RootIndex::kTheHoleValue);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- r3: receiver
|
||||
@ -303,7 +303,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
Label use_receiver, do_throw, leave_frame;
|
||||
|
||||
// If the result is undefined, we jump out to using the implicit receiver.
|
||||
__ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &use_receiver);
|
||||
__ JumpIfRoot(r3, RootIndex::kUndefinedValue, &use_receiver);
|
||||
|
||||
// Otherwise we do a smi check and fall through to check if the return value
|
||||
// is a valid receiver.
|
||||
@ -325,7 +325,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// on-stack receiver as the result.
|
||||
__ bind(&use_receiver);
|
||||
__ LoadP(r3, MemOperand(sp));
|
||||
__ JumpIfRoot(r3, Heap::kTheHoleValueRootIndex, &do_throw);
|
||||
__ JumpIfRoot(r3, RootIndex::kTheHoleValue, &do_throw);
|
||||
|
||||
__ bind(&leave_frame);
|
||||
// Restore smi-tagged arguments count from the frame.
|
||||
@ -402,7 +402,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label stack_overflow;
|
||||
__ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(sp, RootIndex::kRealStackLimit);
|
||||
__ blt(&stack_overflow);
|
||||
|
||||
// Push receiver.
|
||||
@ -468,7 +468,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
|
||||
__ Push(r4, r7);
|
||||
// Push hole as receiver since we do not use it for stepping.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
__ CallRuntime(Runtime::kDebugOnFunctionCall);
|
||||
__ Pop(r4);
|
||||
__ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
|
||||
@ -505,7 +505,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
|
||||
// interruptions (e.g. debug break and preemption) here, so the "real stack
|
||||
// limit" is checked.
|
||||
Label okay;
|
||||
__ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(r5, RootIndex::kRealStackLimit);
|
||||
// Make r5 the space we have left. The stack might already be overflowed
|
||||
// here which will cause r5 to become negative.
|
||||
__ sub(r5, sp, r5);
|
||||
@ -573,7 +573,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
|
||||
// Initialize all JavaScript callee-saved registers, since they will be seen
|
||||
// by the garbage collector as part of handlers.
|
||||
__ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r7, RootIndex::kUndefinedValue);
|
||||
__ mr(r14, r7);
|
||||
__ mr(r15, r7);
|
||||
__ mr(r16, r7);
|
||||
@ -887,7 +887,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// Do a stack check to ensure we don't go over the limit.
|
||||
Label ok;
|
||||
__ sub(r8, sp, r5);
|
||||
__ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(r0, RootIndex::kRealStackLimit);
|
||||
__ cmpl(r8, r0);
|
||||
__ bge(&ok);
|
||||
__ CallRuntime(Runtime::kThrowStackOverflow);
|
||||
@ -896,7 +896,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// If ok, push undefined as the initial value for all register file entries.
|
||||
// TODO(rmcilroy): Consider doing more than one push per loop iteration.
|
||||
Label loop, no_args;
|
||||
__ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r8, RootIndex::kUndefinedValue);
|
||||
__ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC);
|
||||
__ beq(&no_args, cr0);
|
||||
__ mtctr(r5);
|
||||
@ -920,7 +920,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
__ bind(&no_incoming_new_target_or_generator_register);
|
||||
|
||||
// Load accumulator with undefined.
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
|
||||
// Load the dispatch table into a register and dispatch to the bytecode
|
||||
// handler at the current bytecode offset.
|
||||
Label do_dispatch;
|
||||
@ -968,7 +968,7 @@ static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
|
||||
// Check the stack for overflow. We are not trying to catch
|
||||
// interruptions (e.g. debug break and preemption) here, so the "real stack
|
||||
// limit" is checked.
|
||||
__ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kRealStackLimit);
|
||||
// Make scratch the space we have left. The stack might already be overflowed
|
||||
// here which will cause scratch to become negative.
|
||||
__ sub(scratch, sp, scratch);
|
||||
@ -1014,7 +1014,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
|
||||
|
||||
// Push "undefined" as the receiver arg if we need to.
|
||||
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ mr(r6, r3); // Argument count is correct.
|
||||
}
|
||||
|
||||
@ -1227,7 +1227,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
|
||||
__ push(r7);
|
||||
}
|
||||
for (int i = 0; i < 3 - j; ++i) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
}
|
||||
if (j < 3) {
|
||||
__ jmp(&args_done);
|
||||
@ -1398,7 +1398,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
Register scratch = r7;
|
||||
__ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
|
||||
__ add(new_sp, sp, arg_size);
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
__ mr(r5, scratch);
|
||||
__ LoadP(r4, MemOperand(new_sp, 0)); // receiver
|
||||
__ cmpi(arg_size, Operand(kPointerSize));
|
||||
@ -1423,8 +1423,8 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
|
||||
// 3. Tail call with no arguments if argArray is null or undefined.
|
||||
Label no_arguments;
|
||||
__ JumpIfRoot(r5, Heap::kNullValueRootIndex, &no_arguments);
|
||||
__ JumpIfRoot(r5, Heap::kUndefinedValueRootIndex, &no_arguments);
|
||||
__ JumpIfRoot(r5, RootIndex::kNullValue, &no_arguments);
|
||||
__ JumpIfRoot(r5, RootIndex::kUndefinedValue, &no_arguments);
|
||||
|
||||
// 4a. Apply the receiver to the given argArray.
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
|
||||
@ -1447,7 +1447,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
|
||||
Label done;
|
||||
__ cmpi(r3, Operand::Zero());
|
||||
__ bne(&done);
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ addi(r3, r3, Operand(1));
|
||||
__ bind(&done);
|
||||
}
|
||||
@ -1502,7 +1502,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
|
||||
Register scratch = r7;
|
||||
__ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
|
||||
__ add(new_sp, sp, arg_size);
|
||||
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r4, RootIndex::kUndefinedValue);
|
||||
__ mr(scratch, r4);
|
||||
__ mr(r5, r4);
|
||||
__ cmpi(arg_size, Operand(kPointerSize));
|
||||
@ -1552,7 +1552,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
||||
Register new_sp = r7;
|
||||
__ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
|
||||
__ add(new_sp, sp, arg_size);
|
||||
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r4, RootIndex::kUndefinedValue);
|
||||
__ mr(r5, r4);
|
||||
__ mr(r6, r4);
|
||||
__ StoreP(r4, MemOperand(new_sp, 0)); // receiver (undefined)
|
||||
@ -1655,7 +1655,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label done;
|
||||
__ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(ip, RootIndex::kRealStackLimit);
|
||||
// Make ip the space we have left. The stack might already be overflowed
|
||||
// here which will cause ip to become negative.
|
||||
__ sub(ip, sp, ip);
|
||||
@ -1677,9 +1677,9 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
__ mtctr(r7);
|
||||
__ bind(&loop);
|
||||
__ LoadPU(ip, MemOperand(r5, kPointerSize));
|
||||
__ CompareRoot(ip, Heap::kTheHoleValueRootIndex);
|
||||
__ CompareRoot(ip, RootIndex::kTheHoleValue);
|
||||
__ bne(&skip);
|
||||
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(ip, RootIndex::kUndefinedValue);
|
||||
__ bind(&skip);
|
||||
__ push(ip);
|
||||
__ bdnz(&loop);
|
||||
@ -1825,9 +1825,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
||||
__ bge(&done_convert);
|
||||
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
|
||||
Label convert_global_proxy;
|
||||
__ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex,
|
||||
&convert_global_proxy);
|
||||
__ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object);
|
||||
__ JumpIfRoot(r6, RootIndex::kUndefinedValue, &convert_global_proxy);
|
||||
__ JumpIfNotRoot(r6, RootIndex::kNullValue, &convert_to_object);
|
||||
__ bind(&convert_global_proxy);
|
||||
{
|
||||
// Patch receiver to global proxy.
|
||||
@ -1915,7 +1914,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack
|
||||
// limit".
|
||||
__ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(sp, RootIndex::kRealStackLimit);
|
||||
__ bgt(&done); // Signed comparison.
|
||||
// Restore the stack pointer.
|
||||
__ mr(sp, r9);
|
||||
@ -2047,7 +2046,7 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
|
||||
|
||||
// Calling convention for function specific ConstructStubs require
|
||||
// r5 to contain either an AllocationSite or undefined.
|
||||
__ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r5, RootIndex::kUndefinedValue);
|
||||
|
||||
Label call_generic_stub;
|
||||
|
||||
@ -2231,7 +2230,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// r4: function
|
||||
// r5: expected number of arguments
|
||||
// r6: new target (passed through to callee)
|
||||
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r0, RootIndex::kUndefinedValue);
|
||||
__ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
|
||||
__ sub(r7, fp, r7);
|
||||
// Adjust for frame.
|
||||
@ -2420,7 +2419,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
|
||||
// Check result for exception sentinel.
|
||||
Label exception_returned;
|
||||
__ CompareRoot(r3, Heap::kExceptionRootIndex);
|
||||
__ CompareRoot(r3, RootIndex::kException);
|
||||
__ beq(&exception_returned);
|
||||
|
||||
// Check that there is no pending exception, otherwise we
|
||||
@ -2432,7 +2431,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
|
||||
__ Move(r6, pending_exception_address);
|
||||
__ LoadP(r6, MemOperand(r6));
|
||||
__ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
|
||||
__ CompareRoot(r6, RootIndex::kTheHoleValue);
|
||||
// Cannot use check here as it attempts to generate call into runtime.
|
||||
__ beq(&okay);
|
||||
__ stop("Unexpected pending exception");
|
||||
|
@ -54,7 +54,7 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
|
||||
// Run the native code for the InternalArray function called as a normal
|
||||
// function.
|
||||
// tail call a stub
|
||||
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r4, RootIndex::kUndefinedValue);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
@ -108,7 +108,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
|
||||
__ Push(cp, r2);
|
||||
__ SmiUntag(r2);
|
||||
// The receiver for the builtin/api call.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
// Set up pointer to last argument.
|
||||
__ la(r6, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
|
||||
|
||||
@ -178,7 +178,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// Preserve the incoming parameters on the stack.
|
||||
__ SmiTag(r2);
|
||||
__ Push(cp, r2, r3);
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ Push(r5);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
@ -203,7 +203,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
|
||||
// Else: use TheHoleValue as receiver for constructor call
|
||||
__ bind(¬_create_implicit_receiver);
|
||||
__ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(r2, RootIndex::kTheHoleValue);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- r2: receiver
|
||||
@ -295,7 +295,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
Label use_receiver, do_throw, leave_frame;
|
||||
|
||||
// If the result is undefined, we jump out to using the implicit receiver.
|
||||
__ JumpIfRoot(r2, Heap::kUndefinedValueRootIndex, &use_receiver);
|
||||
__ JumpIfRoot(r2, RootIndex::kUndefinedValue, &use_receiver);
|
||||
|
||||
// Otherwise we do a smi check and fall through to check if the return value
|
||||
// is a valid receiver.
|
||||
@ -317,7 +317,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// on-stack receiver as the result.
|
||||
__ bind(&use_receiver);
|
||||
__ LoadP(r2, MemOperand(sp));
|
||||
__ JumpIfRoot(r2, Heap::kTheHoleValueRootIndex, &do_throw);
|
||||
__ JumpIfRoot(r2, RootIndex::kTheHoleValue, &do_throw);
|
||||
|
||||
__ bind(&leave_frame);
|
||||
// Restore smi-tagged arguments count from the frame.
|
||||
@ -393,7 +393,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label stack_overflow;
|
||||
__ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(sp, RootIndex::kRealStackLimit);
|
||||
__ blt(&stack_overflow);
|
||||
|
||||
// Push receiver.
|
||||
@ -468,7 +468,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
|
||||
__ Push(r3, r6);
|
||||
// Push hole as receiver since we do not use it for stepping.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
__ CallRuntime(Runtime::kDebugOnFunctionCall);
|
||||
__ Pop(r3);
|
||||
__ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
|
||||
@ -505,7 +505,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
|
||||
// interruptions (e.g. debug break and preemption) here, so the "real stack
|
||||
// limit" is checked.
|
||||
Label okay;
|
||||
__ LoadRoot(r4, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(r4, RootIndex::kRealStackLimit);
|
||||
// Make r4 the space we have left. The stack might already be overflowed
|
||||
// here which will cause r4 to become negative.
|
||||
__ SubP(r4, sp, r4);
|
||||
@ -581,7 +581,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
||||
|
||||
// Initialize all JavaScript callee-saved registers, since they will be seen
|
||||
// by the garbage collector as part of handlers.
|
||||
__ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r6, RootIndex::kUndefinedValue);
|
||||
__ LoadRR(r7, r6);
|
||||
__ LoadRR(r8, r6);
|
||||
__ LoadRR(r9, r6);
|
||||
@ -890,7 +890,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// Do a stack check to ensure we don't go over the limit.
|
||||
Label ok;
|
||||
__ SubP(r8, sp, r4);
|
||||
__ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(r0, RootIndex::kRealStackLimit);
|
||||
__ CmpLogicalP(r8, r0);
|
||||
__ bge(&ok);
|
||||
__ CallRuntime(Runtime::kThrowStackOverflow);
|
||||
@ -899,7 +899,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// If ok, push undefined as the initial value for all register file entries.
|
||||
// TODO(rmcilroy): Consider doing more than one push per loop iteration.
|
||||
Label loop, no_args;
|
||||
__ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r8, RootIndex::kUndefinedValue);
|
||||
__ ShiftRightP(r4, r4, Operand(kPointerSizeLog2));
|
||||
__ LoadAndTestP(r4, r4);
|
||||
__ beq(&no_args);
|
||||
@ -924,7 +924,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
__ bind(&no_incoming_new_target_or_generator_register);
|
||||
|
||||
// Load accumulator with undefined.
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
|
||||
// Load the dispatch table into a register and dispatch to the bytecode
|
||||
// handler at the current bytecode offset.
|
||||
Label do_dispatch;
|
||||
@ -973,7 +973,7 @@ static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
|
||||
// Check the stack for overflow. We are not trying to catch
|
||||
// interruptions (e.g. debug break and preemption) here, so the "real stack
|
||||
// limit" is checked.
|
||||
__ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kRealStackLimit);
|
||||
// Make scratch the space we have left. The stack might already be overflowed
|
||||
// here which will cause scratch to become negative.
|
||||
__ SubP(scratch, sp, scratch);
|
||||
@ -1020,7 +1020,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
|
||||
|
||||
// Push "undefined" as the receiver arg if we need to.
|
||||
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ LoadRR(r5, r2); // Argument count is correct.
|
||||
}
|
||||
|
||||
@ -1230,7 +1230,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
|
||||
__ push(r6);
|
||||
}
|
||||
for (int i = 0; i < 3 - j; ++i) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
}
|
||||
if (j < 3) {
|
||||
__ jmp(&args_done);
|
||||
@ -1392,7 +1392,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
Register scratch = r6;
|
||||
__ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
|
||||
__ AddP(new_sp, sp, arg_size);
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
__ LoadRR(r4, scratch);
|
||||
__ LoadP(r3, MemOperand(new_sp, 0)); // receiver
|
||||
__ CmpP(arg_size, Operand(kPointerSize));
|
||||
@ -1417,8 +1417,8 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
|
||||
// 3. Tail call with no arguments if argArray is null or undefined.
|
||||
Label no_arguments;
|
||||
__ JumpIfRoot(r4, Heap::kNullValueRootIndex, &no_arguments);
|
||||
__ JumpIfRoot(r4, Heap::kUndefinedValueRootIndex, &no_arguments);
|
||||
__ JumpIfRoot(r4, RootIndex::kNullValue, &no_arguments);
|
||||
__ JumpIfRoot(r4, RootIndex::kUndefinedValue, &no_arguments);
|
||||
|
||||
// 4a. Apply the receiver to the given argArray.
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
|
||||
@ -1441,7 +1441,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
|
||||
Label done;
|
||||
__ CmpP(r2, Operand::Zero());
|
||||
__ bne(&done, Label::kNear);
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ AddP(r2, Operand(1));
|
||||
__ bind(&done);
|
||||
}
|
||||
@ -1496,7 +1496,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
|
||||
Register scratch = r6;
|
||||
__ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
|
||||
__ AddP(new_sp, sp, arg_size);
|
||||
__ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r3, RootIndex::kUndefinedValue);
|
||||
__ LoadRR(scratch, r3);
|
||||
__ LoadRR(r4, r3);
|
||||
__ CmpP(arg_size, Operand(kPointerSize));
|
||||
@ -1546,7 +1546,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
||||
Register new_sp = r6;
|
||||
__ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
|
||||
__ AddP(new_sp, sp, arg_size);
|
||||
__ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r3, RootIndex::kUndefinedValue);
|
||||
__ LoadRR(r4, r3);
|
||||
__ LoadRR(r5, r3);
|
||||
__ StoreP(r3, MemOperand(new_sp, 0)); // receiver (undefined)
|
||||
@ -1659,7 +1659,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label done;
|
||||
__ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(ip, RootIndex::kRealStackLimit);
|
||||
// Make ip the space we have left. The stack might already be overflowed
|
||||
// here which will cause ip to become negative.
|
||||
__ SubP(ip, sp, ip);
|
||||
@ -1682,9 +1682,9 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
__ bind(&loop);
|
||||
__ LoadP(ip, MemOperand(r4, kPointerSize));
|
||||
__ la(r4, MemOperand(r4, kPointerSize));
|
||||
__ CompareRoot(ip, Heap::kTheHoleValueRootIndex);
|
||||
__ CompareRoot(ip, RootIndex::kTheHoleValue);
|
||||
__ bne(&skip, Label::kNear);
|
||||
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(ip, RootIndex::kUndefinedValue);
|
||||
__ bind(&skip);
|
||||
__ push(ip);
|
||||
__ BranchOnCount(r1, &loop);
|
||||
@ -1830,9 +1830,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
||||
__ bge(&done_convert);
|
||||
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
|
||||
Label convert_global_proxy;
|
||||
__ JumpIfRoot(r5, Heap::kUndefinedValueRootIndex,
|
||||
&convert_global_proxy);
|
||||
__ JumpIfNotRoot(r5, Heap::kNullValueRootIndex, &convert_to_object);
|
||||
__ JumpIfRoot(r5, RootIndex::kUndefinedValue, &convert_global_proxy);
|
||||
__ JumpIfNotRoot(r5, RootIndex::kNullValue, &convert_to_object);
|
||||
__ bind(&convert_global_proxy);
|
||||
{
|
||||
// Patch receiver to global proxy.
|
||||
@ -1921,7 +1920,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack
|
||||
// limit".
|
||||
__ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(sp, RootIndex::kRealStackLimit);
|
||||
__ bgt(&done); // Signed comparison.
|
||||
// Restore the stack pointer.
|
||||
__ LoadRR(sp, r8);
|
||||
@ -2054,7 +2053,7 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
|
||||
|
||||
// Calling convention for function specific ConstructStubs require
|
||||
// r4 to contain either an AllocationSite or undefined.
|
||||
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r4, RootIndex::kUndefinedValue);
|
||||
|
||||
Label call_generic_stub;
|
||||
|
||||
@ -2236,7 +2235,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// Fill the remaining expected arguments with undefined.
|
||||
// r3: function
|
||||
// r4: expected number of argumentus
|
||||
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r0, RootIndex::kUndefinedValue);
|
||||
__ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
|
||||
__ SubP(r6, fp, r6);
|
||||
// Adjust for frame.
|
||||
@ -2428,7 +2427,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
|
||||
// Check result for exception sentinel.
|
||||
Label exception_returned;
|
||||
__ CompareRoot(r2, Heap::kExceptionRootIndex);
|
||||
__ CompareRoot(r2, RootIndex::kException);
|
||||
__ beq(&exception_returned, Label::kNear);
|
||||
|
||||
// Check that there is no pending exception, otherwise we
|
||||
@ -2439,7 +2438,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
IsolateAddressId::kPendingExceptionAddress, masm->isolate());
|
||||
__ Move(r1, pending_exception_address);
|
||||
__ LoadP(r1, MemOperand(r1));
|
||||
__ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
|
||||
__ CompareRoot(r1, RootIndex::kTheHoleValue);
|
||||
// Cannot use check here as it attempts to generate call into runtime.
|
||||
__ beq(&okay, Label::kNear);
|
||||
__ stop("Unexpected pending exception");
|
||||
|
@ -87,7 +87,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
|
||||
__ Push(rcx);
|
||||
|
||||
// The receiver for the builtin/api call.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
|
||||
// Set up pointer to last argument.
|
||||
__ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
|
||||
@ -157,7 +157,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
__ Push(rsi);
|
||||
__ Push(rcx);
|
||||
__ Push(rdi);
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
__ Push(rdx);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
@ -181,7 +181,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
|
||||
// Else: use TheHoleValue as receiver for constructor call
|
||||
__ bind(¬_create_implicit_receiver);
|
||||
__ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(rax, RootIndex::kTheHoleValue);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax implicit receiver
|
||||
@ -269,8 +269,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
Label use_receiver, do_throw, leave_frame;
|
||||
|
||||
// If the result is undefined, we jump out to using the implicit receiver.
|
||||
__ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &use_receiver,
|
||||
Label::kNear);
|
||||
__ JumpIfRoot(rax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear);
|
||||
|
||||
// Otherwise we do a smi check and fall through to check if the return value
|
||||
// is a valid receiver.
|
||||
@ -292,7 +291,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
||||
// on-stack receiver as the result.
|
||||
__ bind(&use_receiver);
|
||||
__ movp(rax, Operand(rsp, 0 * kPointerSize));
|
||||
__ JumpIfRoot(rax, Heap::kTheHoleValueRootIndex, &do_throw, Label::kNear);
|
||||
__ JumpIfRoot(rax, RootIndex::kTheHoleValue, &do_throw, Label::kNear);
|
||||
|
||||
__ bind(&leave_frame);
|
||||
// Restore the arguments count.
|
||||
@ -324,7 +323,7 @@ static void Generate_StackOverflowCheck(
|
||||
// Check the stack for overflow. We are not trying to catch
|
||||
// interruptions (e.g. debug break and preemption) here, so the "real stack
|
||||
// limit" is checked.
|
||||
__ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit);
|
||||
__ movp(scratch, rsp);
|
||||
// Make scratch the space we have left. The stack might already be overflowed
|
||||
// here which will cause scratch to become negative.
|
||||
@ -533,7 +532,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label stack_overflow;
|
||||
__ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(rsp, RootIndex::kRealStackLimit);
|
||||
__ j(below, &stack_overflow);
|
||||
|
||||
// Pop return address.
|
||||
@ -602,7 +601,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
||||
__ Push(rdx);
|
||||
__ Push(rdi);
|
||||
// Push hole as receiver since we do not use it for stepping.
|
||||
__ PushRoot(Heap::kTheHoleValueRootIndex);
|
||||
__ PushRoot(RootIndex::kTheHoleValue);
|
||||
__ CallRuntime(Runtime::kDebugOnFunctionCall);
|
||||
__ Pop(rdx);
|
||||
__ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
|
||||
@ -905,7 +904,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
Label ok;
|
||||
__ movp(rax, rsp);
|
||||
__ subp(rax, rcx);
|
||||
__ CompareRoot(rax, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(rax, RootIndex::kRealStackLimit);
|
||||
__ j(above_equal, &ok, Label::kNear);
|
||||
__ CallRuntime(Runtime::kThrowStackOverflow);
|
||||
__ bind(&ok);
|
||||
@ -913,7 +912,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
// If ok, push undefined as the initial value for all register file entries.
|
||||
Label loop_header;
|
||||
Label loop_check;
|
||||
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(rax, RootIndex::kUndefinedValue);
|
||||
__ j(always, &loop_check, Label::kNear);
|
||||
__ bind(&loop_header);
|
||||
// TODO(rmcilroy): Consider doing more than one push per loop iteration.
|
||||
@ -937,7 +936,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
||||
__ bind(&no_incoming_new_target_or_generator_register);
|
||||
|
||||
// Load accumulator with undefined.
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
|
||||
|
||||
// Load the dispatch table into a register and dispatch to the bytecode
|
||||
// handler at the current bytecode offset.
|
||||
@ -1026,7 +1025,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
|
||||
|
||||
// Push "undefined" as the receiver arg if we need to.
|
||||
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ decl(rcx); // Subtract one for receiver.
|
||||
}
|
||||
|
||||
@ -1251,7 +1250,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
|
||||
rbp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
|
||||
}
|
||||
for (int i = 0; i < 3 - j; ++i) {
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
}
|
||||
if (j < 3) {
|
||||
__ jmp(&args_done, Label::kNear);
|
||||
@ -1370,7 +1369,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
{
|
||||
Label no_arg_array, no_this_arg;
|
||||
StackArgumentsAccessor args(rsp, rax);
|
||||
__ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(rdx, RootIndex::kUndefinedValue);
|
||||
__ movp(rbx, rdx);
|
||||
__ movp(rdi, args.GetReceiverOperand());
|
||||
__ testp(rax, rax);
|
||||
@ -1402,9 +1401,8 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
||||
|
||||
// 3. Tail call with no arguments if argArray is null or undefined.
|
||||
Label no_arguments;
|
||||
__ JumpIfRoot(rbx, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
|
||||
__ JumpIfRoot(rbx, Heap::kUndefinedValueRootIndex, &no_arguments,
|
||||
Label::kNear);
|
||||
__ JumpIfRoot(rbx, RootIndex::kNullValue, &no_arguments, Label::kNear);
|
||||
__ JumpIfRoot(rbx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
|
||||
|
||||
// 4a. Apply the receiver to the given argArray.
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
|
||||
@ -1438,7 +1436,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
|
||||
__ testp(rax, rax);
|
||||
__ j(not_zero, &done, Label::kNear);
|
||||
__ PopReturnAddressTo(rbx);
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ PushReturnAddressFrom(rbx);
|
||||
__ incp(rax);
|
||||
__ bind(&done);
|
||||
@ -1488,7 +1486,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
|
||||
{
|
||||
Label done;
|
||||
StackArgumentsAccessor args(rsp, rax);
|
||||
__ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(rdi, RootIndex::kUndefinedValue);
|
||||
__ movp(rdx, rdi);
|
||||
__ movp(rbx, rdi);
|
||||
__ cmpp(rax, Immediate(1));
|
||||
@ -1539,7 +1537,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
||||
{
|
||||
Label done;
|
||||
StackArgumentsAccessor args(rsp, rax);
|
||||
__ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(rdi, RootIndex::kUndefinedValue);
|
||||
__ movp(rdx, rdi);
|
||||
__ movp(rbx, rdi);
|
||||
__ cmpp(rax, Immediate(1));
|
||||
@ -1554,7 +1552,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
||||
__ bind(&done);
|
||||
__ PopReturnAddressTo(rcx);
|
||||
__ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ PushReturnAddressFrom(rcx);
|
||||
}
|
||||
|
||||
@ -1601,7 +1599,7 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
|
||||
|
||||
// Run the native code for the InternalArray function called as a normal
|
||||
// function.
|
||||
__ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(rbx, RootIndex::kUndefinedValue);
|
||||
__ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
@ -1701,7 +1699,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
|
||||
// Fill remaining expected arguments with undefined values.
|
||||
Label fill;
|
||||
__ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
|
||||
__ bind(&fill);
|
||||
__ incp(r8);
|
||||
__ Push(kScratchRegister);
|
||||
@ -1782,7 +1780,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack limit".
|
||||
Label done;
|
||||
__ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
|
||||
__ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit);
|
||||
__ movp(r8, rsp);
|
||||
// Make r8 the space we have left. The stack might already be overflowed
|
||||
// here which will cause r8 to become negative.
|
||||
@ -1806,9 +1804,9 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
|
||||
// Turn the hole into undefined as we go.
|
||||
__ movp(r11,
|
||||
FieldOperand(rbx, r9, times_pointer_size, FixedArray::kHeaderSize));
|
||||
__ CompareRoot(r11, Heap::kTheHoleValueRootIndex);
|
||||
__ CompareRoot(r11, RootIndex::kTheHoleValue);
|
||||
__ j(not_equal, &push, Label::kNear);
|
||||
__ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(r11, RootIndex::kUndefinedValue);
|
||||
__ bind(&push);
|
||||
__ Push(r11);
|
||||
__ incl(r9);
|
||||
@ -1957,9 +1955,9 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
||||
__ j(above_equal, &done_convert);
|
||||
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
|
||||
Label convert_global_proxy;
|
||||
__ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex,
|
||||
&convert_global_proxy, Label::kNear);
|
||||
__ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
|
||||
__ JumpIfRoot(rcx, RootIndex::kUndefinedValue, &convert_global_proxy,
|
||||
Label::kNear);
|
||||
__ JumpIfNotRoot(rcx, RootIndex::kNullValue, &convert_to_object,
|
||||
Label::kNear);
|
||||
__ bind(&convert_global_proxy);
|
||||
{
|
||||
@ -2049,7 +2047,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
|
||||
// Check the stack for overflow. We are not trying to catch interruptions
|
||||
// (i.e. debug break and preemption) here, so check the "real stack
|
||||
// limit".
|
||||
__ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
|
||||
__ CompareRoot(rsp, RootIndex::kRealStackLimit);
|
||||
__ j(above_equal, &done, Label::kNear);
|
||||
// Restore the stack pointer.
|
||||
__ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
|
||||
@ -2183,7 +2181,7 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
|
||||
|
||||
// Calling convention for function specific ConstructStubs require
|
||||
// rbx to contain either an AllocationSite or undefined.
|
||||
__ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(rbx, RootIndex::kUndefinedValue);
|
||||
|
||||
// Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
|
||||
__ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
|
||||
@ -2471,14 +2469,14 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
|
||||
|
||||
// Check result for exception sentinel.
|
||||
Label exception_returned;
|
||||
__ CompareRoot(rax, Heap::kExceptionRootIndex);
|
||||
__ CompareRoot(rax, RootIndex::kException);
|
||||
__ j(equal, &exception_returned);
|
||||
|
||||
// Check that there is no pending exception, otherwise we
|
||||
// should have returned the exception sentinel.
|
||||
if (FLAG_debug_code) {
|
||||
Label okay;
|
||||
__ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(r14, RootIndex::kTheHoleValue);
|
||||
ExternalReference pending_exception_address = ExternalReference::Create(
|
||||
IsolateAddressId::kPendingExceptionAddress, masm->isolate());
|
||||
Operand pending_exception_operand =
|
||||
|
@ -225,7 +225,7 @@ TNode<Object> CodeStubAssembler::NoContextConstant() {
|
||||
CodeStubAssembler::name##Constant() { \
|
||||
return UncheckedCast<std::remove_reference<decltype( \
|
||||
*std::declval<Heap>().rootAccessorName())>::type>( \
|
||||
LoadRoot(Heap::k##rootIndexName##RootIndex)); \
|
||||
LoadRoot(RootIndex::k##rootIndexName)); \
|
||||
}
|
||||
HEAP_MUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR);
|
||||
#undef HEAP_CONSTANT_ACCESSOR
|
||||
@ -236,7 +236,7 @@ HEAP_MUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR);
|
||||
CodeStubAssembler::name##Constant() { \
|
||||
return UncheckedCast<std::remove_reference<decltype( \
|
||||
*std::declval<ReadOnlyRoots>().rootAccessorName())>::type>( \
|
||||
LoadRoot(Heap::k##rootIndexName##RootIndex)); \
|
||||
LoadRoot(RootIndex::k##rootIndexName)); \
|
||||
}
|
||||
HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR);
|
||||
#undef HEAP_CONSTANT_ACCESSOR
|
||||
@ -256,7 +256,7 @@ HEAP_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_TEST);
|
||||
TNode<Int64T> CodeStubAssembler::HashSeed() {
|
||||
DCHECK(Is64());
|
||||
TNode<HeapObject> hash_seed_root =
|
||||
TNode<HeapObject>::UncheckedCast(LoadRoot(Heap::kHashSeedRootIndex));
|
||||
TNode<HeapObject>::UncheckedCast(LoadRoot(RootIndex::kHashSeed));
|
||||
return TNode<Int64T>::UncheckedCast(LoadObjectField(
|
||||
hash_seed_root, ByteArray::kHeaderSize, MachineType::Int64()));
|
||||
}
|
||||
@ -269,7 +269,7 @@ TNode<Int32T> CodeStubAssembler::HashSeedHigh() {
|
||||
static int kOffset = kInt32Size;
|
||||
#endif
|
||||
TNode<HeapObject> hash_seed_root =
|
||||
TNode<HeapObject>::UncheckedCast(LoadRoot(Heap::kHashSeedRootIndex));
|
||||
TNode<HeapObject>::UncheckedCast(LoadRoot(RootIndex::kHashSeed));
|
||||
return TNode<Int32T>::UncheckedCast(LoadObjectField(
|
||||
hash_seed_root, ByteArray::kHeaderSize + kOffset, MachineType::Int32()));
|
||||
}
|
||||
@ -282,7 +282,7 @@ TNode<Int32T> CodeStubAssembler::HashSeedLow() {
|
||||
static int kOffset = 0;
|
||||
#endif
|
||||
TNode<HeapObject> hash_seed_root =
|
||||
TNode<HeapObject>::UncheckedCast(LoadRoot(Heap::kHashSeedRootIndex));
|
||||
TNode<HeapObject>::UncheckedCast(LoadRoot(RootIndex::kHashSeed));
|
||||
return TNode<Int32T>::UncheckedCast(LoadObjectField(
|
||||
hash_seed_root, ByteArray::kHeaderSize + kOffset, MachineType::Int32()));
|
||||
}
|
||||
@ -1007,9 +1007,9 @@ void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
|
||||
CSA_SLOW_ASSERT(this, IsMap(receiver_map));
|
||||
VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
|
||||
Label loop_body(this, &var_map);
|
||||
Node* empty_fixed_array = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
|
||||
Node* empty_fixed_array = LoadRoot(RootIndex::kEmptyFixedArray);
|
||||
Node* empty_slow_element_dictionary =
|
||||
LoadRoot(Heap::kEmptySlowElementDictionaryRootIndex);
|
||||
LoadRoot(RootIndex::kEmptySlowElementDictionary);
|
||||
Goto(&loop_body);
|
||||
|
||||
BIND(&loop_body);
|
||||
@ -1084,7 +1084,7 @@ TNode<BoolT> CodeStubAssembler::IsFastJSArrayWithNoCustomIteration(
|
||||
{
|
||||
// Check that the Array.prototype hasn't been modified in a way that would
|
||||
// affect iteration.
|
||||
Node* protector_cell = LoadRoot(Heap::kArrayIteratorProtectorRootIndex);
|
||||
Node* protector_cell = LoadRoot(RootIndex::kArrayIteratorProtector);
|
||||
DCHECK(isolate()->heap()->array_iterator_protector()->IsPropertyCell());
|
||||
var_result =
|
||||
WordEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset),
|
||||
@ -1250,7 +1250,7 @@ Node* CodeStubAssembler::AllocateRaw(Node* size_in_bytes, AllocationFlags flags,
|
||||
BIND(&needs_filler);
|
||||
// Store a filler and increase the address by kPointerSize.
|
||||
StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
|
||||
LoadRoot(Heap::kOnePointerFillerMapRootIndex));
|
||||
LoadRoot(RootIndex::kOnePointerFillerMap));
|
||||
address.Bind(IntPtrAdd(no_runtime_result, IntPtrConstant(4)));
|
||||
|
||||
Goto(&done_filling);
|
||||
@ -1481,19 +1481,19 @@ TNode<IntPtrT> CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
|
||||
}
|
||||
|
||||
TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32Root(
|
||||
Heap::RootListIndex root_index) {
|
||||
RootIndex root_index) {
|
||||
Node* roots_array_start =
|
||||
ExternalConstant(ExternalReference::roots_array_start(isolate()));
|
||||
int index = root_index * kPointerSize;
|
||||
int offset = static_cast<int>(root_index) * kPointerSize;
|
||||
if (SmiValuesAre32Bits()) {
|
||||
#if V8_TARGET_LITTLE_ENDIAN
|
||||
index += kPointerSize / 2;
|
||||
offset += kPointerSize / 2;
|
||||
#endif
|
||||
return UncheckedCast<Int32T>(
|
||||
Load(MachineType::Int32(), roots_array_start, IntPtrConstant(index)));
|
||||
Load(MachineType::Int32(), roots_array_start, IntPtrConstant(offset)));
|
||||
} else {
|
||||
return SmiToInt32(Load(MachineType::AnyTagged(), roots_array_start,
|
||||
IntPtrConstant(index)));
|
||||
IntPtrConstant(offset)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1673,7 +1673,7 @@ TNode<PrototypeInfo> CodeStubAssembler::LoadMapPrototypeInfo(
|
||||
|
||||
BIND(&if_strong_heap_object);
|
||||
GotoIfNot(WordEqual(LoadMap(CAST(prototype_info.value())),
|
||||
LoadRoot(Heap::kPrototypeInfoMapRootIndex)),
|
||||
LoadRoot(RootIndex::kPrototypeInfoMap)),
|
||||
if_no_proto_info);
|
||||
return CAST(prototype_info.value());
|
||||
}
|
||||
@ -2558,7 +2558,7 @@ TNode<Context> CodeStubAssembler::LoadNativeContext(
|
||||
|
||||
TNode<Context> CodeStubAssembler::LoadModuleContext(
|
||||
SloppyTNode<Context> context) {
|
||||
Node* module_map = LoadRoot(Heap::kModuleContextMapRootIndex);
|
||||
Node* module_map = LoadRoot(RootIndex::kModuleContextMap);
|
||||
Variable cur_context(this, MachineRepresentation::kTaggedPointer);
|
||||
cur_context.Bind(context);
|
||||
|
||||
@ -2741,8 +2741,8 @@ Node* CodeStubAssembler::StoreMap(Node* object, Node* map) {
|
||||
object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
|
||||
}
|
||||
|
||||
Node* CodeStubAssembler::StoreMapNoWriteBarrier(
|
||||
Node* object, Heap::RootListIndex map_root_index) {
|
||||
Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object,
|
||||
RootIndex map_root_index) {
|
||||
return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
|
||||
}
|
||||
|
||||
@ -2754,7 +2754,7 @@ Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
|
||||
}
|
||||
|
||||
Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
|
||||
Heap::RootListIndex root_index) {
|
||||
RootIndex root_index) {
|
||||
if (Heap::RootIsImmortalImmovable(root_index)) {
|
||||
return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
|
||||
} else {
|
||||
@ -2870,7 +2870,7 @@ void CodeStubAssembler::EnsureArrayLengthWritable(TNode<Map> map,
|
||||
TNode<Name> maybe_length = CAST(LoadWeakFixedArrayElement(
|
||||
descriptors, DescriptorArray::ToKeyIndex(length_index)));
|
||||
CSA_ASSERT(this,
|
||||
WordEqual(maybe_length, LoadRoot(Heap::klength_stringRootIndex)));
|
||||
WordEqual(maybe_length, LoadRoot(RootIndex::klength_string)));
|
||||
#endif
|
||||
|
||||
TNode<Uint32T> details = LoadDetailsByKeyIndex(
|
||||
@ -3011,7 +3011,7 @@ void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
|
||||
Node* CodeStubAssembler::AllocateCellWithValue(Node* value,
|
||||
WriteBarrierMode mode) {
|
||||
Node* result = Allocate(Cell::kSize, kNone);
|
||||
StoreMapNoWriteBarrier(result, Heap::kCellMapRootIndex);
|
||||
StoreMapNoWriteBarrier(result, RootIndex::kCellMap);
|
||||
StoreCellValue(result, value, mode);
|
||||
return result;
|
||||
}
|
||||
@ -3035,7 +3035,7 @@ Node* CodeStubAssembler::StoreCellValue(Node* cell, Node* value,
|
||||
|
||||
TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumber() {
|
||||
Node* result = Allocate(HeapNumber::kSize, kNone);
|
||||
Heap::RootListIndex heap_map_index = Heap::kHeapNumberMapRootIndex;
|
||||
RootIndex heap_map_index = RootIndex::kHeapNumberMap;
|
||||
StoreMapNoWriteBarrier(result, heap_map_index);
|
||||
return UncheckedCast<HeapNumber>(result);
|
||||
}
|
||||
@ -3049,7 +3049,7 @@ TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumberWithValue(
|
||||
|
||||
TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumber() {
|
||||
Node* result = Allocate(MutableHeapNumber::kSize, kNone);
|
||||
Heap::RootListIndex heap_map_index = Heap::kMutableHeapNumberMapRootIndex;
|
||||
RootIndex heap_map_index = RootIndex::kMutableHeapNumberMap;
|
||||
StoreMapNoWriteBarrier(result, heap_map_index);
|
||||
return UncheckedCast<MutableHeapNumber>(result);
|
||||
}
|
||||
@ -3075,7 +3075,7 @@ TNode<BigInt> CodeStubAssembler::AllocateRawBigInt(TNode<IntPtrT> length) {
|
||||
TNode<IntPtrT> size = IntPtrAdd(IntPtrConstant(BigInt::kHeaderSize),
|
||||
Signed(WordShl(length, kPointerSizeLog2)));
|
||||
Node* raw_result = Allocate(size, kNone);
|
||||
StoreMapNoWriteBarrier(raw_result, Heap::kBigIntMapRootIndex);
|
||||
StoreMapNoWriteBarrier(raw_result, RootIndex::kBigIntMap);
|
||||
return UncheckedCast<BigInt>(raw_result);
|
||||
}
|
||||
|
||||
@ -3108,11 +3108,11 @@ TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
|
||||
uint32_t length, AllocationFlags flags) {
|
||||
Comment("AllocateSeqOneByteString");
|
||||
if (length == 0) {
|
||||
return CAST(LoadRoot(Heap::kempty_stringRootIndex));
|
||||
return CAST(LoadRoot(RootIndex::kempty_string));
|
||||
}
|
||||
Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
|
||||
StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(RootIndex::kOneByteStringMap));
|
||||
StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
|
||||
StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
|
||||
Uint32Constant(length),
|
||||
MachineRepresentation::kWord32);
|
||||
@ -3150,8 +3150,8 @@ TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
|
||||
{
|
||||
// Just allocate the SeqOneByteString in new space.
|
||||
Node* result = AllocateInNewSpace(size, flags);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
|
||||
StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(RootIndex::kOneByteStringMap));
|
||||
StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
|
||||
StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
|
||||
length, MachineRepresentation::kWord32);
|
||||
StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
|
||||
@ -3172,7 +3172,7 @@ TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
|
||||
|
||||
BIND(&if_lengthiszero);
|
||||
{
|
||||
var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
|
||||
var_result.Bind(LoadRoot(RootIndex::kempty_string));
|
||||
Goto(&if_join);
|
||||
}
|
||||
|
||||
@ -3184,11 +3184,11 @@ TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
|
||||
uint32_t length, AllocationFlags flags) {
|
||||
Comment("AllocateSeqTwoByteString");
|
||||
if (length == 0) {
|
||||
return CAST(LoadRoot(Heap::kempty_stringRootIndex));
|
||||
return CAST(LoadRoot(RootIndex::kempty_string));
|
||||
}
|
||||
Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
|
||||
StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(RootIndex::kStringMap));
|
||||
StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
|
||||
StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
|
||||
Uint32Constant(length),
|
||||
MachineRepresentation::kWord32);
|
||||
@ -3220,8 +3220,8 @@ TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
|
||||
{
|
||||
// Just allocate the SeqTwoByteString in new space.
|
||||
Node* result = AllocateInNewSpace(size, flags);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
|
||||
StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(RootIndex::kStringMap));
|
||||
StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
|
||||
StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
|
||||
length, MachineRepresentation::kWord32);
|
||||
StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
|
||||
@ -3242,7 +3242,7 @@ TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
|
||||
|
||||
BIND(&if_lengthiszero);
|
||||
{
|
||||
var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
|
||||
var_result.Bind(LoadRoot(RootIndex::kempty_string));
|
||||
Goto(&if_join);
|
||||
}
|
||||
|
||||
@ -3250,11 +3250,12 @@ TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
|
||||
return CAST(var_result.value());
|
||||
}
|
||||
|
||||
TNode<String> CodeStubAssembler::AllocateSlicedString(
|
||||
Heap::RootListIndex map_root_index, TNode<Uint32T> length,
|
||||
TNode<String> parent, TNode<Smi> offset) {
|
||||
DCHECK(map_root_index == Heap::kSlicedOneByteStringMapRootIndex ||
|
||||
map_root_index == Heap::kSlicedStringMapRootIndex);
|
||||
TNode<String> CodeStubAssembler::AllocateSlicedString(RootIndex map_root_index,
|
||||
TNode<Uint32T> length,
|
||||
TNode<String> parent,
|
||||
TNode<Smi> offset) {
|
||||
DCHECK(map_root_index == RootIndex::kSlicedOneByteStringMap ||
|
||||
map_root_index == RootIndex::kSlicedStringMap);
|
||||
Node* result = Allocate(SlicedString::kSize);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
|
||||
StoreMapNoWriteBarrier(result, map_root_index);
|
||||
@ -3272,21 +3273,23 @@ TNode<String> CodeStubAssembler::AllocateSlicedString(
|
||||
|
||||
TNode<String> CodeStubAssembler::AllocateSlicedOneByteString(
|
||||
TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
|
||||
return AllocateSlicedString(Heap::kSlicedOneByteStringMapRootIndex, length,
|
||||
return AllocateSlicedString(RootIndex::kSlicedOneByteStringMap, length,
|
||||
parent, offset);
|
||||
}
|
||||
|
||||
TNode<String> CodeStubAssembler::AllocateSlicedTwoByteString(
|
||||
TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
|
||||
return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent,
|
||||
return AllocateSlicedString(RootIndex::kSlicedStringMap, length, parent,
|
||||
offset);
|
||||
}
|
||||
|
||||
TNode<String> CodeStubAssembler::AllocateConsString(
|
||||
Heap::RootListIndex map_root_index, TNode<Uint32T> length,
|
||||
TNode<String> first, TNode<String> second, AllocationFlags flags) {
|
||||
DCHECK(map_root_index == Heap::kConsOneByteStringMapRootIndex ||
|
||||
map_root_index == Heap::kConsStringMapRootIndex);
|
||||
TNode<String> CodeStubAssembler::AllocateConsString(RootIndex map_root_index,
|
||||
TNode<Uint32T> length,
|
||||
TNode<String> first,
|
||||
TNode<String> second,
|
||||
AllocationFlags flags) {
|
||||
DCHECK(map_root_index == RootIndex::kConsOneByteStringMap ||
|
||||
map_root_index == RootIndex::kConsStringMap);
|
||||
Node* result = Allocate(ConsString::kSize, flags);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
|
||||
StoreMapNoWriteBarrier(result, map_root_index);
|
||||
@ -3311,15 +3314,15 @@ TNode<String> CodeStubAssembler::AllocateConsString(
|
||||
TNode<String> CodeStubAssembler::AllocateOneByteConsString(
|
||||
TNode<Uint32T> length, TNode<String> first, TNode<String> second,
|
||||
AllocationFlags flags) {
|
||||
return AllocateConsString(Heap::kConsOneByteStringMapRootIndex, length, first,
|
||||
return AllocateConsString(RootIndex::kConsOneByteStringMap, length, first,
|
||||
second, flags);
|
||||
}
|
||||
|
||||
TNode<String> CodeStubAssembler::AllocateTwoByteConsString(
|
||||
TNode<Uint32T> length, TNode<String> first, TNode<String> second,
|
||||
AllocationFlags flags) {
|
||||
return AllocateConsString(Heap::kConsStringMapRootIndex, length, first,
|
||||
second, flags);
|
||||
return AllocateConsString(RootIndex::kConsStringMap, length, first, second,
|
||||
flags);
|
||||
}
|
||||
|
||||
TNode<String> CodeStubAssembler::NewConsString(TNode<Uint32T> length,
|
||||
@ -3402,8 +3405,8 @@ TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionaryWithCapacity(
|
||||
UncheckedCast<NameDictionary>(AllocateInNewSpace(store_size));
|
||||
Comment("Initialize NameDictionary");
|
||||
// Initialize FixedArray fields.
|
||||
DCHECK(Heap::RootIsImmortalImmovable(Heap::kNameDictionaryMapRootIndex));
|
||||
StoreMapNoWriteBarrier(result, Heap::kNameDictionaryMapRootIndex);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(RootIndex::kNameDictionaryMap));
|
||||
StoreMapNoWriteBarrier(result, RootIndex::kNameDictionaryMap);
|
||||
StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
|
||||
SmiFromIntPtr(length));
|
||||
// Initialized HashTable fields.
|
||||
@ -3467,8 +3470,8 @@ Node* CodeStubAssembler::AllocateOrderedHashTable() {
|
||||
// Allocate the table and add the proper map.
|
||||
const ElementsKind elements_kind = HOLEY_ELEMENTS;
|
||||
TNode<IntPtrT> length_intptr = IntPtrConstant(kFixedArrayLength);
|
||||
TNode<Map> fixed_array_map = CAST(LoadRoot(
|
||||
static_cast<Heap::RootListIndex>(CollectionType::GetMapRootIndex())));
|
||||
TNode<Map> fixed_array_map =
|
||||
CAST(LoadRoot(static_cast<RootIndex>(CollectionType::GetMapRootIndex())));
|
||||
TNode<FixedArray> table =
|
||||
CAST(AllocateFixedArray(elements_kind, length_intptr,
|
||||
kAllowLargeObjectAllocation, fixed_array_map));
|
||||
@ -3541,8 +3544,8 @@ TNode<CollectionType> CodeStubAssembler::AllocateSmallOrderedHashTable(
|
||||
UncheckedCast<IntPtrT>(TimesPointerSize(total_size_word_aligned));
|
||||
|
||||
// Allocate the table and add the proper map.
|
||||
TNode<Map> small_ordered_hash_map = CAST(LoadRoot(
|
||||
static_cast<Heap::RootListIndex>(CollectionType::GetMapRootIndex())));
|
||||
TNode<Map> small_ordered_hash_map =
|
||||
CAST(LoadRoot(static_cast<RootIndex>(CollectionType::GetMapRootIndex())));
|
||||
TNode<Object> table_obj = CAST(AllocateInNewSpace(total_size_word_aligned));
|
||||
StoreMapNoWriteBarrier(table_obj, small_ordered_hash_map);
|
||||
TNode<CollectionType> table = UncheckedCast<CollectionType>(table_obj);
|
||||
@ -3712,7 +3715,7 @@ void CodeStubAssembler::InitializeJSObjectFromMap(
|
||||
if (properties == nullptr) {
|
||||
CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
|
||||
StoreObjectFieldRoot(object, JSObject::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
} else {
|
||||
CSA_ASSERT(this, Word32Or(Word32Or(IsPropertyArray(properties),
|
||||
IsNameDictionary(properties)),
|
||||
@ -3722,7 +3725,7 @@ void CodeStubAssembler::InitializeJSObjectFromMap(
|
||||
}
|
||||
if (elements == nullptr) {
|
||||
StoreObjectFieldRoot(object, JSObject::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
} else {
|
||||
CSA_ASSERT(this, IsFixedArray(elements));
|
||||
StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
|
||||
@ -3741,7 +3744,7 @@ void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
|
||||
CSA_ASSERT(
|
||||
this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
|
||||
InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
|
||||
Heap::kUndefinedValueRootIndex);
|
||||
RootIndex::kUndefinedValue);
|
||||
}
|
||||
|
||||
void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
|
||||
@ -3780,11 +3783,11 @@ void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
|
||||
|
||||
Comment("iInitialize filler fields");
|
||||
InitializeFieldsWithRoot(object, used_size, instance_size,
|
||||
Heap::kOnePointerFillerMapRootIndex);
|
||||
RootIndex::kOnePointerFillerMap);
|
||||
|
||||
Comment("Initialize undefined fields");
|
||||
InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
|
||||
Heap::kUndefinedValueRootIndex);
|
||||
RootIndex::kUndefinedValue);
|
||||
|
||||
STATIC_ASSERT(Map::kNoSlackTracking == 0);
|
||||
GotoIf(IsClearWord32<Map::ConstructionCounterBits>(new_bit_field3),
|
||||
@ -3861,9 +3864,9 @@ CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
|
||||
StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements);
|
||||
// Setup elements object.
|
||||
STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
|
||||
Heap::RootListIndex elements_map_index =
|
||||
IsDoubleElementsKind(kind) ? Heap::kFixedDoubleArrayMapRootIndex
|
||||
: Heap::kFixedArrayMapRootIndex;
|
||||
RootIndex elements_map_index = IsDoubleElementsKind(kind)
|
||||
? RootIndex::kFixedDoubleArrayMap
|
||||
: RootIndex::kFixedArrayMap;
|
||||
DCHECK(Heap::RootIsImmortalImmovable(elements_map_index));
|
||||
StoreMapNoWriteBarrier(elements, elements_map_index);
|
||||
TNode<Smi> capacity_smi = ParameterToTagged(capacity, capacity_mode);
|
||||
@ -3889,7 +3892,7 @@ Node* CodeStubAssembler::AllocateUninitializedJSArray(Node* array_map,
|
||||
StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
|
||||
|
||||
StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
|
||||
if (allocation_site != nullptr) {
|
||||
InitializeAllocationMemento(array, IntPtrConstant(JSArray::kSize),
|
||||
@ -3914,7 +3917,7 @@ Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
|
||||
array = AllocateUninitializedJSArrayWithoutElements(array_map, length,
|
||||
allocation_site);
|
||||
StoreObjectFieldRoot(array, JSArray::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
} else if (TryGetIntPtrOrSmiConstantValue(capacity, &capacity_as_constant,
|
||||
capacity_mode) &&
|
||||
capacity_as_constant > 0) {
|
||||
@ -3924,7 +3927,7 @@ Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
|
||||
// Fill in the elements with holes.
|
||||
FillFixedArrayWithValue(kind, elements,
|
||||
IntPtrOrSmiConstant(0, capacity_mode), capacity,
|
||||
Heap::kTheHoleValueRootIndex, capacity_mode);
|
||||
RootIndex::kTheHoleValue, capacity_mode);
|
||||
} else {
|
||||
Label out(this), empty(this), nonempty(this);
|
||||
VARIABLE(var_array, MachineRepresentation::kTagged);
|
||||
@ -3939,7 +3942,7 @@ Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
|
||||
var_array.Bind(AllocateUninitializedJSArrayWithoutElements(
|
||||
array_map, length, allocation_site));
|
||||
StoreObjectFieldRoot(var_array.value(), JSArray::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
Goto(&out);
|
||||
}
|
||||
|
||||
@ -3953,7 +3956,7 @@ Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
|
||||
// Fill in the elements with holes.
|
||||
FillFixedArrayWithValue(kind, elements,
|
||||
IntPtrOrSmiConstant(0, capacity_mode), capacity,
|
||||
Heap::kTheHoleValueRootIndex, capacity_mode);
|
||||
RootIndex::kTheHoleValue, capacity_mode);
|
||||
Goto(&out);
|
||||
}
|
||||
|
||||
@ -4036,9 +4039,9 @@ TNode<FixedArrayBase> CodeStubAssembler::AllocateFixedArray(
|
||||
StoreMap(array, fixed_array_map);
|
||||
}
|
||||
} else {
|
||||
Heap::RootListIndex map_index = IsDoubleElementsKind(kind)
|
||||
? Heap::kFixedDoubleArrayMapRootIndex
|
||||
: Heap::kFixedArrayMapRootIndex;
|
||||
RootIndex map_index = IsDoubleElementsKind(kind)
|
||||
? RootIndex::kFixedDoubleArrayMap
|
||||
: RootIndex::kFixedArrayMap;
|
||||
DCHECK(Heap::RootIsImmortalImmovable(map_index));
|
||||
StoreMapNoWriteBarrier(array, map_index);
|
||||
}
|
||||
@ -4098,7 +4101,7 @@ TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedArray(
|
||||
if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
|
||||
Label new_space_check(this, {&var_fixed_array_map});
|
||||
Branch(WordEqual(var_fixed_array_map.value(),
|
||||
LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
|
||||
LoadRoot(RootIndex::kFixedCOWArrayMap)),
|
||||
&cow, &new_space_check);
|
||||
|
||||
BIND(&new_space_check);
|
||||
@ -4158,7 +4161,7 @@ TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedArray(
|
||||
Goto(&done);
|
||||
});
|
||||
} else {
|
||||
var_fixed_array_map.Bind(LoadRoot(Heap::kFixedArrayMapRootIndex));
|
||||
var_fixed_array_map.Bind(LoadRoot(RootIndex::kFixedArrayMap));
|
||||
Goto(&new_space_check);
|
||||
}
|
||||
}
|
||||
@ -4218,7 +4221,7 @@ Node* CodeStubAssembler::AllocatePropertyArray(Node* capacity_node,
|
||||
Node* total_size = GetPropertyArrayAllocationSize(capacity_node, mode);
|
||||
|
||||
Node* array = Allocate(total_size, flags);
|
||||
Heap::RootListIndex map_index = Heap::kPropertyArrayMapRootIndex;
|
||||
RootIndex map_index = RootIndex::kPropertyArrayMap;
|
||||
DCHECK(Heap::RootIsImmortalImmovable(map_index));
|
||||
StoreMapNoWriteBarrier(array, map_index);
|
||||
InitializePropertyArrayLength(array, capacity_node, mode);
|
||||
@ -4243,14 +4246,15 @@ void CodeStubAssembler::FillPropertyArrayWithUndefined(Node* array,
|
||||
mode);
|
||||
}
|
||||
|
||||
void CodeStubAssembler::FillFixedArrayWithValue(
|
||||
ElementsKind kind, Node* array, Node* from_node, Node* to_node,
|
||||
Heap::RootListIndex value_root_index, ParameterMode mode) {
|
||||
void CodeStubAssembler::FillFixedArrayWithValue(ElementsKind kind, Node* array,
|
||||
Node* from_node, Node* to_node,
|
||||
RootIndex value_root_index,
|
||||
ParameterMode mode) {
|
||||
CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
|
||||
CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
|
||||
CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
|
||||
DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
|
||||
value_root_index == Heap::kUndefinedValueRootIndex);
|
||||
DCHECK(value_root_index == RootIndex::kTheHoleValue ||
|
||||
value_root_index == RootIndex::kUndefinedValue);
|
||||
|
||||
// Determine the value to initialize the {array} based
|
||||
// on the {value_root_index} and the elements {kind}.
|
||||
@ -4378,10 +4382,10 @@ void CodeStubAssembler::CopyFixedArrayElements(
|
||||
// pre-initialized with holes to make sure that it's always in a
|
||||
// consistent state.
|
||||
FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
|
||||
capacity, Heap::kTheHoleValueRootIndex, mode);
|
||||
capacity, RootIndex::kTheHoleValue, mode);
|
||||
} else if (element_count != capacity) {
|
||||
FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
|
||||
Heap::kTheHoleValueRootIndex, mode);
|
||||
RootIndex::kTheHoleValue, mode);
|
||||
}
|
||||
|
||||
Node* first_from_element_offset =
|
||||
@ -4493,9 +4497,8 @@ TNode<FixedArray> CodeStubAssembler::HeapObjectToFixedArray(
|
||||
TNode<HeapObject> base, Label* cast_fail) {
|
||||
Label fixed_array(this);
|
||||
TNode<Map> map = LoadMap(base);
|
||||
GotoIf(WordEqual(map, LoadRoot(Heap::kFixedArrayMapRootIndex)), &fixed_array);
|
||||
GotoIf(WordNotEqual(map, LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
|
||||
cast_fail);
|
||||
GotoIf(WordEqual(map, LoadRoot(RootIndex::kFixedArrayMap)), &fixed_array);
|
||||
GotoIf(WordNotEqual(map, LoadRoot(RootIndex::kFixedCOWArrayMap)), cast_fail);
|
||||
Goto(&fixed_array);
|
||||
BIND(&fixed_array);
|
||||
return UncheckedCast<FixedArray>(base);
|
||||
@ -4703,7 +4706,7 @@ void CodeStubAssembler::InitializeAllocationMemento(Node* base,
|
||||
Node* allocation_site) {
|
||||
Comment("[Initialize AllocationMemento");
|
||||
Node* memento = InnerAllocate(base, base_allocation_size);
|
||||
StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex);
|
||||
StoreMapNoWriteBarrier(memento, RootIndex::kAllocationMementoMap);
|
||||
StoreObjectFieldNoWriteBarrier(
|
||||
memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
|
||||
if (FLAG_allocation_site_pretenuring) {
|
||||
@ -5364,42 +5367,42 @@ TNode<BoolT> CodeStubAssembler::IsUndetectableMap(SloppyTNode<Map> map) {
|
||||
|
||||
TNode<BoolT> CodeStubAssembler::IsNoElementsProtectorCellInvalid() {
|
||||
Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
|
||||
Node* cell = LoadRoot(Heap::kNoElementsProtectorRootIndex);
|
||||
Node* cell = LoadRoot(RootIndex::kNoElementsProtector);
|
||||
Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
|
||||
return WordEqual(cell_value, invalid);
|
||||
}
|
||||
|
||||
TNode<BoolT> CodeStubAssembler::IsPromiseResolveProtectorCellInvalid() {
|
||||
Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
|
||||
Node* cell = LoadRoot(Heap::kPromiseResolveProtectorRootIndex);
|
||||
Node* cell = LoadRoot(RootIndex::kPromiseResolveProtector);
|
||||
Node* cell_value = LoadObjectField(cell, Cell::kValueOffset);
|
||||
return WordEqual(cell_value, invalid);
|
||||
}
|
||||
|
||||
TNode<BoolT> CodeStubAssembler::IsPromiseThenProtectorCellInvalid() {
|
||||
Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
|
||||
Node* cell = LoadRoot(Heap::kPromiseThenProtectorRootIndex);
|
||||
Node* cell = LoadRoot(RootIndex::kPromiseThenProtector);
|
||||
Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
|
||||
return WordEqual(cell_value, invalid);
|
||||
}
|
||||
|
||||
TNode<BoolT> CodeStubAssembler::IsArraySpeciesProtectorCellInvalid() {
|
||||
Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
|
||||
Node* cell = LoadRoot(Heap::kArraySpeciesProtectorRootIndex);
|
||||
Node* cell = LoadRoot(RootIndex::kArraySpeciesProtector);
|
||||
Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
|
||||
return WordEqual(cell_value, invalid);
|
||||
}
|
||||
|
||||
TNode<BoolT> CodeStubAssembler::IsTypedArraySpeciesProtectorCellInvalid() {
|
||||
Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
|
||||
Node* cell = LoadRoot(Heap::kTypedArraySpeciesProtectorRootIndex);
|
||||
Node* cell = LoadRoot(RootIndex::kTypedArraySpeciesProtector);
|
||||
Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
|
||||
return WordEqual(cell_value, invalid);
|
||||
}
|
||||
|
||||
TNode<BoolT> CodeStubAssembler::IsPromiseSpeciesProtectorCellInvalid() {
|
||||
Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
|
||||
Node* cell = LoadRoot(Heap::kPromiseSpeciesProtectorRootIndex);
|
||||
Node* cell = LoadRoot(RootIndex::kPromiseSpeciesProtector);
|
||||
Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
|
||||
return WordEqual(cell_value, invalid);
|
||||
}
|
||||
@ -5438,7 +5441,7 @@ TNode<BoolT> CodeStubAssembler::IsCallable(SloppyTNode<HeapObject> object) {
|
||||
}
|
||||
|
||||
TNode<BoolT> CodeStubAssembler::IsCell(SloppyTNode<HeapObject> object) {
|
||||
return WordEqual(LoadMap(object), LoadRoot(Heap::kCellMapRootIndex));
|
||||
return WordEqual(LoadMap(object), LoadRoot(RootIndex::kCellMap));
|
||||
}
|
||||
|
||||
TNode<BoolT> CodeStubAssembler::IsCode(SloppyTNode<HeapObject> object) {
|
||||
@ -5812,7 +5815,7 @@ TNode<BoolT> CodeStubAssembler::IsPrivateSymbol(
|
||||
|
||||
TNode<BoolT> CodeStubAssembler::IsNativeContext(
|
||||
SloppyTNode<HeapObject> object) {
|
||||
return WordEqual(LoadMap(object), LoadRoot(Heap::kNativeContextMapRootIndex));
|
||||
return WordEqual(LoadMap(object), LoadRoot(RootIndex::kNativeContextMap));
|
||||
}
|
||||
|
||||
TNode<BoolT> CodeStubAssembler::IsFixedDoubleArray(
|
||||
@ -6099,7 +6102,7 @@ TNode<String> CodeStubAssembler::StringFromSingleCharCode(TNode<Int32T> code) {
|
||||
{
|
||||
// Load the isolate wide single character string cache.
|
||||
TNode<FixedArray> cache =
|
||||
CAST(LoadRoot(Heap::kSingleCharacterStringCacheRootIndex));
|
||||
CAST(LoadRoot(RootIndex::kSingleCharacterStringCache));
|
||||
TNode<IntPtrT> code_index = Signed(ChangeUint32ToWord(code));
|
||||
|
||||
// Check if we have an entry for the {code} in the single character string
|
||||
@ -6747,7 +6750,7 @@ TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
|
||||
done(this, &result);
|
||||
|
||||
// Load the number string cache.
|
||||
Node* number_string_cache = LoadRoot(Heap::kNumberStringCacheRootIndex);
|
||||
Node* number_string_cache = LoadRoot(RootIndex::kNumberStringCache);
|
||||
|
||||
// Make the hash mask from the length of the number string cache. It
|
||||
// contains two elements (number and string) for each cache entry.
|
||||
@ -9317,25 +9320,22 @@ void CodeStubAssembler::CheckForAssociatedProtector(Node* name,
|
||||
Label* if_protector) {
|
||||
// This list must be kept in sync with LookupIterator::UpdateProtector!
|
||||
// TODO(jkummerow): Would it be faster to have a bit in Symbol::flags()?
|
||||
GotoIf(WordEqual(name, LoadRoot(Heap::kconstructor_stringRootIndex)),
|
||||
GotoIf(WordEqual(name, LoadRoot(RootIndex::kconstructor_string)),
|
||||
if_protector);
|
||||
GotoIf(WordEqual(name, LoadRoot(Heap::kiterator_symbolRootIndex)),
|
||||
GotoIf(WordEqual(name, LoadRoot(RootIndex::kiterator_symbol)), if_protector);
|
||||
GotoIf(WordEqual(name, LoadRoot(RootIndex::knext_string)), if_protector);
|
||||
GotoIf(WordEqual(name, LoadRoot(RootIndex::kspecies_symbol)), if_protector);
|
||||
GotoIf(WordEqual(name, LoadRoot(RootIndex::kis_concat_spreadable_symbol)),
|
||||
if_protector);
|
||||
GotoIf(WordEqual(name, LoadRoot(Heap::knext_stringRootIndex)), if_protector);
|
||||
GotoIf(WordEqual(name, LoadRoot(Heap::kspecies_symbolRootIndex)),
|
||||
if_protector);
|
||||
GotoIf(WordEqual(name, LoadRoot(Heap::kis_concat_spreadable_symbolRootIndex)),
|
||||
if_protector);
|
||||
GotoIf(WordEqual(name, LoadRoot(Heap::kresolve_stringRootIndex)),
|
||||
if_protector);
|
||||
GotoIf(WordEqual(name, LoadRoot(Heap::kthen_stringRootIndex)), if_protector);
|
||||
GotoIf(WordEqual(name, LoadRoot(RootIndex::kresolve_string)), if_protector);
|
||||
GotoIf(WordEqual(name, LoadRoot(RootIndex::kthen_string)), if_protector);
|
||||
// Fall through if no case matched.
|
||||
}
|
||||
|
||||
TNode<Map> CodeStubAssembler::LoadReceiverMap(SloppyTNode<Object> receiver) {
|
||||
return Select<Map>(
|
||||
TaggedIsSmi(receiver),
|
||||
[=] { return CAST(LoadRoot(Heap::kHeapNumberMapRootIndex)); },
|
||||
[=] { return CAST(LoadRoot(RootIndex::kHeapNumberMap)); },
|
||||
[=] { return LoadMap(UncheckedCast<HeapObject>(receiver)); });
|
||||
}
|
||||
|
||||
@ -9999,9 +9999,8 @@ void CodeStubAssembler::TrapAllocationMemento(Node* object,
|
||||
BIND(&map_check);
|
||||
{
|
||||
TNode<Object> memento_map = LoadObjectField(object, kMementoMapOffset);
|
||||
Branch(
|
||||
WordEqual(memento_map, LoadRoot(Heap::kAllocationMementoMapRootIndex)),
|
||||
memento_found, &no_memento_found);
|
||||
Branch(WordEqual(memento_map, LoadRoot(RootIndex::kAllocationMementoMap)),
|
||||
memento_found, &no_memento_found);
|
||||
}
|
||||
BIND(&no_memento_found);
|
||||
Comment("] TrapAllocationMemento");
|
||||
@ -10015,7 +10014,7 @@ TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
|
||||
SloppyTNode<FeedbackVector> feedback_vector, TNode<Smi> slot) {
|
||||
TNode<IntPtrT> size = IntPtrConstant(AllocationSite::kSizeWithWeakNext);
|
||||
Node* site = Allocate(size, CodeStubAssembler::kPretenured);
|
||||
StoreMapNoWriteBarrier(site, Heap::kAllocationSiteWithWeakNextMapRootIndex);
|
||||
StoreMapNoWriteBarrier(site, RootIndex::kAllocationSiteWithWeakNextMap);
|
||||
// Should match AllocationSite::Initialize.
|
||||
TNode<WordT> field = UpdateWord<AllocationSite::ElementsKindBits>(
|
||||
IntPtrConstant(0), IntPtrConstant(GetInitialFastElementsKind()));
|
||||
@ -10039,7 +10038,7 @@ TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
|
||||
|
||||
// Store an empty fixed array for the code dependency.
|
||||
StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
|
||||
Heap::kEmptyWeakFixedArrayRootIndex);
|
||||
RootIndex::kEmptyWeakFixedArray);
|
||||
|
||||
// Link the object to the allocation site list
|
||||
TNode<ExternalReference> site_list = ExternalConstant(
|
||||
@ -10207,9 +10206,10 @@ void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
|
||||
doesnt_fit);
|
||||
}
|
||||
|
||||
void CodeStubAssembler::InitializeFieldsWithRoot(
|
||||
Node* object, Node* start_offset, Node* end_offset,
|
||||
Heap::RootListIndex root_index) {
|
||||
void CodeStubAssembler::InitializeFieldsWithRoot(Node* object,
|
||||
Node* start_offset,
|
||||
Node* end_offset,
|
||||
RootIndex root_index) {
|
||||
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
|
||||
start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
|
||||
end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
|
||||
@ -12128,9 +12128,9 @@ TNode<JSArrayIterator> CodeStubAssembler::CreateArrayIterator(
|
||||
Node* iterator = Allocate(JSArrayIterator::kSize);
|
||||
StoreMapNoWriteBarrier(iterator, iterator_map);
|
||||
StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldNoWriteBarrier(
|
||||
iterator, JSArrayIterator::kIteratedObjectOffset, object);
|
||||
StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset,
|
||||
@ -12150,9 +12150,9 @@ Node* CodeStubAssembler::AllocateJSIteratorResult(Node* context, Node* value,
|
||||
Node* result = Allocate(JSIteratorResult::kSize);
|
||||
StoreMapNoWriteBarrier(result, map);
|
||||
StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, value);
|
||||
StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kDoneOffset, done);
|
||||
return result;
|
||||
@ -12167,7 +12167,7 @@ Node* CodeStubAssembler::AllocateJSIteratorResultForEntry(Node* context,
|
||||
TNode<FixedArray> elements = UncheckedCast<FixedArray>(
|
||||
Allocate(elements_size + JSArray::kSize + JSIteratorResult::kSize));
|
||||
StoreObjectFieldRoot(elements, FixedArray::kMapOffset,
|
||||
Heap::kFixedArrayMapRootIndex);
|
||||
RootIndex::kFixedArrayMap);
|
||||
StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
|
||||
StoreFixedArrayElement(elements, 0, key);
|
||||
StoreFixedArrayElement(elements, 1, value);
|
||||
@ -12176,7 +12176,7 @@ Node* CodeStubAssembler::AllocateJSIteratorResultForEntry(Node* context,
|
||||
Node* array = InnerAllocate(elements, elements_size);
|
||||
StoreMapNoWriteBarrier(array, array_map);
|
||||
StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
|
||||
StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
|
||||
Node* iterator_map =
|
||||
@ -12184,12 +12184,12 @@ Node* CodeStubAssembler::AllocateJSIteratorResultForEntry(Node* context,
|
||||
Node* result = InnerAllocate(array, JSArray::kSize);
|
||||
StoreMapNoWriteBarrier(result, iterator_map);
|
||||
StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, array);
|
||||
StoreObjectFieldRoot(result, JSIteratorResult::kDoneOffset,
|
||||
Heap::kFalseValueRootIndex);
|
||||
RootIndex::kFalseValue);
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -12610,11 +12610,11 @@ Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
|
||||
STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kPointerSize);
|
||||
StoreMapNoWriteBarrier(fun, map);
|
||||
StoreObjectFieldRoot(fun, JSObject::kPropertiesOrHashOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
|
||||
Heap::kEmptyFixedArrayRootIndex);
|
||||
RootIndex::kEmptyFixedArray);
|
||||
StoreObjectFieldRoot(fun, JSFunction::kFeedbackCellOffset,
|
||||
Heap::kManyClosuresCellRootIndex);
|
||||
RootIndex::kManyClosuresCell);
|
||||
StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
|
||||
shared_info);
|
||||
StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
|
||||
@ -12765,7 +12765,7 @@ void CodeStubAssembler::PerformStackCheck(TNode<Context> context) {
|
||||
void CodeStubAssembler::InitializeFunctionContext(Node* native_context,
|
||||
Node* context, int slots) {
|
||||
DCHECK_GE(slots, Context::MIN_CONTEXT_SLOTS);
|
||||
StoreMapNoWriteBarrier(context, Heap::kFunctionContextMapRootIndex);
|
||||
StoreMapNoWriteBarrier(context, RootIndex::kFunctionContextMap);
|
||||
StoreObjectFieldNoWriteBarrier(context, FixedArray::kLengthOffset,
|
||||
SmiConstant(slots));
|
||||
|
||||
|
@ -831,7 +831,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
|
||||
// Load a SMI and untag it.
|
||||
TNode<IntPtrT> LoadAndUntagSmi(Node* base, int index);
|
||||
// Load a SMI root, untag it, and convert to Word32.
|
||||
TNode<Int32T> LoadAndUntagToWord32Root(Heap::RootListIndex root_index);
|
||||
TNode<Int32T> LoadAndUntagToWord32Root(RootIndex root_index);
|
||||
|
||||
TNode<MaybeObject> LoadMaybeWeakObjectField(SloppyTNode<HeapObject> object,
|
||||
int offset) {
|
||||
@ -1169,11 +1169,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
|
||||
MachineRepresentation rep = MachineRepresentation::kTagged);
|
||||
// Store the Map of an HeapObject.
|
||||
Node* StoreMap(Node* object, Node* map);
|
||||
Node* StoreMapNoWriteBarrier(Node* object,
|
||||
Heap::RootListIndex map_root_index);
|
||||
Node* StoreMapNoWriteBarrier(Node* object, RootIndex map_root_index);
|
||||
Node* StoreMapNoWriteBarrier(Node* object, Node* map);
|
||||
Node* StoreObjectFieldRoot(Node* object, int offset,
|
||||
Heap::RootListIndex root);
|
||||
Node* StoreObjectFieldRoot(Node* object, int offset, RootIndex root);
|
||||
// Store an array element to a FixedArray.
|
||||
void StoreFixedArrayElement(
|
||||
TNode<FixedArray> object, int index, SloppyTNode<Object> value,
|
||||
@ -1472,8 +1470,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
|
||||
Node* InternalArrayCreate(TNode<Context> context, TNode<Number> len);
|
||||
|
||||
void FillFixedArrayWithValue(ElementsKind kind, Node* array, Node* from_index,
|
||||
Node* to_index,
|
||||
Heap::RootListIndex value_root_index,
|
||||
Node* to_index, RootIndex value_root_index,
|
||||
ParameterMode mode = INTPTR_PARAMETERS);
|
||||
|
||||
// Uses memset to effectively initialize the given FixedArray with zeroes.
|
||||
@ -1539,9 +1536,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
|
||||
|
||||
TNode<FixedDoubleArray> HeapObjectToFixedDoubleArray(TNode<HeapObject> base,
|
||||
Label* cast_fail) {
|
||||
GotoIf(WordNotEqual(LoadMap(base),
|
||||
LoadRoot(Heap::kFixedDoubleArrayMapRootIndex)),
|
||||
cast_fail);
|
||||
GotoIf(
|
||||
WordNotEqual(LoadMap(base), LoadRoot(RootIndex::kFixedDoubleArrayMap)),
|
||||
cast_fail);
|
||||
return UncheckedCast<FixedDoubleArray>(base);
|
||||
}
|
||||
|
||||
@ -2680,7 +2677,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
|
||||
ParameterMode mode);
|
||||
|
||||
void InitializeFieldsWithRoot(Node* object, Node* start_offset,
|
||||
Node* end_offset, Heap::RootListIndex root);
|
||||
Node* end_offset, RootIndex root);
|
||||
|
||||
Node* RelationalComparison(Operation op, Node* left, Node* right,
|
||||
Node* context,
|
||||
@ -2970,11 +2967,11 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
|
||||
Node* EmitKeyedSloppyArguments(Node* receiver, Node* key, Node* value,
|
||||
Label* bailout);
|
||||
|
||||
TNode<String> AllocateSlicedString(Heap::RootListIndex map_root_index,
|
||||
TNode<String> AllocateSlicedString(RootIndex map_root_index,
|
||||
TNode<Uint32T> length,
|
||||
TNode<String> parent, TNode<Smi> offset);
|
||||
|
||||
TNode<String> AllocateConsString(Heap::RootListIndex map_root_index,
|
||||
TNode<String> AllocateConsString(RootIndex map_root_index,
|
||||
TNode<Uint32T> length, TNode<String> first,
|
||||
TNode<String> second, AllocationFlags flags);
|
||||
|
||||
|
@ -3186,7 +3186,7 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
|
||||
auto MoveConstantToRegister = [&](Register dst, Constant src) {
|
||||
if (src.type() == Constant::kHeapObject) {
|
||||
Handle<HeapObject> src_object = src.ToHeapObject();
|
||||
Heap::RootListIndex index;
|
||||
RootIndex index;
|
||||
if (IsMaterializableFromRoot(src_object, &index)) {
|
||||
__ LoadRoot(dst, index);
|
||||
} else {
|
||||
|
@ -2581,7 +2581,7 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
|
||||
auto MoveConstantToRegister = [&](Register dst, Constant src) {
|
||||
if (src.type() == Constant::kHeapObject) {
|
||||
Handle<HeapObject> src_object = src.ToHeapObject();
|
||||
Heap::RootListIndex index;
|
||||
RootIndex index;
|
||||
if (IsMaterializableFromRoot(src_object, &index)) {
|
||||
__ LoadRoot(dst, index);
|
||||
} else {
|
||||
|
@ -309,7 +309,7 @@ TNode<Float64T> CodeAssembler::Float64Constant(double value) {
|
||||
}
|
||||
|
||||
TNode<HeapNumber> CodeAssembler::NaNConstant() {
|
||||
return UncheckedCast<HeapNumber>(LoadRoot(Heap::kNanValueRootIndex));
|
||||
return UncheckedCast<HeapNumber>(LoadRoot(RootIndex::kNanValue));
|
||||
}
|
||||
|
||||
bool CodeAssembler::ToInt32Constant(Node* node, int32_t& out_value) {
|
||||
@ -963,7 +963,7 @@ Node* CodeAssembler::AtomicLoad(MachineType rep, Node* base, Node* offset) {
|
||||
return raw_assembler()->AtomicLoad(rep, base, offset);
|
||||
}
|
||||
|
||||
TNode<Object> CodeAssembler::LoadRoot(Heap::RootListIndex root_index) {
|
||||
TNode<Object> CodeAssembler::LoadRoot(RootIndex root_index) {
|
||||
if (isolate()->heap()->RootCanBeTreatedAsConstant(root_index)) {
|
||||
Handle<Object> root = isolate()->heap()->root_handle(root_index);
|
||||
if (root->IsSmi()) {
|
||||
@ -978,8 +978,9 @@ TNode<Object> CodeAssembler::LoadRoot(Heap::RootListIndex root_index) {
|
||||
// cases, it would boil down to loading from a fixed kRootRegister offset.
|
||||
Node* roots_array_start =
|
||||
ExternalConstant(ExternalReference::roots_array_start(isolate()));
|
||||
size_t offset = static_cast<size_t>(root_index) * kPointerSize;
|
||||
return UncheckedCast<Object>(Load(MachineType::AnyTagged(), roots_array_start,
|
||||
IntPtrConstant(root_index * kPointerSize)));
|
||||
IntPtrConstant(offset)));
|
||||
}
|
||||
|
||||
Node* CodeAssembler::Store(Node* base, Node* value) {
|
||||
@ -1033,12 +1034,13 @@ Node* CodeAssembler::AtomicCompareExchange(MachineType type, Node* base,
|
||||
new_value);
|
||||
}
|
||||
|
||||
Node* CodeAssembler::StoreRoot(Heap::RootListIndex root_index, Node* value) {
|
||||
Node* CodeAssembler::StoreRoot(RootIndex root_index, Node* value) {
|
||||
DCHECK(Heap::RootCanBeWrittenAfterInitialization(root_index));
|
||||
Node* roots_array_start =
|
||||
ExternalConstant(ExternalReference::roots_array_start(isolate()));
|
||||
size_t offset = static_cast<size_t>(root_index) * kPointerSize;
|
||||
return StoreNoWriteBarrier(MachineRepresentation::kTagged, roots_array_start,
|
||||
IntPtrConstant(root_index * kPointerSize), value);
|
||||
IntPtrConstant(offset), value);
|
||||
}
|
||||
|
||||
Node* CodeAssembler::Retain(Node* value) {
|
||||
|
@ -815,7 +815,7 @@ class V8_EXPORT_PRIVATE CodeAssembler {
|
||||
Node* AtomicLoad(MachineType rep, Node* base, Node* offset);
|
||||
|
||||
// Load a value from the root array.
|
||||
TNode<Object> LoadRoot(Heap::RootListIndex root_index);
|
||||
TNode<Object> LoadRoot(RootIndex root_index);
|
||||
|
||||
// Store value to raw memory location.
|
||||
Node* Store(Node* base, Node* value);
|
||||
@ -845,7 +845,7 @@ class V8_EXPORT_PRIVATE CodeAssembler {
|
||||
Node* AtomicXor(MachineType type, Node* base, Node* offset, Node* value);
|
||||
|
||||
// Store a value to the root array.
|
||||
Node* StoreRoot(Heap::RootListIndex root_index, Node* value);
|
||||
Node* StoreRoot(RootIndex root_index, Node* value);
|
||||
|
||||
// Basic arithmetic operations.
|
||||
#define DECLARE_CODE_ASSEMBLER_BINARY_OP(name, ResType, Arg1Type, Arg2Type) \
|
||||
@ -1139,7 +1139,7 @@ class V8_EXPORT_PRIVATE CodeAssembler {
|
||||
TArgs... args) {
|
||||
int argc = static_cast<int>(sizeof...(args));
|
||||
Node* arity = Int32Constant(argc);
|
||||
Node* receiver = LoadRoot(Heap::kUndefinedValueRootIndex);
|
||||
Node* receiver = LoadRoot(RootIndex::kUndefinedValue);
|
||||
|
||||
// Construct(target, new_target, arity, receiver, arguments...)
|
||||
return CallStub(callable, context, new_target, new_target, arity, receiver,
|
||||
|
@ -454,8 +454,8 @@ void CodeGenerator::RecordSafepoint(ReferenceMap* references,
|
||||
}
|
||||
}
|
||||
|
||||
bool CodeGenerator::IsMaterializableFromRoot(
|
||||
Handle<HeapObject> object, Heap::RootListIndex* index_return) {
|
||||
bool CodeGenerator::IsMaterializableFromRoot(Handle<HeapObject> object,
|
||||
RootIndex* index_return) {
|
||||
const CallDescriptor* incoming_descriptor =
|
||||
linkage()->GetIncomingDescriptor();
|
||||
if (incoming_descriptor->flags() & CallDescriptor::kCanUseRoots) {
|
||||
|
@ -163,7 +163,7 @@ class CodeGenerator final : public GapResolver::Assembler {
|
||||
// which is cheaper on some platforms than materializing the actual heap
|
||||
// object constant.
|
||||
bool IsMaterializableFromRoot(Handle<HeapObject> object,
|
||||
Heap::RootListIndex* index_return);
|
||||
RootIndex* index_return);
|
||||
|
||||
enum CodeGenResult { kSuccess, kTooManyDeoptimizationBailouts };
|
||||
|
||||
|
@ -471,9 +471,9 @@ InstructionOperand OperandForDeopt(Isolate* isolate, OperandGenerator* g,
|
||||
}
|
||||
|
||||
Handle<HeapObject> constant = HeapConstantOf(input->op());
|
||||
Heap::RootListIndex root_index;
|
||||
RootIndex root_index;
|
||||
if (isolate->heap()->IsRootHandle(constant, &root_index) &&
|
||||
root_index == Heap::kOptimizedOutRootIndex) {
|
||||
root_index == RootIndex::kOptimizedOut) {
|
||||
// For an optimized-out object we return an invalid instruction
|
||||
// operand, so that we take the fast path for optimized-out values.
|
||||
return InstructionOperand();
|
||||
|
@ -3373,7 +3373,7 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
|
||||
break;
|
||||
case Constant::kHeapObject: {
|
||||
Handle<HeapObject> src_object = src.ToHeapObject();
|
||||
Heap::RootListIndex index;
|
||||
RootIndex index;
|
||||
if (IsMaterializableFromRoot(src_object, &index)) {
|
||||
__ LoadRoot(dst, index);
|
||||
} else {
|
||||
|
@ -3623,7 +3623,7 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
|
||||
break;
|
||||
case Constant::kHeapObject: {
|
||||
Handle<HeapObject> src_object = src.ToHeapObject();
|
||||
Heap::RootListIndex index;
|
||||
RootIndex index;
|
||||
if (IsMaterializableFromRoot(src_object, &index)) {
|
||||
__ LoadRoot(dst, index);
|
||||
} else {
|
||||
|
@ -2574,7 +2574,7 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
|
||||
break;
|
||||
case Constant::kHeapObject: {
|
||||
Handle<HeapObject> src_object = src.ToHeapObject();
|
||||
Heap::RootListIndex index;
|
||||
RootIndex index;
|
||||
if (IsMaterializableFromRoot(src_object, &index)) {
|
||||
__ LoadRoot(dst, index);
|
||||
} else {
|
||||
|
@ -3180,7 +3180,7 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
|
||||
break;
|
||||
case Constant::kHeapObject: {
|
||||
Handle<HeapObject> src_object = src.ToHeapObject();
|
||||
Heap::RootListIndex index;
|
||||
RootIndex index;
|
||||
if (IsMaterializableFromRoot(src_object, &index)) {
|
||||
__ LoadRoot(dst, index);
|
||||
} else {
|
||||
|
@ -1218,7 +1218,7 @@ class RepresentationSelector {
|
||||
return kNoWriteBarrier;
|
||||
}
|
||||
if (value_type.IsHeapConstant()) {
|
||||
Heap::RootListIndex root_index;
|
||||
RootIndex root_index;
|
||||
Heap* heap = jsgraph_->isolate()->heap();
|
||||
if (heap->IsRootHandle(value_type.AsHeapConstant()->Value(),
|
||||
&root_index)) {
|
||||
|
@ -2654,7 +2654,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
break;
|
||||
}
|
||||
case kX64StackCheck:
|
||||
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
|
||||
__ CompareRoot(rsp, RootIndex::kStackLimit);
|
||||
break;
|
||||
case kWord32AtomicExchangeInt8: {
|
||||
__ xchgb(i.InputRegister(0), i.MemoryOperand(1));
|
||||
@ -3270,7 +3270,7 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
|
||||
break;
|
||||
case Constant::kHeapObject: {
|
||||
Handle<HeapObject> src_object = src.ToHeapObject();
|
||||
Heap::RootListIndex index;
|
||||
RootIndex index;
|
||||
if (IsMaterializableFromRoot(src_object, &index)) {
|
||||
__ LoadRoot(dst, index);
|
||||
} else {
|
||||
|
@ -1721,7 +1721,7 @@ void VisitWord64Compare(InstructionSelector* selector, Node* node,
|
||||
X64OperandGenerator g(selector);
|
||||
if (selector->CanUseRootsRegister()) {
|
||||
Heap* const heap = selector->isolate()->heap();
|
||||
Heap::RootListIndex root_index;
|
||||
RootIndex root_index;
|
||||
HeapObjectBinopMatcher m(node);
|
||||
if (m.right().HasValue() &&
|
||||
heap->IsRootHandle(m.right().Value(), &root_index)) {
|
||||
|
@ -99,8 +99,8 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
|
||||
// Fail safe in the unlikely case of an arbitrary root-relative offset.
|
||||
if (offset_in_roots_table % kPointerSize != 0) return nullptr;
|
||||
|
||||
Heap::RootListIndex root_index =
|
||||
static_cast<Heap::RootListIndex>(offset_in_roots_table / kPointerSize);
|
||||
RootIndex root_index =
|
||||
static_cast<RootIndex>(offset_in_roots_table / kPointerSize);
|
||||
|
||||
HeapStringAllocator allocator;
|
||||
StringStream accumulator(&allocator);
|
||||
|
@ -32,9 +32,10 @@ bool HandleBase::IsDereferenceAllowed(DereferenceCheckMode mode) const {
|
||||
Heap* heap = isolate->heap();
|
||||
Object** roots_array_start = heap->roots_array_start();
|
||||
if (roots_array_start <= location_ &&
|
||||
location_ < roots_array_start + Heap::kStrongRootListLength &&
|
||||
location_ < roots_array_start +
|
||||
static_cast<int>(RootIndex::kStrongRootListLength) &&
|
||||
heap->RootCanBeTreatedAsConstant(
|
||||
static_cast<Heap::RootListIndex>(location_ - roots_array_start))) {
|
||||
static_cast<RootIndex>(location_ - roots_array_start))) {
|
||||
return true;
|
||||
}
|
||||
if (!AllowHandleDereference::IsAllowed()) return false;
|
||||
@ -160,7 +161,7 @@ Object** CanonicalHandleScope::Lookup(Object* object) {
|
||||
int index = root_index_map_->Lookup(HeapObject::cast(object));
|
||||
if (index != RootIndexMap::kInvalidRootIndex) {
|
||||
return isolate_->heap()
|
||||
->root_handle(static_cast<Heap::RootListIndex>(index))
|
||||
->root_handle(static_cast<RootIndex>(index))
|
||||
.location();
|
||||
}
|
||||
}
|
||||
|
@ -16,69 +16,67 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
#define ROOT_ACCESSOR(type, name, camel_name) \
|
||||
Handle<type> Factory::name() { \
|
||||
return Handle<type>(bit_cast<type**>( \
|
||||
&isolate()->heap()->roots_[Heap::k##camel_name##RootIndex])); \
|
||||
#define ROOT_ACCESSOR(type, name, camel_name) \
|
||||
Handle<type> Factory::name() { \
|
||||
return Handle<type>(bit_cast<type**>( \
|
||||
&isolate()->heap()->roots_[RootIndex::k##camel_name])); \
|
||||
}
|
||||
ROOT_LIST(ROOT_ACCESSOR)
|
||||
#undef ROOT_ACCESSOR
|
||||
|
||||
#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
|
||||
Handle<Map> Factory::name##_map() { \
|
||||
return Handle<Map>(bit_cast<Map**>( \
|
||||
&isolate()->heap()->roots_[Heap::k##Name##MapRootIndex])); \
|
||||
#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
|
||||
Handle<Map> Factory::name##_map() { \
|
||||
return Handle<Map>( \
|
||||
bit_cast<Map**>(&isolate()->heap()->roots_[RootIndex::k##Name##Map])); \
|
||||
}
|
||||
STRUCT_LIST(STRUCT_MAP_ACCESSOR)
|
||||
#undef STRUCT_MAP_ACCESSOR
|
||||
|
||||
#define ALLOCATION_SITE_MAP_ACCESSOR(NAME, Name, Size, name) \
|
||||
Handle<Map> Factory::name##_map() { \
|
||||
return Handle<Map>(bit_cast<Map**>( \
|
||||
&isolate()->heap()->roots_[Heap::k##Name##Size##MapRootIndex])); \
|
||||
#define ALLOCATION_SITE_MAP_ACCESSOR(NAME, Name, Size, name) \
|
||||
Handle<Map> Factory::name##_map() { \
|
||||
return Handle<Map>(bit_cast<Map**>( \
|
||||
&isolate()->heap()->roots_[RootIndex::k##Name##Size##Map])); \
|
||||
}
|
||||
ALLOCATION_SITE_LIST(ALLOCATION_SITE_MAP_ACCESSOR)
|
||||
#undef ALLOCATION_SITE_MAP_ACCESSOR
|
||||
|
||||
#define DATA_HANDLER_MAP_ACCESSOR(NAME, Name, Size, name) \
|
||||
Handle<Map> Factory::name##_map() { \
|
||||
return Handle<Map>(bit_cast<Map**>( \
|
||||
&isolate()->heap()->roots_[Heap::k##Name##Size##MapRootIndex])); \
|
||||
#define DATA_HANDLER_MAP_ACCESSOR(NAME, Name, Size, name) \
|
||||
Handle<Map> Factory::name##_map() { \
|
||||
return Handle<Map>(bit_cast<Map**>( \
|
||||
&isolate()->heap()->roots_[RootIndex::k##Name##Size##Map])); \
|
||||
}
|
||||
DATA_HANDLER_LIST(DATA_HANDLER_MAP_ACCESSOR)
|
||||
#undef DATA_HANDLER_MAP_ACCESSOR
|
||||
|
||||
#define STRING_ACCESSOR(name, str) \
|
||||
Handle<String> Factory::name() { \
|
||||
return Handle<String>(bit_cast<String**>( \
|
||||
&isolate()->heap()->roots_[Heap::k##name##RootIndex])); \
|
||||
#define STRING_ACCESSOR(name, str) \
|
||||
Handle<String> Factory::name() { \
|
||||
return Handle<String>( \
|
||||
bit_cast<String**>(&isolate()->heap()->roots_[RootIndex::k##name])); \
|
||||
}
|
||||
INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
|
||||
#undef STRING_ACCESSOR
|
||||
|
||||
#define SYMBOL_ACCESSOR(name) \
|
||||
Handle<Symbol> Factory::name() { \
|
||||
return Handle<Symbol>(bit_cast<Symbol**>( \
|
||||
&isolate()->heap()->roots_[Heap::k##name##RootIndex])); \
|
||||
#define SYMBOL_ACCESSOR(name) \
|
||||
Handle<Symbol> Factory::name() { \
|
||||
return Handle<Symbol>( \
|
||||
bit_cast<Symbol**>(&isolate()->heap()->roots_[RootIndex::k##name])); \
|
||||
}
|
||||
PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
|
||||
#undef SYMBOL_ACCESSOR
|
||||
|
||||
#define SYMBOL_ACCESSOR(name, description) \
|
||||
Handle<Symbol> Factory::name() { \
|
||||
return Handle<Symbol>(bit_cast<Symbol**>( \
|
||||
&isolate()->heap()->roots_[Heap::k##name##RootIndex])); \
|
||||
#define SYMBOL_ACCESSOR(name, description) \
|
||||
Handle<Symbol> Factory::name() { \
|
||||
return Handle<Symbol>( \
|
||||
bit_cast<Symbol**>(&isolate()->heap()->roots_[RootIndex::k##name])); \
|
||||
}
|
||||
PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
|
||||
WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
|
||||
#undef SYMBOL_ACCESSOR
|
||||
|
||||
#define ACCESSOR_INFO_ACCESSOR(accessor_name, AccessorName, ...) \
|
||||
Handle<AccessorInfo> Factory::accessor_name##_accessor() { \
|
||||
return Handle<AccessorInfo>(bit_cast<AccessorInfo**>( \
|
||||
&isolate() \
|
||||
->heap() \
|
||||
->roots_[Heap::k##AccessorName##AccessorRootIndex])); \
|
||||
#define ACCESSOR_INFO_ACCESSOR(accessor_name, AccessorName, ...) \
|
||||
Handle<AccessorInfo> Factory::accessor_name##_accessor() { \
|
||||
return Handle<AccessorInfo>(bit_cast<AccessorInfo**>( \
|
||||
&isolate()->heap()->roots_[RootIndex::k##AccessorName##Accessor])); \
|
||||
}
|
||||
ACCESSOR_INFO_LIST(ACCESSOR_INFO_ACCESSOR)
|
||||
#undef ACCESSOR_INFO_ACCESSOR
|
||||
|
@ -288,9 +288,9 @@ Handle<PropertyArray> Factory::NewPropertyArray(int length,
|
||||
return array;
|
||||
}
|
||||
|
||||
Handle<FixedArray> Factory::NewFixedArrayWithFiller(
|
||||
Heap::RootListIndex map_root_index, int length, Object* filler,
|
||||
PretenureFlag pretenure) {
|
||||
Handle<FixedArray> Factory::NewFixedArrayWithFiller(RootIndex map_root_index,
|
||||
int length, Object* filler,
|
||||
PretenureFlag pretenure) {
|
||||
HeapObject* result = AllocateRawFixedArray(length, pretenure);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
|
||||
Map* map = Map::cast(isolate()->heap()->root(map_root_index));
|
||||
@ -302,8 +302,8 @@ Handle<FixedArray> Factory::NewFixedArrayWithFiller(
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
Handle<T> Factory::NewFixedArrayWithMap(Heap::RootListIndex map_root_index,
|
||||
int length, PretenureFlag pretenure) {
|
||||
Handle<T> Factory::NewFixedArrayWithMap(RootIndex map_root_index, int length,
|
||||
PretenureFlag pretenure) {
|
||||
static_assert(std::is_base_of<FixedArray, T>::value,
|
||||
"T must be a descendant of FixedArray");
|
||||
// Zero-length case must be handled outside, where the knowledge about
|
||||
@ -314,7 +314,7 @@ Handle<T> Factory::NewFixedArrayWithMap(Heap::RootListIndex map_root_index,
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
Handle<T> Factory::NewWeakFixedArrayWithMap(Heap::RootListIndex map_root_index,
|
||||
Handle<T> Factory::NewWeakFixedArrayWithMap(RootIndex map_root_index,
|
||||
int length,
|
||||
PretenureFlag pretenure) {
|
||||
static_assert(std::is_base_of<WeakFixedArray, T>::value,
|
||||
@ -337,16 +337,16 @@ Handle<T> Factory::NewWeakFixedArrayWithMap(Heap::RootListIndex map_root_index,
|
||||
}
|
||||
|
||||
template Handle<FixedArray> Factory::NewFixedArrayWithMap<FixedArray>(
|
||||
Heap::RootListIndex, int, PretenureFlag);
|
||||
RootIndex, int, PretenureFlag);
|
||||
|
||||
template Handle<DescriptorArray>
|
||||
Factory::NewWeakFixedArrayWithMap<DescriptorArray>(Heap::RootListIndex, int,
|
||||
Factory::NewWeakFixedArrayWithMap<DescriptorArray>(RootIndex, int,
|
||||
PretenureFlag);
|
||||
|
||||
Handle<FixedArray> Factory::NewFixedArray(int length, PretenureFlag pretenure) {
|
||||
DCHECK_LE(0, length);
|
||||
if (length == 0) return empty_fixed_array();
|
||||
return NewFixedArrayWithFiller(Heap::kFixedArrayMapRootIndex, length,
|
||||
return NewFixedArrayWithFiller(RootIndex::kFixedArrayMap, length,
|
||||
*undefined_value(), pretenure);
|
||||
}
|
||||
|
||||
@ -356,7 +356,7 @@ Handle<WeakFixedArray> Factory::NewWeakFixedArray(int length,
|
||||
if (length == 0) return empty_weak_fixed_array();
|
||||
HeapObject* result =
|
||||
AllocateRawArray(WeakFixedArray::SizeFor(length), pretenure);
|
||||
DCHECK(Heap::RootIsImmortalImmovable(Heap::kWeakFixedArrayMapRootIndex));
|
||||
DCHECK(Heap::RootIsImmortalImmovable(RootIndex::kWeakFixedArrayMap));
|
||||
result->set_map_after_allocation(*weak_fixed_array_map(), SKIP_WRITE_BARRIER);
|
||||
Handle<WeakFixedArray> array(WeakFixedArray::cast(result), isolate());
|
||||
array->set_length(length);
|
||||
@ -392,7 +392,7 @@ Handle<FixedArray> Factory::NewFixedArrayWithHoles(int length,
|
||||
PretenureFlag pretenure) {
|
||||
DCHECK_LE(0, length);
|
||||
if (length == 0) return empty_fixed_array();
|
||||
return NewFixedArrayWithFiller(Heap::kFixedArrayMapRootIndex, length,
|
||||
return NewFixedArrayWithFiller(RootIndex::kFixedArrayMap, length,
|
||||
*the_hole_value(), pretenure);
|
||||
}
|
||||
|
||||
@ -404,7 +404,7 @@ Handle<FixedArray> Factory::NewUninitializedFixedArray(
|
||||
// TODO(ulan): As an experiment this temporarily returns an initialized fixed
|
||||
// array. After getting canary/performance coverage, either remove the
|
||||
// function or revert to returning uninitilized array.
|
||||
return NewFixedArrayWithFiller(Heap::kFixedArrayMapRootIndex, length,
|
||||
return NewFixedArrayWithFiller(RootIndex::kFixedArrayMap, length,
|
||||
*undefined_value(), pretenure);
|
||||
}
|
||||
|
||||
@ -453,7 +453,7 @@ Handle<ObjectBoilerplateDescription> Factory::NewObjectBoilerplateDescription(
|
||||
|
||||
Handle<ObjectBoilerplateDescription> description =
|
||||
Handle<ObjectBoilerplateDescription>::cast(NewFixedArrayWithMap(
|
||||
Heap::kObjectBoilerplateDescriptionMapRootIndex, size, TENURED));
|
||||
RootIndex::kObjectBoilerplateDescriptionMap, size, TENURED));
|
||||
|
||||
if (has_different_size_backing_store) {
|
||||
DCHECK_IMPLIES((boilerplate == (all_properties - index_keys)),
|
||||
@ -774,7 +774,7 @@ Handle<SeqOneByteString> Factory::AllocateRawOneByteInternalizedString(
|
||||
// The canonical empty_string is the only zero-length string we allow.
|
||||
DCHECK_IMPLIES(
|
||||
length == 0,
|
||||
isolate()->heap()->roots_[Heap::kempty_stringRootIndex] == nullptr);
|
||||
isolate()->heap()->roots_[RootIndex::kempty_string] == nullptr);
|
||||
|
||||
Map* map = *one_byte_internalized_string_map();
|
||||
int size = SeqOneByteString::SizeFor(length);
|
||||
@ -1354,7 +1354,7 @@ Handle<Symbol> Factory::NewPrivateFieldSymbol() {
|
||||
|
||||
Handle<NativeContext> Factory::NewNativeContext() {
|
||||
Handle<NativeContext> context = NewFixedArrayWithMap<NativeContext>(
|
||||
Heap::kNativeContextMapRootIndex, Context::NATIVE_CONTEXT_SLOTS, TENURED);
|
||||
RootIndex::kNativeContextMap, Context::NATIVE_CONTEXT_SLOTS, TENURED);
|
||||
context->set_native_context(*context);
|
||||
context->set_errors_thrown(Smi::kZero);
|
||||
context->set_math_random_index(Smi::kZero);
|
||||
@ -1366,7 +1366,7 @@ Handle<Context> Factory::NewScriptContext(Handle<NativeContext> outer,
|
||||
Handle<ScopeInfo> scope_info) {
|
||||
DCHECK_EQ(scope_info->scope_type(), SCRIPT_SCOPE);
|
||||
Handle<Context> context = NewFixedArrayWithMap<Context>(
|
||||
Heap::kScriptContextMapRootIndex, scope_info->ContextLength(), TENURED);
|
||||
RootIndex::kScriptContextMap, scope_info->ContextLength(), TENURED);
|
||||
context->set_scope_info(*scope_info);
|
||||
context->set_previous(*outer);
|
||||
context->set_extension(*the_hole_value());
|
||||
@ -1378,8 +1378,7 @@ Handle<Context> Factory::NewScriptContext(Handle<NativeContext> outer,
|
||||
Handle<ScriptContextTable> Factory::NewScriptContextTable() {
|
||||
Handle<ScriptContextTable> context_table =
|
||||
NewFixedArrayWithMap<ScriptContextTable>(
|
||||
Heap::kScriptContextTableMapRootIndex,
|
||||
ScriptContextTable::kMinLength);
|
||||
RootIndex::kScriptContextTableMap, ScriptContextTable::kMinLength);
|
||||
context_table->set_used(0);
|
||||
return context_table;
|
||||
}
|
||||
@ -1389,7 +1388,7 @@ Handle<Context> Factory::NewModuleContext(Handle<Module> module,
|
||||
Handle<ScopeInfo> scope_info) {
|
||||
DCHECK_EQ(scope_info->scope_type(), MODULE_SCOPE);
|
||||
Handle<Context> context = NewFixedArrayWithMap<Context>(
|
||||
Heap::kModuleContextMapRootIndex, scope_info->ContextLength(), TENURED);
|
||||
RootIndex::kModuleContextMap, scope_info->ContextLength(), TENURED);
|
||||
context->set_scope_info(*scope_info);
|
||||
context->set_previous(*outer);
|
||||
context->set_extension(*module);
|
||||
@ -1402,13 +1401,13 @@ Handle<Context> Factory::NewFunctionContext(Handle<Context> outer,
|
||||
Handle<ScopeInfo> scope_info) {
|
||||
int length = scope_info->ContextLength();
|
||||
DCHECK_LE(Context::MIN_CONTEXT_SLOTS, length);
|
||||
Heap::RootListIndex mapRootIndex;
|
||||
RootIndex mapRootIndex;
|
||||
switch (scope_info->scope_type()) {
|
||||
case EVAL_SCOPE:
|
||||
mapRootIndex = Heap::kEvalContextMapRootIndex;
|
||||
mapRootIndex = RootIndex::kEvalContextMap;
|
||||
break;
|
||||
case FUNCTION_SCOPE:
|
||||
mapRootIndex = Heap::kFunctionContextMapRootIndex;
|
||||
mapRootIndex = RootIndex::kFunctionContextMap;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -1426,7 +1425,7 @@ Handle<Context> Factory::NewCatchContext(Handle<Context> previous,
|
||||
Handle<Object> thrown_object) {
|
||||
STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX);
|
||||
Handle<Context> context = NewFixedArrayWithMap<Context>(
|
||||
Heap::kCatchContextMapRootIndex, Context::MIN_CONTEXT_SLOTS + 1);
|
||||
RootIndex::kCatchContextMap, Context::MIN_CONTEXT_SLOTS + 1);
|
||||
context->set_scope_info(*scope_info);
|
||||
context->set_previous(*previous);
|
||||
context->set_extension(*the_hole_value());
|
||||
@ -1446,7 +1445,7 @@ Handle<Context> Factory::NewDebugEvaluateContext(Handle<Context> previous,
|
||||
? Handle<HeapObject>::cast(the_hole_value())
|
||||
: Handle<HeapObject>::cast(extension);
|
||||
Handle<Context> c = NewFixedArrayWithMap<Context>(
|
||||
Heap::kDebugEvaluateContextMapRootIndex, Context::MIN_CONTEXT_SLOTS + 2);
|
||||
RootIndex::kDebugEvaluateContextMap, Context::MIN_CONTEXT_SLOTS + 2);
|
||||
c->set_scope_info(*scope_info);
|
||||
c->set_previous(*previous);
|
||||
c->set_native_context(previous->native_context());
|
||||
@ -1460,7 +1459,7 @@ Handle<Context> Factory::NewWithContext(Handle<Context> previous,
|
||||
Handle<ScopeInfo> scope_info,
|
||||
Handle<JSReceiver> extension) {
|
||||
Handle<Context> context = NewFixedArrayWithMap<Context>(
|
||||
Heap::kWithContextMapRootIndex, Context::MIN_CONTEXT_SLOTS);
|
||||
RootIndex::kWithContextMap, Context::MIN_CONTEXT_SLOTS);
|
||||
context->set_scope_info(*scope_info);
|
||||
context->set_previous(*previous);
|
||||
context->set_extension(*extension);
|
||||
@ -1472,7 +1471,7 @@ Handle<Context> Factory::NewBlockContext(Handle<Context> previous,
|
||||
Handle<ScopeInfo> scope_info) {
|
||||
DCHECK_EQ(scope_info->scope_type(), BLOCK_SCOPE);
|
||||
Handle<Context> context = NewFixedArrayWithMap<Context>(
|
||||
Heap::kBlockContextMapRootIndex, scope_info->ContextLength());
|
||||
RootIndex::kBlockContextMap, scope_info->ContextLength());
|
||||
context->set_scope_info(*scope_info);
|
||||
context->set_previous(*previous);
|
||||
context->set_extension(*the_hole_value());
|
||||
@ -1484,7 +1483,7 @@ Handle<Context> Factory::NewBuiltinContext(Handle<NativeContext> native_context,
|
||||
int length) {
|
||||
DCHECK_GE(length, Context::MIN_CONTEXT_SLOTS);
|
||||
Handle<Context> context =
|
||||
NewFixedArrayWithMap<Context>(Heap::kFunctionContextMapRootIndex, length);
|
||||
NewFixedArrayWithMap<Context>(RootIndex::kFunctionContextMap, length);
|
||||
context->set_scope_info(ReadOnlyRoots(isolate()).empty_scope_info());
|
||||
context->set_extension(*the_hole_value());
|
||||
context->set_native_context(*native_context);
|
||||
@ -1787,7 +1786,7 @@ Handle<TransitionArray> Factory::NewTransitionArray(int number_of_transitions,
|
||||
int slack) {
|
||||
int capacity = TransitionArray::LengthFor(number_of_transitions + slack);
|
||||
Handle<TransitionArray> array = NewWeakFixedArrayWithMap<TransitionArray>(
|
||||
Heap::kTransitionArrayMapRootIndex, capacity, TENURED);
|
||||
RootIndex::kTransitionArrayMap, capacity, TENURED);
|
||||
// Transition arrays are tenured. When black allocation is on we have to
|
||||
// add the transition array to the list of encountered_transition_arrays.
|
||||
Heap* heap = isolate()->heap();
|
||||
@ -2489,12 +2488,12 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
|
||||
}
|
||||
|
||||
Handle<ScopeInfo> Factory::NewScopeInfo(int length) {
|
||||
return NewFixedArrayWithMap<ScopeInfo>(Heap::kScopeInfoMapRootIndex, length,
|
||||
return NewFixedArrayWithMap<ScopeInfo>(RootIndex::kScopeInfoMap, length,
|
||||
TENURED);
|
||||
}
|
||||
|
||||
Handle<ModuleInfo> Factory::NewModuleInfo() {
|
||||
return NewFixedArrayWithMap<ModuleInfo>(Heap::kModuleInfoMapRootIndex,
|
||||
return NewFixedArrayWithMap<ModuleInfo>(RootIndex::kModuleInfoMap,
|
||||
ModuleInfo::kLength, TENURED);
|
||||
}
|
||||
|
||||
|
@ -107,14 +107,13 @@ class V8_EXPORT_PRIVATE Factory {
|
||||
// Allocates a fixed array-like object with given map and initialized with
|
||||
// undefined values.
|
||||
template <typename T = FixedArray>
|
||||
Handle<T> NewFixedArrayWithMap(Heap::RootListIndex map_root_index, int length,
|
||||
Handle<T> NewFixedArrayWithMap(RootIndex map_root_index, int length,
|
||||
PretenureFlag pretenure = NOT_TENURED);
|
||||
|
||||
// Allocates a weak fixed array-like object with given map and initialized
|
||||
// with undefined values.
|
||||
template <typename T = WeakFixedArray>
|
||||
Handle<T> NewWeakFixedArrayWithMap(Heap::RootListIndex map_root_index,
|
||||
int length,
|
||||
Handle<T> NewWeakFixedArrayWithMap(RootIndex map_root_index, int length,
|
||||
PretenureFlag pretenure = NOT_TENURED);
|
||||
|
||||
// Allocates a fixed array initialized with undefined values.
|
||||
@ -976,7 +975,7 @@ class V8_EXPORT_PRIVATE Factory {
|
||||
HeapObject* AllocateRawArray(int size, PretenureFlag pretenure);
|
||||
HeapObject* AllocateRawFixedArray(int length, PretenureFlag pretenure);
|
||||
HeapObject* AllocateRawWeakArrayList(int length, PretenureFlag pretenure);
|
||||
Handle<FixedArray> NewFixedArrayWithFiller(Heap::RootListIndex map_root_index,
|
||||
Handle<FixedArray> NewFixedArrayWithFiller(RootIndex map_root_index,
|
||||
int length, Object* filler,
|
||||
PretenureFlag pretenure);
|
||||
|
||||
|
@ -56,32 +56,34 @@ HeapObject* AllocationResult::ToObjectChecked() {
|
||||
}
|
||||
|
||||
#define ROOT_ACCESSOR(type, name, camel_name) \
|
||||
type* Heap::name() { return type::cast(roots_[k##camel_name##RootIndex]); }
|
||||
type* Heap::name() { return type::cast(roots_[RootIndex::k##camel_name]); }
|
||||
MUTABLE_ROOT_LIST(ROOT_ACCESSOR)
|
||||
#undef ROOT_ACCESSOR
|
||||
|
||||
#define DATA_HANDLER_MAP_ACCESSOR(NAME, Name, Size, name) \
|
||||
Map* Heap::name##_map() { \
|
||||
return Map::cast(roots_[k##Name##Size##MapRootIndex]); \
|
||||
#define DATA_HANDLER_MAP_ACCESSOR(NAME, Name, Size, name) \
|
||||
Map* Heap::name##_map() { \
|
||||
return Map::cast(roots_[RootIndex::k##Name##Size##Map]); \
|
||||
}
|
||||
DATA_HANDLER_LIST(DATA_HANDLER_MAP_ACCESSOR)
|
||||
#undef DATA_HANDLER_MAP_ACCESSOR
|
||||
|
||||
#define ACCESSOR_INFO_ACCESSOR(accessor_name, AccessorName, ...) \
|
||||
AccessorInfo* Heap::accessor_name##_accessor() { \
|
||||
return AccessorInfo::cast(roots_[k##AccessorName##AccessorRootIndex]); \
|
||||
#define ACCESSOR_INFO_ACCESSOR(accessor_name, AccessorName, ...) \
|
||||
AccessorInfo* Heap::accessor_name##_accessor() { \
|
||||
return AccessorInfo::cast(roots_[RootIndex::k##AccessorName##Accessor]); \
|
||||
}
|
||||
ACCESSOR_INFO_LIST(ACCESSOR_INFO_ACCESSOR)
|
||||
#undef ACCESSOR_INFO_ACCESSOR
|
||||
|
||||
#define ROOT_ACCESSOR(type, name, camel_name) \
|
||||
void Heap::set_##name(type* value) { \
|
||||
/* The deserializer makes use of the fact that these common roots are */ \
|
||||
/* never in new space and never on a page that is being compacted. */ \
|
||||
DCHECK(!deserialization_complete() || \
|
||||
RootCanBeWrittenAfterInitialization(k##camel_name##RootIndex)); \
|
||||
DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
|
||||
roots_[k##camel_name##RootIndex] = value; \
|
||||
#define ROOT_ACCESSOR(type, name, camel_name) \
|
||||
void Heap::set_##name(type* value) { \
|
||||
/* The deserializer makes use of the fact that these common roots are */ \
|
||||
/* never in new space and never on a page that is being compacted. */ \
|
||||
DCHECK(!deserialization_complete() || \
|
||||
RootCanBeWrittenAfterInitialization(RootIndex::k##camel_name)); \
|
||||
DCHECK_IMPLIES( \
|
||||
static_cast<int>(RootIndex::k##camel_name) < kOldSpaceRoots, \
|
||||
!InNewSpace(value)); \
|
||||
roots_[RootIndex::k##camel_name] = value; \
|
||||
}
|
||||
ROOT_LIST(ROOT_ACCESSOR)
|
||||
#undef ROOT_ACCESSOR
|
||||
|
@ -234,7 +234,6 @@ Heap::Heap()
|
||||
// Ensure old_generation_size_ is a multiple of kPageSize.
|
||||
DCHECK_EQ(0, max_old_generation_size_ & (Page::kPageSize - 1));
|
||||
|
||||
memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
|
||||
set_native_contexts_list(nullptr);
|
||||
set_allocation_sites_list(Smi::kZero);
|
||||
// Put a dummy entry in the remembered pages so we can find the list the
|
||||
@ -701,7 +700,7 @@ const char* Heap::GetSpaceName(int idx) {
|
||||
}
|
||||
|
||||
void Heap::SetRootCodeStubs(SimpleNumberDictionary* value) {
|
||||
roots_[kCodeStubsRootIndex] = value;
|
||||
roots_[RootIndex::kCodeStubs] = value;
|
||||
}
|
||||
|
||||
void Heap::RepairFreeListsAfterDeserialization() {
|
||||
@ -2475,29 +2474,29 @@ void Heap::CreateFixedStubs() {
|
||||
Heap::CreateJSRunMicrotasksEntryStub();
|
||||
}
|
||||
|
||||
bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) {
|
||||
bool Heap::RootCanBeWrittenAfterInitialization(RootIndex root_index) {
|
||||
switch (root_index) {
|
||||
case kNumberStringCacheRootIndex:
|
||||
case kCodeStubsRootIndex:
|
||||
case kScriptListRootIndex:
|
||||
case kMaterializedObjectsRootIndex:
|
||||
case kDetachedContextsRootIndex:
|
||||
case kRetainedMapsRootIndex:
|
||||
case kRetainingPathTargetsRootIndex:
|
||||
case kFeedbackVectorsForProfilingToolsRootIndex:
|
||||
case kNoScriptSharedFunctionInfosRootIndex:
|
||||
case kSerializedObjectsRootIndex:
|
||||
case kSerializedGlobalProxySizesRootIndex:
|
||||
case kPublicSymbolTableRootIndex:
|
||||
case kApiSymbolTableRootIndex:
|
||||
case kApiPrivateSymbolTableRootIndex:
|
||||
case kMessageListenersRootIndex:
|
||||
case RootIndex::kNumberStringCache:
|
||||
case RootIndex::kCodeStubs:
|
||||
case RootIndex::kScriptList:
|
||||
case RootIndex::kMaterializedObjects:
|
||||
case RootIndex::kDetachedContexts:
|
||||
case RootIndex::kRetainedMaps:
|
||||
case RootIndex::kRetainingPathTargets:
|
||||
case RootIndex::kFeedbackVectorsForProfilingTools:
|
||||
case RootIndex::kNoScriptSharedFunctionInfos:
|
||||
case RootIndex::kSerializedObjects:
|
||||
case RootIndex::kSerializedGlobalProxySizes:
|
||||
case RootIndex::kPublicSymbolTable:
|
||||
case RootIndex::kApiSymbolTable:
|
||||
case RootIndex::kApiPrivateSymbolTable:
|
||||
case RootIndex::kMessageListeners:
|
||||
// Smi values
|
||||
#define SMI_ENTRY(type, name, Name) case k##Name##RootIndex:
|
||||
#define SMI_ENTRY(type, name, Name) case RootIndex::k##Name:
|
||||
SMI_ROOT_LIST(SMI_ENTRY)
|
||||
#undef SMI_ENTRY
|
||||
// String table
|
||||
case kStringTableRootIndex:
|
||||
case RootIndex::kStringTable:
|
||||
return true;
|
||||
|
||||
default:
|
||||
@ -2505,7 +2504,7 @@ bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) {
|
||||
}
|
||||
}
|
||||
|
||||
bool Heap::RootCanBeTreatedAsConstant(RootListIndex root_index) {
|
||||
bool Heap::RootCanBeTreatedAsConstant(RootIndex root_index) {
|
||||
bool can_be = !RootCanBeWrittenAfterInitialization(root_index) &&
|
||||
!InNewSpace(root(root_index));
|
||||
DCHECK_IMPLIES(can_be, IsImmovable(HeapObject::cast(root(root_index))));
|
||||
@ -2523,11 +2522,11 @@ void Heap::FlushNumberStringCache() {
|
||||
|
||||
namespace {
|
||||
|
||||
Heap::RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type) {
|
||||
RootIndex RootIndexForFixedTypedArray(ExternalArrayType array_type) {
|
||||
switch (array_type) {
|
||||
#define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype) \
|
||||
case kExternal##Type##Array: \
|
||||
return Heap::kFixed##Type##ArrayMapRootIndex;
|
||||
return RootIndex::kFixed##Type##ArrayMap;
|
||||
|
||||
TYPED_ARRAYS(ARRAY_TYPE_TO_ROOT_INDEX)
|
||||
#undef ARRAY_TYPE_TO_ROOT_INDEX
|
||||
@ -2535,11 +2534,11 @@ Heap::RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type) {
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
Heap::RootListIndex RootIndexForFixedTypedArray(ElementsKind elements_kind) {
|
||||
RootIndex RootIndexForFixedTypedArray(ElementsKind elements_kind) {
|
||||
switch (elements_kind) {
|
||||
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
|
||||
case TYPE##_ELEMENTS: \
|
||||
return Heap::kFixed##Type##ArrayMapRootIndex;
|
||||
return RootIndex::kFixed##Type##ArrayMap;
|
||||
TYPED_ARRAYS(TYPED_ARRAY_CASE)
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -2547,12 +2546,11 @@ Heap::RootListIndex RootIndexForFixedTypedArray(ElementsKind elements_kind) {
|
||||
}
|
||||
}
|
||||
|
||||
Heap::RootListIndex RootIndexForEmptyFixedTypedArray(
|
||||
ElementsKind elements_kind) {
|
||||
RootIndex RootIndexForEmptyFixedTypedArray(ElementsKind elements_kind) {
|
||||
switch (elements_kind) {
|
||||
#define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype) \
|
||||
case TYPE##_ELEMENTS: \
|
||||
return Heap::kEmptyFixed##Type##ArrayRootIndex;
|
||||
return RootIndex::kEmptyFixed##Type##Array;
|
||||
|
||||
TYPED_ARRAYS(ELEMENT_KIND_TO_ROOT_INDEX)
|
||||
#undef ELEMENT_KIND_TO_ROOT_INDEX
|
||||
@ -2583,11 +2581,11 @@ HeapObject* Heap::CreateFillerObjectAt(Address addr, int size,
|
||||
HeapObject* filler = HeapObject::FromAddress(addr);
|
||||
if (size == kPointerSize) {
|
||||
filler->set_map_after_allocation(
|
||||
reinterpret_cast<Map*>(root(kOnePointerFillerMapRootIndex)),
|
||||
reinterpret_cast<Map*>(root(RootIndex::kOnePointerFillerMap)),
|
||||
SKIP_WRITE_BARRIER);
|
||||
} else if (size == 2 * kPointerSize) {
|
||||
filler->set_map_after_allocation(
|
||||
reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex)),
|
||||
reinterpret_cast<Map*>(root(RootIndex::kTwoPointerFillerMap)),
|
||||
SKIP_WRITE_BARRIER);
|
||||
if (clear_memory_mode == ClearFreedMemoryMode::kClearFreedMemory) {
|
||||
Memory<Address>(addr + kPointerSize) =
|
||||
@ -2596,7 +2594,7 @@ HeapObject* Heap::CreateFillerObjectAt(Address addr, int size,
|
||||
} else {
|
||||
DCHECK_GT(size, 2 * kPointerSize);
|
||||
filler->set_map_after_allocation(
|
||||
reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex)),
|
||||
reinterpret_cast<Map*>(root(RootIndex::kFreeSpaceMap)),
|
||||
SKIP_WRITE_BARRIER);
|
||||
FreeSpace::cast(filler)->relaxed_write_size(size);
|
||||
if (clear_memory_mode == ClearFreedMemoryMode::kClearFreedMemory) {
|
||||
@ -3624,16 +3622,15 @@ bool Heap::IsValidAllocationSpace(AllocationSpace space) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool Heap::RootIsImmortalImmovable(int root_index) {
|
||||
bool Heap::RootIsImmortalImmovable(RootIndex root_index) {
|
||||
switch (root_index) {
|
||||
#define IMMORTAL_IMMOVABLE_ROOT(name) case Heap::k##name##RootIndex:
|
||||
#define IMMORTAL_IMMOVABLE_ROOT(name) case RootIndex::k##name:
|
||||
IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
|
||||
#undef IMMORTAL_IMMOVABLE_ROOT
|
||||
#define INTERNALIZED_STRING(name, value) case Heap::k##name##RootIndex:
|
||||
#define INTERNALIZED_STRING(name, value) case RootIndex::k##name:
|
||||
INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
|
||||
#undef INTERNALIZED_STRING
|
||||
#define STRING_TYPE(NAME, size, name, Name) case Heap::k##Name##MapRootIndex:
|
||||
#define STRING_TYPE(NAME, size, name, Name) case RootIndex::k##Name##Map:
|
||||
STRING_TYPE_LIST(STRING_TYPE)
|
||||
#undef STRING_TYPE
|
||||
return true;
|
||||
@ -3869,7 +3866,7 @@ void Heap::IterateWeakRoots(RootVisitor* v, VisitMode mode) {
|
||||
mode == VISIT_ALL_IN_MINOR_MC_UPDATE;
|
||||
v->VisitRootPointer(
|
||||
Root::kStringTable, nullptr,
|
||||
reinterpret_cast<Object**>(&roots_[kStringTableRootIndex]));
|
||||
reinterpret_cast<Object**>(&roots_[RootIndex::kStringTable]));
|
||||
v->Synchronize(VisitorSynchronization::kStringTable);
|
||||
if (!isMinorGC && mode != VISIT_ALL_IN_SWEEP_NEWSPACE &&
|
||||
mode != VISIT_FOR_SERIALIZATION) {
|
||||
@ -3884,8 +3881,8 @@ void Heap::IterateWeakRoots(RootVisitor* v, VisitMode mode) {
|
||||
void Heap::IterateSmiRoots(RootVisitor* v) {
|
||||
// Acquire execution access since we are going to read stack limit values.
|
||||
ExecutionAccess access(isolate());
|
||||
v->VisitRootPointers(Root::kSmiRootList, nullptr, &roots_[kSmiRootsStart],
|
||||
&roots_[kRootListLength]);
|
||||
v->VisitRootPointers(Root::kSmiRootList, nullptr, roots_.smi_roots_begin(),
|
||||
roots_.smi_roots_end());
|
||||
v->Synchronize(VisitorSynchronization::kSmiRootList);
|
||||
}
|
||||
|
||||
@ -3942,8 +3939,9 @@ void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) {
|
||||
const bool isMinorGC = mode == VISIT_ALL_IN_SCAVENGE ||
|
||||
mode == VISIT_ALL_IN_MINOR_MC_MARK ||
|
||||
mode == VISIT_ALL_IN_MINOR_MC_UPDATE;
|
||||
v->VisitRootPointers(Root::kStrongRootList, nullptr, &roots_[0],
|
||||
&roots_[kStrongRootListLength]);
|
||||
v->VisitRootPointers(Root::kStrongRootList, nullptr,
|
||||
&roots_[RootIndex::kRootsStart],
|
||||
&roots_[RootIndex::kStrongRootListLength]);
|
||||
v->Synchronize(VisitorSynchronization::kStrongRootList);
|
||||
|
||||
isolate_->bootstrapper()->Iterate(v);
|
||||
@ -4603,15 +4601,15 @@ void Heap::SetStackLimits() {
|
||||
|
||||
// Set up the special root array entries containing the stack limits.
|
||||
// These are actually addresses, but the tag makes the GC ignore it.
|
||||
roots_[kStackLimitRootIndex] = reinterpret_cast<Object*>(
|
||||
roots_[RootIndex::kStackLimit] = reinterpret_cast<Object*>(
|
||||
(isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
|
||||
roots_[kRealStackLimitRootIndex] = reinterpret_cast<Object*>(
|
||||
roots_[RootIndex::kRealStackLimit] = reinterpret_cast<Object*>(
|
||||
(isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
|
||||
}
|
||||
|
||||
void Heap::ClearStackLimits() {
|
||||
roots_[kStackLimitRootIndex] = Smi::kZero;
|
||||
roots_[kRealStackLimitRootIndex] = Smi::kZero;
|
||||
roots_[RootIndex::kStackLimit] = Smi::kZero;
|
||||
roots_[RootIndex::kRealStackLimit] = Smi::kZero;
|
||||
}
|
||||
|
||||
int Heap::NextAllocationTimeout(int current_timeout) {
|
||||
|
107
src/heap/heap.h
107
src/heap/heap.h
@ -309,55 +309,6 @@ struct CommentStatistic {
|
||||
|
||||
class Heap {
|
||||
public:
|
||||
// Declare all the root indices. This defines the root list order.
|
||||
// clang-format off
|
||||
enum RootListIndex {
|
||||
#define DECL(type, name, camel_name) k##camel_name##RootIndex,
|
||||
STRONG_ROOT_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(name, str) k##name##RootIndex,
|
||||
INTERNALIZED_STRING_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(name) k##name##RootIndex,
|
||||
PRIVATE_SYMBOL_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(name, description) k##name##RootIndex,
|
||||
PUBLIC_SYMBOL_LIST(DECL)
|
||||
WELL_KNOWN_SYMBOL_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(accessor_name, AccessorName, ...) \
|
||||
k##AccessorName##AccessorRootIndex,
|
||||
ACCESSOR_INFO_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(NAME, Name, name) k##Name##MapRootIndex,
|
||||
STRUCT_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(NAME, Name, Size, name) k##Name##Size##MapRootIndex,
|
||||
ALLOCATION_SITE_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(NAME, Name, Size, name) k##Name##Size##MapRootIndex,
|
||||
DATA_HANDLER_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
kStringTableRootIndex,
|
||||
|
||||
#define DECL(type, name, camel_name) k##camel_name##RootIndex,
|
||||
SMI_ROOT_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
kRootListLength,
|
||||
kStrongRootListLength = kStringTableRootIndex,
|
||||
kSmiRootsStart = kStringTableRootIndex + 1
|
||||
};
|
||||
// clang-format on
|
||||
|
||||
enum FindMementoMode { kForRuntime, kForGC };
|
||||
|
||||
enum HeapState {
|
||||
@ -413,13 +364,18 @@ class Heap {
|
||||
|
||||
static const int kMinPromotedPercentForFastPromotionMode = 90;
|
||||
|
||||
STATIC_ASSERT(kUndefinedValueRootIndex ==
|
||||
STATIC_ASSERT(static_cast<int>(RootIndex::kUndefinedValue) ==
|
||||
Internals::kUndefinedValueRootIndex);
|
||||
STATIC_ASSERT(kTheHoleValueRootIndex == Internals::kTheHoleValueRootIndex);
|
||||
STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
|
||||
STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
|
||||
STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
|
||||
STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
|
||||
STATIC_ASSERT(static_cast<int>(RootIndex::kTheHoleValue) ==
|
||||
Internals::kTheHoleValueRootIndex);
|
||||
STATIC_ASSERT(static_cast<int>(RootIndex::kNullValue) ==
|
||||
Internals::kNullValueRootIndex);
|
||||
STATIC_ASSERT(static_cast<int>(RootIndex::kTrueValue) ==
|
||||
Internals::kTrueValueRootIndex);
|
||||
STATIC_ASSERT(static_cast<int>(RootIndex::kFalseValue) ==
|
||||
Internals::kFalseValueRootIndex);
|
||||
STATIC_ASSERT(static_cast<int>(RootIndex::kempty_string) ==
|
||||
Internals::kEmptyStringRootIndex);
|
||||
|
||||
// Calculates the maximum amount of filler that could be required by the
|
||||
// given alignment.
|
||||
@ -430,14 +386,14 @@ class Heap {
|
||||
|
||||
void FatalProcessOutOfMemory(const char* location);
|
||||
|
||||
V8_EXPORT_PRIVATE static bool RootIsImmortalImmovable(int root_index);
|
||||
V8_EXPORT_PRIVATE static bool RootIsImmortalImmovable(RootIndex root_index);
|
||||
|
||||
// Checks whether the space is valid.
|
||||
static bool IsValidAllocationSpace(AllocationSpace space);
|
||||
|
||||
// Generated code can embed direct references to non-writable roots if
|
||||
// they are in new space.
|
||||
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
|
||||
static bool RootCanBeWrittenAfterInitialization(RootIndex root_index);
|
||||
|
||||
// Zapping is needed for verify heap, and always done in debug builds.
|
||||
static inline bool ShouldZapGarbage() {
|
||||
@ -825,21 +781,18 @@ class Heap {
|
||||
ACCESSOR_INFO_LIST(ACCESSOR_INFO_ACCESSOR)
|
||||
#undef ACCESSOR_INFO_ACCESSOR
|
||||
|
||||
Object* root(RootListIndex index) { return roots_[index]; }
|
||||
Handle<Object> root_handle(RootListIndex index) {
|
||||
Object* root(RootIndex index) { return roots_[index]; }
|
||||
Handle<Object> root_handle(RootIndex index) {
|
||||
return Handle<Object>(&roots_[index]);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
bool IsRootHandle(Handle<T> handle, RootListIndex* index) const {
|
||||
Object** const handle_location = bit_cast<Object**>(handle.address());
|
||||
if (handle_location >= &roots_[kRootListLength]) return false;
|
||||
if (handle_location < &roots_[0]) return false;
|
||||
*index = static_cast<RootListIndex>(handle_location - &roots_[0]);
|
||||
return true;
|
||||
bool IsRootHandle(Handle<T> handle, RootIndex* index) const {
|
||||
return roots_.IsRootHandle(handle, index);
|
||||
}
|
||||
|
||||
// Generated code can embed this address to get access to the roots.
|
||||
Object** roots_array_start() { return roots_; }
|
||||
Object** roots_array_start() { return roots_.roots_; }
|
||||
|
||||
ExternalReferenceTable* external_reference_table() {
|
||||
DCHECK(external_reference_table_.is_initialized());
|
||||
@ -867,23 +820,23 @@ class Heap {
|
||||
void SetRootCodeStubs(SimpleNumberDictionary* value);
|
||||
|
||||
void SetRootMaterializedObjects(FixedArray* objects) {
|
||||
roots_[kMaterializedObjectsRootIndex] = objects;
|
||||
roots_[RootIndex::kMaterializedObjects] = objects;
|
||||
}
|
||||
|
||||
void SetRootScriptList(Object* value) {
|
||||
roots_[kScriptListRootIndex] = value;
|
||||
roots_[RootIndex::kScriptList] = value;
|
||||
}
|
||||
|
||||
void SetRootStringTable(StringTable* value) {
|
||||
roots_[kStringTableRootIndex] = value;
|
||||
roots_[RootIndex::kStringTable] = value;
|
||||
}
|
||||
|
||||
void SetRootNoScriptSharedFunctionInfos(Object* value) {
|
||||
roots_[kNoScriptSharedFunctionInfosRootIndex] = value;
|
||||
roots_[RootIndex::kNoScriptSharedFunctionInfos] = value;
|
||||
}
|
||||
|
||||
void SetMessageListeners(TemplateList* value) {
|
||||
roots_[kMessageListenersRootIndex] = value;
|
||||
roots_[RootIndex::kMessageListeners] = value;
|
||||
}
|
||||
|
||||
// Set the stack limit in the roots_ array. Some architectures generate
|
||||
@ -896,7 +849,7 @@ class Heap {
|
||||
void ClearStackLimits();
|
||||
|
||||
// Generated code can treat direct references to this root as constant.
|
||||
bool RootCanBeTreatedAsConstant(RootListIndex root_index);
|
||||
bool RootCanBeTreatedAsConstant(RootIndex root_index);
|
||||
|
||||
Map* MapForFixedTypedArray(ExternalArrayType array_type);
|
||||
Map* MapForFixedTypedArray(ElementsKind elements_kind);
|
||||
@ -1542,18 +1495,18 @@ class Heap {
|
||||
struct StringTypeTable {
|
||||
InstanceType type;
|
||||
int size;
|
||||
RootListIndex index;
|
||||
RootIndex index;
|
||||
};
|
||||
|
||||
struct ConstantStringTable {
|
||||
const char* contents;
|
||||
RootListIndex index;
|
||||
RootIndex index;
|
||||
};
|
||||
|
||||
struct StructTable {
|
||||
InstanceType type;
|
||||
int size;
|
||||
RootListIndex index;
|
||||
RootIndex index;
|
||||
};
|
||||
|
||||
struct GCCallbackTuple {
|
||||
@ -1974,13 +1927,13 @@ class Heap {
|
||||
// more expedient to get at the isolate directly from within Heap methods.
|
||||
Isolate* isolate_;
|
||||
|
||||
Object* roots_[kRootListLength];
|
||||
RootsTable roots_;
|
||||
|
||||
// This table is accessed from builtin code compiled into the snapshot, and
|
||||
// thus its offset from roots_ must remain static. This is verified in
|
||||
// Isolate::Init() using runtime checks.
|
||||
static constexpr int kRootsExternalReferenceTableOffset =
|
||||
kRootListLength * kPointerSize;
|
||||
static_cast<int>(RootIndex::kRootListLength) * kPointerSize;
|
||||
ExternalReferenceTable external_reference_table_;
|
||||
|
||||
// As external references above, builtins are accessed through an offset from
|
||||
|
@ -58,31 +58,31 @@ bool Heap::CreateHeapObjects() {
|
||||
|
||||
const Heap::StringTypeTable Heap::string_type_table[] = {
|
||||
#define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
|
||||
{type, size, k##camel_name##MapRootIndex},
|
||||
{type, size, RootIndex::k##camel_name##Map},
|
||||
STRING_TYPE_LIST(STRING_TYPE_ELEMENT)
|
||||
#undef STRING_TYPE_ELEMENT
|
||||
};
|
||||
|
||||
const Heap::ConstantStringTable Heap::constant_string_table[] = {
|
||||
{"", kempty_stringRootIndex},
|
||||
#define CONSTANT_STRING_ELEMENT(name, contents) {contents, k##name##RootIndex},
|
||||
{"", RootIndex::kempty_string},
|
||||
#define CONSTANT_STRING_ELEMENT(name, contents) {contents, RootIndex::k##name},
|
||||
INTERNALIZED_STRING_LIST(CONSTANT_STRING_ELEMENT)
|
||||
#undef CONSTANT_STRING_ELEMENT
|
||||
};
|
||||
|
||||
const Heap::StructTable Heap::struct_table[] = {
|
||||
#define STRUCT_TABLE_ELEMENT(NAME, Name, name) \
|
||||
{NAME##_TYPE, Name::kSize, k##Name##MapRootIndex},
|
||||
{NAME##_TYPE, Name::kSize, RootIndex::k##Name##Map},
|
||||
STRUCT_LIST(STRUCT_TABLE_ELEMENT)
|
||||
#undef STRUCT_TABLE_ELEMENT
|
||||
|
||||
#define ALLOCATION_SITE_ELEMENT(NAME, Name, Size, name) \
|
||||
{NAME##_TYPE, Name::kSize##Size, k##Name##Size##MapRootIndex},
|
||||
{NAME##_TYPE, Name::kSize##Size, RootIndex::k##Name##Size##Map},
|
||||
ALLOCATION_SITE_LIST(ALLOCATION_SITE_ELEMENT)
|
||||
#undef ALLOCATION_SITE_ELEMENT
|
||||
|
||||
#define DATA_HANDLER_ELEMENT(NAME, Name, Size, name) \
|
||||
{NAME##_TYPE, Name::kSizeWithData##Size, k##Name##Size##MapRootIndex},
|
||||
{NAME##_TYPE, Name::kSizeWithData##Size, RootIndex::k##Name##Size##Map},
|
||||
DATA_HANDLER_LIST(DATA_HANDLER_ELEMENT)
|
||||
#undef DATA_HANDLER_ELEMENT
|
||||
};
|
||||
@ -120,8 +120,8 @@ AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
|
||||
if (!allocation.To(&result)) return allocation;
|
||||
// Map::cast cannot be used due to uninitialized map field.
|
||||
Map* map = reinterpret_cast<Map*>(result);
|
||||
map->set_map_after_allocation(reinterpret_cast<Map*>(root(kMetaMapRootIndex)),
|
||||
SKIP_WRITE_BARRIER);
|
||||
map->set_map_after_allocation(
|
||||
reinterpret_cast<Map*>(root(RootIndex::kMetaMap)), SKIP_WRITE_BARRIER);
|
||||
map->set_instance_type(instance_type);
|
||||
map->set_instance_size(instance_size);
|
||||
// Initialize to only containing tagged fields.
|
||||
@ -701,7 +701,7 @@ void Heap::CreateInitialObjects() {
|
||||
{ \
|
||||
Handle<Symbol> symbol( \
|
||||
isolate()->factory()->NewPrivateSymbol(TENURED_READ_ONLY)); \
|
||||
roots_[k##name##RootIndex] = *symbol; \
|
||||
roots_[RootIndex::k##name] = *symbol; \
|
||||
}
|
||||
PRIVATE_SYMBOL_LIST(SYMBOL_INIT)
|
||||
#undef SYMBOL_INIT
|
||||
@ -714,7 +714,7 @@ void Heap::CreateInitialObjects() {
|
||||
Handle<String> name##d = \
|
||||
factory->NewStringFromStaticChars(#description, TENURED_READ_ONLY); \
|
||||
name->set_name(*name##d); \
|
||||
roots_[k##name##RootIndex] = *name;
|
||||
roots_[RootIndex::k##name] = *name;
|
||||
PUBLIC_SYMBOL_LIST(SYMBOL_INIT)
|
||||
#undef SYMBOL_INIT
|
||||
|
||||
@ -724,7 +724,7 @@ void Heap::CreateInitialObjects() {
|
||||
factory->NewStringFromStaticChars(#description, TENURED_READ_ONLY); \
|
||||
name->set_is_well_known_symbol(true); \
|
||||
name->set_name(*name##d); \
|
||||
roots_[k##name##RootIndex] = *name;
|
||||
roots_[RootIndex::k##name] = *name;
|
||||
WELL_KNOWN_SYMBOL_LIST(SYMBOL_INIT)
|
||||
#undef SYMBOL_INIT
|
||||
|
||||
@ -900,15 +900,15 @@ void Heap::CreateInternalAccessorInfoObjects() {
|
||||
|
||||
#define INIT_ACCESSOR_INFO(accessor_name, AccessorName, ...) \
|
||||
acessor_info = Accessors::Make##AccessorName##Info(isolate); \
|
||||
roots_[k##AccessorName##AccessorRootIndex] = *acessor_info;
|
||||
roots_[RootIndex::k##AccessorName##Accessor] = *acessor_info;
|
||||
ACCESSOR_INFO_LIST(INIT_ACCESSOR_INFO)
|
||||
#undef INIT_ACCESSOR_INFO
|
||||
|
||||
#define INIT_SIDE_EFFECT_FLAG(accessor_name, AccessorName, GetterType, \
|
||||
SetterType) \
|
||||
AccessorInfo::cast(roots_[k##AccessorName##AccessorRootIndex]) \
|
||||
AccessorInfo::cast(roots_[RootIndex::k##AccessorName##Accessor]) \
|
||||
->set_getter_side_effect_type(SideEffectType::GetterType); \
|
||||
AccessorInfo::cast(roots_[k##AccessorName##AccessorRootIndex]) \
|
||||
AccessorInfo::cast(roots_[RootIndex::k##AccessorName##Accessor]) \
|
||||
->set_setter_side_effect_type(SideEffectType::SetterType);
|
||||
ACCESSOR_INFO_LIST(INIT_SIDE_EFFECT_FLAG)
|
||||
#undef INIT_SIDE_EFFECT_FLAG
|
||||
|
@ -2961,7 +2961,7 @@ size_t FreeListCategory::SumFreeList() {
|
||||
size_t sum = 0;
|
||||
FreeSpace* cur = top();
|
||||
while (cur != nullptr) {
|
||||
DCHECK(cur->map() == page()->heap()->root(Heap::kFreeSpaceMapRootIndex));
|
||||
DCHECK(cur->map() == page()->heap()->root(RootIndex::kFreeSpaceMap));
|
||||
sum += cur->relaxed_read_size();
|
||||
cur = cur->next();
|
||||
}
|
||||
|
@ -370,15 +370,15 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
|
||||
__ pop(return_address);
|
||||
|
||||
// new target
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
|
||||
// call data
|
||||
__ push(call_data);
|
||||
|
||||
// return value
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
// return value default
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
// isolate
|
||||
__ push(Immediate(ExternalReference::isolate_address(isolate())));
|
||||
// holder
|
||||
@ -449,9 +449,9 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
|
||||
__ pop(scratch); // Pop return address to extend the frame.
|
||||
__ push(receiver);
|
||||
__ push(FieldOperand(callback, AccessorInfo::kDataOffset));
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex); // ReturnValue
|
||||
__ PushRoot(RootIndex::kUndefinedValue); // ReturnValue
|
||||
// ReturnValue default value
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
__ push(Immediate(ExternalReference::isolate_address(isolate())));
|
||||
__ push(holder);
|
||||
__ push(Immediate(Smi::kZero)); // should_throw_on_error -> false
|
||||
|
@ -51,7 +51,7 @@ MacroAssembler::MacroAssembler(Isolate* isolate,
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
|
||||
void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
|
||||
// TODO(jgruber, v8:6666): Support loads through the root register once it
|
||||
// exists.
|
||||
if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
|
||||
@ -67,22 +67,20 @@ void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
|
||||
}
|
||||
ExternalReference roots_array_start =
|
||||
ExternalReference::roots_array_start(isolate());
|
||||
mov(destination, Immediate(index));
|
||||
mov(destination, Immediate(static_cast<int>(index)));
|
||||
mov(destination,
|
||||
StaticArray(destination, times_pointer_size, roots_array_start));
|
||||
}
|
||||
|
||||
void MacroAssembler::CompareRoot(Register with,
|
||||
Register scratch,
|
||||
Heap::RootListIndex index) {
|
||||
void MacroAssembler::CompareRoot(Register with, Register scratch,
|
||||
RootIndex index) {
|
||||
ExternalReference roots_array_start =
|
||||
ExternalReference::roots_array_start(isolate());
|
||||
mov(scratch, Immediate(index));
|
||||
mov(scratch, Immediate(static_cast<int>(index)));
|
||||
cmp(with, StaticArray(scratch, times_pointer_size, roots_array_start));
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
|
||||
void MacroAssembler::CompareRoot(Register with, RootIndex index) {
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
|
||||
Handle<Object> object = isolate()->heap()->root_handle(index);
|
||||
if (object->IsHeapObject()) {
|
||||
@ -92,7 +90,7 @@ void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
|
||||
}
|
||||
}
|
||||
|
||||
void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
|
||||
void MacroAssembler::CompareRoot(Operand with, RootIndex index) {
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
|
||||
Handle<Object> object = isolate()->heap()->root_handle(index);
|
||||
if (object->IsHeapObject()) {
|
||||
@ -102,7 +100,7 @@ void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
|
||||
}
|
||||
}
|
||||
|
||||
void MacroAssembler::PushRoot(Heap::RootListIndex index) {
|
||||
void MacroAssembler::PushRoot(RootIndex index) {
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
|
||||
Handle<Object> object = isolate()->heap()->root_handle(index);
|
||||
if (object->IsHeapObject()) {
|
||||
@ -116,10 +114,10 @@ void TurboAssembler::LoadFromConstantsTable(Register destination,
|
||||
int constant_index) {
|
||||
DCHECK(!is_ebx_addressable_);
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
RootIndex::kBuiltinsConstantsTable));
|
||||
// TODO(jgruber): LoadRoot should be a register-relative load once we have
|
||||
// the kRootRegister.
|
||||
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
|
||||
mov(destination,
|
||||
FieldOperand(destination,
|
||||
FixedArray::kHeaderSize + constant_index * kPointerSize));
|
||||
|
@ -241,7 +241,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
||||
|
||||
void Ret();
|
||||
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index) override;
|
||||
void LoadRoot(Register destination, RootIndex index) override;
|
||||
|
||||
void MoveForRootRegisterRefactoring(Register dst, Register src) {
|
||||
// TODO(v8:6666): When rewriting ia32 ASM builtins to not clobber the
|
||||
@ -501,34 +501,32 @@ class MacroAssembler : public TurboAssembler {
|
||||
void Set(Operand dst, int32_t x) { mov(dst, Immediate(x)); }
|
||||
|
||||
// Operations on roots in the root-array.
|
||||
void CompareRoot(Register with, Register scratch, Heap::RootListIndex index);
|
||||
void CompareRoot(Register with, Register scratch, RootIndex index);
|
||||
// These methods can only be used with constant roots (i.e. non-writable
|
||||
// and not in new space).
|
||||
void CompareRoot(Register with, Heap::RootListIndex index);
|
||||
void CompareRoot(Operand with, Heap::RootListIndex index);
|
||||
void PushRoot(Heap::RootListIndex index);
|
||||
void CompareRoot(Register with, RootIndex index);
|
||||
void CompareRoot(Operand with, RootIndex index);
|
||||
void PushRoot(RootIndex index);
|
||||
|
||||
// Compare the object in a register to a value and jump if they are equal.
|
||||
void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal,
|
||||
void JumpIfRoot(Register with, RootIndex index, Label* if_equal,
|
||||
Label::Distance if_equal_distance = Label::kFar) {
|
||||
CompareRoot(with, index);
|
||||
j(equal, if_equal, if_equal_distance);
|
||||
}
|
||||
void JumpIfRoot(Operand with, Heap::RootListIndex index, Label* if_equal,
|
||||
void JumpIfRoot(Operand with, RootIndex index, Label* if_equal,
|
||||
Label::Distance if_equal_distance = Label::kFar) {
|
||||
CompareRoot(with, index);
|
||||
j(equal, if_equal, if_equal_distance);
|
||||
}
|
||||
|
||||
// Compare the object in a register to a value and jump if they are not equal.
|
||||
void JumpIfNotRoot(Register with, Heap::RootListIndex index,
|
||||
Label* if_not_equal,
|
||||
void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal,
|
||||
Label::Distance if_not_equal_distance = Label::kFar) {
|
||||
CompareRoot(with, index);
|
||||
j(not_equal, if_not_equal, if_not_equal_distance);
|
||||
}
|
||||
void JumpIfNotRoot(Operand with, Heap::RootListIndex index,
|
||||
Label* if_not_equal,
|
||||
void JumpIfNotRoot(Operand with, RootIndex index, Label* if_not_equal,
|
||||
Label::Distance if_not_equal_distance = Label::kFar) {
|
||||
CompareRoot(with, index);
|
||||
j(not_equal, if_not_equal, if_not_equal_distance);
|
||||
|
@ -1046,9 +1046,8 @@ void AccessorAssembler::CheckFieldType(TNode<DescriptorArray> descriptors,
|
||||
Node* value_map = LoadMap(value);
|
||||
// While supporting mutable HeapNumbers would be straightforward, such
|
||||
// objects should not end up here anyway.
|
||||
CSA_ASSERT(this,
|
||||
WordNotEqual(value_map,
|
||||
LoadRoot(Heap::kMutableHeapNumberMapRootIndex)));
|
||||
CSA_ASSERT(this, WordNotEqual(value_map,
|
||||
LoadRoot(RootIndex::kMutableHeapNumberMap)));
|
||||
Branch(IsHeapNumberMap(value_map), &all_fine, bailout);
|
||||
}
|
||||
|
||||
@ -2491,7 +2490,7 @@ void AccessorAssembler::LoadIC_Noninlined(const LoadICParameters* p,
|
||||
|
||||
{
|
||||
// Check megamorphic case.
|
||||
GotoIfNot(WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
|
||||
GotoIfNot(WordEqual(feedback, LoadRoot(RootIndex::kmegamorphic_symbol)),
|
||||
&try_uninitialized);
|
||||
|
||||
TryProbeStubCache(isolate()->load_stub_cache(), p->receiver, p->name,
|
||||
@ -2501,9 +2500,8 @@ void AccessorAssembler::LoadIC_Noninlined(const LoadICParameters* p,
|
||||
BIND(&try_uninitialized);
|
||||
{
|
||||
// Check uninitialized case.
|
||||
GotoIfNot(
|
||||
WordEqual(feedback, LoadRoot(Heap::kuninitialized_symbolRootIndex)),
|
||||
miss);
|
||||
GotoIfNot(WordEqual(feedback, LoadRoot(RootIndex::kuninitialized_symbol)),
|
||||
miss);
|
||||
exit_point->ReturnCallStub(
|
||||
Builtins::CallableFor(isolate(), Builtins::kLoadIC_Uninitialized),
|
||||
p->context, p->receiver, p->name, p->slot, p->vector);
|
||||
@ -2519,7 +2517,7 @@ void AccessorAssembler::LoadIC_Uninitialized(const LoadICParameters* p) {
|
||||
|
||||
// Optimistically write the state transition to the vector.
|
||||
StoreFeedbackVectorSlot(p->vector, p->slot,
|
||||
LoadRoot(Heap::kpremonomorphic_symbolRootIndex),
|
||||
LoadRoot(RootIndex::kpremonomorphic_symbol),
|
||||
SKIP_WRITE_BARRIER, 0, SMI_PARAMETERS);
|
||||
StoreWeakReferenceInFeedbackVector(p->vector, p->slot, receiver_map,
|
||||
kPointerSize, SMI_PARAMETERS);
|
||||
@ -2545,7 +2543,7 @@ void AccessorAssembler::LoadIC_Uninitialized(const LoadICParameters* p) {
|
||||
{
|
||||
// Undo the optimistic state transition.
|
||||
StoreFeedbackVectorSlot(p->vector, p->slot,
|
||||
LoadRoot(Heap::kuninitialized_symbolRootIndex),
|
||||
LoadRoot(RootIndex::kuninitialized_symbol),
|
||||
SKIP_WRITE_BARRIER, 0, SMI_PARAMETERS);
|
||||
|
||||
TailCallRuntime(Runtime::kLoadIC_Miss, p->context, p->receiver, p->name,
|
||||
@ -2627,8 +2625,7 @@ void AccessorAssembler::LoadGlobalIC_TryHandlerCase(
|
||||
TNode<MaybeObject> feedback_element =
|
||||
LoadFeedbackVectorSlot(vector, slot, kPointerSize, slot_mode);
|
||||
TNode<Object> handler = CAST(feedback_element);
|
||||
GotoIf(WordEqual(handler, LoadRoot(Heap::kuninitialized_symbolRootIndex)),
|
||||
miss);
|
||||
GotoIf(WordEqual(handler, LoadRoot(RootIndex::kuninitialized_symbol)), miss);
|
||||
|
||||
OnNonExistent on_nonexistent = typeof_mode == NOT_INSIDE_TYPEOF
|
||||
? OnNonExistent::kThrowReferenceError
|
||||
@ -2684,9 +2681,9 @@ void AccessorAssembler::KeyedLoadIC(const LoadICParameters* p) {
|
||||
{
|
||||
// Check megamorphic case.
|
||||
Comment("KeyedLoadIC_try_megamorphic");
|
||||
GotoIfNot(WordEqual(strong_feedback,
|
||||
LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
|
||||
&try_polymorphic_name);
|
||||
GotoIfNot(
|
||||
WordEqual(strong_feedback, LoadRoot(RootIndex::kmegamorphic_symbol)),
|
||||
&try_polymorphic_name);
|
||||
// TODO(jkummerow): Inline this? Or some of it?
|
||||
TailCallBuiltin(Builtins::kKeyedLoadIC_Megamorphic, p->context, p->receiver,
|
||||
p->name, p->slot, p->vector);
|
||||
@ -2905,9 +2902,9 @@ void AccessorAssembler::StoreIC(const StoreICParameters* p) {
|
||||
BIND(&try_megamorphic);
|
||||
{
|
||||
// Check megamorphic case.
|
||||
GotoIfNot(WordEqual(strong_feedback,
|
||||
LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
|
||||
&try_uninitialized);
|
||||
GotoIfNot(
|
||||
WordEqual(strong_feedback, LoadRoot(RootIndex::kmegamorphic_symbol)),
|
||||
&try_uninitialized);
|
||||
|
||||
TryProbeStubCache(isolate()->store_stub_cache(), p->receiver, p->name,
|
||||
&if_handler, &var_handler, &miss);
|
||||
@ -2915,9 +2912,9 @@ void AccessorAssembler::StoreIC(const StoreICParameters* p) {
|
||||
BIND(&try_uninitialized);
|
||||
{
|
||||
// Check uninitialized case.
|
||||
GotoIfNot(WordEqual(strong_feedback,
|
||||
LoadRoot(Heap::kuninitialized_symbolRootIndex)),
|
||||
&miss);
|
||||
GotoIfNot(
|
||||
WordEqual(strong_feedback, LoadRoot(RootIndex::kuninitialized_symbol)),
|
||||
&miss);
|
||||
TailCallBuiltin(Builtins::kStoreIC_Uninitialized, p->context, p->receiver,
|
||||
p->name, p->value, p->slot, p->vector);
|
||||
}
|
||||
@ -2951,7 +2948,7 @@ void AccessorAssembler::StoreGlobalIC(const StoreICParameters* pp) {
|
||||
TNode<MaybeObject> handler = LoadFeedbackVectorSlot(
|
||||
pp->vector, pp->slot, kPointerSize, SMI_PARAMETERS);
|
||||
|
||||
GotoIf(WordEqual(handler, LoadRoot(Heap::kuninitialized_symbolRootIndex)),
|
||||
GotoIf(WordEqual(handler, LoadRoot(RootIndex::kuninitialized_symbol)),
|
||||
&miss);
|
||||
|
||||
StoreICParameters p = *pp;
|
||||
@ -3088,9 +3085,9 @@ void AccessorAssembler::KeyedStoreIC(const StoreICParameters* p) {
|
||||
{
|
||||
// Check megamorphic case.
|
||||
Comment("KeyedStoreIC_try_megamorphic");
|
||||
GotoIfNot(WordEqual(strong_feedback,
|
||||
LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
|
||||
&try_polymorphic_name);
|
||||
GotoIfNot(
|
||||
WordEqual(strong_feedback, LoadRoot(RootIndex::kmegamorphic_symbol)),
|
||||
&try_polymorphic_name);
|
||||
TailCallBuiltin(Builtins::kKeyedStoreIC_Megamorphic, p->context,
|
||||
p->receiver, p->name, p->value, p->slot, p->vector);
|
||||
}
|
||||
@ -3170,15 +3167,14 @@ void AccessorAssembler::StoreInArrayLiteralIC(const StoreICParameters* p) {
|
||||
BIND(&try_megamorphic);
|
||||
{
|
||||
Comment("StoreInArrayLiteralIC_try_megamorphic");
|
||||
CSA_ASSERT(
|
||||
this,
|
||||
Word32Or(WordEqual(strong_feedback,
|
||||
LoadRoot(Heap::kuninitialized_symbolRootIndex)),
|
||||
WordEqual(strong_feedback,
|
||||
LoadRoot(Heap::kmegamorphic_symbolRootIndex))));
|
||||
GotoIfNot(WordEqual(strong_feedback,
|
||||
LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
|
||||
&miss);
|
||||
CSA_ASSERT(this,
|
||||
Word32Or(WordEqual(strong_feedback,
|
||||
LoadRoot(RootIndex::kuninitialized_symbol)),
|
||||
WordEqual(strong_feedback,
|
||||
LoadRoot(RootIndex::kmegamorphic_symbol))));
|
||||
GotoIfNot(
|
||||
WordEqual(strong_feedback, LoadRoot(RootIndex::kmegamorphic_symbol)),
|
||||
&miss);
|
||||
TailCallRuntime(Runtime::kStoreInArrayLiteralIC_Slow, p->context,
|
||||
p->value, p->receiver, p->name);
|
||||
}
|
||||
@ -3659,15 +3655,14 @@ void AccessorAssembler::GenerateCloneObjectIC() {
|
||||
BIND(&try_megamorphic);
|
||||
{
|
||||
Comment("CloneObjectIC_try_megamorphic");
|
||||
CSA_ASSERT(
|
||||
this,
|
||||
Word32Or(WordEqual(strong_feedback,
|
||||
LoadRoot(Heap::kuninitialized_symbolRootIndex)),
|
||||
WordEqual(strong_feedback,
|
||||
LoadRoot(Heap::kmegamorphic_symbolRootIndex))));
|
||||
GotoIfNot(WordEqual(strong_feedback,
|
||||
LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
|
||||
&miss);
|
||||
CSA_ASSERT(this,
|
||||
Word32Or(WordEqual(strong_feedback,
|
||||
LoadRoot(RootIndex::kuninitialized_symbol)),
|
||||
WordEqual(strong_feedback,
|
||||
LoadRoot(RootIndex::kmegamorphic_symbol))));
|
||||
GotoIfNot(
|
||||
WordEqual(strong_feedback, LoadRoot(RootIndex::kmegamorphic_symbol)),
|
||||
&miss);
|
||||
TailCallBuiltin(Builtins::kCloneObjectIC_Slow, context, source, flags, slot,
|
||||
vector);
|
||||
}
|
||||
|
@ -324,7 +324,7 @@ void KeyedStoreGenericAssembler::StoreElementWithCapacity(
|
||||
|
||||
Label check_double_elements(this), check_cow_elements(this);
|
||||
Node* elements_map = LoadMap(elements);
|
||||
GotoIf(WordNotEqual(elements_map, LoadRoot(Heap::kFixedArrayMapRootIndex)),
|
||||
GotoIf(WordNotEqual(elements_map, LoadRoot(RootIndex::kFixedArrayMap)),
|
||||
&check_double_elements);
|
||||
|
||||
// FixedArray backing store -> Smi or object elements.
|
||||
@ -385,7 +385,7 @@ void KeyedStoreGenericAssembler::StoreElementWithCapacity(
|
||||
{
|
||||
Label transition_to_double(this), transition_to_object(this);
|
||||
Node* native_context = LoadNativeContext(context);
|
||||
Branch(WordEqual(LoadMap(value), LoadRoot(Heap::kHeapNumberMapRootIndex)),
|
||||
Branch(WordEqual(LoadMap(value), LoadRoot(RootIndex::kHeapNumberMap)),
|
||||
&transition_to_double, &transition_to_object);
|
||||
BIND(&transition_to_double);
|
||||
{
|
||||
@ -428,7 +428,7 @@ void KeyedStoreGenericAssembler::StoreElementWithCapacity(
|
||||
}
|
||||
|
||||
BIND(&check_double_elements);
|
||||
Node* fixed_double_array_map = LoadRoot(Heap::kFixedDoubleArrayMapRootIndex);
|
||||
Node* fixed_double_array_map = LoadRoot(RootIndex::kFixedDoubleArrayMap);
|
||||
GotoIf(WordNotEqual(elements_map, fixed_double_array_map),
|
||||
&check_cow_elements);
|
||||
// FixedDoubleArray backing store -> double elements.
|
||||
@ -1050,7 +1050,7 @@ void KeyedStoreGenericAssembler::StoreIC_Uninitialized() {
|
||||
|
||||
// Optimistically write the state transition to the vector.
|
||||
StoreFeedbackVectorSlot(vector, slot,
|
||||
LoadRoot(Heap::kpremonomorphic_symbolRootIndex),
|
||||
LoadRoot(RootIndex::kpremonomorphic_symbol),
|
||||
SKIP_WRITE_BARRIER, 0, SMI_PARAMETERS);
|
||||
|
||||
StoreICParameters p(context, receiver, name, value, slot, vector);
|
||||
@ -1060,7 +1060,7 @@ void KeyedStoreGenericAssembler::StoreIC_Uninitialized() {
|
||||
{
|
||||
// Undo the optimistic state transition.
|
||||
StoreFeedbackVectorSlot(vector, slot,
|
||||
LoadRoot(Heap::kuninitialized_symbolRootIndex),
|
||||
LoadRoot(RootIndex::kuninitialized_symbol),
|
||||
SKIP_WRITE_BARRIER, 0, SMI_PARAMETERS);
|
||||
TailCallRuntime(Runtime::kStoreIC_Miss, context, value, slot, vector,
|
||||
receiver, name);
|
||||
|
@ -803,7 +803,7 @@ void InterpreterAssembler::CollectCallableFeedback(Node* target, Node* context,
|
||||
// MegamorphicSentinel is an immortal immovable object so
|
||||
// write-barrier is not needed.
|
||||
Comment("transition to megamorphic");
|
||||
DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
|
||||
DCHECK(Heap::RootIsImmortalImmovable(RootIndex::kmegamorphic_symbol));
|
||||
StoreFeedbackVectorSlot(
|
||||
feedback_vector, slot_id,
|
||||
HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
|
||||
@ -976,7 +976,7 @@ Node* InterpreterAssembler::Construct(Node* target, Node* context,
|
||||
// Check if it is uninitialized.
|
||||
Comment("check if uninitialized");
|
||||
Node* is_uninitialized =
|
||||
WordEqual(feedback, LoadRoot(Heap::kuninitialized_symbolRootIndex));
|
||||
WordEqual(feedback, LoadRoot(RootIndex::kuninitialized_symbol));
|
||||
Branch(is_uninitialized, &initialize, &mark_megamorphic);
|
||||
}
|
||||
|
||||
@ -1054,7 +1054,7 @@ Node* InterpreterAssembler::Construct(Node* target, Node* context,
|
||||
// MegamorphicSentinel is an immortal immovable object so
|
||||
// write-barrier is not needed.
|
||||
Comment("transition to megamorphic");
|
||||
DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
|
||||
DCHECK(Heap::RootIsImmortalImmovable(RootIndex::kmegamorphic_symbol));
|
||||
StoreFeedbackVectorSlot(
|
||||
feedback_vector, slot_id,
|
||||
HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
|
||||
@ -1139,7 +1139,7 @@ Node* InterpreterAssembler::ConstructWithSpread(Node* target, Node* context,
|
||||
// Check if it is uninitialized.
|
||||
Comment("check if uninitialized");
|
||||
Node* is_uninitialized =
|
||||
WordEqual(feedback, LoadRoot(Heap::kuninitialized_symbolRootIndex));
|
||||
WordEqual(feedback, LoadRoot(RootIndex::kuninitialized_symbol));
|
||||
Branch(is_uninitialized, &initialize, &mark_megamorphic);
|
||||
}
|
||||
|
||||
@ -1195,7 +1195,7 @@ Node* InterpreterAssembler::ConstructWithSpread(Node* target, Node* context,
|
||||
// MegamorphicSentinel is an immortal immovable object so
|
||||
// write-barrier is not needed.
|
||||
Comment("transition to megamorphic");
|
||||
DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
|
||||
DCHECK(Heap::RootIsImmortalImmovable(RootIndex::kmegamorphic_symbol));
|
||||
StoreFeedbackVectorSlot(
|
||||
feedback_vector, slot_id,
|
||||
HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
|
||||
@ -1705,7 +1705,7 @@ Node* InterpreterAssembler::ImportRegisterFile(
|
||||
StoreRegister(value, reg_index);
|
||||
|
||||
StoreFixedArrayElement(array, array_index,
|
||||
LoadRoot(Heap::kStaleRegisterRootIndex));
|
||||
LoadRoot(RootIndex::kStaleRegister));
|
||||
|
||||
var_index = IntPtrAdd(index, IntPtrConstant(1));
|
||||
Goto(&loop);
|
||||
|
@ -3637,7 +3637,7 @@ ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
|
||||
#undef ISOLATE_FIELD_OFFSET
|
||||
#endif
|
||||
|
||||
Handle<Symbol> Isolate::SymbolFor(Heap::RootListIndex dictionary_index,
|
||||
Handle<Symbol> Isolate::SymbolFor(RootIndex dictionary_index,
|
||||
Handle<String> name, bool private_symbol) {
|
||||
Handle<String> key = factory()->InternalizeString(name);
|
||||
Handle<NameDictionary> dictionary =
|
||||
@ -3651,14 +3651,14 @@ Handle<Symbol> Isolate::SymbolFor(Heap::RootListIndex dictionary_index,
|
||||
dictionary = NameDictionary::Add(this, dictionary, key, symbol,
|
||||
PropertyDetails::Empty(), &entry);
|
||||
switch (dictionary_index) {
|
||||
case Heap::kPublicSymbolTableRootIndex:
|
||||
case RootIndex::kPublicSymbolTable:
|
||||
symbol->set_is_public(true);
|
||||
heap()->set_public_symbol_table(*dictionary);
|
||||
break;
|
||||
case Heap::kApiSymbolTableRootIndex:
|
||||
case RootIndex::kApiSymbolTable:
|
||||
heap()->set_api_symbol_table(*dictionary);
|
||||
break;
|
||||
case Heap::kApiPrivateSymbolTableRootIndex:
|
||||
case RootIndex::kApiPrivateSymbolTable:
|
||||
heap()->set_api_private_symbol_table(*dictionary);
|
||||
break;
|
||||
default:
|
||||
|
@ -1369,8 +1369,8 @@ class Isolate : private HiddenFactory {
|
||||
void RunMicrotasks();
|
||||
bool IsRunningMicrotasks() const { return is_running_microtasks_; }
|
||||
|
||||
Handle<Symbol> SymbolFor(Heap::RootListIndex dictionary_index,
|
||||
Handle<String> name, bool private_symbol);
|
||||
Handle<Symbol> SymbolFor(RootIndex dictionary_index, Handle<String> name,
|
||||
bool private_symbol);
|
||||
|
||||
void SetUseCounterCallback(v8::Isolate::UseCounterCallback callback);
|
||||
void CountUsage(v8::Isolate::UseCounterFeature feature);
|
||||
|
@ -119,7 +119,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
__ li(t0, ExternalReference::Create(
|
||||
IsolateAddressId::kPendingExceptionAddress, isolate));
|
||||
__ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0.
|
||||
__ LoadRoot(v0, Heap::kExceptionRootIndex);
|
||||
__ LoadRoot(v0, RootIndex::kException);
|
||||
__ b(&exit); // b exposes branch delay slot.
|
||||
__ nop(); // Branch delay slot nop.
|
||||
|
||||
@ -415,7 +415,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
stack_space_offset != kInvalidStackOffset);
|
||||
|
||||
// Check if the function scheduled an exception.
|
||||
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(t0, RootIndex::kTheHoleValue);
|
||||
__ li(kScratchReg, ExternalReference::scheduled_exception_address(isolate));
|
||||
__ lw(t1, MemOperand(kScratchReg));
|
||||
__ Branch(&promote_scheduled_exception, ne, t0, Operand(t1));
|
||||
@ -466,13 +466,13 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
|
||||
STATIC_ASSERT(FCA::kHolderIndex == 0);
|
||||
|
||||
// new target
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
|
||||
// call data.
|
||||
__ Push(call_data);
|
||||
|
||||
Register scratch = call_data;
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
// Push return value and default return value.
|
||||
__ Push(scratch, scratch);
|
||||
__ li(scratch, ExternalReference::isolate_address(masm->isolate()));
|
||||
@ -543,7 +543,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
|
||||
__ sw(receiver, MemOperand(sp, (PCA::kThisIndex + 1) * kPointerSize));
|
||||
__ lw(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
|
||||
__ sw(scratch, MemOperand(sp, (PCA::kDataIndex + 1) * kPointerSize));
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
__ sw(scratch, MemOperand(sp, (PCA::kReturnValueOffset + 1) * kPointerSize));
|
||||
__ sw(scratch, MemOperand(sp, (PCA::kReturnValueDefaultValueIndex + 1) *
|
||||
kPointerSize));
|
||||
|
@ -127,11 +127,11 @@ int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
|
||||
return bytes;
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
|
||||
void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
|
||||
lw(destination, MemOperand(kRootRegister, RootRegisterOffset(index)));
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index,
|
||||
void TurboAssembler::LoadRoot(Register destination, RootIndex index,
|
||||
Condition cond, Register src1,
|
||||
const Operand& src2) {
|
||||
Branch(2, NegateCondition(cond), src1, src2);
|
||||
@ -2811,7 +2811,7 @@ void TurboAssembler::Branch(Label* L, Condition cond, Register rs,
|
||||
}
|
||||
|
||||
void TurboAssembler::Branch(Label* L, Condition cond, Register rs,
|
||||
Heap::RootListIndex index, BranchDelaySlot bdslot) {
|
||||
RootIndex index, BranchDelaySlot bdslot) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LoadRoot(scratch, index);
|
||||
@ -3620,8 +3620,8 @@ bool TurboAssembler::BranchAndLinkShortCheck(int32_t offset, Label* L,
|
||||
void TurboAssembler::LoadFromConstantsTable(Register destination,
|
||||
int constant_index) {
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
RootIndex::kBuiltinsConstantsTable));
|
||||
LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
|
||||
lw(destination,
|
||||
FieldMemOperand(destination,
|
||||
FixedArray::kHeaderSize + constant_index * kPointerSize));
|
||||
@ -4323,7 +4323,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
|
||||
|
||||
// Clear the new.target register if not given.
|
||||
if (!new_target.is_valid()) {
|
||||
LoadRoot(a3, Heap::kUndefinedValueRootIndex);
|
||||
LoadRoot(a3, RootIndex::kUndefinedValue);
|
||||
}
|
||||
|
||||
Label done;
|
||||
@ -5029,7 +5029,7 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
|
||||
if (emit_debug_code()) {
|
||||
Label done_checking;
|
||||
AssertNotSmi(object);
|
||||
LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
Branch(&done_checking, eq, object, Operand(scratch));
|
||||
GetObjectType(object, scratch, scratch);
|
||||
Assert(eq, AbortReason::kExpectedUndefinedOrCell, scratch,
|
||||
|
@ -226,7 +226,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
||||
void BranchMSA(Label* target, MSABranchDF df, MSABranchCondition cond,
|
||||
MSARegister wt, BranchDelaySlot bd = PROTECT);
|
||||
|
||||
void Branch(Label* L, Condition cond, Register rs, Heap::RootListIndex index,
|
||||
void Branch(Label* L, Condition cond, Register rs, RootIndex index,
|
||||
BranchDelaySlot bdslot = PROTECT);
|
||||
|
||||
// Load int32 in the rd register.
|
||||
@ -796,8 +796,8 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
||||
Func GetLabelFunction);
|
||||
|
||||
// Load an object from the root table.
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index) override;
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index, Condition cond,
|
||||
void LoadRoot(Register destination, RootIndex index) override;
|
||||
void LoadRoot(Register destination, RootIndex index, Condition cond,
|
||||
Register src1, const Operand& src2);
|
||||
|
||||
// If the value is a NaN, canonicalize the value else, do nothing.
|
||||
@ -919,7 +919,7 @@ class MacroAssembler : public TurboAssembler {
|
||||
// less efficient form using xor instead of mov is emitted.
|
||||
void Swap(Register reg1, Register reg2, Register scratch = no_reg);
|
||||
|
||||
void PushRoot(Heap::RootListIndex index) {
|
||||
void PushRoot(RootIndex index) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LoadRoot(scratch, index);
|
||||
@ -927,7 +927,7 @@ class MacroAssembler : public TurboAssembler {
|
||||
}
|
||||
|
||||
// Compare the object in a register to a value and jump if they are equal.
|
||||
void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal) {
|
||||
void JumpIfRoot(Register with, RootIndex index, Label* if_equal) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LoadRoot(scratch, index);
|
||||
@ -935,8 +935,7 @@ class MacroAssembler : public TurboAssembler {
|
||||
}
|
||||
|
||||
// Compare the object in a register to a value and jump if they are not equal.
|
||||
void JumpIfNotRoot(Register with, Heap::RootListIndex index,
|
||||
Label* if_not_equal) {
|
||||
void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LoadRoot(scratch, index);
|
||||
|
@ -118,7 +118,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
__ li(a4, ExternalReference::Create(
|
||||
IsolateAddressId::kPendingExceptionAddress, isolate));
|
||||
__ Sd(v0, MemOperand(a4)); // We come back from 'invoke'. result is in v0.
|
||||
__ LoadRoot(v0, Heap::kExceptionRootIndex);
|
||||
__ LoadRoot(v0, RootIndex::kException);
|
||||
__ b(&exit); // b exposes branch delay slot.
|
||||
__ nop(); // Branch delay slot nop.
|
||||
|
||||
@ -418,7 +418,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
stack_space_offset != kInvalidStackOffset);
|
||||
|
||||
// Check if the function scheduled an exception.
|
||||
__ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(a4, RootIndex::kTheHoleValue);
|
||||
__ li(kScratchReg, ExternalReference::scheduled_exception_address(isolate));
|
||||
__ Ld(a5, MemOperand(kScratchReg));
|
||||
__ Branch(&promote_scheduled_exception, ne, a4, Operand(a5));
|
||||
@ -469,13 +469,13 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
|
||||
STATIC_ASSERT(FCA::kHolderIndex == 0);
|
||||
|
||||
// new target
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
|
||||
// call data.
|
||||
__ Push(call_data);
|
||||
|
||||
Register scratch = call_data;
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
// Push return value and default return value.
|
||||
__ Push(scratch, scratch);
|
||||
__ li(scratch, ExternalReference::isolate_address(masm->isolate()));
|
||||
@ -548,7 +548,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
|
||||
__ Sd(receiver, MemOperand(sp, (PCA::kThisIndex + 1) * kPointerSize));
|
||||
__ Ld(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
|
||||
__ Sd(scratch, MemOperand(sp, (PCA::kDataIndex + 1) * kPointerSize));
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
__ Sd(scratch, MemOperand(sp, (PCA::kReturnValueOffset + 1) * kPointerSize));
|
||||
__ Sd(scratch, MemOperand(sp, (PCA::kReturnValueDefaultValueIndex + 1) *
|
||||
kPointerSize));
|
||||
|
@ -127,11 +127,11 @@ int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
|
||||
return bytes;
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
|
||||
void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
|
||||
Ld(destination, MemOperand(s6, RootRegisterOffset(index)));
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index,
|
||||
void TurboAssembler::LoadRoot(Register destination, RootIndex index,
|
||||
Condition cond, Register src1,
|
||||
const Operand& src2) {
|
||||
Branch(2, NegateCondition(cond), src1, src2);
|
||||
@ -3308,7 +3308,7 @@ void TurboAssembler::Branch(Label* L, Condition cond, Register rs,
|
||||
}
|
||||
|
||||
void TurboAssembler::Branch(Label* L, Condition cond, Register rs,
|
||||
Heap::RootListIndex index, BranchDelaySlot bdslot) {
|
||||
RootIndex index, BranchDelaySlot bdslot) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LoadRoot(scratch, index);
|
||||
@ -4121,8 +4121,8 @@ bool TurboAssembler::BranchAndLinkShortCheck(int32_t offset, Label* L,
|
||||
void TurboAssembler::LoadFromConstantsTable(Register destination,
|
||||
int constant_index) {
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
RootIndex::kBuiltinsConstantsTable));
|
||||
LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
|
||||
Ld(destination,
|
||||
FieldMemOperand(destination,
|
||||
FixedArray::kHeaderSize + constant_index * kPointerSize));
|
||||
@ -4705,7 +4705,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
|
||||
|
||||
// Clear the new.target register if not given.
|
||||
if (!new_target.is_valid()) {
|
||||
LoadRoot(a3, Heap::kUndefinedValueRootIndex);
|
||||
LoadRoot(a3, RootIndex::kUndefinedValue);
|
||||
}
|
||||
|
||||
Label done;
|
||||
@ -5431,7 +5431,7 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
|
||||
if (emit_debug_code()) {
|
||||
Label done_checking;
|
||||
AssertNotSmi(object);
|
||||
LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
Branch(&done_checking, eq, object, Operand(scratch));
|
||||
GetObjectType(object, scratch, scratch);
|
||||
Assert(eq, AbortReason::kExpectedUndefinedOrCell, scratch,
|
||||
|
@ -243,7 +243,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
||||
void BranchMSA(Label* target, MSABranchDF df, MSABranchCondition cond,
|
||||
MSARegister wt, BranchDelaySlot bd = PROTECT);
|
||||
|
||||
void Branch(Label* L, Condition cond, Register rs, Heap::RootListIndex index,
|
||||
void Branch(Label* L, Condition cond, Register rs, RootIndex index,
|
||||
BranchDelaySlot bdslot = PROTECT);
|
||||
|
||||
static int InstrCountForLi64Bit(int64_t value);
|
||||
@ -766,8 +766,8 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
||||
Func GetLabelFunction);
|
||||
|
||||
// Load an object from the root table.
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index) override;
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index, Condition cond,
|
||||
void LoadRoot(Register destination, RootIndex index) override;
|
||||
void LoadRoot(Register destination, RootIndex index, Condition cond,
|
||||
Register src1, const Operand& src2);
|
||||
|
||||
// If the value is a NaN, canonicalize the value else, do nothing.
|
||||
@ -934,7 +934,7 @@ class MacroAssembler : public TurboAssembler {
|
||||
// less efficient form using xor instead of mov is emitted.
|
||||
void Swap(Register reg1, Register reg2, Register scratch = no_reg);
|
||||
|
||||
void PushRoot(Heap::RootListIndex index) {
|
||||
void PushRoot(RootIndex index) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LoadRoot(scratch, index);
|
||||
@ -942,7 +942,7 @@ class MacroAssembler : public TurboAssembler {
|
||||
}
|
||||
|
||||
// Compare the object in a register to a value and jump if they are equal.
|
||||
void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal) {
|
||||
void JumpIfRoot(Register with, RootIndex index, Label* if_equal) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LoadRoot(scratch, index);
|
||||
@ -950,8 +950,7 @@ class MacroAssembler : public TurboAssembler {
|
||||
}
|
||||
|
||||
// Compare the object in a register to a value and jump if they are not equal.
|
||||
void JumpIfNotRoot(Register with, Heap::RootListIndex index,
|
||||
Label* if_not_equal) {
|
||||
void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal) {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register scratch = temps.Acquire();
|
||||
LoadRoot(scratch, index);
|
||||
|
@ -2160,8 +2160,9 @@ bool CanLeak(Object* obj, Heap* heap) {
|
||||
if (obj->IsContext()) return true;
|
||||
if (obj->IsMap()) {
|
||||
Map* map = Map::cast(obj);
|
||||
for (int i = 0; i < Heap::kStrongRootListLength; i++) {
|
||||
Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
|
||||
for (int i = 0; i < static_cast<int>(RootIndex::kStrongRootListLength);
|
||||
i++) {
|
||||
RootIndex root_index = static_cast<RootIndex>(i);
|
||||
if (map == heap->root(root_index)) return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -1857,7 +1857,7 @@ uint32_t StringTableShape::HashForObject(Isolate* isolate, Object* object) {
|
||||
}
|
||||
|
||||
int StringTableShape::GetMapRootIndex() {
|
||||
return Heap::kStringTableMapRootIndex;
|
||||
return static_cast<int>(RootIndex::kStringTableMap);
|
||||
}
|
||||
|
||||
bool NumberDictionary::requires_slow_elements() {
|
||||
@ -1930,7 +1930,7 @@ int FreeSpace::Size() { return size(); }
|
||||
|
||||
FreeSpace* FreeSpace::next() {
|
||||
DCHECK(map() == Heap::FromWritableHeapObject(this)->root(
|
||||
Heap::kFreeSpaceMapRootIndex) ||
|
||||
RootIndex::kFreeSpaceMap) ||
|
||||
(!Heap::FromWritableHeapObject(this)->deserialization_complete() &&
|
||||
map() == nullptr));
|
||||
DCHECK_LE(kNextOffset + kPointerSize, relaxed_read_size());
|
||||
@ -1940,7 +1940,7 @@ FreeSpace* FreeSpace::next() {
|
||||
|
||||
void FreeSpace::set_next(FreeSpace* next) {
|
||||
DCHECK(map() == Heap::FromWritableHeapObject(this)->root(
|
||||
Heap::kFreeSpaceMapRootIndex) ||
|
||||
RootIndex::kFreeSpaceMap) ||
|
||||
(!Heap::FromWritableHeapObject(this)->deserialization_complete() &&
|
||||
map() == nullptr));
|
||||
DCHECK_LE(kNextOffset + kPointerSize, relaxed_read_size());
|
||||
@ -2757,13 +2757,13 @@ Object* GlobalDictionaryShape::Unwrap(Object* object) {
|
||||
}
|
||||
|
||||
int GlobalDictionaryShape::GetMapRootIndex() {
|
||||
return Heap::kGlobalDictionaryMapRootIndex;
|
||||
return static_cast<int>(RootIndex::kGlobalDictionaryMap);
|
||||
}
|
||||
|
||||
Name* NameDictionary::NameAt(int entry) { return Name::cast(KeyAt(entry)); }
|
||||
|
||||
int NameDictionaryShape::GetMapRootIndex() {
|
||||
return Heap::kNameDictionaryMapRootIndex;
|
||||
return static_cast<int>(RootIndex::kNameDictionaryMap);
|
||||
}
|
||||
|
||||
PropertyCell* GlobalDictionary::CellAt(int entry) {
|
||||
@ -2816,11 +2816,11 @@ Handle<Object> NumberDictionaryBaseShape::AsHandle(Isolate* isolate,
|
||||
}
|
||||
|
||||
int NumberDictionaryShape::GetMapRootIndex() {
|
||||
return Heap::kNumberDictionaryMapRootIndex;
|
||||
return static_cast<int>(RootIndex::kNumberDictionaryMap);
|
||||
}
|
||||
|
||||
int SimpleNumberDictionaryShape::GetMapRootIndex() {
|
||||
return Heap::kSimpleNumberDictionaryMapRootIndex;
|
||||
return static_cast<int>(RootIndex::kSimpleNumberDictionaryMap);
|
||||
}
|
||||
|
||||
bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
|
||||
|
@ -10571,7 +10571,7 @@ Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate,
|
||||
// Allocate the array of keys.
|
||||
Handle<WeakFixedArray> result =
|
||||
factory->NewWeakFixedArrayWithMap<DescriptorArray>(
|
||||
Heap::kDescriptorArrayMapRootIndex, LengthFor(size), pretenure);
|
||||
RootIndex::kDescriptorArrayMap, LengthFor(size), pretenure);
|
||||
result->Set(kDescriptorLengthIndex,
|
||||
MaybeObject::FromObject(Smi::FromInt(number_of_descriptors)));
|
||||
result->Set(kEnumCacheIndex, MaybeObject::FromObject(
|
||||
@ -16628,8 +16628,7 @@ Handle<Derived> HashTable<Derived, Shape>::NewInternal(
|
||||
Isolate* isolate, int capacity, PretenureFlag pretenure) {
|
||||
Factory* factory = isolate->factory();
|
||||
int length = EntryToIndex(capacity);
|
||||
Heap::RootListIndex map_root_index =
|
||||
static_cast<Heap::RootListIndex>(Shape::GetMapRootIndex());
|
||||
RootIndex map_root_index = static_cast<RootIndex>(Shape::GetMapRootIndex());
|
||||
Handle<FixedArray> array =
|
||||
factory->NewFixedArrayWithMap(map_root_index, length, pretenure);
|
||||
Handle<Derived> table = Handle<Derived>::cast(array);
|
||||
|
@ -59,11 +59,11 @@ void HashTableBase::SetNumberOfDeletedElements(int nod) {
|
||||
|
||||
template <typename Key>
|
||||
int BaseShape<Key>::GetMapRootIndex() {
|
||||
return Heap::kHashTableMapRootIndex;
|
||||
return static_cast<int>(RootIndex::kHashTableMap);
|
||||
}
|
||||
|
||||
int EphemeronHashTableShape::GetMapRootIndex() {
|
||||
return Heap::kEphemeronHashTableMapRootIndex;
|
||||
return static_cast<int>(RootIndex::kEphemeronHashTableMap);
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
|
@ -14,19 +14,19 @@ namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
int OrderedHashSet::GetMapRootIndex() {
|
||||
return Heap::kOrderedHashSetMapRootIndex;
|
||||
return static_cast<int>(RootIndex::kOrderedHashSetMap);
|
||||
}
|
||||
|
||||
int OrderedHashMap::GetMapRootIndex() {
|
||||
return Heap::kOrderedHashMapMapRootIndex;
|
||||
return static_cast<int>(RootIndex::kOrderedHashMapMap);
|
||||
}
|
||||
|
||||
int SmallOrderedHashMap::GetMapRootIndex() {
|
||||
return Heap::kSmallOrderedHashMapMapRootIndex;
|
||||
return static_cast<int>(RootIndex::kSmallOrderedHashMapMap);
|
||||
}
|
||||
|
||||
int SmallOrderedHashSet::GetMapRootIndex() {
|
||||
return Heap::kSmallOrderedHashSetMapRootIndex;
|
||||
return static_cast<int>(RootIndex::kSmallOrderedHashSetMap);
|
||||
}
|
||||
|
||||
inline Object* OrderedHashMap::ValueAt(int entry) {
|
||||
|
@ -26,7 +26,7 @@ Handle<Derived> OrderedHashTable<Derived, entrysize>::Allocate(
|
||||
}
|
||||
int num_buckets = capacity / kLoadFactor;
|
||||
Handle<FixedArray> backing_store = isolate->factory()->NewFixedArrayWithMap(
|
||||
static_cast<Heap::RootListIndex>(Derived::GetMapRootIndex()),
|
||||
static_cast<RootIndex>(Derived::GetMapRootIndex()),
|
||||
kHashTableStartIndex + num_buckets + (capacity * kEntrySize), pretenure);
|
||||
Handle<Derived> table = Handle<Derived>::cast(backing_store);
|
||||
for (int i = 0; i < num_buckets; ++i) {
|
||||
|
@ -115,7 +115,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
IsolateAddressId::kPendingExceptionAddress, isolate())));
|
||||
|
||||
__ StoreP(r3, MemOperand(ip));
|
||||
__ LoadRoot(r3, Heap::kExceptionRootIndex);
|
||||
__ LoadRoot(r3, RootIndex::kException);
|
||||
__ b(&exit);
|
||||
|
||||
// Invoke: Link this frame into the handler chain.
|
||||
@ -439,7 +439,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
__ LeaveExitFrame(false, r14, stack_space_operand != nullptr);
|
||||
|
||||
// Check if the function scheduled an exception.
|
||||
__ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
|
||||
__ LoadRoot(r14, RootIndex::kTheHoleValue);
|
||||
__ Move(r15, ExternalReference::scheduled_exception_address(isolate));
|
||||
__ LoadP(r15, MemOperand(r15));
|
||||
__ cmp(r14, r15);
|
||||
@ -490,13 +490,13 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
|
||||
STATIC_ASSERT(FCA::kHolderIndex == 0);
|
||||
|
||||
// new target
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
|
||||
// call data
|
||||
__ push(call_data);
|
||||
|
||||
Register scratch = call_data;
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
// return value
|
||||
__ push(scratch);
|
||||
// return value default
|
||||
@ -577,7 +577,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
|
||||
// Push data from AccessorInfo.
|
||||
__ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
|
||||
__ push(scratch);
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
__ Push(scratch, scratch);
|
||||
__ Move(scratch, ExternalReference::isolate_address(isolate()));
|
||||
__ Push(scratch, holder);
|
||||
|
@ -128,14 +128,14 @@ void TurboAssembler::Jump(Register target) {
|
||||
void TurboAssembler::LoadFromConstantsTable(Register destination,
|
||||
int constant_index) {
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
RootIndex::kBuiltinsConstantsTable));
|
||||
|
||||
const uint32_t offset =
|
||||
FixedArray::kHeaderSize + constant_index * kPointerSize - kHeapObjectTag;
|
||||
|
||||
CHECK(is_uint19(offset));
|
||||
DCHECK_NE(destination, r0);
|
||||
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
|
||||
LoadP(destination, MemOperand(destination, offset), r0);
|
||||
}
|
||||
|
||||
@ -395,7 +395,7 @@ void TurboAssembler::MultiPopDoubles(RegList dregs, Register location) {
|
||||
addi(location, location, Operand(stack_offset));
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index,
|
||||
void TurboAssembler::LoadRoot(Register destination, RootIndex index,
|
||||
Condition cond) {
|
||||
DCHECK(cond == al);
|
||||
LoadP(destination, MemOperand(kRootRegister, RootRegisterOffset(index)), r0);
|
||||
@ -1319,7 +1319,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
|
||||
|
||||
// Clear the new.target register if not given.
|
||||
if (!new_target.is_valid()) {
|
||||
LoadRoot(r6, Heap::kUndefinedValueRootIndex);
|
||||
LoadRoot(r6, RootIndex::kUndefinedValue);
|
||||
}
|
||||
|
||||
Label done;
|
||||
@ -1443,8 +1443,7 @@ void MacroAssembler::CompareInstanceType(Register map, Register type_reg,
|
||||
cmpi(type_reg, Operand(type));
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::CompareRoot(Register obj, Heap::RootListIndex index) {
|
||||
void MacroAssembler::CompareRoot(Register obj, RootIndex index) {
|
||||
DCHECK(obj != r0);
|
||||
LoadRoot(r0, index);
|
||||
cmp(obj, r0);
|
||||
@ -1894,7 +1893,7 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
|
||||
if (emit_debug_code()) {
|
||||
Label done_checking;
|
||||
AssertNotSmi(object);
|
||||
CompareRoot(object, Heap::kUndefinedValueRootIndex);
|
||||
CompareRoot(object, RootIndex::kUndefinedValue);
|
||||
beq(&done_checking);
|
||||
LoadP(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
|
||||
CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE);
|
||||
|
@ -324,11 +324,10 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
||||
Register exclusion3 = no_reg);
|
||||
|
||||
// Load an object from the root table.
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index) override {
|
||||
void LoadRoot(Register destination, RootIndex index) override {
|
||||
LoadRoot(destination, index, al);
|
||||
}
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index,
|
||||
Condition cond);
|
||||
void LoadRoot(Register destination, RootIndex index, Condition cond);
|
||||
|
||||
void SwapP(Register src, Register dst, Register scratch);
|
||||
void SwapP(Register src, MemOperand dst, Register scratch);
|
||||
@ -853,21 +852,20 @@ class MacroAssembler : public TurboAssembler {
|
||||
|
||||
// Compare the object in a register to a value from the root list.
|
||||
// Uses the ip register as scratch.
|
||||
void CompareRoot(Register obj, Heap::RootListIndex index);
|
||||
void PushRoot(Heap::RootListIndex index) {
|
||||
void CompareRoot(Register obj, RootIndex index);
|
||||
void PushRoot(RootIndex index) {
|
||||
LoadRoot(r0, index);
|
||||
Push(r0);
|
||||
}
|
||||
|
||||
// Compare the object in a register to a value and jump if they are equal.
|
||||
void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal) {
|
||||
void JumpIfRoot(Register with, RootIndex index, Label* if_equal) {
|
||||
CompareRoot(with, index);
|
||||
beq(if_equal);
|
||||
}
|
||||
|
||||
// Compare the object in a register to a value and jump if they are not equal.
|
||||
void JumpIfNotRoot(Register with, Heap::RootListIndex index,
|
||||
Label* if_not_equal) {
|
||||
void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal) {
|
||||
CompareRoot(with, index);
|
||||
bne(if_not_equal);
|
||||
}
|
||||
|
@ -16,69 +16,69 @@ namespace internal {
|
||||
|
||||
ReadOnlyRoots::ReadOnlyRoots(Isolate* isolate) : heap_(isolate->heap()) {}
|
||||
|
||||
#define ROOT_ACCESSOR(type, name, camel_name) \
|
||||
type* ReadOnlyRoots::name() { \
|
||||
return type::cast(heap_->roots_[Heap::k##camel_name##RootIndex]); \
|
||||
} \
|
||||
Handle<type> ReadOnlyRoots::name##_handle() { \
|
||||
return Handle<type>( \
|
||||
bit_cast<type**>(&heap_->roots_[Heap::k##camel_name##RootIndex])); \
|
||||
#define ROOT_ACCESSOR(type, name, camel_name) \
|
||||
type* ReadOnlyRoots::name() { \
|
||||
return type::cast(heap_->roots_[RootIndex::k##camel_name]); \
|
||||
} \
|
||||
Handle<type> ReadOnlyRoots::name##_handle() { \
|
||||
return Handle<type>( \
|
||||
bit_cast<type**>(&heap_->roots_[RootIndex::k##camel_name])); \
|
||||
}
|
||||
STRONG_READ_ONLY_ROOT_LIST(ROOT_ACCESSOR)
|
||||
#undef ROOT_ACCESSOR
|
||||
|
||||
#define STRING_ACCESSOR(name, str) \
|
||||
String* ReadOnlyRoots::name() { \
|
||||
return String::cast(heap_->roots_[Heap::k##name##RootIndex]); \
|
||||
} \
|
||||
Handle<String> ReadOnlyRoots::name##_handle() { \
|
||||
return Handle<String>( \
|
||||
bit_cast<String**>(&heap_->roots_[Heap::k##name##RootIndex])); \
|
||||
#define STRING_ACCESSOR(name, str) \
|
||||
String* ReadOnlyRoots::name() { \
|
||||
return String::cast(heap_->roots_[RootIndex::k##name]); \
|
||||
} \
|
||||
Handle<String> ReadOnlyRoots::name##_handle() { \
|
||||
return Handle<String>( \
|
||||
bit_cast<String**>(&heap_->roots_[RootIndex::k##name])); \
|
||||
}
|
||||
INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
|
||||
#undef STRING_ACCESSOR
|
||||
|
||||
#define SYMBOL_ACCESSOR(name) \
|
||||
Symbol* ReadOnlyRoots::name() { \
|
||||
return Symbol::cast(heap_->roots_[Heap::k##name##RootIndex]); \
|
||||
} \
|
||||
Handle<Symbol> ReadOnlyRoots::name##_handle() { \
|
||||
return Handle<Symbol>( \
|
||||
bit_cast<Symbol**>(&heap_->roots_[Heap::k##name##RootIndex])); \
|
||||
#define SYMBOL_ACCESSOR(name) \
|
||||
Symbol* ReadOnlyRoots::name() { \
|
||||
return Symbol::cast(heap_->roots_[RootIndex::k##name]); \
|
||||
} \
|
||||
Handle<Symbol> ReadOnlyRoots::name##_handle() { \
|
||||
return Handle<Symbol>( \
|
||||
bit_cast<Symbol**>(&heap_->roots_[RootIndex::k##name])); \
|
||||
}
|
||||
PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
|
||||
#undef SYMBOL_ACCESSOR
|
||||
|
||||
#define SYMBOL_ACCESSOR(name, description) \
|
||||
Symbol* ReadOnlyRoots::name() { \
|
||||
return Symbol::cast(heap_->roots_[Heap::k##name##RootIndex]); \
|
||||
} \
|
||||
Handle<Symbol> ReadOnlyRoots::name##_handle() { \
|
||||
return Handle<Symbol>( \
|
||||
bit_cast<Symbol**>(&heap_->roots_[Heap::k##name##RootIndex])); \
|
||||
#define SYMBOL_ACCESSOR(name, description) \
|
||||
Symbol* ReadOnlyRoots::name() { \
|
||||
return Symbol::cast(heap_->roots_[RootIndex::k##name]); \
|
||||
} \
|
||||
Handle<Symbol> ReadOnlyRoots::name##_handle() { \
|
||||
return Handle<Symbol>( \
|
||||
bit_cast<Symbol**>(&heap_->roots_[RootIndex::k##name])); \
|
||||
}
|
||||
PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
|
||||
WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
|
||||
#undef SYMBOL_ACCESSOR
|
||||
|
||||
#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
|
||||
Map* ReadOnlyRoots::name##_map() { \
|
||||
return Map::cast(heap_->roots_[Heap::k##Name##MapRootIndex]); \
|
||||
} \
|
||||
Handle<Map> ReadOnlyRoots::name##_map_handle() { \
|
||||
return Handle<Map>( \
|
||||
bit_cast<Map**>(&heap_->roots_[Heap::k##Name##MapRootIndex])); \
|
||||
#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
|
||||
Map* ReadOnlyRoots::name##_map() { \
|
||||
return Map::cast(heap_->roots_[RootIndex::k##Name##Map]); \
|
||||
} \
|
||||
Handle<Map> ReadOnlyRoots::name##_map_handle() { \
|
||||
return Handle<Map>( \
|
||||
bit_cast<Map**>(&heap_->roots_[RootIndex::k##Name##Map])); \
|
||||
}
|
||||
STRUCT_LIST(STRUCT_MAP_ACCESSOR)
|
||||
#undef STRUCT_MAP_ACCESSOR
|
||||
|
||||
#define ALLOCATION_SITE_MAP_ACCESSOR(NAME, Name, Size, name) \
|
||||
Map* ReadOnlyRoots::name##_map() { \
|
||||
return Map::cast(heap_->roots_[Heap::k##Name##Size##MapRootIndex]); \
|
||||
} \
|
||||
Handle<Map> ReadOnlyRoots::name##_map_handle() { \
|
||||
return Handle<Map>( \
|
||||
bit_cast<Map**>(&heap_->roots_[Heap::k##Name##Size##MapRootIndex])); \
|
||||
#define ALLOCATION_SITE_MAP_ACCESSOR(NAME, Name, Size, name) \
|
||||
Map* ReadOnlyRoots::name##_map() { \
|
||||
return Map::cast(heap_->roots_[RootIndex::k##Name##Size##Map]); \
|
||||
} \
|
||||
Handle<Map> ReadOnlyRoots::name##_map_handle() { \
|
||||
return Handle<Map>( \
|
||||
bit_cast<Map**>(&heap_->roots_[RootIndex::k##Name##Size##Map])); \
|
||||
}
|
||||
ALLOCATION_SITE_LIST(ALLOCATION_SITE_MAP_ACCESSOR)
|
||||
#undef ALLOCATION_SITE_MAP_ACCESSOR
|
||||
|
95
src/roots.h
95
src/roots.h
@ -5,6 +5,7 @@
|
||||
#ifndef V8_ROOTS_H_
|
||||
#define V8_ROOTS_H_
|
||||
|
||||
#include "src/accessors.h"
|
||||
#include "src/handles.h"
|
||||
#include "src/heap-symbols.h"
|
||||
#include "src/objects-definitions.h"
|
||||
@ -286,6 +287,100 @@ namespace internal {
|
||||
MUTABLE_ROOT_LIST(V) \
|
||||
STRONG_READ_ONLY_ROOT_LIST(V)
|
||||
|
||||
// Declare all the root indices. This defines the root list order.
|
||||
// clang-format off
|
||||
enum class RootIndex {
|
||||
#define DECL(type, name, camel_name) k##camel_name,
|
||||
STRONG_ROOT_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(name, str) k##name,
|
||||
INTERNALIZED_STRING_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(name) k##name,
|
||||
PRIVATE_SYMBOL_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(name, description) k##name,
|
||||
PUBLIC_SYMBOL_LIST(DECL)
|
||||
WELL_KNOWN_SYMBOL_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(accessor_name, AccessorName, ...) k##AccessorName##Accessor,
|
||||
ACCESSOR_INFO_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(NAME, Name, name) k##Name##Map,
|
||||
STRUCT_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(NAME, Name, Size, name) k##Name##Size##Map,
|
||||
ALLOCATION_SITE_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
#define DECL(NAME, Name, Size, name) k##Name##Size##Map,
|
||||
DATA_HANDLER_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
kStringTable,
|
||||
|
||||
#define DECL(type, name, camel_name) k##camel_name,
|
||||
SMI_ROOT_LIST(DECL)
|
||||
#undef DECL
|
||||
|
||||
kRootListLength,
|
||||
|
||||
// Helper aliases.
|
||||
kRootsStart = 0,
|
||||
kStrongRootListLength = kStringTable,
|
||||
kSmiRootsStart = kStringTable + 1
|
||||
};
|
||||
// clang-format on
|
||||
|
||||
// Represents a storage of V8 heap roots.
|
||||
class RootsTable {
|
||||
public:
|
||||
static constexpr size_t kEntriesCount =
|
||||
static_cast<size_t>(RootIndex::kRootListLength);
|
||||
|
||||
static constexpr size_t kSmiRootsStart =
|
||||
static_cast<size_t>(RootIndex::kSmiRootsStart);
|
||||
|
||||
RootsTable() : roots_{} {}
|
||||
|
||||
template <typename T>
|
||||
bool IsRootHandle(Handle<T> handle, RootIndex* index) const {
|
||||
Object** const handle_location = bit_cast<Object**>(handle.address());
|
||||
if (handle_location >= &roots_[kEntriesCount]) return false;
|
||||
if (handle_location < &roots_[0]) return false;
|
||||
*index = static_cast<RootIndex>(handle_location - &roots_[0]);
|
||||
return true;
|
||||
}
|
||||
|
||||
Object* const& operator[](RootIndex root_index) const {
|
||||
size_t index = static_cast<size_t>(root_index);
|
||||
DCHECK_LT(index, kEntriesCount);
|
||||
return roots_[index];
|
||||
}
|
||||
|
||||
private:
|
||||
Object** smi_roots_begin() { return &roots_[kSmiRootsStart]; }
|
||||
Object** smi_roots_end() { return &roots_[kEntriesCount]; }
|
||||
|
||||
Object*& operator[](RootIndex root_index) {
|
||||
size_t index = static_cast<size_t>(root_index);
|
||||
DCHECK_LT(index, kEntriesCount);
|
||||
return roots_[index];
|
||||
}
|
||||
|
||||
Object* roots_[kEntriesCount];
|
||||
|
||||
friend class Heap;
|
||||
friend class Factory;
|
||||
friend class ReadOnlyRoots;
|
||||
};
|
||||
|
||||
class FixedTypedArrayBase;
|
||||
class Heap;
|
||||
class Isolate;
|
||||
|
@ -134,7 +134,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
IsolateAddressId::kPendingExceptionAddress, isolate())));
|
||||
|
||||
__ StoreP(r2, MemOperand(ip));
|
||||
__ LoadRoot(r2, Heap::kExceptionRootIndex);
|
||||
__ LoadRoot(r2, RootIndex::kException);
|
||||
__ b(&exit, Label::kNear);
|
||||
|
||||
// Invoke: Link this frame into the handler chain.
|
||||
@ -475,7 +475,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
// Check if the function scheduled an exception.
|
||||
__ Move(r7, ExternalReference::scheduled_exception_address(isolate));
|
||||
__ LoadP(r7, MemOperand(r7));
|
||||
__ CompareRoot(r7, Heap::kTheHoleValueRootIndex);
|
||||
__ CompareRoot(r7, RootIndex::kTheHoleValue);
|
||||
__ bne(&promote_scheduled_exception, Label::kNear);
|
||||
|
||||
__ b(r14);
|
||||
@ -523,13 +523,13 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
|
||||
STATIC_ASSERT(FCA::kHolderIndex == 0);
|
||||
|
||||
// new target
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
|
||||
// call data
|
||||
__ push(call_data);
|
||||
|
||||
Register scratch = call_data;
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
// return value
|
||||
__ push(scratch);
|
||||
// return value default
|
||||
@ -609,7 +609,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
|
||||
// Push data from AccessorInfo.
|
||||
__ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
|
||||
__ push(scratch);
|
||||
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
|
||||
__ Push(scratch, scratch);
|
||||
__ Move(scratch, ExternalReference::isolate_address(isolate()));
|
||||
__ Push(scratch, holder);
|
||||
|
@ -123,14 +123,14 @@ int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
|
||||
void TurboAssembler::LoadFromConstantsTable(Register destination,
|
||||
int constant_index) {
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
RootIndex::kBuiltinsConstantsTable));
|
||||
|
||||
const uint32_t offset =
|
||||
FixedArray::kHeaderSize + constant_index * kPointerSize - kHeapObjectTag;
|
||||
|
||||
CHECK(is_uint19(offset));
|
||||
DCHECK_NE(destination, r0);
|
||||
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
|
||||
LoadP(destination, MemOperand(destination, offset), r1);
|
||||
}
|
||||
|
||||
@ -429,7 +429,7 @@ void TurboAssembler::MultiPopDoubles(RegList dregs, Register location) {
|
||||
AddP(location, location, Operand(stack_offset));
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index,
|
||||
void TurboAssembler::LoadRoot(Register destination, RootIndex index,
|
||||
Condition) {
|
||||
LoadP(destination, MemOperand(kRootRegister, RootRegisterOffset(index)), r0);
|
||||
}
|
||||
@ -1385,7 +1385,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
|
||||
|
||||
// Clear the new.target register if not given.
|
||||
if (!new_target.is_valid()) {
|
||||
LoadRoot(r5, Heap::kUndefinedValueRootIndex);
|
||||
LoadRoot(r5, RootIndex::kUndefinedValue);
|
||||
}
|
||||
|
||||
Label done;
|
||||
@ -1512,7 +1512,7 @@ void MacroAssembler::CompareInstanceType(Register map, Register type_reg,
|
||||
CmpP(type_reg, Operand(type));
|
||||
}
|
||||
|
||||
void MacroAssembler::CompareRoot(Register obj, Heap::RootListIndex index) {
|
||||
void MacroAssembler::CompareRoot(Register obj, RootIndex index) {
|
||||
CmpP(obj, MemOperand(kRootRegister, RootRegisterOffset(index)));
|
||||
}
|
||||
|
||||
@ -1836,7 +1836,7 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
|
||||
if (emit_debug_code()) {
|
||||
Label done_checking;
|
||||
AssertNotSmi(object);
|
||||
CompareRoot(object, Heap::kUndefinedValueRootIndex);
|
||||
CompareRoot(object, RootIndex::kUndefinedValue);
|
||||
beq(&done_checking, Label::kNear);
|
||||
LoadP(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
|
||||
CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE);
|
||||
|
@ -53,7 +53,7 @@ inline MemOperand FieldMemOperand(Register object, Register index, int offset) {
|
||||
}
|
||||
|
||||
// Generate a MemOperand for loading a field from Root register
|
||||
inline MemOperand RootMemOperand(Heap::RootListIndex index) {
|
||||
inline MemOperand RootMemOperand(RootIndex index) {
|
||||
return MemOperand(kRootRegister, index << kPointerSizeLog2);
|
||||
}
|
||||
|
||||
@ -258,11 +258,10 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
||||
Register exclusion3 = no_reg);
|
||||
|
||||
// Load an object from the root table.
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index) override {
|
||||
void LoadRoot(Register destination, RootIndex index) override {
|
||||
LoadRoot(destination, index, al);
|
||||
}
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index,
|
||||
Condition cond);
|
||||
void LoadRoot(Register destination, RootIndex index, Condition cond);
|
||||
//--------------------------------------------------------------------------
|
||||
// S390 Macro Assemblers for Instructions
|
||||
//--------------------------------------------------------------------------
|
||||
@ -1089,8 +1088,8 @@ class MacroAssembler : public TurboAssembler {
|
||||
|
||||
// Compare the object in a register to a value from the root list.
|
||||
// Uses the ip register as scratch.
|
||||
void CompareRoot(Register obj, Heap::RootListIndex index);
|
||||
void PushRoot(Heap::RootListIndex index) {
|
||||
void CompareRoot(Register obj, RootIndex index);
|
||||
void PushRoot(RootIndex index) {
|
||||
LoadRoot(r0, index);
|
||||
Push(r0);
|
||||
}
|
||||
@ -1103,14 +1102,13 @@ class MacroAssembler : public TurboAssembler {
|
||||
void JumpToInstructionStream(Address entry);
|
||||
|
||||
// Compare the object in a register to a value and jump if they are equal.
|
||||
void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal) {
|
||||
void JumpIfRoot(Register with, RootIndex index, Label* if_equal) {
|
||||
CompareRoot(with, index);
|
||||
beq(if_equal);
|
||||
}
|
||||
|
||||
// Compare the object in a register to a value and jump if they are not equal.
|
||||
void JumpIfNotRoot(Register with, Heap::RootListIndex index,
|
||||
Label* if_not_equal) {
|
||||
void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal) {
|
||||
CompareRoot(with, index);
|
||||
bne(if_not_equal);
|
||||
}
|
||||
|
@ -672,7 +672,7 @@ bool Deserializer<AllocatorT>::ReadData(MaybeObject** current,
|
||||
SIXTEEN_CASES(kRootArrayConstants)
|
||||
SIXTEEN_CASES(kRootArrayConstants + 16) {
|
||||
int id = data & kRootArrayConstantsMask;
|
||||
Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(id);
|
||||
RootIndex root_index = static_cast<RootIndex>(id);
|
||||
MaybeObject* object =
|
||||
MaybeObject::FromObject(isolate->heap()->root(root_index));
|
||||
DCHECK(!Heap::InNewSpace(object));
|
||||
@ -806,7 +806,7 @@ MaybeObject** Deserializer<AllocatorT>::ReadDataCase(
|
||||
new_object = GetBackReferencedObject(data & kSpaceMask);
|
||||
} else if (where == kRootArray) {
|
||||
int id = source_.GetInt();
|
||||
Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(id);
|
||||
RootIndex root_index = static_cast<RootIndex>(id);
|
||||
new_object = isolate->heap()->root(root_index);
|
||||
emit_write_barrier = Heap::InNewSpace(new_object);
|
||||
hot_objects_.Add(HeapObject::cast(new_object));
|
||||
|
@ -248,7 +248,7 @@ void Serializer<AllocatorT>::PutRoot(
|
||||
|
||||
// Assert that the first 32 root array items are a conscious choice. They are
|
||||
// chosen so that the most common ones can be encoded more efficiently.
|
||||
STATIC_ASSERT(Heap::kArgumentsMarkerRootIndex ==
|
||||
STATIC_ASSERT(static_cast<int>(RootIndex::kArgumentsMarker) ==
|
||||
kNumberOfRootArrayConstants - 1);
|
||||
|
||||
if (how_to_code == kPlain && where_to_point == kStartOfObject &&
|
||||
@ -740,7 +740,7 @@ void Serializer<AllocatorT>::ObjectSerializer::VisitPointers(
|
||||
// Repeats are not subject to the write barrier so we can only use
|
||||
// immortal immovable root members. They are never in new space.
|
||||
if (current != start && root_index != RootIndexMap::kInvalidRootIndex &&
|
||||
Heap::RootIsImmortalImmovable(root_index) &&
|
||||
Heap::RootIsImmortalImmovable(static_cast<RootIndex>(root_index)) &&
|
||||
*current == current[-1]) {
|
||||
DCHECK_EQ(reference_type, HeapObjectReferenceType::STRONG);
|
||||
DCHECK(!Heap::InNewSpace(current_contents));
|
||||
|
@ -152,9 +152,12 @@ void StartupSerializer::CheckRehashability(HeapObject* obj) {
|
||||
}
|
||||
|
||||
bool StartupSerializer::MustBeDeferred(HeapObject* object) {
|
||||
if (root_has_been_serialized_.test(Heap::kFreeSpaceMapRootIndex) &&
|
||||
root_has_been_serialized_.test(Heap::kOnePointerFillerMapRootIndex) &&
|
||||
root_has_been_serialized_.test(Heap::kTwoPointerFillerMapRootIndex)) {
|
||||
if (root_has_been_serialized_.test(
|
||||
static_cast<size_t>(RootIndex::kFreeSpaceMap)) &&
|
||||
root_has_been_serialized_.test(
|
||||
static_cast<size_t>(RootIndex::kOnePointerFillerMap)) &&
|
||||
root_has_been_serialized_.test(
|
||||
static_cast<size_t>(RootIndex::kTwoPointerFillerMap))) {
|
||||
// All required root objects are serialized, so any aligned objects can
|
||||
// be saved without problems.
|
||||
return false;
|
||||
|
@ -69,7 +69,8 @@ class StartupSerializer : public Serializer<> {
|
||||
|
||||
void CheckRehashability(HeapObject* obj);
|
||||
|
||||
std::bitset<Heap::kStrongRootListLength> root_has_been_serialized_;
|
||||
std::bitset<static_cast<size_t>(RootIndex::kStrongRootListLength)>
|
||||
root_has_been_serialized_;
|
||||
PartialCacheIndexMap partial_cache_index_map_;
|
||||
std::vector<AccessorInfo*> accessor_infos_;
|
||||
std::vector<CallHandlerInfo*> call_handler_infos_;
|
||||
|
@ -32,7 +32,7 @@ void TurboAssemblerBase::IndirectLoadConstant(Register destination,
|
||||
// check if any of the fast paths can be applied.
|
||||
|
||||
int builtin_index;
|
||||
Heap::RootListIndex root_index;
|
||||
RootIndex root_index;
|
||||
if (isolate()->heap()->IsRootHandle(object, &root_index)) {
|
||||
// Roots are loaded relative to the root register.
|
||||
LoadRoot(destination, root_index);
|
||||
@ -84,8 +84,9 @@ void TurboAssemblerBase::IndirectLoadExternalReference(
|
||||
}
|
||||
|
||||
// static
|
||||
int32_t TurboAssemblerBase::RootRegisterOffset(Heap::RootListIndex root_index) {
|
||||
return (root_index << kPointerSizeLog2) - kRootRegisterBias;
|
||||
int32_t TurboAssemblerBase::RootRegisterOffset(RootIndex root_index) {
|
||||
return (static_cast<int32_t>(root_index) << kPointerSizeLog2) -
|
||||
kRootRegisterBias;
|
||||
}
|
||||
|
||||
// static
|
||||
|
@ -52,9 +52,9 @@ class V8_EXPORT_PRIVATE TurboAssemblerBase : public Assembler {
|
||||
intptr_t offset) = 0;
|
||||
virtual void LoadRootRelative(Register destination, int32_t offset) = 0;
|
||||
|
||||
virtual void LoadRoot(Register destination, Heap::RootListIndex index) = 0;
|
||||
virtual void LoadRoot(Register destination, RootIndex index) = 0;
|
||||
|
||||
static int32_t RootRegisterOffset(Heap::RootListIndex root_index);
|
||||
static int32_t RootRegisterOffset(RootIndex root_index);
|
||||
static int32_t RootRegisterOffsetForExternalReferenceIndex(
|
||||
int reference_index);
|
||||
|
||||
|
@ -107,7 +107,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
|
||||
ExternalReference pending_exception = ExternalReference::Create(
|
||||
IsolateAddressId::kPendingExceptionAddress, isolate());
|
||||
__ Store(pending_exception, rax);
|
||||
__ LoadRoot(rax, Heap::kExceptionRootIndex);
|
||||
__ LoadRoot(rax, RootIndex::kException);
|
||||
__ jmp(&exit);
|
||||
|
||||
// Invoke: Link this frame into the handler chain.
|
||||
@ -352,19 +352,19 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
|
||||
__ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
|
||||
__ j(above_equal, &ok, Label::kNear);
|
||||
|
||||
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
|
||||
__ CompareRoot(map, RootIndex::kHeapNumberMap);
|
||||
__ j(equal, &ok, Label::kNear);
|
||||
|
||||
__ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
|
||||
__ CompareRoot(return_value, RootIndex::kUndefinedValue);
|
||||
__ j(equal, &ok, Label::kNear);
|
||||
|
||||
__ CompareRoot(return_value, Heap::kTrueValueRootIndex);
|
||||
__ CompareRoot(return_value, RootIndex::kTrueValue);
|
||||
__ j(equal, &ok, Label::kNear);
|
||||
|
||||
__ CompareRoot(return_value, Heap::kFalseValueRootIndex);
|
||||
__ CompareRoot(return_value, RootIndex::kFalseValue);
|
||||
__ j(equal, &ok, Label::kNear);
|
||||
|
||||
__ CompareRoot(return_value, Heap::kNullValueRootIndex);
|
||||
__ CompareRoot(return_value, RootIndex::kNullValue);
|
||||
__ j(equal, &ok, Label::kNear);
|
||||
|
||||
__ Abort(AbortReason::kAPICallReturnedInvalidObject);
|
||||
@ -428,15 +428,15 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
|
||||
__ PopReturnAddressTo(return_address);
|
||||
|
||||
// new target
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
|
||||
// call data
|
||||
__ Push(call_data);
|
||||
|
||||
// return value
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
// return value default
|
||||
__ PushRoot(Heap::kUndefinedValueRootIndex);
|
||||
__ PushRoot(RootIndex::kUndefinedValue);
|
||||
// isolate
|
||||
Register scratch = call_data;
|
||||
__ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
|
||||
@ -526,7 +526,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
|
||||
__ PopReturnAddressTo(scratch);
|
||||
__ Push(receiver);
|
||||
__ Push(FieldOperand(callback, AccessorInfo::kDataOffset));
|
||||
__ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
|
||||
__ Push(kScratchRegister); // return value
|
||||
__ Push(kScratchRegister); // return value default
|
||||
__ PushAddress(ExternalReference::isolate_address(isolate()));
|
||||
|
@ -136,8 +136,8 @@ void MacroAssembler::Store(ExternalReference destination, Register source) {
|
||||
void TurboAssembler::LoadFromConstantsTable(Register destination,
|
||||
int constant_index) {
|
||||
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
|
||||
Heap::kBuiltinsConstantsTableRootIndex));
|
||||
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
|
||||
RootIndex::kBuiltinsConstantsTable));
|
||||
LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
|
||||
movp(destination,
|
||||
FieldOperand(destination,
|
||||
FixedArray::kHeaderSize + constant_index * kPointerSize));
|
||||
@ -193,22 +193,22 @@ void MacroAssembler::PushAddress(ExternalReference source) {
|
||||
Push(kScratchRegister);
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
|
||||
void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
|
||||
DCHECK(root_array_available_);
|
||||
movp(destination, Operand(kRootRegister, RootRegisterOffset(index)));
|
||||
}
|
||||
|
||||
void MacroAssembler::PushRoot(Heap::RootListIndex index) {
|
||||
void MacroAssembler::PushRoot(RootIndex index) {
|
||||
DCHECK(root_array_available_);
|
||||
Push(Operand(kRootRegister, RootRegisterOffset(index)));
|
||||
}
|
||||
|
||||
void TurboAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
|
||||
void TurboAssembler::CompareRoot(Register with, RootIndex index) {
|
||||
DCHECK(root_array_available_);
|
||||
cmpp(with, Operand(kRootRegister, RootRegisterOffset(index)));
|
||||
}
|
||||
|
||||
void TurboAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
|
||||
void TurboAssembler::CompareRoot(Operand with, RootIndex index) {
|
||||
DCHECK(root_array_available_);
|
||||
DCHECK(!with.AddressUsesRegister(kScratchRegister));
|
||||
LoadRoot(kScratchRegister, index);
|
||||
@ -2176,7 +2176,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
|
||||
|
||||
// Clear the new.target register if not given.
|
||||
if (!new_target.is_valid()) {
|
||||
LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
|
||||
LoadRoot(rdx, RootIndex::kUndefinedValue);
|
||||
}
|
||||
|
||||
Label done;
|
||||
|
@ -220,8 +220,8 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
||||
void Set(Operand dst, intptr_t x);
|
||||
|
||||
// Operations on roots in the root-array.
|
||||
void LoadRoot(Register destination, Heap::RootListIndex index) override;
|
||||
void LoadRoot(Operand destination, Heap::RootListIndex index) {
|
||||
void LoadRoot(Register destination, RootIndex index) override;
|
||||
void LoadRoot(Operand destination, RootIndex index) {
|
||||
LoadRoot(kScratchRegister, index);
|
||||
movp(destination, kScratchRegister);
|
||||
}
|
||||
@ -409,8 +409,8 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
||||
void Pinsrd(XMMRegister dst, Register src, int8_t imm8);
|
||||
void Pinsrd(XMMRegister dst, Operand src, int8_t imm8);
|
||||
|
||||
void CompareRoot(Register with, Heap::RootListIndex index);
|
||||
void CompareRoot(Operand with, Heap::RootListIndex index);
|
||||
void CompareRoot(Register with, RootIndex index);
|
||||
void CompareRoot(Operand with, RootIndex index);
|
||||
|
||||
// Generates function and stub prologue code.
|
||||
void StubPrologue(StackFrame::Type type);
|
||||
@ -552,29 +552,27 @@ class MacroAssembler : public TurboAssembler {
|
||||
// Load a root value where the index (or part of it) is variable.
|
||||
// The variable_offset register is added to the fixed_offset value
|
||||
// to get the index into the root-array.
|
||||
void PushRoot(Heap::RootListIndex index);
|
||||
void PushRoot(RootIndex index);
|
||||
|
||||
// Compare the object in a register to a value and jump if they are equal.
|
||||
void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal,
|
||||
void JumpIfRoot(Register with, RootIndex index, Label* if_equal,
|
||||
Label::Distance if_equal_distance = Label::kFar) {
|
||||
CompareRoot(with, index);
|
||||
j(equal, if_equal, if_equal_distance);
|
||||
}
|
||||
void JumpIfRoot(Operand with, Heap::RootListIndex index, Label* if_equal,
|
||||
void JumpIfRoot(Operand with, RootIndex index, Label* if_equal,
|
||||
Label::Distance if_equal_distance = Label::kFar) {
|
||||
CompareRoot(with, index);
|
||||
j(equal, if_equal, if_equal_distance);
|
||||
}
|
||||
|
||||
// Compare the object in a register to a value and jump if they are not equal.
|
||||
void JumpIfNotRoot(Register with, Heap::RootListIndex index,
|
||||
Label* if_not_equal,
|
||||
void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal,
|
||||
Label::Distance if_not_equal_distance = Label::kFar) {
|
||||
CompareRoot(with, index);
|
||||
j(not_equal, if_not_equal, if_not_equal_distance);
|
||||
}
|
||||
void JumpIfNotRoot(Operand with, Heap::RootListIndex index,
|
||||
Label* if_not_equal,
|
||||
void JumpIfNotRoot(Operand with, RootIndex index, Label* if_not_equal,
|
||||
Label::Distance if_not_equal_distance = Label::kFar) {
|
||||
CompareRoot(with, index);
|
||||
j(not_equal, if_not_equal, if_not_equal_distance);
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user