Use root array to load roots in generated ARM code.

This cuts down on code size and reloc work.
Review URL: http://codereview.chromium.org/171041

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2701 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
whesse@chromium.org 2009-08-17 14:31:49 +00:00
parent e64bf9ad6c
commit 349dc04a5d
15 changed files with 178 additions and 109 deletions

View File

@ -214,9 +214,13 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Enter an internal frame.
__ EnterInternalFrame();
// Setup the context from the function argument.
// Set up the context from the function argument.
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Set up the roots register.
ExternalReference roots_address = ExternalReference::roots_address();
__ mov(r10, Operand(roots_address));
// Push the function and the receiver onto the stack.
__ push(r1);
__ push(r2);
@ -239,7 +243,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Initialize all JavaScript callee-saved registers, since they will be seen
// by the garbage collector as part of handlers.
__ mov(r4, Operand(Factory::undefined_value()));
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
__ mov(r5, Operand(r4));
__ mov(r6, Operand(r4));
__ mov(r7, Operand(r4));
@ -282,7 +286,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
{ Label done;
__ tst(r0, Operand(r0));
__ b(ne, &done);
__ mov(r2, Operand(Factory::undefined_value()));
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ push(r2);
__ add(r0, r0, Operand(1));
__ bind(&done);
@ -323,10 +327,10 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ tst(r2, Operand(kSmiTagMask));
__ b(eq, &call_to_object);
__ mov(r3, Operand(Factory::null_value()));
__ LoadRoot(r3, Heap::kNullValueRootIndex);
__ cmp(r2, r3);
__ b(eq, &use_global_receiver);
__ mov(r3, Operand(Factory::undefined_value()));
__ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
__ cmp(r2, r3);
__ b(eq, &use_global_receiver);
@ -492,10 +496,10 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ ldr(r0, MemOperand(fp, kRecvOffset));
__ tst(r0, Operand(kSmiTagMask));
__ b(eq, &call_to_object);
__ mov(r1, Operand(Factory::null_value()));
__ LoadRoot(r1, Heap::kNullValueRootIndex);
__ cmp(r0, r1);
__ b(eq, &use_global_receiver);
__ mov(r1, Operand(Factory::undefined_value()));
__ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
__ cmp(r0, r1);
__ b(eq, &use_global_receiver);
@ -665,7 +669,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r1: function
// r2: expected number of arguments
// r3: code entry to call
__ mov(ip, Operand(Factory::undefined_value()));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
__ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.

View File

@ -67,7 +67,7 @@ void EntryNode::Compile(MacroAssembler* masm) {
__ add(fp, sp, Operand(2 * kPointerSize));
int count = CfgGlobals::current()->fun()->scope()->num_stack_slots();
if (count > 0) {
__ mov(ip, Operand(Factory::undefined_value()));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
for (int i = 0; i < count; i++) {
__ push(ip);
}

View File

@ -305,7 +305,7 @@ void CodeGenerator::GenCode(FunctionLiteral* fun) {
// sp: stack pointer
// fp: frame pointer
// cp: callee's context
__ mov(r0, Operand(Factory::undefined_value()));
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
function_return_.Bind();
if (FLAG_trace) {
@ -478,11 +478,11 @@ void CodeGenerator::Load(Expression* x, TypeofState typeof_state) {
JumpTarget loaded;
JumpTarget materialize_true;
materialize_true.Branch(cc_reg_);
__ mov(r0, Operand(Factory::false_value()));
__ LoadRoot(r0, Heap::kFalseValueRootIndex);
frame_->EmitPush(r0);
loaded.Jump();
materialize_true.Bind();
__ mov(r0, Operand(Factory::true_value()));
__ LoadRoot(r0, Heap::kTrueValueRootIndex);
frame_->EmitPush(r0);
loaded.Bind();
cc_reg_ = al;
@ -499,7 +499,7 @@ void CodeGenerator::Load(Expression* x, TypeofState typeof_state) {
// Load "true" if necessary.
if (true_target.is_linked()) {
true_target.Bind();
__ mov(r0, Operand(Factory::true_value()));
__ LoadRoot(r0, Heap::kTrueValueRootIndex);
frame_->EmitPush(r0);
}
// If both "true" and "false" need to be loaded jump across the code for
@ -510,7 +510,7 @@ void CodeGenerator::Load(Expression* x, TypeofState typeof_state) {
// Load "false" if necessary.
if (false_target.is_linked()) {
false_target.Bind();
__ mov(r0, Operand(Factory::false_value()));
__ LoadRoot(r0, Heap::kFalseValueRootIndex);
frame_->EmitPush(r0);
}
// A value is loaded on all paths reaching this point.
@ -640,15 +640,18 @@ void CodeGenerator::ToBoolean(JumpTarget* true_target,
// Fast case checks
// Check if the value is 'false'.
__ cmp(r0, Operand(Factory::false_value()));
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r0, ip);
false_target->Branch(eq);
// Check if the value is 'true'.
__ cmp(r0, Operand(Factory::true_value()));
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r0, ip);
true_target->Branch(eq);
// Check if the value is 'undefined'.
__ cmp(r0, Operand(Factory::undefined_value()));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r0, ip);
false_target->Branch(eq);
// Check if the value is a smi.
@ -661,7 +664,8 @@ void CodeGenerator::ToBoolean(JumpTarget* true_target,
frame_->EmitPush(r0);
frame_->CallRuntime(Runtime::kToBool, 1);
// Convert the result (r0) to a condition code.
__ cmp(r0, Operand(Factory::false_value()));
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r0, ip);
cc_reg_ = ne;
}
@ -1185,7 +1189,7 @@ void CodeGenerator::VisitDeclaration(Declaration* node) {
// 'undefined') because we may have a (legal) redeclaration and we
// must not destroy the current value.
if (node->mode() == Variable::CONST) {
__ mov(r0, Operand(Factory::the_hole_value()));
__ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
frame_->EmitPush(r0);
} else if (node->fun() != NULL) {
LoadAndSpill(node->fun());
@ -1725,9 +1729,11 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
// Both SpiderMonkey and kjs ignore null and undefined in contrast
// to the specification. 12.6.4 mandates a call to ToObject.
frame_->EmitPop(r0);
__ cmp(r0, Operand(Factory::undefined_value()));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r0, ip);
exit.Branch(eq);
__ cmp(r0, Operand(Factory::null_value()));
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r0, ip);
exit.Branch(eq);
// Stack layout in body:
@ -1759,7 +1765,8 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
// Otherwise, we got a FixedArray, and we have to do a slow check.
__ mov(r2, Operand(r0));
__ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
__ cmp(r1, Operand(Factory::meta_map()));
__ LoadRoot(ip, Heap::kMetaMapRootIndex);
__ cmp(r1, ip);
fixed_array.Branch(ne);
// Get enum cache
@ -1833,7 +1840,8 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
__ mov(r3, Operand(r0));
// If the property has been removed while iterating, we just skip it.
__ cmp(r3, Operand(Factory::null_value()));
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r3, ip);
node->continue_target()->Branch(eq);
end_del_check.Bind();
@ -2093,7 +2101,7 @@ void CodeGenerator::VisitTryFinally(TryFinally* node) {
// Fake a top of stack value (unneeded when FALLING) and set the
// state in r2, then jump around the unlink blocks if any.
__ mov(r0, Operand(Factory::undefined_value()));
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
frame_->EmitPush(r0);
__ mov(r2, Operand(Smi::FromInt(FALLING)));
if (nof_unlinks > 0) {
@ -2135,7 +2143,7 @@ void CodeGenerator::VisitTryFinally(TryFinally* node) {
frame_->EmitPush(r0);
} else {
// Fake TOS for targets that shadowed breaks and continues.
__ mov(r0, Operand(Factory::undefined_value()));
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
frame_->EmitPush(r0);
}
__ mov(r2, Operand(Smi::FromInt(JUMPING + i)));
@ -2322,8 +2330,9 @@ void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
r2,
&slow));
if (potential_slot->var()->mode() == Variable::CONST) {
__ cmp(r0, Operand(Factory::the_hole_value()));
__ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq);
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r0, ip);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
}
// There is always control flow to slow from
// ContextSlotOperandCheckExtensions so we have to jump around
@ -2360,8 +2369,9 @@ void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
// value.
Comment cmnt(masm_, "[ Unhole const");
frame_->EmitPop(r0);
__ cmp(r0, Operand(Factory::the_hole_value()));
__ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq);
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r0, ip);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
frame_->EmitPush(r0);
}
}
@ -2404,7 +2414,8 @@ void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot,
__ bind(&next);
// Terminate at global context.
__ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset));
__ cmp(tmp2, Operand(Factory::global_context_map()));
__ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
__ cmp(tmp2, ip);
__ b(eq, &fast);
// Check that extension is NULL.
__ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX));
@ -2501,7 +2512,8 @@ void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
__ ldr(r2, FieldMemOperand(r1, literal_offset));
JumpTarget done;
__ cmp(r2, Operand(Factory::undefined_value()));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r2, ip);
done.Branch(ne);
// If the entry is undefined we call the runtime system to computed
@ -2583,7 +2595,8 @@ void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
// Check whether we need to materialize the object literal boilerplate.
// If so, jump to the deferred code.
__ cmp(r2, Operand(Factory::undefined_value()));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r2, Operand(ip));
deferred->Branch(eq);
deferred->BindExit();
@ -2705,7 +2718,8 @@ void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
// Check whether we need to materialize the object literal boilerplate.
// If so, jump to the deferred code.
__ cmp(r2, Operand(Factory::undefined_value()));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r2, Operand(ip));
deferred->Branch(eq);
deferred->BindExit();
@ -3036,7 +3050,7 @@ void CodeGenerator::VisitCallEval(CallEval* node) {
// Prepare stack for call to resolved function.
LoadAndSpill(function);
__ mov(r2, Operand(Factory::undefined_value()));
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
frame_->EmitPush(r2); // Slot for receiver
int arg_count = args->length();
for (int i = 0; i < arg_count; i++) {
@ -3180,7 +3194,7 @@ void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
// Non-JS objects have class null.
null.Bind();
__ mov(r0, Operand(Factory::null_value()));
__ LoadRoot(r0, Heap::kNullValueRootIndex);
frame_->EmitPush(r0);
// All done.
@ -3253,7 +3267,7 @@ void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
__ CallRuntime(Runtime::kLog, 2);
}
#endif
__ mov(r0, Operand(Factory::undefined_value()));
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
frame_->EmitPush(r0);
}
@ -3274,7 +3288,7 @@ void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) {
VirtualFrame::SpilledScope spilled_scope;
ASSERT(args->length() == 2);
__ mov(r0, Operand(Factory::undefined_value()));
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
frame_->EmitPush(r0);
}
@ -3494,14 +3508,14 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
} else {
// Default: Result of deleting non-global, not dynamically
// introduced variables is false.
__ mov(r0, Operand(Factory::false_value()));
__ LoadRoot(r0, Heap::kFalseValueRootIndex);
}
} else {
// Default: Result of deleting expressions is true.
LoadAndSpill(node->expression()); // may have side-effects
frame_->Drop();
__ mov(r0, Operand(Factory::true_value()));
__ LoadRoot(r0, Heap::kTrueValueRootIndex);
}
frame_->EmitPush(r0);
@ -3554,7 +3568,7 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
case Token::VOID:
// since the stack top is cached in r0, popping and then
// pushing a value can be done by just writing to r0.
__ mov(r0, Operand(Factory::undefined_value()));
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
break;
case Token::ADD: {
@ -3880,14 +3894,16 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
if (left_is_null || right_is_null) {
LoadAndSpill(left_is_null ? right : left);
frame_->EmitPop(r0);
__ cmp(r0, Operand(Factory::null_value()));
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r0, ip);
// The 'null' value is only equal to 'undefined' if using non-strict
// comparisons.
if (op != Token::EQ_STRICT) {
true_target()->Branch(eq);
__ cmp(r0, Operand(Factory::undefined_value()));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r0, Operand(ip));
true_target()->Branch(eq);
__ tst(r0, Operand(kSmiTagMask));
@ -3924,7 +3940,8 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
__ tst(r1, Operand(kSmiTagMask));
true_target()->Branch(eq);
__ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
__ cmp(r1, Operand(Factory::heap_number_map()));
__ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
__ cmp(r1, ip);
cc_reg_ = eq;
} else if (check->Equals(Heap::string_symbol())) {
@ -3944,13 +3961,16 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
cc_reg_ = lt;
} else if (check->Equals(Heap::boolean_symbol())) {
__ cmp(r1, Operand(Factory::true_value()));
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r1, ip);
true_target()->Branch(eq);
__ cmp(r1, Operand(Factory::false_value()));
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r1, ip);
cc_reg_ = eq;
} else if (check->Equals(Heap::undefined_symbol())) {
__ cmp(r1, Operand(Factory::undefined_value()));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r1, ip);
true_target()->Branch(eq);
__ tst(r1, Operand(kSmiTagMask));
@ -3975,7 +3995,8 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
false_target()->Branch(eq);
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
__ cmp(r1, Operand(Factory::null_value()));
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r1, ip);
true_target()->Branch(eq);
// It can be an undetectable object.
@ -4206,7 +4227,8 @@ void Reference::SetValue(InitState init_state) {
// executed, the code is identical to a normal store (see below).
Comment cmnt(masm, "[ Init const");
__ ldr(r2, cgen_->SlotOperand(slot, r2));
__ cmp(r2, Operand(Factory::the_hole_value()));
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r2, ip);
exit.Branch(ne);
}
@ -4939,7 +4961,7 @@ static void AllocateHeapNumber(
// Tag and adjust back to start of new object.
__ sub(result_reg, result_reg, Operand(HeapNumber::kSize - kHeapObjectTag));
// Get heap number map into scratch2.
__ mov(scratch2, Operand(Factory::heap_number_map()));
__ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex);
// Store heap number map in new object.
__ str(scratch2, FieldMemOperand(result_reg, HeapObject::kMapOffset));
}
@ -6085,7 +6107,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ bind(&loop);
__ cmp(r2, Operand(r4));
__ b(eq, &is_instance);
__ cmp(r2, Operand(Factory::null_value()));
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r2, ip);
__ b(eq, &is_not_instance);
__ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
__ ldr(r2, FieldMemOperand(r2, Map::kPrototypeOffset));

View File

@ -842,7 +842,7 @@ static const int kMaxRegisters = 16;
// formatting. See for example the command "objdump -d <binary file>".
static const char* reg_names[kMaxRegisters] = {
"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
"r8", "r9", "sl", "fp", "ip", "sp", "lr", "pc",
"r8", "r9", "r10", "fp", "ip", "sp", "lr", "pc",
};

View File

@ -87,7 +87,8 @@ static void GenerateDictionaryLoad(MacroAssembler* masm,
// Check that the properties array is a dictionary.
__ ldr(t0, FieldMemOperand(t1, JSObject::kPropertiesOffset));
__ ldr(r3, FieldMemOperand(t0, HeapObject::kMapOffset));
__ cmp(r3, Operand(Factory::hash_table_map()));
__ LoadRoot(ip, Heap::kHashTableMapRootIndex);
__ cmp(r3, ip);
__ b(ne, miss);
// Compute the capacity mask.
@ -254,9 +255,11 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
// Check for boolean.
__ bind(&non_string);
__ cmp(r1, Operand(Factory::true_value()));
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r1, ip);
__ b(eq, &boolean);
__ cmp(r1, Operand(Factory::false_value()));
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r1, ip);
__ b(ne, &miss);
__ bind(&boolean);
StubCompiler::GenerateLoadGlobalFunctionPrototype(
@ -582,7 +585,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
__ cmp(r3, Operand(Factory::fixed_array_map()));
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(r3, ip);
__ b(ne, &slow);
// Check that the key (index) is within bounds.
__ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
@ -601,7 +605,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ bind(&fast);
__ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
__ cmp(r0, Operand(Factory::the_hole_value()));
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r0, ip);
// In case the loaded value is the_hole we have to consult GetProperty
// to ensure the prototype chain is searched.
__ b(eq, &slow);
@ -661,7 +666,8 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
__ cmp(r2, Operand(Factory::fixed_array_map()));
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(r2, ip);
__ b(ne, &slow);
// Untag the key (for checking against untagged length in the fixed array).
__ mov(r1, Operand(r1, ASR, kSmiTagSize));
@ -710,7 +716,8 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ bind(&array);
__ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
__ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
__ cmp(r1, Operand(Factory::fixed_array_map()));
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(r1, ip);
__ b(ne, &slow);
// Check the key against the length in the array, compute the

View File

@ -174,6 +174,13 @@ void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) {
}
void MacroAssembler::LoadRoot(Register destination,
Heap::RootListIndex index,
Condition cond) {
ldr(destination, MemOperand(r10, index << kPointerSizeLog2), cond);
}
// Will clobber 4 registers: object, offset, scratch, ip. The
// register 'object' contains a heap object pointer. The heap object
// tag is shifted away.
@ -714,7 +721,8 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
push(holder_reg); // Temporarily save holder on the stack.
// Read the first word and compare to the global_context_map.
ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
cmp(holder_reg, Operand(Factory::global_context_map()));
LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
cmp(holder_reg, ip);
Check(eq, "JSGlobalObject::global_context should be a global context.");
pop(holder_reg); // Restore holder.
}
@ -731,11 +739,13 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// that ip is clobbered as part of cmp with an object Operand.
push(holder_reg); // Temporarily save holder on the stack.
mov(holder_reg, ip); // Move ip to its holding place.
cmp(holder_reg, Operand(Factory::null_value()));
LoadRoot(ip, Heap::kNullValueRootIndex);
cmp(holder_reg, ip);
Check(ne, "JSGlobalProxy::context() should not be null.");
ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
cmp(holder_reg, Operand(Factory::global_context_map()));
LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
cmp(holder_reg, ip);
Check(eq, "JSGlobalObject::global_context should be a global context.");
// Restore ip is not needed. ip is reloaded below.
pop(holder_reg); // Restore holder.
@ -792,7 +802,8 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
// If the prototype or initial map is the hole, don't return it and
// simply miss the cache instead. This will allow us to allocate a
// prototype object on-demand in the runtime system.
cmp(result, Operand(Factory::the_hole_value()));
LoadRoot(ip, Heap::kTheHoleValueRootIndex);
cmp(result, ip);
b(eq, miss);
// If the function does not have an initial map, we're done.
@ -832,7 +843,7 @@ void MacroAssembler::IllegalOperation(int num_arguments) {
if (num_arguments > 0) {
add(sp, sp, Operand(num_arguments * kPointerSize));
}
mov(r0, Operand(Factory::undefined_value()));
LoadRoot(r0, Heap::kUndefinedValueRootIndex);
}

View File

@ -89,6 +89,10 @@ class MacroAssembler: public Assembler {
void Ret(Condition cond = al);
// Jumps to the label at the index given by the Smi in "index".
void SmiJumpTable(Register index, Vector<Label*> targets);
// Load an object from the root table.
void LoadRoot(Register destination,
Heap::RootListIndex index,
Condition cond = al);
// Sets the remembered set bit for [address+offset], where address is the
// address of the heap object 'object'. The address must be in the first 8K

View File

@ -395,7 +395,8 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
__ mov(scratch, Operand(Handle<Object>(cell)));
__ ldr(scratch,
FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
__ cmp(scratch, Operand(Factory::the_hole_value()));
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(scratch, ip);
__ b(ne, miss);
}
object = JSObject::cast(object->GetPrototype());
@ -667,9 +668,11 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
case BOOLEAN_CHECK: {
Label fast;
// Check that the object is a boolean.
__ cmp(r1, Operand(Factory::true_value()));
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r1, ip);
__ b(eq, &fast);
__ cmp(r1, Operand(Factory::false_value()));
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r1, ip);
__ b(ne, &miss);
__ bind(&fast);
// Check that the maps starting from the prototype haven't changed.
@ -688,7 +691,8 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
__ ldr(r3, FieldMemOperand(r1, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
__ cmp(r2, Operand(Factory::fixed_array_map()));
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(r2, ip);
__ b(ne, &miss);
break;
@ -1108,7 +1112,8 @@ Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
// Check for deleted property if property can actually be deleted.
if (!is_dont_delete) {
__ cmp(r0, Operand(Factory::the_hole_value()));
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r0, ip);
__ b(eq, &miss);
}

View File

@ -139,7 +139,7 @@ void VirtualFrame::AllocateStackSlots() {
Comment cmnt(masm(), "[ Allocate space for locals");
Adjust(count);
// Initialize stack slots with 'undefined' value.
__ mov(ip, Operand(Factory::undefined_value()));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
for (int i = 0; i < count; i++) {
__ push(ip);
}

View File

@ -563,6 +563,11 @@ ExternalReference ExternalReference::the_hole_value_location() {
}
ExternalReference ExternalReference::roots_address() {
return ExternalReference(Heap::roots_address());
}
ExternalReference ExternalReference::address_of_stack_guard_limit() {
return ExternalReference(StackGuard::address_of_jslimit());
}

View File

@ -398,6 +398,9 @@ class ExternalReference BASE_EMBEDDED {
// Static variable Factory::the_hole_value.location()
static ExternalReference the_hole_value_location();
// Static variable Heap::roots_address()
static ExternalReference roots_address();
// Static variable StackGuard::address_of_jslimit()
static ExternalReference address_of_stack_guard_limit();

View File

@ -731,6 +731,9 @@ class Heap : public AllStatic {
// Update the next script id.
static inline void SetLastScriptId(Object* last_script_id);
// Generated code can embed this address to get access to the roots.
static Object** roots_address() { return roots_; }
#ifdef DEBUG
static void Print();
static void PrintHandles();
@ -837,6 +840,26 @@ class Heap : public AllStatic {
> old_gen_allocation_limit_;
}
// Declare all the root indices.
enum RootListIndex {
#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
#undef ROOT_INDEX_DECLARATION
// Utility type maps
#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
STRUCT_LIST(DECLARE_STRUCT_MAP)
#undef DECLARE_STRUCT_MAP
#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
#undef SYMBOL_DECLARATION
kSymbolTableRootIndex,
kStrongRootListLength = kSymbolTableRootIndex,
kRootListLength
};
private:
static int semispace_size_;
static int initial_semispace_size_;
@ -917,26 +940,6 @@ class Heap : public AllStatic {
// last GC.
static int old_gen_exhausted_;
// Declare all the root indices.
enum RootListIndex {
#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
#undef ROOT_INDEX_DECLARATION
// Utility type maps
#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
STRUCT_LIST(DECLARE_STRUCT_MAP)
#undef DECLARE_STRUCT_MAP
#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
#undef SYMBOL_DECLARATION
kSymbolTableRootIndex,
kStrongRootListLength = kSymbolTableRootIndex,
kRootListLength
};
static Object* roots_[kRootListLength];
struct StringTypeTable {

View File

@ -672,13 +672,17 @@ void ExternalReferenceTable::PopulateTable() {
UNCLASSIFIED,
2,
"Factory::the_hole_value().location()");
Add(ExternalReference::address_of_stack_guard_limit().address(),
Add(ExternalReference::roots_address().address(),
UNCLASSIFIED,
3,
"Heap::roots_address()");
Add(ExternalReference::address_of_stack_guard_limit().address(),
UNCLASSIFIED,
4,
"StackGuard::address_of_jslimit()");
Add(ExternalReference::address_of_regexp_stack_limit().address(),
UNCLASSIFIED,
4,
5,
"RegExpStack::limit_address()");
Add(ExternalReference::new_space_start().address(),
UNCLASSIFIED,
@ -699,36 +703,36 @@ void ExternalReferenceTable::PopulateTable() {
#ifdef ENABLE_DEBUGGER_SUPPORT
Add(ExternalReference::debug_break().address(),
UNCLASSIFIED,
5,
10,
"Debug::Break()");
Add(ExternalReference::debug_step_in_fp_address().address(),
UNCLASSIFIED,
10,
11,
"Debug::step_in_fp_addr()");
#endif
Add(ExternalReference::double_fp_operation(Token::ADD).address(),
UNCLASSIFIED,
11,
12,
"add_two_doubles");
Add(ExternalReference::double_fp_operation(Token::SUB).address(),
UNCLASSIFIED,
12,
13,
"sub_two_doubles");
Add(ExternalReference::double_fp_operation(Token::MUL).address(),
UNCLASSIFIED,
13,
14,
"mul_two_doubles");
Add(ExternalReference::double_fp_operation(Token::DIV).address(),
UNCLASSIFIED,
14,
15,
"div_two_doubles");
Add(ExternalReference::double_fp_operation(Token::MOD).address(),
UNCLASSIFIED,
15,
16,
"mod_two_doubles");
Add(ExternalReference::compare_doubles().address(),
UNCLASSIFIED,
16,
17,
"compare_doubles");
}

View File

@ -123,13 +123,13 @@ TEST(Type0) {
"20354189 eorcss r4, r5, r9, lsl #3");
COMPARE(sub(r5, r6, Operand(r10, LSL, 31), LeaveCC, hs),
"20465f8a subcs r5, r6, sl, lsl #31");
"20465f8a subcs r5, r6, r10, lsl #31");
COMPARE(sub(r5, r6, Operand(r10, LSL, 30), SetCC, cc),
"30565f0a subccs r5, r6, sl, lsl #30");
"30565f0a subccs r5, r6, r10, lsl #30");
COMPARE(sub(r5, r6, Operand(r10, LSL, 24), LeaveCC, lo),
"30465c0a subcc r5, r6, sl, lsl #24");
"30465c0a subcc r5, r6, r10, lsl #24");
COMPARE(sub(r5, r6, Operand(r10, LSL, 16), SetCC, mi),
"4056580a submis r5, r6, sl, lsl #16");
"4056580a submis r5, r6, r10, lsl #16");
COMPARE(rsb(r6, r7, Operand(fp)),
"e067600b rsb r6, r7, fp");
@ -163,7 +163,7 @@ TEST(Type0) {
COMPARE(sbc(r7, r9, Operand(ip, ROR, 4)),
"e0c9726c sbc r7, r9, ip, ror #4");
COMPARE(sbc(r7, r10, Operand(ip), SetCC),
"e0da700c sbcs r7, sl, ip");
"e0da700c sbcs r7, r10, ip");
COMPARE(sbc(r7, ip, Operand(ip, ROR, 31), SetCC, hi),
"80dc7fec sbchis r7, ip, ip, ror #31");
@ -240,7 +240,7 @@ TEST(Type0) {
"51d10004 bicpls r0, r1, r4");
COMPARE(mvn(r10, Operand(r1)),
"e1e0a001 mvn sl, r1");
"e1e0a001 mvn r10, r1");
COMPARE(mvn(r9, Operand(r2)),
"e1e09002 mvn r9, r2");
COMPARE(mvn(r0, Operand(r3), SetCC),

View File

@ -125,9 +125,9 @@ TEST(ExternalReferenceEncoder) {
encoder.Encode(the_hole_value_location.address()));
ExternalReference stack_guard_limit_address =
ExternalReference::address_of_stack_guard_limit();
CHECK_EQ(make_code(UNCLASSIFIED, 3),
CHECK_EQ(make_code(UNCLASSIFIED, 4),
encoder.Encode(stack_guard_limit_address.address()));
CHECK_EQ(make_code(UNCLASSIFIED, 5),
CHECK_EQ(make_code(UNCLASSIFIED, 10),
encoder.Encode(ExternalReference::debug_break().address()));
CHECK_EQ(make_code(UNCLASSIFIED, 6),
encoder.Encode(ExternalReference::new_space_start().address()));
@ -157,9 +157,9 @@ TEST(ExternalReferenceDecoder) {
CHECK_EQ(ExternalReference::the_hole_value_location().address(),
decoder.Decode(make_code(UNCLASSIFIED, 2)));
CHECK_EQ(ExternalReference::address_of_stack_guard_limit().address(),
decoder.Decode(make_code(UNCLASSIFIED, 3)));
decoder.Decode(make_code(UNCLASSIFIED, 4)));
CHECK_EQ(ExternalReference::debug_break().address(),
decoder.Decode(make_code(UNCLASSIFIED, 5)));
decoder.Decode(make_code(UNCLASSIFIED, 10)));
CHECK_EQ(ExternalReference::new_space_start().address(),
decoder.Decode(make_code(UNCLASSIFIED, 6)));
}