Revert commit 2701 per Erik Corry's request.

Original CL:

http://codereview.chromium.org/171041
Review URL: http://codereview.chromium.org/171089

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2702 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
feng@chromium.org 2009-08-18 00:12:26 +00:00
parent 349dc04a5d
commit 33fb11c12f
15 changed files with 109 additions and 178 deletions

View File

@ -214,13 +214,9 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Enter an internal frame.
__ EnterInternalFrame();
// Set up the context from the function argument.
// Setup the context from the function argument.
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Set up the roots register.
ExternalReference roots_address = ExternalReference::roots_address();
__ mov(r10, Operand(roots_address));
// Push the function and the receiver onto the stack.
__ push(r1);
__ push(r2);
@ -243,7 +239,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Initialize all JavaScript callee-saved registers, since they will be seen
// by the garbage collector as part of handlers.
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
__ mov(r4, Operand(Factory::undefined_value()));
__ mov(r5, Operand(r4));
__ mov(r6, Operand(r4));
__ mov(r7, Operand(r4));
@ -286,7 +282,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
{ Label done;
__ tst(r0, Operand(r0));
__ b(ne, &done);
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ mov(r2, Operand(Factory::undefined_value()));
__ push(r2);
__ add(r0, r0, Operand(1));
__ bind(&done);
@ -327,10 +323,10 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ tst(r2, Operand(kSmiTagMask));
__ b(eq, &call_to_object);
__ LoadRoot(r3, Heap::kNullValueRootIndex);
__ mov(r3, Operand(Factory::null_value()));
__ cmp(r2, r3);
__ b(eq, &use_global_receiver);
__ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
__ mov(r3, Operand(Factory::undefined_value()));
__ cmp(r2, r3);
__ b(eq, &use_global_receiver);
@ -496,10 +492,10 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ ldr(r0, MemOperand(fp, kRecvOffset));
__ tst(r0, Operand(kSmiTagMask));
__ b(eq, &call_to_object);
__ LoadRoot(r1, Heap::kNullValueRootIndex);
__ mov(r1, Operand(Factory::null_value()));
__ cmp(r0, r1);
__ b(eq, &use_global_receiver);
__ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
__ mov(r1, Operand(Factory::undefined_value()));
__ cmp(r0, r1);
__ b(eq, &use_global_receiver);
@ -669,7 +665,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r1: function
// r2: expected number of arguments
// r3: code entry to call
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ mov(ip, Operand(Factory::undefined_value()));
__ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
__ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.

View File

@ -67,7 +67,7 @@ void EntryNode::Compile(MacroAssembler* masm) {
__ add(fp, sp, Operand(2 * kPointerSize));
int count = CfgGlobals::current()->fun()->scope()->num_stack_slots();
if (count > 0) {
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ mov(ip, Operand(Factory::undefined_value()));
for (int i = 0; i < count; i++) {
__ push(ip);
}

View File

@ -305,7 +305,7 @@ void CodeGenerator::GenCode(FunctionLiteral* fun) {
// sp: stack pointer
// fp: frame pointer
// cp: callee's context
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
__ mov(r0, Operand(Factory::undefined_value()));
function_return_.Bind();
if (FLAG_trace) {
@ -478,11 +478,11 @@ void CodeGenerator::Load(Expression* x, TypeofState typeof_state) {
JumpTarget loaded;
JumpTarget materialize_true;
materialize_true.Branch(cc_reg_);
__ LoadRoot(r0, Heap::kFalseValueRootIndex);
__ mov(r0, Operand(Factory::false_value()));
frame_->EmitPush(r0);
loaded.Jump();
materialize_true.Bind();
__ LoadRoot(r0, Heap::kTrueValueRootIndex);
__ mov(r0, Operand(Factory::true_value()));
frame_->EmitPush(r0);
loaded.Bind();
cc_reg_ = al;
@ -499,7 +499,7 @@ void CodeGenerator::Load(Expression* x, TypeofState typeof_state) {
// Load "true" if necessary.
if (true_target.is_linked()) {
true_target.Bind();
__ LoadRoot(r0, Heap::kTrueValueRootIndex);
__ mov(r0, Operand(Factory::true_value()));
frame_->EmitPush(r0);
}
// If both "true" and "false" need to be loaded jump across the code for
@ -510,7 +510,7 @@ void CodeGenerator::Load(Expression* x, TypeofState typeof_state) {
// Load "false" if necessary.
if (false_target.is_linked()) {
false_target.Bind();
__ LoadRoot(r0, Heap::kFalseValueRootIndex);
__ mov(r0, Operand(Factory::false_value()));
frame_->EmitPush(r0);
}
// A value is loaded on all paths reaching this point.
@ -640,18 +640,15 @@ void CodeGenerator::ToBoolean(JumpTarget* true_target,
// Fast case checks
// Check if the value is 'false'.
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r0, ip);
__ cmp(r0, Operand(Factory::false_value()));
false_target->Branch(eq);
// Check if the value is 'true'.
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r0, ip);
__ cmp(r0, Operand(Factory::true_value()));
true_target->Branch(eq);
// Check if the value is 'undefined'.
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r0, ip);
__ cmp(r0, Operand(Factory::undefined_value()));
false_target->Branch(eq);
// Check if the value is a smi.
@ -664,8 +661,7 @@ void CodeGenerator::ToBoolean(JumpTarget* true_target,
frame_->EmitPush(r0);
frame_->CallRuntime(Runtime::kToBool, 1);
// Convert the result (r0) to a condition code.
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r0, ip);
__ cmp(r0, Operand(Factory::false_value()));
cc_reg_ = ne;
}
@ -1189,7 +1185,7 @@ void CodeGenerator::VisitDeclaration(Declaration* node) {
// 'undefined') because we may have a (legal) redeclaration and we
// must not destroy the current value.
if (node->mode() == Variable::CONST) {
__ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
__ mov(r0, Operand(Factory::the_hole_value()));
frame_->EmitPush(r0);
} else if (node->fun() != NULL) {
LoadAndSpill(node->fun());
@ -1729,11 +1725,9 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
// Both SpiderMonkey and kjs ignore null and undefined in contrast
// to the specification. 12.6.4 mandates a call to ToObject.
frame_->EmitPop(r0);
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r0, ip);
__ cmp(r0, Operand(Factory::undefined_value()));
exit.Branch(eq);
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r0, ip);
__ cmp(r0, Operand(Factory::null_value()));
exit.Branch(eq);
// Stack layout in body:
@ -1765,8 +1759,7 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
// Otherwise, we got a FixedArray, and we have to do a slow check.
__ mov(r2, Operand(r0));
__ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kMetaMapRootIndex);
__ cmp(r1, ip);
__ cmp(r1, Operand(Factory::meta_map()));
fixed_array.Branch(ne);
// Get enum cache
@ -1840,8 +1833,7 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
__ mov(r3, Operand(r0));
// If the property has been removed while iterating, we just skip it.
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r3, ip);
__ cmp(r3, Operand(Factory::null_value()));
node->continue_target()->Branch(eq);
end_del_check.Bind();
@ -2101,7 +2093,7 @@ void CodeGenerator::VisitTryFinally(TryFinally* node) {
// Fake a top of stack value (unneeded when FALLING) and set the
// state in r2, then jump around the unlink blocks if any.
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
__ mov(r0, Operand(Factory::undefined_value()));
frame_->EmitPush(r0);
__ mov(r2, Operand(Smi::FromInt(FALLING)));
if (nof_unlinks > 0) {
@ -2143,7 +2135,7 @@ void CodeGenerator::VisitTryFinally(TryFinally* node) {
frame_->EmitPush(r0);
} else {
// Fake TOS for targets that shadowed breaks and continues.
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
__ mov(r0, Operand(Factory::undefined_value()));
frame_->EmitPush(r0);
}
__ mov(r2, Operand(Smi::FromInt(JUMPING + i)));
@ -2330,9 +2322,8 @@ void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
r2,
&slow));
if (potential_slot->var()->mode() == Variable::CONST) {
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r0, ip);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
__ cmp(r0, Operand(Factory::the_hole_value()));
__ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq);
}
// There is always control flow to slow from
// ContextSlotOperandCheckExtensions so we have to jump around
@ -2369,9 +2360,8 @@ void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
// value.
Comment cmnt(masm_, "[ Unhole const");
frame_->EmitPop(r0);
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r0, ip);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
__ cmp(r0, Operand(Factory::the_hole_value()));
__ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq);
frame_->EmitPush(r0);
}
}
@ -2414,8 +2404,7 @@ void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot,
__ bind(&next);
// Terminate at global context.
__ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
__ cmp(tmp2, ip);
__ cmp(tmp2, Operand(Factory::global_context_map()));
__ b(eq, &fast);
// Check that extension is NULL.
__ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX));
@ -2512,8 +2501,7 @@ void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
__ ldr(r2, FieldMemOperand(r1, literal_offset));
JumpTarget done;
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r2, ip);
__ cmp(r2, Operand(Factory::undefined_value()));
done.Branch(ne);
// If the entry is undefined we call the runtime system to computed
@ -2595,8 +2583,7 @@ void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
// Check whether we need to materialize the object literal boilerplate.
// If so, jump to the deferred code.
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r2, Operand(ip));
__ cmp(r2, Operand(Factory::undefined_value()));
deferred->Branch(eq);
deferred->BindExit();
@ -2718,8 +2705,7 @@ void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
// Check whether we need to materialize the object literal boilerplate.
// If so, jump to the deferred code.
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r2, Operand(ip));
__ cmp(r2, Operand(Factory::undefined_value()));
deferred->Branch(eq);
deferred->BindExit();
@ -3050,7 +3036,7 @@ void CodeGenerator::VisitCallEval(CallEval* node) {
// Prepare stack for call to resolved function.
LoadAndSpill(function);
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ mov(r2, Operand(Factory::undefined_value()));
frame_->EmitPush(r2); // Slot for receiver
int arg_count = args->length();
for (int i = 0; i < arg_count; i++) {
@ -3194,7 +3180,7 @@ void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
// Non-JS objects have class null.
null.Bind();
__ LoadRoot(r0, Heap::kNullValueRootIndex);
__ mov(r0, Operand(Factory::null_value()));
frame_->EmitPush(r0);
// All done.
@ -3267,7 +3253,7 @@ void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
__ CallRuntime(Runtime::kLog, 2);
}
#endif
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
__ mov(r0, Operand(Factory::undefined_value()));
frame_->EmitPush(r0);
}
@ -3288,7 +3274,7 @@ void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) {
VirtualFrame::SpilledScope spilled_scope;
ASSERT(args->length() == 2);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
__ mov(r0, Operand(Factory::undefined_value()));
frame_->EmitPush(r0);
}
@ -3508,14 +3494,14 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
} else {
// Default: Result of deleting non-global, not dynamically
// introduced variables is false.
__ LoadRoot(r0, Heap::kFalseValueRootIndex);
__ mov(r0, Operand(Factory::false_value()));
}
} else {
// Default: Result of deleting expressions is true.
LoadAndSpill(node->expression()); // may have side-effects
frame_->Drop();
__ LoadRoot(r0, Heap::kTrueValueRootIndex);
__ mov(r0, Operand(Factory::true_value()));
}
frame_->EmitPush(r0);
@ -3568,7 +3554,7 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
case Token::VOID:
// since the stack top is cached in r0, popping and then
// pushing a value can be done by just writing to r0.
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
__ mov(r0, Operand(Factory::undefined_value()));
break;
case Token::ADD: {
@ -3894,16 +3880,14 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
if (left_is_null || right_is_null) {
LoadAndSpill(left_is_null ? right : left);
frame_->EmitPop(r0);
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r0, ip);
__ cmp(r0, Operand(Factory::null_value()));
// The 'null' value is only equal to 'undefined' if using non-strict
// comparisons.
if (op != Token::EQ_STRICT) {
true_target()->Branch(eq);
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r0, Operand(ip));
__ cmp(r0, Operand(Factory::undefined_value()));
true_target()->Branch(eq);
__ tst(r0, Operand(kSmiTagMask));
@ -3940,8 +3924,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
__ tst(r1, Operand(kSmiTagMask));
true_target()->Branch(eq);
__ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
__ cmp(r1, ip);
__ cmp(r1, Operand(Factory::heap_number_map()));
cc_reg_ = eq;
} else if (check->Equals(Heap::string_symbol())) {
@ -3961,16 +3944,13 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
cc_reg_ = lt;
} else if (check->Equals(Heap::boolean_symbol())) {
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r1, ip);
__ cmp(r1, Operand(Factory::true_value()));
true_target()->Branch(eq);
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r1, ip);
__ cmp(r1, Operand(Factory::false_value()));
cc_reg_ = eq;
} else if (check->Equals(Heap::undefined_symbol())) {
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r1, ip);
__ cmp(r1, Operand(Factory::undefined_value()));
true_target()->Branch(eq);
__ tst(r1, Operand(kSmiTagMask));
@ -3995,8 +3975,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
false_target()->Branch(eq);
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r1, ip);
__ cmp(r1, Operand(Factory::null_value()));
true_target()->Branch(eq);
// It can be an undetectable object.
@ -4227,8 +4206,7 @@ void Reference::SetValue(InitState init_state) {
// executed, the code is identical to a normal store (see below).
Comment cmnt(masm, "[ Init const");
__ ldr(r2, cgen_->SlotOperand(slot, r2));
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r2, ip);
__ cmp(r2, Operand(Factory::the_hole_value()));
exit.Branch(ne);
}
@ -4961,7 +4939,7 @@ static void AllocateHeapNumber(
// Tag and adjust back to start of new object.
__ sub(result_reg, result_reg, Operand(HeapNumber::kSize - kHeapObjectTag));
// Get heap number map into scratch2.
__ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex);
__ mov(scratch2, Operand(Factory::heap_number_map()));
// Store heap number map in new object.
__ str(scratch2, FieldMemOperand(result_reg, HeapObject::kMapOffset));
}
@ -6107,8 +6085,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ bind(&loop);
__ cmp(r2, Operand(r4));
__ b(eq, &is_instance);
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r2, ip);
__ cmp(r2, Operand(Factory::null_value()));
__ b(eq, &is_not_instance);
__ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
__ ldr(r2, FieldMemOperand(r2, Map::kPrototypeOffset));

View File

@ -842,7 +842,7 @@ static const int kMaxRegisters = 16;
// formatting. See for example the command "objdump -d <binary file>".
static const char* reg_names[kMaxRegisters] = {
"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
"r8", "r9", "r10", "fp", "ip", "sp", "lr", "pc",
"r8", "r9", "sl", "fp", "ip", "sp", "lr", "pc",
};

View File

@ -87,8 +87,7 @@ static void GenerateDictionaryLoad(MacroAssembler* masm,
// Check that the properties array is a dictionary.
__ ldr(t0, FieldMemOperand(t1, JSObject::kPropertiesOffset));
__ ldr(r3, FieldMemOperand(t0, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kHashTableMapRootIndex);
__ cmp(r3, ip);
__ cmp(r3, Operand(Factory::hash_table_map()));
__ b(ne, miss);
// Compute the capacity mask.
@ -255,11 +254,9 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
// Check for boolean.
__ bind(&non_string);
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r1, ip);
__ cmp(r1, Operand(Factory::true_value()));
__ b(eq, &boolean);
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r1, ip);
__ cmp(r1, Operand(Factory::false_value()));
__ b(ne, &miss);
__ bind(&boolean);
StubCompiler::GenerateLoadGlobalFunctionPrototype(
@ -585,8 +582,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(r3, ip);
__ cmp(r3, Operand(Factory::fixed_array_map()));
__ b(ne, &slow);
// Check that the key (index) is within bounds.
__ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
@ -605,8 +601,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ bind(&fast);
__ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r0, ip);
__ cmp(r0, Operand(Factory::the_hole_value()));
// In case the loaded value is the_hole we have to consult GetProperty
// to ensure the prototype chain is searched.
__ b(eq, &slow);
@ -666,8 +661,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(r2, ip);
__ cmp(r2, Operand(Factory::fixed_array_map()));
__ b(ne, &slow);
// Untag the key (for checking against untagged length in the fixed array).
__ mov(r1, Operand(r1, ASR, kSmiTagSize));
@ -716,8 +710,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ bind(&array);
__ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
__ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(r1, ip);
__ cmp(r1, Operand(Factory::fixed_array_map()));
__ b(ne, &slow);
// Check the key against the length in the array, compute the

View File

@ -174,13 +174,6 @@ void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) {
}
void MacroAssembler::LoadRoot(Register destination,
Heap::RootListIndex index,
Condition cond) {
ldr(destination, MemOperand(r10, index << kPointerSizeLog2), cond);
}
// Will clobber 4 registers: object, offset, scratch, ip. The
// register 'object' contains a heap object pointer. The heap object
// tag is shifted away.
@ -721,8 +714,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
push(holder_reg); // Temporarily save holder on the stack.
// Read the first word and compare to the global_context_map.
ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
cmp(holder_reg, ip);
cmp(holder_reg, Operand(Factory::global_context_map()));
Check(eq, "JSGlobalObject::global_context should be a global context.");
pop(holder_reg); // Restore holder.
}
@ -739,13 +731,11 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// that ip is clobbered as part of cmp with an object Operand.
push(holder_reg); // Temporarily save holder on the stack.
mov(holder_reg, ip); // Move ip to its holding place.
LoadRoot(ip, Heap::kNullValueRootIndex);
cmp(holder_reg, ip);
cmp(holder_reg, Operand(Factory::null_value()));
Check(ne, "JSGlobalProxy::context() should not be null.");
ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
cmp(holder_reg, ip);
cmp(holder_reg, Operand(Factory::global_context_map()));
Check(eq, "JSGlobalObject::global_context should be a global context.");
// Restore ip is not needed. ip is reloaded below.
pop(holder_reg); // Restore holder.
@ -802,8 +792,7 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
// If the prototype or initial map is the hole, don't return it and
// simply miss the cache instead. This will allow us to allocate a
// prototype object on-demand in the runtime system.
LoadRoot(ip, Heap::kTheHoleValueRootIndex);
cmp(result, ip);
cmp(result, Operand(Factory::the_hole_value()));
b(eq, miss);
// If the function does not have an initial map, we're done.
@ -843,7 +832,7 @@ void MacroAssembler::IllegalOperation(int num_arguments) {
if (num_arguments > 0) {
add(sp, sp, Operand(num_arguments * kPointerSize));
}
LoadRoot(r0, Heap::kUndefinedValueRootIndex);
mov(r0, Operand(Factory::undefined_value()));
}

View File

@ -89,10 +89,6 @@ class MacroAssembler: public Assembler {
void Ret(Condition cond = al);
// Jumps to the label at the index given by the Smi in "index".
void SmiJumpTable(Register index, Vector<Label*> targets);
// Load an object from the root table.
void LoadRoot(Register destination,
Heap::RootListIndex index,
Condition cond = al);
// Sets the remembered set bit for [address+offset], where address is the
// address of the heap object 'object'. The address must be in the first 8K

View File

@ -395,8 +395,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
__ mov(scratch, Operand(Handle<Object>(cell)));
__ ldr(scratch,
FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(scratch, ip);
__ cmp(scratch, Operand(Factory::the_hole_value()));
__ b(ne, miss);
}
object = JSObject::cast(object->GetPrototype());
@ -668,11 +667,9 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
case BOOLEAN_CHECK: {
Label fast;
// Check that the object is a boolean.
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r1, ip);
__ cmp(r1, Operand(Factory::true_value()));
__ b(eq, &fast);
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r1, ip);
__ cmp(r1, Operand(Factory::false_value()));
__ b(ne, &miss);
__ bind(&fast);
// Check that the maps starting from the prototype haven't changed.
@ -691,8 +688,7 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
__ ldr(r3, FieldMemOperand(r1, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(r2, ip);
__ cmp(r2, Operand(Factory::fixed_array_map()));
__ b(ne, &miss);
break;
@ -1112,8 +1108,7 @@ Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
// Check for deleted property if property can actually be deleted.
if (!is_dont_delete) {
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r0, ip);
__ cmp(r0, Operand(Factory::the_hole_value()));
__ b(eq, &miss);
}

View File

@ -139,7 +139,7 @@ void VirtualFrame::AllocateStackSlots() {
Comment cmnt(masm(), "[ Allocate space for locals");
Adjust(count);
// Initialize stack slots with 'undefined' value.
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ mov(ip, Operand(Factory::undefined_value()));
for (int i = 0; i < count; i++) {
__ push(ip);
}

View File

@ -563,11 +563,6 @@ ExternalReference ExternalReference::the_hole_value_location() {
}
ExternalReference ExternalReference::roots_address() {
return ExternalReference(Heap::roots_address());
}
ExternalReference ExternalReference::address_of_stack_guard_limit() {
return ExternalReference(StackGuard::address_of_jslimit());
}

View File

@ -398,9 +398,6 @@ class ExternalReference BASE_EMBEDDED {
// Static variable Factory::the_hole_value.location()
static ExternalReference the_hole_value_location();
// Static variable Heap::roots_address()
static ExternalReference roots_address();
// Static variable StackGuard::address_of_jslimit()
static ExternalReference address_of_stack_guard_limit();

View File

@ -731,9 +731,6 @@ class Heap : public AllStatic {
// Update the next script id.
static inline void SetLastScriptId(Object* last_script_id);
// Generated code can embed this address to get access to the roots.
static Object** roots_address() { return roots_; }
#ifdef DEBUG
static void Print();
static void PrintHandles();
@ -840,26 +837,6 @@ class Heap : public AllStatic {
> old_gen_allocation_limit_;
}
// Declare all the root indices.
enum RootListIndex {
#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
#undef ROOT_INDEX_DECLARATION
// Utility type maps
#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
STRUCT_LIST(DECLARE_STRUCT_MAP)
#undef DECLARE_STRUCT_MAP
#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
#undef SYMBOL_DECLARATION
kSymbolTableRootIndex,
kStrongRootListLength = kSymbolTableRootIndex,
kRootListLength
};
private:
static int semispace_size_;
static int initial_semispace_size_;
@ -940,6 +917,26 @@ class Heap : public AllStatic {
// last GC.
static int old_gen_exhausted_;
// Declare all the root indices.
enum RootListIndex {
#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
#undef ROOT_INDEX_DECLARATION
// Utility type maps
#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
STRUCT_LIST(DECLARE_STRUCT_MAP)
#undef DECLARE_STRUCT_MAP
#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
#undef SYMBOL_DECLARATION
kSymbolTableRootIndex,
kStrongRootListLength = kSymbolTableRootIndex,
kRootListLength
};
static Object* roots_[kRootListLength];
struct StringTypeTable {

View File

@ -672,17 +672,13 @@ void ExternalReferenceTable::PopulateTable() {
UNCLASSIFIED,
2,
"Factory::the_hole_value().location()");
Add(ExternalReference::roots_address().address(),
UNCLASSIFIED,
3,
"Heap::roots_address()");
Add(ExternalReference::address_of_stack_guard_limit().address(),
UNCLASSIFIED,
4,
3,
"StackGuard::address_of_jslimit()");
Add(ExternalReference::address_of_regexp_stack_limit().address(),
UNCLASSIFIED,
5,
4,
"RegExpStack::limit_address()");
Add(ExternalReference::new_space_start().address(),
UNCLASSIFIED,
@ -703,36 +699,36 @@ void ExternalReferenceTable::PopulateTable() {
#ifdef ENABLE_DEBUGGER_SUPPORT
Add(ExternalReference::debug_break().address(),
UNCLASSIFIED,
10,
5,
"Debug::Break()");
Add(ExternalReference::debug_step_in_fp_address().address(),
UNCLASSIFIED,
11,
10,
"Debug::step_in_fp_addr()");
#endif
Add(ExternalReference::double_fp_operation(Token::ADD).address(),
UNCLASSIFIED,
12,
11,
"add_two_doubles");
Add(ExternalReference::double_fp_operation(Token::SUB).address(),
UNCLASSIFIED,
13,
12,
"sub_two_doubles");
Add(ExternalReference::double_fp_operation(Token::MUL).address(),
UNCLASSIFIED,
14,
13,
"mul_two_doubles");
Add(ExternalReference::double_fp_operation(Token::DIV).address(),
UNCLASSIFIED,
15,
14,
"div_two_doubles");
Add(ExternalReference::double_fp_operation(Token::MOD).address(),
UNCLASSIFIED,
16,
15,
"mod_two_doubles");
Add(ExternalReference::compare_doubles().address(),
UNCLASSIFIED,
17,
16,
"compare_doubles");
}

View File

@ -123,13 +123,13 @@ TEST(Type0) {
"20354189 eorcss r4, r5, r9, lsl #3");
COMPARE(sub(r5, r6, Operand(r10, LSL, 31), LeaveCC, hs),
"20465f8a subcs r5, r6, r10, lsl #31");
"20465f8a subcs r5, r6, sl, lsl #31");
COMPARE(sub(r5, r6, Operand(r10, LSL, 30), SetCC, cc),
"30565f0a subccs r5, r6, r10, lsl #30");
"30565f0a subccs r5, r6, sl, lsl #30");
COMPARE(sub(r5, r6, Operand(r10, LSL, 24), LeaveCC, lo),
"30465c0a subcc r5, r6, r10, lsl #24");
"30465c0a subcc r5, r6, sl, lsl #24");
COMPARE(sub(r5, r6, Operand(r10, LSL, 16), SetCC, mi),
"4056580a submis r5, r6, r10, lsl #16");
"4056580a submis r5, r6, sl, lsl #16");
COMPARE(rsb(r6, r7, Operand(fp)),
"e067600b rsb r6, r7, fp");
@ -163,7 +163,7 @@ TEST(Type0) {
COMPARE(sbc(r7, r9, Operand(ip, ROR, 4)),
"e0c9726c sbc r7, r9, ip, ror #4");
COMPARE(sbc(r7, r10, Operand(ip), SetCC),
"e0da700c sbcs r7, r10, ip");
"e0da700c sbcs r7, sl, ip");
COMPARE(sbc(r7, ip, Operand(ip, ROR, 31), SetCC, hi),
"80dc7fec sbchis r7, ip, ip, ror #31");
@ -240,7 +240,7 @@ TEST(Type0) {
"51d10004 bicpls r0, r1, r4");
COMPARE(mvn(r10, Operand(r1)),
"e1e0a001 mvn r10, r1");
"e1e0a001 mvn sl, r1");
COMPARE(mvn(r9, Operand(r2)),
"e1e09002 mvn r9, r2");
COMPARE(mvn(r0, Operand(r3), SetCC),

View File

@ -125,9 +125,9 @@ TEST(ExternalReferenceEncoder) {
encoder.Encode(the_hole_value_location.address()));
ExternalReference stack_guard_limit_address =
ExternalReference::address_of_stack_guard_limit();
CHECK_EQ(make_code(UNCLASSIFIED, 4),
CHECK_EQ(make_code(UNCLASSIFIED, 3),
encoder.Encode(stack_guard_limit_address.address()));
CHECK_EQ(make_code(UNCLASSIFIED, 10),
CHECK_EQ(make_code(UNCLASSIFIED, 5),
encoder.Encode(ExternalReference::debug_break().address()));
CHECK_EQ(make_code(UNCLASSIFIED, 6),
encoder.Encode(ExternalReference::new_space_start().address()));
@ -157,9 +157,9 @@ TEST(ExternalReferenceDecoder) {
CHECK_EQ(ExternalReference::the_hole_value_location().address(),
decoder.Decode(make_code(UNCLASSIFIED, 2)));
CHECK_EQ(ExternalReference::address_of_stack_guard_limit().address(),
decoder.Decode(make_code(UNCLASSIFIED, 4)));
decoder.Decode(make_code(UNCLASSIFIED, 3)));
CHECK_EQ(ExternalReference::debug_break().address(),
decoder.Decode(make_code(UNCLASSIFIED, 10)));
decoder.Decode(make_code(UNCLASSIFIED, 5)));
CHECK_EQ(ExternalReference::new_space_start().address(),
decoder.Decode(make_code(UNCLASSIFIED, 6)));
}