Revert 8122 (stub call asserts) while test failures are investigated.
Review URL: http://codereview.chromium.org/7050039 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8125 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
6453056bb6
commit
7a1a72c701
10
include/v8.h
10
include/v8.h
@ -3431,9 +3431,9 @@ class V8EXPORT Context {
|
||||
*
|
||||
* v8::Locker is a scoped lock object. While it's
|
||||
* active (i.e. between its construction and destruction) the current thread is
|
||||
* allowed to use the locked isolate. V8 guarantees that an isolate can be
|
||||
* locked by at most one thread at any time. In other words, the scope of a
|
||||
* v8::Locker is a critical section.
|
||||
* allowed to use the locked isolate. V8 guarantees that an isolate can be locked
|
||||
* by at most one thread at any time. In other words, the scope of a v8::Locker is
|
||||
* a critical section.
|
||||
*
|
||||
* Sample usage:
|
||||
* \code
|
||||
@ -3535,8 +3535,8 @@ class V8EXPORT Locker {
|
||||
static void StopPreemption();
|
||||
|
||||
/**
|
||||
* Returns whether or not the locker for a given isolate, or default isolate
|
||||
* if NULL is given, is locked by the current thread.
|
||||
* Returns whether or not the locker for a given isolate, or default isolate if NULL is given,
|
||||
* is locked by the current thread.
|
||||
*/
|
||||
static bool IsLocked(Isolate* isolate = NULL);
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -846,11 +846,9 @@ void FloatingPointHelper::CallCCodeForDoubleOperation(
|
||||
__ vmov(d0, r0, r1);
|
||||
__ vmov(d1, r2, r3);
|
||||
}
|
||||
{
|
||||
AllowExternalCallThatCantCauseGC scope(masm);
|
||||
__ CallCFunction(
|
||||
ExternalReference::double_fp_operation(op, masm->isolate()), 0, 2);
|
||||
}
|
||||
// Call C routine that may not cause GC or other trouble.
|
||||
__ CallCFunction(ExternalReference::double_fp_operation(op, masm->isolate()),
|
||||
0, 2);
|
||||
// Store answer in the overwritable heap number. Double returned in
|
||||
// registers r0 and r1 or in d0.
|
||||
if (masm->use_eabi_hardfloat()) {
|
||||
@ -1619,8 +1617,6 @@ void CompareStub::Generate(MacroAssembler* masm) {
|
||||
// This stub does not handle the inlined cases (Smis, Booleans, undefined).
|
||||
// The stub returns zero for false, and a non-zero value for true.
|
||||
void ToBooleanStub::Generate(MacroAssembler* masm) {
|
||||
// This stub overrides SometimesSetsUpAFrame() to return false. That means
|
||||
// we cannot call anything that could cause a GC from this stub.
|
||||
// This stub uses VFP3 instructions.
|
||||
CpuFeatures::Scope scope(VFP3);
|
||||
|
||||
@ -1886,13 +1882,12 @@ void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
|
||||
__ jmp(&heapnumber_allocated);
|
||||
|
||||
__ bind(&slow_allocate_heapnumber);
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(r0);
|
||||
__ CallRuntime(Runtime::kNumberAlloc, 0);
|
||||
__ mov(r1, Operand(r0));
|
||||
__ pop(r0);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ push(r0);
|
||||
__ CallRuntime(Runtime::kNumberAlloc, 0);
|
||||
__ mov(r1, Operand(r0));
|
||||
__ pop(r0);
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
__ bind(&heapnumber_allocated);
|
||||
__ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
|
||||
@ -1933,14 +1928,13 @@ void UnaryOpStub::GenerateHeapNumberCodeBitNot(
|
||||
__ jmp(&heapnumber_allocated);
|
||||
|
||||
__ bind(&slow_allocate_heapnumber);
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(r0); // Push the heap number, not the untagged int32.
|
||||
__ CallRuntime(Runtime::kNumberAlloc, 0);
|
||||
__ mov(r2, r0); // Move the new heap number into r2.
|
||||
// Get the heap number into r0, now that the new heap number is in r2.
|
||||
__ pop(r0);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ push(r0); // Push the heap number, not the untagged int32.
|
||||
__ CallRuntime(Runtime::kNumberAlloc, 0);
|
||||
__ mov(r2, r0); // Move the new heap number into r2.
|
||||
// Get the heap number into r0, now that the new heap number is in r2.
|
||||
__ pop(r0);
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Convert the heap number in r0 to an untagged integer in r1.
|
||||
// This can't go slow-case because it's the same number we already
|
||||
@ -3169,11 +3163,10 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
|
||||
__ LoadRoot(r5, Heap::kHeapNumberMapRootIndex);
|
||||
__ AllocateHeapNumber(r0, scratch0, scratch1, r5, &skip_cache);
|
||||
__ vstr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset));
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(r0);
|
||||
__ CallRuntime(RuntimeFunction(), 1);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ push(r0);
|
||||
__ CallRuntime(RuntimeFunction(), 1);
|
||||
__ LeaveInternalFrame();
|
||||
__ vldr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset));
|
||||
__ Ret();
|
||||
|
||||
@ -3186,15 +3179,14 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
// We return the value in d2 without adding it to the cache, but
|
||||
// we cause a scavenging GC so that future allocations will succeed.
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ EnterInternalFrame();
|
||||
|
||||
// Allocate an aligned object larger than a HeapNumber.
|
||||
ASSERT(4 * kPointerSize >= HeapNumber::kSize);
|
||||
__ mov(scratch0, Operand(4 * kPointerSize));
|
||||
__ push(scratch0);
|
||||
__ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
|
||||
}
|
||||
// Allocate an aligned object larger than a HeapNumber.
|
||||
ASSERT(4 * kPointerSize >= HeapNumber::kSize);
|
||||
__ mov(scratch0, Operand(4 * kPointerSize));
|
||||
__ push(scratch0);
|
||||
__ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
|
||||
__ LeaveInternalFrame();
|
||||
__ Ret();
|
||||
}
|
||||
}
|
||||
@ -3306,14 +3298,11 @@ void MathPowStub::Generate(MacroAssembler* masm) {
|
||||
__ push(lr);
|
||||
__ PrepareCallCFunction(1, 1, scratch);
|
||||
__ SetCallCDoubleArguments(double_base, exponent);
|
||||
{
|
||||
AllowExternalCallThatCantCauseGC scope(masm);
|
||||
__ CallCFunction(
|
||||
ExternalReference::power_double_int_function(masm->isolate()),
|
||||
1, 1);
|
||||
__ pop(lr);
|
||||
__ GetCFunctionDoubleResult(double_result);
|
||||
}
|
||||
__ CallCFunction(
|
||||
ExternalReference::power_double_int_function(masm->isolate()),
|
||||
1, 1);
|
||||
__ pop(lr);
|
||||
__ GetCFunctionDoubleResult(double_result);
|
||||
__ vstr(double_result,
|
||||
FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
|
||||
__ mov(r0, heapnumber);
|
||||
@ -3339,14 +3328,11 @@ void MathPowStub::Generate(MacroAssembler* masm) {
|
||||
__ push(lr);
|
||||
__ PrepareCallCFunction(0, 2, scratch);
|
||||
__ SetCallCDoubleArguments(double_base, double_exponent);
|
||||
{
|
||||
AllowExternalCallThatCantCauseGC scope(masm);
|
||||
__ CallCFunction(
|
||||
ExternalReference::power_double_double_function(masm->isolate()),
|
||||
0, 2);
|
||||
__ pop(lr);
|
||||
__ GetCFunctionDoubleResult(double_result);
|
||||
}
|
||||
__ CallCFunction(
|
||||
ExternalReference::power_double_double_function(masm->isolate()),
|
||||
0, 2);
|
||||
__ pop(lr);
|
||||
__ GetCFunctionDoubleResult(double_result);
|
||||
__ vstr(double_result,
|
||||
FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
|
||||
__ mov(r0, heapnumber);
|
||||
@ -3518,7 +3504,6 @@ void CEntryStub::Generate(MacroAssembler* masm) {
|
||||
__ sub(r6, r6, Operand(kPointerSize));
|
||||
|
||||
// Enter the exit frame that transitions from JavaScript to C++.
|
||||
FrameScope scope(masm, StackFrame::MANUAL);
|
||||
__ EnterExitFrame(save_doubles_);
|
||||
|
||||
// Setup argc and the builtin function in callee-saved registers.
|
||||
@ -3884,11 +3869,10 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
|
||||
} else {
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ Push(r0, r1);
|
||||
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ Push(r0, r1);
|
||||
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
|
||||
__ LeaveInternalFrame();
|
||||
__ cmp(r0, Operand(0));
|
||||
__ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
|
||||
__ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
|
||||
@ -6153,13 +6137,12 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
|
||||
// Call the runtime system in a fresh internal frame.
|
||||
ExternalReference miss =
|
||||
ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ Push(r1, r0);
|
||||
__ mov(ip, Operand(Smi::FromInt(op_)));
|
||||
__ push(ip);
|
||||
__ CallExternalReference(miss, 3);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ Push(r1, r0);
|
||||
__ mov(ip, Operand(Smi::FromInt(op_)));
|
||||
__ push(ip);
|
||||
__ CallExternalReference(miss, 3);
|
||||
__ LeaveInternalFrame();
|
||||
// Compute the entry point of the rewritten stub.
|
||||
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
|
||||
// Restore registers.
|
||||
@ -6340,8 +6323,6 @@ void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
|
||||
|
||||
|
||||
void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
|
||||
// This stub overrides SometimesSetsUpAFrame() to return false. That means
|
||||
// we cannot call anything that could cause a GC from this stub.
|
||||
// Registers:
|
||||
// result: StringDictionary to probe
|
||||
// r1: key
|
||||
|
@ -64,8 +64,6 @@ class ToBooleanStub: public CodeStub {
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
private:
|
||||
Register tos_;
|
||||
Major MajorKey() { return ToBoolean; }
|
||||
@ -639,8 +637,6 @@ class StringDictionaryLookupStub: public CodeStub {
|
||||
Register r0,
|
||||
Register r1);
|
||||
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
private:
|
||||
static const int kInlinedProbes = 4;
|
||||
static const int kTotalProbes = 20;
|
||||
@ -660,7 +656,7 @@ class StringDictionaryLookupStub: public CodeStub {
|
||||
}
|
||||
#endif
|
||||
|
||||
Major MajorKey() { return StringDictionaryLookup; }
|
||||
Major MajorKey() { return StringDictionaryNegativeLookup; }
|
||||
|
||||
int MinorKey() {
|
||||
return LookupModeBits::encode(mode_);
|
||||
|
@ -38,16 +38,12 @@ namespace internal {
|
||||
// Platform-specific RuntimeCallHelper functions.
|
||||
|
||||
void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
|
||||
masm->EnterFrame(StackFrame::INTERNAL);
|
||||
ASSERT(!masm->has_frame());
|
||||
masm->set_has_frame(true);
|
||||
masm->EnterInternalFrame();
|
||||
}
|
||||
|
||||
|
||||
void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
|
||||
masm->LeaveFrame(StackFrame::INTERNAL);
|
||||
ASSERT(masm->has_frame());
|
||||
masm->set_has_frame(false);
|
||||
masm->LeaveInternalFrame();
|
||||
}
|
||||
|
||||
|
||||
|
@ -132,58 +132,56 @@ void BreakLocationIterator::ClearDebugBreakAtSlot() {
|
||||
static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
|
||||
RegList object_regs,
|
||||
RegList non_object_regs) {
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ EnterInternalFrame();
|
||||
|
||||
// Store the registers containing live values on the expression stack to
|
||||
// make sure that these are correctly updated during GC. Non object values
|
||||
// are stored as a smi causing it to be untouched by GC.
|
||||
ASSERT((object_regs & ~kJSCallerSaved) == 0);
|
||||
ASSERT((non_object_regs & ~kJSCallerSaved) == 0);
|
||||
ASSERT((object_regs & non_object_regs) == 0);
|
||||
if ((object_regs | non_object_regs) != 0) {
|
||||
for (int i = 0; i < kNumJSCallerSaved; i++) {
|
||||
int r = JSCallerSavedCode(i);
|
||||
Register reg = { r };
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
if (FLAG_debug_code) {
|
||||
__ tst(reg, Operand(0xc0000000));
|
||||
__ Assert(eq, "Unable to encode value as smi");
|
||||
}
|
||||
__ mov(reg, Operand(reg, LSL, kSmiTagSize));
|
||||
// Store the registers containing live values on the expression stack to
|
||||
// make sure that these are correctly updated during GC. Non object values
|
||||
// are stored as a smi causing it to be untouched by GC.
|
||||
ASSERT((object_regs & ~kJSCallerSaved) == 0);
|
||||
ASSERT((non_object_regs & ~kJSCallerSaved) == 0);
|
||||
ASSERT((object_regs & non_object_regs) == 0);
|
||||
if ((object_regs | non_object_regs) != 0) {
|
||||
for (int i = 0; i < kNumJSCallerSaved; i++) {
|
||||
int r = JSCallerSavedCode(i);
|
||||
Register reg = { r };
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
if (FLAG_debug_code) {
|
||||
__ tst(reg, Operand(0xc0000000));
|
||||
__ Assert(eq, "Unable to encode value as smi");
|
||||
}
|
||||
__ mov(reg, Operand(reg, LSL, kSmiTagSize));
|
||||
}
|
||||
__ stm(db_w, sp, object_regs | non_object_regs);
|
||||
}
|
||||
__ stm(db_w, sp, object_regs | non_object_regs);
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
__ RecordComment("// Calling from debug break to runtime - come in - over");
|
||||
__ RecordComment("// Calling from debug break to runtime - come in - over");
|
||||
#endif
|
||||
__ mov(r0, Operand(0, RelocInfo::NONE)); // no arguments
|
||||
__ mov(r1, Operand(ExternalReference::debug_break(masm->isolate())));
|
||||
__ mov(r0, Operand(0, RelocInfo::NONE)); // no arguments
|
||||
__ mov(r1, Operand(ExternalReference::debug_break(masm->isolate())));
|
||||
|
||||
CEntryStub ceb(1);
|
||||
__ CallStub(&ceb);
|
||||
CEntryStub ceb(1);
|
||||
__ CallStub(&ceb);
|
||||
|
||||
// Restore the register values from the expression stack.
|
||||
if ((object_regs | non_object_regs) != 0) {
|
||||
__ ldm(ia_w, sp, object_regs | non_object_regs);
|
||||
for (int i = 0; i < kNumJSCallerSaved; i++) {
|
||||
int r = JSCallerSavedCode(i);
|
||||
Register reg = { r };
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
__ mov(reg, Operand(reg, LSR, kSmiTagSize));
|
||||
}
|
||||
if (FLAG_debug_code &&
|
||||
(((object_regs |non_object_regs) & (1 << r)) == 0)) {
|
||||
__ mov(reg, Operand(kDebugZapValue));
|
||||
}
|
||||
// Restore the register values from the expression stack.
|
||||
if ((object_regs | non_object_regs) != 0) {
|
||||
__ ldm(ia_w, sp, object_regs | non_object_regs);
|
||||
for (int i = 0; i < kNumJSCallerSaved; i++) {
|
||||
int r = JSCallerSavedCode(i);
|
||||
Register reg = { r };
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
__ mov(reg, Operand(reg, LSR, kSmiTagSize));
|
||||
}
|
||||
if (FLAG_debug_code &&
|
||||
(((object_regs |non_object_regs) & (1 << r)) == 0)) {
|
||||
__ mov(reg, Operand(kDebugZapValue));
|
||||
}
|
||||
}
|
||||
|
||||
// Leave the internal frame.
|
||||
}
|
||||
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Now that the break point has been handled, resume normal execution by
|
||||
// jumping to the target address intended by the caller and that was
|
||||
// overwritten by the address of DebugBreakXXX.
|
||||
|
@ -603,10 +603,7 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
__ mov(r5, Operand(ExternalReference::isolate_address()));
|
||||
__ str(r5, MemOperand(sp, 1 * kPointerSize)); // Isolate.
|
||||
// Call Deoptimizer::New().
|
||||
{
|
||||
AllowExternalCallThatCantCauseGC scope(masm());
|
||||
__ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
|
||||
}
|
||||
__ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
|
||||
|
||||
// Preserve "deoptimizer" object in register r0 and get the input
|
||||
// frame descriptor pointer to r1 (deoptimizer->input_);
|
||||
@ -660,11 +657,8 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
// r0: deoptimizer object; r1: scratch.
|
||||
__ PrepareCallCFunction(1, r1);
|
||||
// Call Deoptimizer::ComputeOutputFrames().
|
||||
{
|
||||
AllowExternalCallThatCantCauseGC scope(masm());
|
||||
__ CallCFunction(
|
||||
ExternalReference::compute_output_frames_function(isolate), 1);
|
||||
}
|
||||
__ CallCFunction(
|
||||
ExternalReference::compute_output_frames_function(isolate), 1);
|
||||
__ pop(r0); // Restore deoptimizer object (class Deoptimizer).
|
||||
|
||||
// Replace the current (input) frame with the output frames.
|
||||
|
@ -153,11 +153,6 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
|
||||
__ bind(&ok);
|
||||
}
|
||||
|
||||
// Open a frame scope to indicate that there is a frame on the stack. The
|
||||
// MANUAL indicates that the scope shouldn't actually generate code to set up
|
||||
// the frame (that is done below).
|
||||
FrameScope frame_scope(masm_, StackFrame::MANUAL);
|
||||
|
||||
int locals_count = scope()->num_stack_slots();
|
||||
|
||||
__ Push(lr, fp, cp, r1);
|
||||
|
@ -603,22 +603,21 @@ static void GenerateCallMiss(MacroAssembler* masm,
|
||||
// Get the receiver of the function from the stack.
|
||||
__ ldr(r3, MemOperand(sp, argc * kPointerSize));
|
||||
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ EnterInternalFrame();
|
||||
|
||||
// Push the receiver and the name of the function.
|
||||
__ Push(r3, r2);
|
||||
// Push the receiver and the name of the function.
|
||||
__ Push(r3, r2);
|
||||
|
||||
// Call the entry.
|
||||
__ mov(r0, Operand(2));
|
||||
__ mov(r1, Operand(ExternalReference(IC_Utility(id), isolate)));
|
||||
// Call the entry.
|
||||
__ mov(r0, Operand(2));
|
||||
__ mov(r1, Operand(ExternalReference(IC_Utility(id), isolate)));
|
||||
|
||||
CEntryStub stub(1);
|
||||
__ CallStub(&stub);
|
||||
CEntryStub stub(1);
|
||||
__ CallStub(&stub);
|
||||
|
||||
// Move result to r1 and leave the internal frame.
|
||||
__ mov(r1, Operand(r0));
|
||||
}
|
||||
// Move result to r1 and leave the internal frame.
|
||||
__ mov(r1, Operand(r0));
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Check if the receiver is a global object of some sort.
|
||||
// This can happen only for regular CallIC but not KeyedCallIC.
|
||||
@ -751,13 +750,12 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
|
||||
// This branch is taken when calling KeyedCallIC_Miss is neither required
|
||||
// nor beneficial.
|
||||
__ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, r0, r3);
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(r2); // save the key
|
||||
__ Push(r1, r2); // pass the receiver and the key
|
||||
__ CallRuntime(Runtime::kKeyedGetProperty, 2);
|
||||
__ pop(r2); // restore the key
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ push(r2); // save the key
|
||||
__ Push(r1, r2); // pass the receiver and the key
|
||||
__ CallRuntime(Runtime::kKeyedGetProperty, 2);
|
||||
__ pop(r2); // restore the key
|
||||
__ LeaveInternalFrame();
|
||||
__ mov(r1, r0);
|
||||
__ jmp(&do_call);
|
||||
|
||||
|
@ -82,12 +82,6 @@ bool LCodeGen::GenerateCode() {
|
||||
status_ = GENERATING;
|
||||
CpuFeatures::Scope scope1(VFP3);
|
||||
CpuFeatures::Scope scope2(ARMv7);
|
||||
|
||||
// Open a frame scope to indicate that there is a frame on the stack. The
|
||||
// NONE indicates that the scope shouldn't actually generate code to set up
|
||||
// the frame (that is done in GeneatePrologue).
|
||||
FrameScope frame_scope(masm_, StackFrame::NONE);
|
||||
|
||||
return GeneratePrologue() &&
|
||||
GenerateBody() &&
|
||||
GenerateDeferredCode() &&
|
||||
|
@ -42,8 +42,7 @@ namespace internal {
|
||||
MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
|
||||
: Assembler(arg_isolate, buffer, size),
|
||||
generating_stub_(false),
|
||||
allow_stub_calls_(true),
|
||||
has_frame_(false) {
|
||||
allow_stub_calls_(true) {
|
||||
if (isolate() != NULL) {
|
||||
code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
|
||||
isolate());
|
||||
@ -977,9 +976,6 @@ void MacroAssembler::InvokeCode(Register code,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
// You can't call a function without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
Label done;
|
||||
|
||||
InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag,
|
||||
@ -1007,9 +1003,6 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
|
||||
RelocInfo::Mode rmode,
|
||||
InvokeFlag flag,
|
||||
CallKind call_kind) {
|
||||
// You can't call a function without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
Label done;
|
||||
|
||||
InvokePrologue(expected, actual, code, no_reg, &done, flag,
|
||||
@ -1033,9 +1026,6 @@ void MacroAssembler::InvokeFunction(Register fun,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
// You can't call a function without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
// Contract with called JS functions requires that function is passed in r1.
|
||||
ASSERT(fun.is(r1));
|
||||
|
||||
@ -1060,9 +1050,6 @@ void MacroAssembler::InvokeFunction(JSFunction* function,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
CallKind call_kind) {
|
||||
// You can't call a function without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
ASSERT(function->is_compiled());
|
||||
|
||||
// Get the function and setup the context.
|
||||
@ -1118,10 +1105,10 @@ void MacroAssembler::IsObjectJSStringType(Register object,
|
||||
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
void MacroAssembler::DebugBreak() {
|
||||
ASSERT(allow_stub_calls());
|
||||
mov(r0, Operand(0, RelocInfo::NONE));
|
||||
mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
|
||||
CEntryStub ces(1);
|
||||
ASSERT(AllowThisStubCall(&ces));
|
||||
Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
|
||||
}
|
||||
#endif
|
||||
@ -1785,13 +1772,13 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
|
||||
|
||||
|
||||
void MacroAssembler::CallStub(CodeStub* stub, Condition cond) {
|
||||
ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
|
||||
ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
|
||||
Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub, Condition cond) {
|
||||
ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
|
||||
ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
|
||||
Object* result;
|
||||
{ MaybeObject* maybe_result = stub->TryGetCode();
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
@ -1802,12 +1789,13 @@ MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub, Condition cond) {
|
||||
|
||||
|
||||
void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
|
||||
ASSERT(stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_);
|
||||
ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
|
||||
Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, Condition cond) {
|
||||
ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
|
||||
Object* result;
|
||||
{ MaybeObject* maybe_result = stub->TryGetCode();
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
@ -1910,12 +1898,6 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
|
||||
}
|
||||
|
||||
|
||||
bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
|
||||
if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
|
||||
return stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_;
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::IllegalOperation(int num_arguments) {
|
||||
if (num_arguments > 0) {
|
||||
add(sp, sp, Operand(num_arguments * kPointerSize));
|
||||
@ -2385,9 +2367,6 @@ MaybeObject* MacroAssembler::TryJumpToExternalReference(
|
||||
void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper) {
|
||||
// You can't call a builtin without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
GetBuiltinEntry(r2, id);
|
||||
if (flag == CALL_FUNCTION) {
|
||||
call_wrapper.BeforeCall(CallSize(r2));
|
||||
@ -2516,20 +2495,14 @@ void MacroAssembler::Abort(const char* msg) {
|
||||
RecordComment(msg);
|
||||
}
|
||||
#endif
|
||||
// Disable stub call restrictions to always allow calls to abort.
|
||||
AllowStubCallsScope allow_scope(this, true);
|
||||
|
||||
mov(r0, Operand(p0));
|
||||
push(r0);
|
||||
mov(r0, Operand(Smi::FromInt(p1 - p0)));
|
||||
push(r0);
|
||||
// Disable stub call restrictions to always allow calls to abort.
|
||||
if (!has_frame_) {
|
||||
// We don't actually want to generate a pile of code for this, so just
|
||||
// claim there is a stack frame, without generating one.
|
||||
FrameScope scope(this, StackFrame::NONE);
|
||||
CallRuntime(Runtime::kAbort, 2);
|
||||
} else {
|
||||
CallRuntime(Runtime::kAbort, 2);
|
||||
}
|
||||
CallRuntime(Runtime::kAbort, 2);
|
||||
// will not return here
|
||||
if (is_const_pool_blocked()) {
|
||||
// If the calling code cares about the exact number of
|
||||
@ -3042,7 +3015,6 @@ void MacroAssembler::CallCFunctionHelper(Register function,
|
||||
Register scratch,
|
||||
int num_reg_arguments,
|
||||
int num_double_arguments) {
|
||||
ASSERT(has_frame());
|
||||
// Make sure that the stack is aligned before calling a C function unless
|
||||
// running in the simulator. The simulator has its own alignment check which
|
||||
// provides more information.
|
||||
|
@ -29,7 +29,6 @@
|
||||
#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
|
||||
|
||||
#include "assembler.h"
|
||||
#include "frames.h"
|
||||
#include "v8globals.h"
|
||||
|
||||
namespace v8 {
|
||||
@ -312,6 +311,16 @@ class MacroAssembler: public Assembler {
|
||||
const Register fpscr_flags,
|
||||
const Condition cond = al);
|
||||
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Activation frames
|
||||
|
||||
void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
|
||||
void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
|
||||
|
||||
void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
|
||||
void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
|
||||
|
||||
// Enter exit frame.
|
||||
// stack_space - extra stack space, used for alignment before call to C.
|
||||
void EnterExitFrame(bool save_doubles, int stack_space = 0);
|
||||
@ -862,9 +871,6 @@ class MacroAssembler: public Assembler {
|
||||
bool generating_stub() { return generating_stub_; }
|
||||
void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
|
||||
bool allow_stub_calls() { return allow_stub_calls_; }
|
||||
void set_has_frame(bool value) { has_frame_ = value; }
|
||||
bool has_frame() { return has_frame_; }
|
||||
inline bool AllowThisStubCall(CodeStub* stub);
|
||||
|
||||
// EABI variant for double arguments in use.
|
||||
bool use_eabi_hardfloat() {
|
||||
@ -1011,10 +1017,6 @@ class MacroAssembler: public Assembler {
|
||||
|
||||
void LoadInstanceDescriptors(Register map, Register descriptors);
|
||||
|
||||
// Activation support.
|
||||
void EnterFrame(StackFrame::Type type);
|
||||
void LeaveFrame(StackFrame::Type type);
|
||||
|
||||
private:
|
||||
void CallCFunctionHelper(Register function,
|
||||
ExternalReference function_reference,
|
||||
@ -1038,6 +1040,10 @@ class MacroAssembler: public Assembler {
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind);
|
||||
|
||||
// Activation support.
|
||||
void EnterFrame(StackFrame::Type type);
|
||||
void LeaveFrame(StackFrame::Type type);
|
||||
|
||||
void InitializeNewString(Register string,
|
||||
Register length,
|
||||
Heap::RootListIndex map_index,
|
||||
@ -1051,7 +1057,6 @@ class MacroAssembler: public Assembler {
|
||||
|
||||
bool generating_stub_;
|
||||
bool allow_stub_calls_;
|
||||
bool has_frame_;
|
||||
// This handle will be patched with the code object on installation.
|
||||
Handle<Object> code_object_;
|
||||
|
||||
|
@ -371,12 +371,9 @@ void RegExpMacroAssemblerARM::CheckNotBackReferenceIgnoreCase(
|
||||
// Isolate.
|
||||
__ mov(r3, Operand(ExternalReference::isolate_address()));
|
||||
|
||||
{
|
||||
AllowExternalCallThatCantCauseGC scope(masm_);
|
||||
ExternalReference function =
|
||||
ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate());
|
||||
__ CallCFunction(function, argument_count);
|
||||
}
|
||||
ExternalReference function =
|
||||
ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate());
|
||||
__ CallCFunction(function, argument_count);
|
||||
|
||||
// Check if function returned non-zero for success or zero for failure.
|
||||
__ cmp(r0, Operand(0, RelocInfo::NONE));
|
||||
@ -614,12 +611,6 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
|
||||
|
||||
// Entry code:
|
||||
__ bind(&entry_label_);
|
||||
|
||||
// Tell the system that we have a stack frame. Because the type is MANUAL, no
|
||||
// is generated.
|
||||
FrameScope scope(masm_, StackFrame::MANUAL);
|
||||
|
||||
// Actually emit code to start a new stack frame.
|
||||
// Push arguments
|
||||
// Save callee-save registers.
|
||||
// Start new stack frame.
|
||||
|
@ -800,7 +800,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
|
||||
miss_label);
|
||||
|
||||
// Call a runtime function to load the interceptor property.
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ EnterInternalFrame();
|
||||
// Save the name_ register across the call.
|
||||
__ push(name_);
|
||||
|
||||
@ -817,8 +817,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
|
||||
|
||||
// Restore the name_ register.
|
||||
__ pop(name_);
|
||||
|
||||
// Leave the internal frame.
|
||||
__ LeaveInternalFrame();
|
||||
}
|
||||
|
||||
void LoadWithInterceptor(MacroAssembler* masm,
|
||||
@ -827,19 +826,18 @@ class CallInterceptorCompiler BASE_EMBEDDED {
|
||||
JSObject* holder_obj,
|
||||
Register scratch,
|
||||
Label* interceptor_succeeded) {
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ Push(holder, name_);
|
||||
__ EnterInternalFrame();
|
||||
__ Push(holder, name_);
|
||||
|
||||
CompileCallLoadPropertyWithInterceptor(masm,
|
||||
receiver,
|
||||
holder,
|
||||
name_,
|
||||
holder_obj);
|
||||
CompileCallLoadPropertyWithInterceptor(masm,
|
||||
receiver,
|
||||
holder,
|
||||
name_,
|
||||
holder_obj);
|
||||
|
||||
__ pop(name_); // Restore the name.
|
||||
__ pop(receiver); // Restore the holder.
|
||||
}
|
||||
__ pop(name_); // Restore the name.
|
||||
__ pop(receiver); // Restore the holder.
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// If interceptor returns no-result sentinel, call the constant function.
|
||||
__ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
|
||||
@ -1300,44 +1298,42 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
|
||||
|
||||
// Save necessary data before invoking an interceptor.
|
||||
// Requires a frame to make GC aware of pushed pointers.
|
||||
{
|
||||
FrameScope frame_scope(masm(), StackFrame::INTERNAL);
|
||||
__ EnterInternalFrame();
|
||||
|
||||
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
||||
// CALLBACKS case needs a receiver to be passed into C++ callback.
|
||||
__ Push(receiver, holder_reg, name_reg);
|
||||
} else {
|
||||
__ Push(holder_reg, name_reg);
|
||||
}
|
||||
|
||||
// Invoke an interceptor. Note: map checks from receiver to
|
||||
// interceptor's holder has been compiled before (see a caller
|
||||
// of this method.)
|
||||
CompileCallLoadPropertyWithInterceptor(masm(),
|
||||
receiver,
|
||||
holder_reg,
|
||||
name_reg,
|
||||
interceptor_holder);
|
||||
|
||||
// Check if interceptor provided a value for property. If it's
|
||||
// the case, return immediately.
|
||||
Label interceptor_failed;
|
||||
__ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
|
||||
__ cmp(r0, scratch1);
|
||||
__ b(eq, &interceptor_failed);
|
||||
frame_scope.GenerateLeaveFrame();
|
||||
__ Ret();
|
||||
|
||||
__ bind(&interceptor_failed);
|
||||
__ pop(name_reg);
|
||||
__ pop(holder_reg);
|
||||
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
||||
__ pop(receiver);
|
||||
}
|
||||
|
||||
// Leave the internal frame.
|
||||
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
||||
// CALLBACKS case needs a receiver to be passed into C++ callback.
|
||||
__ Push(receiver, holder_reg, name_reg);
|
||||
} else {
|
||||
__ Push(holder_reg, name_reg);
|
||||
}
|
||||
|
||||
// Invoke an interceptor. Note: map checks from receiver to
|
||||
// interceptor's holder has been compiled before (see a caller
|
||||
// of this method.)
|
||||
CompileCallLoadPropertyWithInterceptor(masm(),
|
||||
receiver,
|
||||
holder_reg,
|
||||
name_reg,
|
||||
interceptor_holder);
|
||||
|
||||
// Check if interceptor provided a value for property. If it's
|
||||
// the case, return immediately.
|
||||
Label interceptor_failed;
|
||||
__ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
|
||||
__ cmp(r0, scratch1);
|
||||
__ b(eq, &interceptor_failed);
|
||||
__ LeaveInternalFrame();
|
||||
__ Ret();
|
||||
|
||||
__ bind(&interceptor_failed);
|
||||
__ pop(name_reg);
|
||||
__ pop(holder_reg);
|
||||
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
||||
__ pop(receiver);
|
||||
}
|
||||
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Check that the maps from interceptor's holder to lookup's holder
|
||||
// haven't changed. And load lookup's holder into |holder| register.
|
||||
if (interceptor_holder != lookup->holder()) {
|
||||
|
@ -1616,7 +1616,6 @@ void Builtins::Setup(bool create_heap_objects) {
|
||||
// We pass all arguments to the generator, but it may not use all of
|
||||
// them. This works because the first arguments are on top of the
|
||||
// stack.
|
||||
ASSERT(!masm.has_frame());
|
||||
g(&masm, functions[i].name, functions[i].extra_args);
|
||||
// Move the code into the object heap.
|
||||
CodeDesc desc;
|
||||
|
@ -52,12 +52,11 @@ void CodeStub::GenerateCode(MacroAssembler* masm) {
|
||||
// Update the static counter each time a new code stub is generated.
|
||||
masm->isolate()->counters()->code_stubs()->Increment();
|
||||
|
||||
// Nested stubs are not allowed for leaves.
|
||||
AllowStubCallsScope allow_scope(masm, false);
|
||||
// Nested stubs are not allowed for leafs.
|
||||
AllowStubCallsScope allow_scope(masm, AllowsStubCalls());
|
||||
|
||||
// Generate the code for the stub.
|
||||
masm->set_generating_stub(true);
|
||||
NoCurrentFrameScope scope(masm);
|
||||
Generate(masm);
|
||||
}
|
||||
|
||||
|
@ -75,7 +75,7 @@ namespace internal {
|
||||
V(KeyedLoadExternalArray) \
|
||||
V(KeyedStoreExternalArray) \
|
||||
V(DebuggerStatement) \
|
||||
V(StringDictionaryLookup)
|
||||
V(StringDictionaryNegativeLookup)
|
||||
|
||||
// List of code stubs only used on ARM platforms.
|
||||
#ifdef V8_TARGET_ARCH_ARM
|
||||
@ -144,13 +144,6 @@ class CodeStub BASE_EMBEDDED {
|
||||
|
||||
virtual ~CodeStub() {}
|
||||
|
||||
// See comment above, where Instanceof is defined.
|
||||
virtual bool CompilingCallsToThisStubIsGCSafe() {
|
||||
return MajorKey() <= Instanceof;
|
||||
}
|
||||
|
||||
virtual bool SometimesSetsUpAFrame() { return true; }
|
||||
|
||||
protected:
|
||||
static const int kMajorBits = 6;
|
||||
static const int kMinorBits = kBitsPerInt - kSmiTagSize - kMajorBits;
|
||||
@ -207,6 +200,9 @@ class CodeStub BASE_EMBEDDED {
|
||||
MajorKeyBits::encode(MajorKey());
|
||||
}
|
||||
|
||||
// See comment above, where Instanceof is defined.
|
||||
bool AllowsStubCalls() { return MajorKey() <= Instanceof; }
|
||||
|
||||
class MajorKeyBits: public BitField<uint32_t, 0, kMajorBits> {};
|
||||
class MinorKeyBits: public BitField<uint32_t, kMajorBits, kMinorBits> {};
|
||||
|
||||
@ -579,11 +575,6 @@ class CEntryStub : public CodeStub {
|
||||
void Generate(MacroAssembler* masm);
|
||||
void SaveDoubles() { save_doubles_ = true; }
|
||||
|
||||
// The version of this stub that doesn't save doubles is generated ahead of
|
||||
// time, so it's OK to call it from other stubs that can't cope with GC during
|
||||
// their code generation.
|
||||
virtual bool CompilingCallsToThisStubIsGCSafe() { return !save_doubles_; }
|
||||
|
||||
private:
|
||||
void GenerateCore(MacroAssembler* masm,
|
||||
Label* throw_normal_exception,
|
||||
|
@ -58,9 +58,6 @@ DeoptimizerData::~DeoptimizerData() {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// We rely on this function not causing a GC. It is called from generated code
|
||||
// without having a real stack frame in place.
|
||||
Deoptimizer* Deoptimizer::New(JSFunction* function,
|
||||
BailoutType type,
|
||||
unsigned bailout_id,
|
||||
@ -354,8 +351,6 @@ int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
|
||||
}
|
||||
|
||||
|
||||
// We rely on this function not causing a GC. It is called from generated code
|
||||
// without having a real stack frame in place.
|
||||
void Deoptimizer::DoComputeOutputFrames() {
|
||||
if (bailout_type_ == OSR) {
|
||||
DoComputeOsrOutputFrame();
|
||||
|
@ -138,10 +138,7 @@ class StackFrame BASE_EMBEDDED {
|
||||
enum Type {
|
||||
NONE = 0,
|
||||
STACK_FRAME_TYPE_LIST(DECLARE_TYPE)
|
||||
NUMBER_OF_TYPES,
|
||||
// Used by FrameScope to indicate that the stack frame is constructed
|
||||
// manually and the FrameScope does not need to emit code.
|
||||
MANUAL
|
||||
NUMBER_OF_TYPES
|
||||
};
|
||||
#undef DECLARE_TYPE
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -242,8 +242,6 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
// NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
|
||||
void ToBooleanStub::Generate(MacroAssembler* masm) {
|
||||
// This stub overrides SometimesSetsUpAFrame() to return false. That means
|
||||
// we cannot call anything that could cause a GC from this stub.
|
||||
Label false_result, true_result, not_string;
|
||||
__ mov(eax, Operand(esp, 1 * kPointerSize));
|
||||
Factory* factory = masm->isolate()->factory();
|
||||
@ -723,12 +721,11 @@ void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
|
||||
__ jmp(&heapnumber_allocated);
|
||||
|
||||
__ bind(&slow_allocate_heapnumber);
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(edx);
|
||||
__ CallRuntime(Runtime::kNumberAlloc, 0);
|
||||
__ pop(edx);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ push(edx);
|
||||
__ CallRuntime(Runtime::kNumberAlloc, 0);
|
||||
__ pop(edx);
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
__ bind(&heapnumber_allocated);
|
||||
// eax: allocated 'empty' number
|
||||
@ -771,16 +768,15 @@ void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
|
||||
__ jmp(&heapnumber_allocated);
|
||||
|
||||
__ bind(&slow_allocate_heapnumber);
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
// Push the original HeapNumber on the stack. The integer value can't
|
||||
// be stored since it's untagged and not in the smi range (so we can't
|
||||
// smi-tag it). We'll recalculate the value after the GC instead.
|
||||
__ push(ebx);
|
||||
__ CallRuntime(Runtime::kNumberAlloc, 0);
|
||||
// New HeapNumber is in eax.
|
||||
__ pop(edx);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
// Push the original HeapNumber on the stack. The integer value can't
|
||||
// be stored since it's untagged and not in the smi range (so we can't
|
||||
// smi-tag it). We'll recalculate the value after the GC instead.
|
||||
__ push(ebx);
|
||||
__ CallRuntime(Runtime::kNumberAlloc, 0);
|
||||
// New HeapNumber is in eax.
|
||||
__ pop(edx);
|
||||
__ LeaveInternalFrame();
|
||||
// IntegerConvert uses ebx and edi as scratch registers.
|
||||
// This conversion won't go slow-case.
|
||||
IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow);
|
||||
@ -2292,12 +2288,11 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
|
||||
__ add(Operand(esp), Immediate(kDoubleSize));
|
||||
// We return the value in xmm1 without adding it to the cache, but
|
||||
// we cause a scavenging GC so that future allocations will succeed.
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
// Allocate an unused object bigger than a HeapNumber.
|
||||
__ push(Immediate(Smi::FromInt(2 * kDoubleSize)));
|
||||
__ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
// Allocate an unused object bigger than a HeapNumber.
|
||||
__ push(Immediate(Smi::FromInt(2 * kDoubleSize)));
|
||||
__ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
|
||||
__ LeaveInternalFrame();
|
||||
__ Ret();
|
||||
}
|
||||
|
||||
@ -2314,11 +2309,10 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
|
||||
__ bind(&runtime_call);
|
||||
__ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
|
||||
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1);
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(eax);
|
||||
__ CallRuntime(RuntimeFunction(), 1);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ push(eax);
|
||||
__ CallRuntime(RuntimeFunction(), 1);
|
||||
__ LeaveInternalFrame();
|
||||
__ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
|
||||
__ Ret();
|
||||
}
|
||||
@ -4531,12 +4525,11 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
|
||||
} else {
|
||||
// Call the builtin and convert 0/1 to true/false.
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(object);
|
||||
__ push(function);
|
||||
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ push(object);
|
||||
__ push(function);
|
||||
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
|
||||
__ LeaveInternalFrame();
|
||||
Label true_value, done;
|
||||
__ test(eax, Operand(eax));
|
||||
__ j(zero, &true_value, Label::kNear);
|
||||
@ -5966,16 +5959,15 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
|
||||
__ push(eax);
|
||||
__ push(ecx);
|
||||
|
||||
{
|
||||
// Call the runtime system in a fresh internal frame.
|
||||
ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
|
||||
masm->isolate());
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(edx);
|
||||
__ push(eax);
|
||||
__ push(Immediate(Smi::FromInt(op_)));
|
||||
__ CallExternalReference(miss, 3);
|
||||
}
|
||||
// Call the runtime system in a fresh internal frame.
|
||||
ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
|
||||
masm->isolate());
|
||||
__ EnterInternalFrame();
|
||||
__ push(edx);
|
||||
__ push(eax);
|
||||
__ push(Immediate(Smi::FromInt(op_)));
|
||||
__ CallExternalReference(miss, 3);
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Compute the entry point of the rewritten stub.
|
||||
__ lea(edi, FieldOperand(eax, Code::kHeaderSize));
|
||||
@ -6116,8 +6108,6 @@ void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
|
||||
|
||||
|
||||
void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
|
||||
// This stub overrides SometimesSetsUpAFrame() to return false. That means
|
||||
// we cannot call anything that could cause a GC from this stub.
|
||||
// Stack frame on entry:
|
||||
// esp[0 * kPointerSize]: return address.
|
||||
// esp[1 * kPointerSize]: key's hash.
|
||||
|
@ -66,8 +66,6 @@ class ToBooleanStub: public CodeStub {
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
private:
|
||||
Major MajorKey() { return ToBoolean; }
|
||||
int MinorKey() { return 0; }
|
||||
@ -474,8 +472,6 @@ class StringDictionaryLookupStub: public CodeStub {
|
||||
Register r0,
|
||||
Register r1);
|
||||
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
private:
|
||||
static const int kInlinedProbes = 4;
|
||||
static const int kTotalProbes = 20;
|
||||
@ -495,7 +491,7 @@ class StringDictionaryLookupStub: public CodeStub {
|
||||
}
|
||||
#endif
|
||||
|
||||
Major MajorKey() { return StringDictionaryLookup; }
|
||||
Major MajorKey() { return StringDictionaryNegativeLookup; }
|
||||
|
||||
int MinorKey() {
|
||||
return DictionaryBits::encode(dictionary_.code()) |
|
||||
|
@ -39,16 +39,12 @@ namespace internal {
|
||||
// Platform-specific RuntimeCallHelper functions.
|
||||
|
||||
void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
|
||||
masm->EnterFrame(StackFrame::INTERNAL);
|
||||
ASSERT(!masm->has_frame());
|
||||
masm->set_has_frame(true);
|
||||
masm->EnterInternalFrame();
|
||||
}
|
||||
|
||||
|
||||
void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
|
||||
masm->LeaveFrame(StackFrame::INTERNAL);
|
||||
ASSERT(masm->has_frame());
|
||||
masm->set_has_frame(false);
|
||||
masm->LeaveInternalFrame();
|
||||
}
|
||||
|
||||
|
||||
|
@ -100,60 +100,59 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
|
||||
RegList non_object_regs,
|
||||
bool convert_call_to_jmp) {
|
||||
// Enter an internal frame.
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ EnterInternalFrame();
|
||||
|
||||
// Store the registers containing live values on the expression stack to
|
||||
// make sure that these are correctly updated during GC. Non object values
|
||||
// are stored as a smi causing it to be untouched by GC.
|
||||
ASSERT((object_regs & ~kJSCallerSaved) == 0);
|
||||
ASSERT((non_object_regs & ~kJSCallerSaved) == 0);
|
||||
ASSERT((object_regs & non_object_regs) == 0);
|
||||
for (int i = 0; i < kNumJSCallerSaved; i++) {
|
||||
int r = JSCallerSavedCode(i);
|
||||
Register reg = { r };
|
||||
if ((object_regs & (1 << r)) != 0) {
|
||||
__ push(reg);
|
||||
}
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
if (FLAG_debug_code) {
|
||||
__ test(reg, Immediate(0xc0000000));
|
||||
__ Assert(zero, "Unable to encode value as smi");
|
||||
}
|
||||
__ SmiTag(reg);
|
||||
__ push(reg);
|
||||
}
|
||||
// Store the registers containing live values on the expression stack to
|
||||
// make sure that these are correctly updated during GC. Non object values
|
||||
// are stored as a smi causing it to be untouched by GC.
|
||||
ASSERT((object_regs & ~kJSCallerSaved) == 0);
|
||||
ASSERT((non_object_regs & ~kJSCallerSaved) == 0);
|
||||
ASSERT((object_regs & non_object_regs) == 0);
|
||||
for (int i = 0; i < kNumJSCallerSaved; i++) {
|
||||
int r = JSCallerSavedCode(i);
|
||||
Register reg = { r };
|
||||
if ((object_regs & (1 << r)) != 0) {
|
||||
__ push(reg);
|
||||
}
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
if (FLAG_debug_code) {
|
||||
__ test(reg, Immediate(0xc0000000));
|
||||
__ Assert(zero, "Unable to encode value as smi");
|
||||
}
|
||||
__ SmiTag(reg);
|
||||
__ push(reg);
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
__ RecordComment("// Calling from debug break to runtime - come in - over");
|
||||
__ RecordComment("// Calling from debug break to runtime - come in - over");
|
||||
#endif
|
||||
__ Set(eax, Immediate(0)); // No arguments.
|
||||
__ mov(ebx, Immediate(ExternalReference::debug_break(masm->isolate())));
|
||||
__ Set(eax, Immediate(0)); // No arguments.
|
||||
__ mov(ebx, Immediate(ExternalReference::debug_break(masm->isolate())));
|
||||
|
||||
CEntryStub ceb(1);
|
||||
__ CallStub(&ceb);
|
||||
CEntryStub ceb(1);
|
||||
__ CallStub(&ceb);
|
||||
|
||||
// Restore the register values containing object pointers from the
|
||||
// expression stack.
|
||||
for (int i = kNumJSCallerSaved; --i >= 0;) {
|
||||
int r = JSCallerSavedCode(i);
|
||||
Register reg = { r };
|
||||
if (FLAG_debug_code) {
|
||||
__ Set(reg, Immediate(kDebugZapValue));
|
||||
}
|
||||
if ((object_regs & (1 << r)) != 0) {
|
||||
__ pop(reg);
|
||||
}
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
__ pop(reg);
|
||||
__ SmiUntag(reg);
|
||||
}
|
||||
// Restore the register values containing object pointers from the expression
|
||||
// stack.
|
||||
for (int i = kNumJSCallerSaved; --i >= 0;) {
|
||||
int r = JSCallerSavedCode(i);
|
||||
Register reg = { r };
|
||||
if (FLAG_debug_code) {
|
||||
__ Set(reg, Immediate(kDebugZapValue));
|
||||
}
|
||||
if ((object_regs & (1 << r)) != 0) {
|
||||
__ pop(reg);
|
||||
}
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
__ pop(reg);
|
||||
__ SmiUntag(reg);
|
||||
}
|
||||
|
||||
// Get rid of the internal frame.
|
||||
}
|
||||
|
||||
// Get rid of the internal frame.
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// If this call did not replace a call but patched other code then there will
|
||||
// be an unwanted return address left on the stack. Here we get rid of that.
|
||||
if (convert_call_to_jmp) {
|
||||
|
@ -650,10 +650,7 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
__ mov(Operand(esp, 4 * kPointerSize), edx); // Fp-to-sp delta.
|
||||
__ mov(Operand(esp, 5 * kPointerSize),
|
||||
Immediate(ExternalReference::isolate_address()));
|
||||
{
|
||||
AllowExternalCallThatCantCauseGC scope(masm());
|
||||
__ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
|
||||
}
|
||||
__ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
|
||||
|
||||
// Preserve deoptimizer object in register eax and get the input
|
||||
// frame descriptor pointer.
|
||||
@ -701,11 +698,8 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
__ push(eax);
|
||||
__ PrepareCallCFunction(1, ebx);
|
||||
__ mov(Operand(esp, 0 * kPointerSize), eax);
|
||||
{
|
||||
AllowExternalCallThatCantCauseGC scope(masm());
|
||||
__ CallCFunction(
|
||||
ExternalReference::compute_output_frames_function(isolate), 1);
|
||||
}
|
||||
__ CallCFunction(
|
||||
ExternalReference::compute_output_frames_function(isolate), 1);
|
||||
__ pop(eax);
|
||||
|
||||
// Replace the current frame with the output frames.
|
||||
|
@ -146,11 +146,6 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
|
||||
__ bind(&ok);
|
||||
}
|
||||
|
||||
// Open a frame scope to indicate that there is a frame on the stack. The
|
||||
// MANUAL indicates that the scope shouldn't actually generate code to set up
|
||||
// the frame (that is done below).
|
||||
FrameScope frame_scope(masm_, StackFrame::MANUAL);
|
||||
|
||||
__ push(ebp); // Caller's frame pointer.
|
||||
__ mov(ebp, esp);
|
||||
__ push(esi); // Callee's context.
|
||||
|
@ -928,22 +928,22 @@ static void GenerateCallMiss(MacroAssembler* masm,
|
||||
// Get the receiver of the function from the stack; 1 ~ return address.
|
||||
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
|
||||
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
// Enter an internal frame.
|
||||
__ EnterInternalFrame();
|
||||
|
||||
// Push the receiver and the name of the function.
|
||||
__ push(edx);
|
||||
__ push(ecx);
|
||||
// Push the receiver and the name of the function.
|
||||
__ push(edx);
|
||||
__ push(ecx);
|
||||
|
||||
// Call the entry.
|
||||
CEntryStub stub(1);
|
||||
__ mov(eax, Immediate(2));
|
||||
__ mov(ebx, Immediate(ExternalReference(IC_Utility(id), masm->isolate())));
|
||||
__ CallStub(&stub);
|
||||
// Call the entry.
|
||||
CEntryStub stub(1);
|
||||
__ mov(eax, Immediate(2));
|
||||
__ mov(ebx, Immediate(ExternalReference(IC_Utility(id), masm->isolate())));
|
||||
__ CallStub(&stub);
|
||||
|
||||
// Move result to edi and exit the internal frame.
|
||||
__ mov(edi, eax);
|
||||
}
|
||||
// Move result to edi and exit the internal frame.
|
||||
__ mov(edi, eax);
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Check if the receiver is a global object of some sort.
|
||||
// This can happen only for regular CallIC but not KeyedCallIC.
|
||||
@ -1090,17 +1090,13 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
|
||||
// This branch is taken when calling KeyedCallIC_Miss is neither required
|
||||
// nor beneficial.
|
||||
__ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
|
||||
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(ecx); // save the key
|
||||
__ push(edx); // pass the receiver
|
||||
__ push(ecx); // pass the key
|
||||
__ CallRuntime(Runtime::kKeyedGetProperty, 2);
|
||||
__ pop(ecx); // restore the key
|
||||
// Leave the internal frame.
|
||||
}
|
||||
|
||||
__ EnterInternalFrame();
|
||||
__ push(ecx); // save the key
|
||||
__ push(edx); // pass the receiver
|
||||
__ push(ecx); // pass the key
|
||||
__ CallRuntime(Runtime::kKeyedGetProperty, 2);
|
||||
__ pop(ecx); // restore the key
|
||||
__ LeaveInternalFrame();
|
||||
__ mov(edi, eax);
|
||||
__ jmp(&do_call);
|
||||
|
||||
|
@ -70,12 +70,6 @@ bool LCodeGen::GenerateCode() {
|
||||
ASSERT(is_unused());
|
||||
status_ = GENERATING;
|
||||
CpuFeatures::Scope scope(SSE2);
|
||||
|
||||
// Open a frame scope to indicate that there is a frame on the stack. The
|
||||
// MANUAL indicates that the scope shouldn't actually generate code to set up
|
||||
// the frame (that is done in GeneratePrologue).
|
||||
FrameScope frame_scope(masm_, StackFrame::MANUAL);
|
||||
|
||||
return GeneratePrologue() &&
|
||||
GenerateBody() &&
|
||||
GenerateDeferredCode() &&
|
||||
|
@ -44,8 +44,7 @@ namespace internal {
|
||||
MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
|
||||
: Assembler(arg_isolate, buffer, size),
|
||||
generating_stub_(false),
|
||||
allow_stub_calls_(true),
|
||||
has_frame_(false) {
|
||||
allow_stub_calls_(true) {
|
||||
if (isolate() != NULL) {
|
||||
code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
|
||||
isolate());
|
||||
@ -1209,13 +1208,13 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
|
||||
|
||||
|
||||
void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
|
||||
ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
|
||||
ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
|
||||
call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
|
||||
ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
|
||||
ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
|
||||
Object* result;
|
||||
{ MaybeObject* maybe_result = stub->TryGetCode();
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
@ -1226,12 +1225,13 @@ MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
|
||||
|
||||
|
||||
void MacroAssembler::TailCallStub(CodeStub* stub) {
|
||||
ASSERT(stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_);
|
||||
ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
|
||||
jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
|
||||
ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
|
||||
Object* result;
|
||||
{ MaybeObject* maybe_result = stub->TryGetCode();
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
@ -1247,12 +1247,6 @@ void MacroAssembler::StubReturn(int argc) {
|
||||
}
|
||||
|
||||
|
||||
bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
|
||||
if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
|
||||
return stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_;
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::IllegalOperation(int num_arguments) {
|
||||
if (num_arguments > 0) {
|
||||
add(Operand(esp), Immediate(num_arguments * kPointerSize));
|
||||
@ -1633,9 +1627,6 @@ void MacroAssembler::InvokeCode(const Operand& code,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
// You can't call a function without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
Label done;
|
||||
InvokePrologue(expected, actual, Handle<Code>::null(), code,
|
||||
&done, flag, Label::kNear, call_wrapper,
|
||||
@ -1661,9 +1652,6 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
// You can't call a function without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
Label done;
|
||||
Operand dummy(eax);
|
||||
InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear,
|
||||
@ -1687,9 +1675,6 @@ void MacroAssembler::InvokeFunction(Register fun,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
// You can't call a function without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
ASSERT(fun.is(edi));
|
||||
mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
|
||||
mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
|
||||
@ -1707,9 +1692,6 @@ void MacroAssembler::InvokeFunction(JSFunction* function,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
// You can't call a function without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
ASSERT(function->is_compiled());
|
||||
// Get the function and setup the context.
|
||||
mov(edi, Immediate(Handle<JSFunction>(function)));
|
||||
@ -1733,8 +1715,8 @@ void MacroAssembler::InvokeFunction(JSFunction* function,
|
||||
void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper) {
|
||||
// You can't call a builtin without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
// Calls are not allowed in some stubs.
|
||||
ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
|
||||
|
||||
// Rely on the assertion to check that the number of provided
|
||||
// arguments match the expected number of arguments. Fake a
|
||||
@ -1745,7 +1727,6 @@ void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
|
||||
expected, expected, flag, call_wrapper, CALL_AS_METHOD);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::GetBuiltinFunction(Register target,
|
||||
Builtins::JavaScript id) {
|
||||
// Load the JavaScript builtin function from the builtins object.
|
||||
@ -1755,7 +1736,6 @@ void MacroAssembler::GetBuiltinFunction(Register target,
|
||||
JSBuiltinsObject::OffsetOfFunctionWithId(id)));
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
|
||||
ASSERT(!target.is(edi));
|
||||
// Load the JavaScript builtin function from the builtins object.
|
||||
@ -2011,19 +1991,13 @@ void MacroAssembler::Abort(const char* msg) {
|
||||
RecordComment(msg);
|
||||
}
|
||||
#endif
|
||||
// Disable stub call restrictions to always allow calls to abort.
|
||||
AllowStubCallsScope allow_scope(this, true);
|
||||
|
||||
push(eax);
|
||||
push(Immediate(p0));
|
||||
push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
|
||||
// Disable stub call restrictions to always allow calls to abort.
|
||||
if (!has_frame_) {
|
||||
// We don't actually want to generate a pile of code for this, so just
|
||||
// claim there is a stack frame, without generating one.
|
||||
FrameScope scope(this, StackFrame::NONE);
|
||||
CallRuntime(Runtime::kAbort, 2);
|
||||
} else {
|
||||
CallRuntime(Runtime::kAbort, 2);
|
||||
}
|
||||
CallRuntime(Runtime::kAbort, 2);
|
||||
// will not return here
|
||||
int3();
|
||||
}
|
||||
@ -2123,7 +2097,6 @@ void MacroAssembler::CallCFunction(ExternalReference function,
|
||||
|
||||
void MacroAssembler::CallCFunction(Register function,
|
||||
int num_arguments) {
|
||||
ASSERT(has_frame());
|
||||
// Check stack alignment.
|
||||
if (emit_debug_code()) {
|
||||
CheckStackAlignment();
|
||||
|
@ -29,7 +29,6 @@
|
||||
#define V8_IA32_MACRO_ASSEMBLER_IA32_H_
|
||||
|
||||
#include "assembler.h"
|
||||
#include "frames.h"
|
||||
#include "v8globals.h"
|
||||
|
||||
namespace v8 {
|
||||
@ -106,6 +105,15 @@ class MacroAssembler: public Assembler {
|
||||
void DebugBreak();
|
||||
#endif
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Activation frames
|
||||
|
||||
void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
|
||||
void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
|
||||
|
||||
void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
|
||||
void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
|
||||
|
||||
// Enter specific kind of exit frame. Expects the number of
|
||||
// arguments in register eax and sets up the number of arguments in
|
||||
// register edi and the pointer to the first argument in register
|
||||
@ -616,9 +624,6 @@ class MacroAssembler: public Assembler {
|
||||
bool generating_stub() { return generating_stub_; }
|
||||
void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
|
||||
bool allow_stub_calls() { return allow_stub_calls_; }
|
||||
void set_has_frame(bool value) { has_frame_ = value; }
|
||||
bool has_frame() { return has_frame_; }
|
||||
inline bool AllowThisStubCall(CodeStub* stub);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// String utilities.
|
||||
@ -642,14 +647,9 @@ class MacroAssembler: public Assembler {
|
||||
return SafepointRegisterStackIndex(reg.code());
|
||||
}
|
||||
|
||||
// Activation support.
|
||||
void EnterFrame(StackFrame::Type type);
|
||||
void LeaveFrame(StackFrame::Type type);
|
||||
|
||||
private:
|
||||
bool generating_stub_;
|
||||
bool allow_stub_calls_;
|
||||
bool has_frame_;
|
||||
// This handle will be patched with the code object on installation.
|
||||
Handle<Object> code_object_;
|
||||
|
||||
@ -664,6 +664,10 @@ class MacroAssembler: public Assembler {
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
// Activation support.
|
||||
void EnterFrame(StackFrame::Type type);
|
||||
void LeaveFrame(StackFrame::Type type);
|
||||
|
||||
void EnterExitFramePrologue();
|
||||
void EnterExitFrameEpilogue(int argc, bool save_doubles);
|
||||
|
||||
|
@ -396,12 +396,9 @@ void RegExpMacroAssemblerIA32::CheckNotBackReferenceIgnoreCase(
|
||||
__ add(edx, Operand(esi));
|
||||
__ mov(Operand(esp, 0 * kPointerSize), edx);
|
||||
|
||||
{
|
||||
AllowExternalCallThatCantCauseGC scope(masm_);
|
||||
ExternalReference compare =
|
||||
ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate());
|
||||
__ CallCFunction(compare, argument_count);
|
||||
}
|
||||
ExternalReference compare =
|
||||
ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate());
|
||||
__ CallCFunction(compare, argument_count);
|
||||
// Pop original values before reacting on result value.
|
||||
__ pop(ebx);
|
||||
__ pop(backtrack_stackpointer());
|
||||
@ -671,12 +668,7 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
|
||||
|
||||
// Entry code:
|
||||
__ bind(&entry_label_);
|
||||
|
||||
// Tell the system that we have a stack frame. Because the type is MANUAL, no
|
||||
// is generated.
|
||||
FrameScope scope(masm_, StackFrame::MANUAL);
|
||||
|
||||
// Actually emit code to start a new stack frame.
|
||||
// Start new stack frame.
|
||||
__ push(ebp);
|
||||
__ mov(ebp, esp);
|
||||
// Save callee-save registers. Order here should correspond to order of
|
||||
|
@ -655,7 +655,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
|
||||
scratch1, scratch2, scratch3, name,
|
||||
miss_label);
|
||||
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ EnterInternalFrame();
|
||||
// Save the name_ register across the call.
|
||||
__ push(name_);
|
||||
|
||||
@ -672,8 +672,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
|
||||
|
||||
// Restore the name_ register.
|
||||
__ pop(name_);
|
||||
|
||||
// Leave the internal frame.
|
||||
__ LeaveInternalFrame();
|
||||
}
|
||||
|
||||
void LoadWithInterceptor(MacroAssembler* masm,
|
||||
@ -681,21 +680,19 @@ class CallInterceptorCompiler BASE_EMBEDDED {
|
||||
Register holder,
|
||||
JSObject* holder_obj,
|
||||
Label* interceptor_succeeded) {
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(holder); // Save the holder.
|
||||
__ push(name_); // Save the name.
|
||||
__ EnterInternalFrame();
|
||||
__ push(holder); // Save the holder.
|
||||
__ push(name_); // Save the name.
|
||||
|
||||
CompileCallLoadPropertyWithInterceptor(masm,
|
||||
receiver,
|
||||
holder,
|
||||
name_,
|
||||
holder_obj);
|
||||
CompileCallLoadPropertyWithInterceptor(masm,
|
||||
receiver,
|
||||
holder,
|
||||
name_,
|
||||
holder_obj);
|
||||
|
||||
__ pop(name_); // Restore the name.
|
||||
__ pop(receiver); // Restore the holder.
|
||||
// Leave the internal frame.
|
||||
}
|
||||
__ pop(name_); // Restore the name.
|
||||
__ pop(receiver); // Restore the holder.
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
__ cmp(eax, masm->isolate()->factory()->no_interceptor_result_sentinel());
|
||||
__ j(not_equal, interceptor_succeeded);
|
||||
@ -1170,42 +1167,40 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
|
||||
|
||||
// Save necessary data before invoking an interceptor.
|
||||
// Requires a frame to make GC aware of pushed pointers.
|
||||
{
|
||||
FrameScope frame_scope(masm(), StackFrame::INTERNAL);
|
||||
__ EnterInternalFrame();
|
||||
|
||||
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
||||
// CALLBACKS case needs a receiver to be passed into C++ callback.
|
||||
__ push(receiver);
|
||||
}
|
||||
__ push(holder_reg);
|
||||
__ push(name_reg);
|
||||
|
||||
// Invoke an interceptor. Note: map checks from receiver to
|
||||
// interceptor's holder has been compiled before (see a caller
|
||||
// of this method.)
|
||||
CompileCallLoadPropertyWithInterceptor(masm(),
|
||||
receiver,
|
||||
holder_reg,
|
||||
name_reg,
|
||||
interceptor_holder);
|
||||
|
||||
// Check if interceptor provided a value for property. If it's
|
||||
// the case, return immediately.
|
||||
Label interceptor_failed;
|
||||
__ cmp(eax, factory()->no_interceptor_result_sentinel());
|
||||
__ j(equal, &interceptor_failed);
|
||||
frame_scope.GenerateLeaveFrame();
|
||||
__ ret(0);
|
||||
|
||||
__ bind(&interceptor_failed);
|
||||
__ pop(name_reg);
|
||||
__ pop(holder_reg);
|
||||
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
||||
__ pop(receiver);
|
||||
}
|
||||
|
||||
// Leave the internal frame.
|
||||
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
||||
// CALLBACKS case needs a receiver to be passed into C++ callback.
|
||||
__ push(receiver);
|
||||
}
|
||||
__ push(holder_reg);
|
||||
__ push(name_reg);
|
||||
|
||||
// Invoke an interceptor. Note: map checks from receiver to
|
||||
// interceptor's holder has been compiled before (see a caller
|
||||
// of this method.)
|
||||
CompileCallLoadPropertyWithInterceptor(masm(),
|
||||
receiver,
|
||||
holder_reg,
|
||||
name_reg,
|
||||
interceptor_holder);
|
||||
|
||||
// Check if interceptor provided a value for property. If it's
|
||||
// the case, return immediately.
|
||||
Label interceptor_failed;
|
||||
__ cmp(eax, factory()->no_interceptor_result_sentinel());
|
||||
__ j(equal, &interceptor_failed);
|
||||
__ LeaveInternalFrame();
|
||||
__ ret(0);
|
||||
|
||||
__ bind(&interceptor_failed);
|
||||
__ pop(name_reg);
|
||||
__ pop(holder_reg);
|
||||
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
||||
__ pop(receiver);
|
||||
}
|
||||
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Check that the maps from interceptor's holder to lookup's holder
|
||||
// haven't changed. And load lookup's holder into holder_reg.
|
||||
|
@ -93,63 +93,6 @@ const int kInvalidProtoDepth = -1;
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
class FrameScope {
|
||||
public:
|
||||
explicit FrameScope(MacroAssembler* masm, StackFrame::Type type)
|
||||
: masm_(masm), type_(type) {
|
||||
ASSERT(!masm->has_frame());
|
||||
masm->set_has_frame(true);
|
||||
if (type != StackFrame::MANUAL && type_ != StackFrame::NONE) {
|
||||
masm->EnterFrame(type);
|
||||
}
|
||||
}
|
||||
|
||||
~FrameScope() {
|
||||
if (type_ != StackFrame::MANUAL && type_ != StackFrame::NONE) {
|
||||
masm_->LeaveFrame(type_);
|
||||
}
|
||||
masm_->set_has_frame(false);
|
||||
}
|
||||
|
||||
// Normally we generate the leave-frame code when this object goes
|
||||
// out of scope. Sometimes we may need to generate the code somewhere else
|
||||
// in addition. Calling this will achieve that, but the object stays in
|
||||
// scope, the MacroAssembler is still marked as being in a frame scope, and
|
||||
// the code will be generated again when it goes out of scope.
|
||||
void GenerateLeaveFrame() {
|
||||
masm_->LeaveFrame(type_);
|
||||
}
|
||||
|
||||
private:
|
||||
MacroAssembler* masm_;
|
||||
StackFrame::Type type_;
|
||||
};
|
||||
|
||||
|
||||
class AllowExternalCallThatCantCauseGC: public FrameScope {
|
||||
public:
|
||||
explicit AllowExternalCallThatCantCauseGC(MacroAssembler* masm)
|
||||
: FrameScope(masm, StackFrame::NONE) { }
|
||||
};
|
||||
|
||||
|
||||
class NoCurrentFrameScope {
|
||||
public:
|
||||
explicit NoCurrentFrameScope(MacroAssembler* masm)
|
||||
: masm_(masm), saved_(masm->has_frame()) {
|
||||
masm->set_has_frame(false);
|
||||
}
|
||||
|
||||
~NoCurrentFrameScope() {
|
||||
masm_->set_has_frame(saved_);
|
||||
}
|
||||
|
||||
private:
|
||||
MacroAssembler* masm_;
|
||||
bool saved_;
|
||||
};
|
||||
|
||||
|
||||
// Support for "structured" code comments.
|
||||
#ifdef DEBUG
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -232,8 +232,6 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
|
||||
void ToBooleanStub::Generate(MacroAssembler* masm) {
|
||||
// This stub overrides SometimesSetsUpAFrame() to return false. That means
|
||||
// we cannot call anything that could cause a GC from this stub.
|
||||
Label false_result, true_result, not_string;
|
||||
__ movq(rax, Operand(rsp, 1 * kPointerSize));
|
||||
|
||||
@ -566,13 +564,12 @@ void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
|
||||
__ jmp(&heapnumber_allocated);
|
||||
|
||||
__ bind(&slow_allocate_heapnumber);
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(rax);
|
||||
__ CallRuntime(Runtime::kNumberAlloc, 0);
|
||||
__ movq(rcx, rax);
|
||||
__ pop(rax);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ push(rax);
|
||||
__ CallRuntime(Runtime::kNumberAlloc, 0);
|
||||
__ movq(rcx, rax);
|
||||
__ pop(rax);
|
||||
__ LeaveInternalFrame();
|
||||
__ bind(&heapnumber_allocated);
|
||||
// rcx: allocated 'empty' number
|
||||
|
||||
@ -1422,12 +1419,11 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
|
||||
__ addq(rsp, Immediate(kDoubleSize));
|
||||
// We return the value in xmm1 without adding it to the cache, but
|
||||
// we cause a scavenging GC so that future allocations will succeed.
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
// Allocate an unused object bigger than a HeapNumber.
|
||||
__ Push(Smi::FromInt(2 * kDoubleSize));
|
||||
__ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
// Allocate an unused object bigger than a HeapNumber.
|
||||
__ Push(Smi::FromInt(2 * kDoubleSize));
|
||||
__ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
|
||||
__ LeaveInternalFrame();
|
||||
__ Ret();
|
||||
}
|
||||
|
||||
@ -1443,11 +1439,10 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
|
||||
__ bind(&runtime_call);
|
||||
__ AllocateHeapNumber(rax, rdi, &skip_cache);
|
||||
__ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(rax);
|
||||
__ CallRuntime(RuntimeFunction(), 1);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ push(rax);
|
||||
__ CallRuntime(RuntimeFunction(), 1);
|
||||
__ LeaveInternalFrame();
|
||||
__ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
|
||||
__ Ret();
|
||||
}
|
||||
@ -4913,13 +4908,12 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
|
||||
// Call the runtime system in a fresh internal frame.
|
||||
ExternalReference miss =
|
||||
ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(rdx);
|
||||
__ push(rax);
|
||||
__ Push(Smi::FromInt(op_));
|
||||
__ CallExternalReference(miss, 3);
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ push(rdx);
|
||||
__ push(rax);
|
||||
__ Push(Smi::FromInt(op_));
|
||||
__ CallExternalReference(miss, 3);
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Compute the entry point of the rewritten stub.
|
||||
__ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
|
||||
@ -5050,8 +5044,6 @@ void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
|
||||
|
||||
|
||||
void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
|
||||
// This stub overrides SometimesSetsUpAFrame() to return false. That means
|
||||
// we cannot call anything that could cause a GC from this stub.
|
||||
// Stack frame on entry:
|
||||
// esp[0 * kPointerSize]: return address.
|
||||
// esp[1 * kPointerSize]: key's hash.
|
||||
|
@ -65,8 +65,6 @@ class ToBooleanStub: public CodeStub {
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
private:
|
||||
Major MajorKey() { return ToBoolean; }
|
||||
int MinorKey() { return 0; }
|
||||
@ -471,8 +469,6 @@ class StringDictionaryLookupStub: public CodeStub {
|
||||
Register r0,
|
||||
Register r1);
|
||||
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
private:
|
||||
static const int kInlinedProbes = 4;
|
||||
static const int kTotalProbes = 20;
|
||||
@ -492,7 +488,7 @@ class StringDictionaryLookupStub: public CodeStub {
|
||||
}
|
||||
#endif
|
||||
|
||||
Major MajorKey() { return StringDictionaryLookup; }
|
||||
Major MajorKey() { return StringDictionaryNegativeLookup; }
|
||||
|
||||
int MinorKey() {
|
||||
return DictionaryBits::encode(dictionary_.code()) |
|
||||
|
@ -38,16 +38,12 @@ namespace internal {
|
||||
// Platform-specific RuntimeCallHelper functions.
|
||||
|
||||
void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
|
||||
masm->EnterFrame(StackFrame::INTERNAL);
|
||||
ASSERT(!masm->has_frame());
|
||||
masm->set_has_frame(true);
|
||||
masm->EnterInternalFrame();
|
||||
}
|
||||
|
||||
|
||||
void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
|
||||
masm->LeaveFrame(StackFrame::INTERNAL);
|
||||
ASSERT(masm->has_frame());
|
||||
masm->set_has_frame(false);
|
||||
masm->LeaveInternalFrame();
|
||||
}
|
||||
|
||||
|
||||
|
@ -100,66 +100,65 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
|
||||
RegList non_object_regs,
|
||||
bool convert_call_to_jmp) {
|
||||
// Enter an internal frame.
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ EnterInternalFrame();
|
||||
|
||||
// Store the registers containing live values on the expression stack to
|
||||
// make sure that these are correctly updated during GC. Non object values
|
||||
// are stored as as two smis causing it to be untouched by GC.
|
||||
ASSERT((object_regs & ~kJSCallerSaved) == 0);
|
||||
ASSERT((non_object_regs & ~kJSCallerSaved) == 0);
|
||||
ASSERT((object_regs & non_object_regs) == 0);
|
||||
for (int i = 0; i < kNumJSCallerSaved; i++) {
|
||||
int r = JSCallerSavedCode(i);
|
||||
Register reg = { r };
|
||||
ASSERT(!reg.is(kScratchRegister));
|
||||
if ((object_regs & (1 << r)) != 0) {
|
||||
__ push(reg);
|
||||
}
|
||||
// Store the 64-bit value as two smis.
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
__ movq(kScratchRegister, reg);
|
||||
__ Integer32ToSmi(reg, reg);
|
||||
__ push(reg);
|
||||
__ sar(kScratchRegister, Immediate(32));
|
||||
__ Integer32ToSmi(kScratchRegister, kScratchRegister);
|
||||
__ push(kScratchRegister);
|
||||
}
|
||||
// Store the registers containing live values on the expression stack to
|
||||
// make sure that these are correctly updated during GC. Non object values
|
||||
// are stored as as two smis causing it to be untouched by GC.
|
||||
ASSERT((object_regs & ~kJSCallerSaved) == 0);
|
||||
ASSERT((non_object_regs & ~kJSCallerSaved) == 0);
|
||||
ASSERT((object_regs & non_object_regs) == 0);
|
||||
for (int i = 0; i < kNumJSCallerSaved; i++) {
|
||||
int r = JSCallerSavedCode(i);
|
||||
Register reg = { r };
|
||||
ASSERT(!reg.is(kScratchRegister));
|
||||
if ((object_regs & (1 << r)) != 0) {
|
||||
__ push(reg);
|
||||
}
|
||||
// Store the 64-bit value as two smis.
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
__ movq(kScratchRegister, reg);
|
||||
__ Integer32ToSmi(reg, reg);
|
||||
__ push(reg);
|
||||
__ sar(kScratchRegister, Immediate(32));
|
||||
__ Integer32ToSmi(kScratchRegister, kScratchRegister);
|
||||
__ push(kScratchRegister);
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
__ RecordComment("// Calling from debug break to runtime - come in - over");
|
||||
__ RecordComment("// Calling from debug break to runtime - come in - over");
|
||||
#endif
|
||||
__ Set(rax, 0); // No arguments (argc == 0).
|
||||
__ movq(rbx, ExternalReference::debug_break(masm->isolate()));
|
||||
__ Set(rax, 0); // No arguments (argc == 0).
|
||||
__ movq(rbx, ExternalReference::debug_break(masm->isolate()));
|
||||
|
||||
CEntryStub ceb(1);
|
||||
__ CallStub(&ceb);
|
||||
CEntryStub ceb(1);
|
||||
__ CallStub(&ceb);
|
||||
|
||||
// Restore the register values from the expression stack.
|
||||
for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
|
||||
int r = JSCallerSavedCode(i);
|
||||
Register reg = { r };
|
||||
if (FLAG_debug_code) {
|
||||
__ Set(reg, kDebugZapValue);
|
||||
}
|
||||
if ((object_regs & (1 << r)) != 0) {
|
||||
__ pop(reg);
|
||||
}
|
||||
// Reconstruct the 64-bit value from two smis.
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
__ pop(kScratchRegister);
|
||||
__ SmiToInteger32(kScratchRegister, kScratchRegister);
|
||||
__ shl(kScratchRegister, Immediate(32));
|
||||
__ pop(reg);
|
||||
__ SmiToInteger32(reg, reg);
|
||||
__ or_(reg, kScratchRegister);
|
||||
}
|
||||
// Restore the register values from the expression stack.
|
||||
for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
|
||||
int r = JSCallerSavedCode(i);
|
||||
Register reg = { r };
|
||||
if (FLAG_debug_code) {
|
||||
__ Set(reg, kDebugZapValue);
|
||||
}
|
||||
if ((object_regs & (1 << r)) != 0) {
|
||||
__ pop(reg);
|
||||
}
|
||||
// Reconstruct the 64-bit value from two smis.
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
__ pop(kScratchRegister);
|
||||
__ SmiToInteger32(kScratchRegister, kScratchRegister);
|
||||
__ shl(kScratchRegister, Immediate(32));
|
||||
__ pop(reg);
|
||||
__ SmiToInteger32(reg, reg);
|
||||
__ or_(reg, kScratchRegister);
|
||||
}
|
||||
|
||||
// Get rid of the internal frame.
|
||||
}
|
||||
|
||||
// Get rid of the internal frame.
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// If this call did not replace a call but patched other code then there will
|
||||
// be an unwanted return address left on the stack. Here we get rid of that.
|
||||
if (convert_call_to_jmp) {
|
||||
|
@ -681,10 +681,7 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
|
||||
Isolate* isolate = masm()->isolate();
|
||||
|
||||
{
|
||||
AllowExternalCallThatCantCauseGC scope(masm());
|
||||
__ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
|
||||
}
|
||||
__ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
|
||||
// Preserve deoptimizer object in register rax and get the input
|
||||
// frame descriptor pointer.
|
||||
__ movq(rbx, Operand(rax, Deoptimizer::input_offset()));
|
||||
@ -730,11 +727,8 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
__ PrepareCallCFunction(2);
|
||||
__ movq(arg1, rax);
|
||||
__ LoadAddress(arg2, ExternalReference::isolate_address());
|
||||
{
|
||||
AllowExternalCallThatCantCauseGC scope(masm());
|
||||
__ CallCFunction(
|
||||
ExternalReference::compute_output_frames_function(isolate), 2);
|
||||
}
|
||||
__ CallCFunction(
|
||||
ExternalReference::compute_output_frames_function(isolate), 2);
|
||||
__ pop(rax);
|
||||
|
||||
// Replace the current frame with the output frames.
|
||||
|
@ -146,11 +146,6 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
|
||||
__ bind(&ok);
|
||||
}
|
||||
|
||||
// Open a frame scope to indicate that there is a frame on the stack. The
|
||||
// MANUAL indicates that the scope shouldn't actually generate code to set up
|
||||
// the frame (that is done below).
|
||||
FrameScope frame_scope(masm_, StackFrame::MANUAL);
|
||||
|
||||
__ push(rbp); // Caller's frame pointer.
|
||||
__ movq(rbp, rsp);
|
||||
__ push(rsi); // Callee's context.
|
||||
|
@ -950,22 +950,21 @@ static void GenerateCallMiss(MacroAssembler* masm,
|
||||
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
|
||||
|
||||
// Enter an internal frame.
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ EnterInternalFrame();
|
||||
|
||||
// Push the receiver and the name of the function.
|
||||
__ push(rdx);
|
||||
__ push(rcx);
|
||||
// Push the receiver and the name of the function.
|
||||
__ push(rdx);
|
||||
__ push(rcx);
|
||||
|
||||
// Call the entry.
|
||||
CEntryStub stub(1);
|
||||
__ Set(rax, 2);
|
||||
__ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate()));
|
||||
__ CallStub(&stub);
|
||||
// Call the entry.
|
||||
CEntryStub stub(1);
|
||||
__ Set(rax, 2);
|
||||
__ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate()));
|
||||
__ CallStub(&stub);
|
||||
|
||||
// Move result to rdi and exit the internal frame.
|
||||
__ movq(rdi, rax);
|
||||
}
|
||||
// Move result to rdi and exit the internal frame.
|
||||
__ movq(rdi, rax);
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Check if the receiver is a global object of some sort.
|
||||
// This can happen only for regular CallIC but not KeyedCallIC.
|
||||
@ -1107,14 +1106,13 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
|
||||
// This branch is taken when calling KeyedCallIC_Miss is neither required
|
||||
// nor beneficial.
|
||||
__ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(rcx); // save the key
|
||||
__ push(rdx); // pass the receiver
|
||||
__ push(rcx); // pass the key
|
||||
__ CallRuntime(Runtime::kKeyedGetProperty, 2);
|
||||
__ pop(rcx); // restore the key
|
||||
}
|
||||
__ EnterInternalFrame();
|
||||
__ push(rcx); // save the key
|
||||
__ push(rdx); // pass the receiver
|
||||
__ push(rcx); // pass the key
|
||||
__ CallRuntime(Runtime::kKeyedGetProperty, 2);
|
||||
__ pop(rcx); // restore the key
|
||||
__ LeaveInternalFrame();
|
||||
__ movq(rdi, rax);
|
||||
__ jmp(&do_call);
|
||||
|
||||
|
@ -81,12 +81,6 @@ bool LCodeGen::GenerateCode() {
|
||||
HPhase phase("Code generation", chunk());
|
||||
ASSERT(is_unused());
|
||||
status_ = GENERATING;
|
||||
|
||||
// Open a frame scope to indicate that there is a frame on the stack. The
|
||||
// MANUAL indicates that the scope shouldn't actually generate code to set up
|
||||
// the frame (that is done in GeneatePrologue).
|
||||
FrameScope frame_scope(masm_, StackFrame::MANUAL);
|
||||
|
||||
return GeneratePrologue() &&
|
||||
GenerateBody() &&
|
||||
GenerateDeferredCode() &&
|
||||
|
@ -44,7 +44,6 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
|
||||
: Assembler(arg_isolate, buffer, size),
|
||||
generating_stub_(false),
|
||||
allow_stub_calls_(true),
|
||||
has_frame_(false),
|
||||
root_array_available_(true) {
|
||||
if (isolate() != NULL) {
|
||||
code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
|
||||
@ -398,7 +397,7 @@ void MacroAssembler::Check(Condition cc, const char* msg) {
|
||||
Label L;
|
||||
j(cc, &L, Label::kNear);
|
||||
Abort(msg);
|
||||
// Control will not return here.
|
||||
// will not return here
|
||||
bind(&L);
|
||||
}
|
||||
|
||||
@ -446,6 +445,9 @@ void MacroAssembler::Abort(const char* msg) {
|
||||
RecordComment(msg);
|
||||
}
|
||||
#endif
|
||||
// Disable stub call restrictions to always allow calls to abort.
|
||||
AllowStubCallsScope allow_scope(this, true);
|
||||
|
||||
push(rax);
|
||||
movq(kScratchRegister, p0, RelocInfo::NONE);
|
||||
push(kScratchRegister);
|
||||
@ -453,28 +455,20 @@ void MacroAssembler::Abort(const char* msg) {
|
||||
reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
|
||||
RelocInfo::NONE);
|
||||
push(kScratchRegister);
|
||||
|
||||
if (!has_frame_) {
|
||||
// We don't actually want to generate a pile of code for this, so just
|
||||
// claim there is a stack frame, without generating one.
|
||||
FrameScope scope(this, StackFrame::NONE);
|
||||
CallRuntime(Runtime::kAbort, 2);
|
||||
} else {
|
||||
CallRuntime(Runtime::kAbort, 2);
|
||||
}
|
||||
// Control will not return here.
|
||||
CallRuntime(Runtime::kAbort, 2);
|
||||
// will not return here
|
||||
int3();
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
|
||||
ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
|
||||
ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
|
||||
Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
|
||||
ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
|
||||
ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
|
||||
MaybeObject* result = stub->TryGetCode();
|
||||
if (!result->IsFailure()) {
|
||||
call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
|
||||
@ -485,12 +479,13 @@ MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
|
||||
|
||||
|
||||
void MacroAssembler::TailCallStub(CodeStub* stub) {
|
||||
ASSERT(stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_);
|
||||
ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
|
||||
Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
|
||||
ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
|
||||
MaybeObject* result = stub->TryGetCode();
|
||||
if (!result->IsFailure()) {
|
||||
jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
|
||||
@ -506,12 +501,6 @@ void MacroAssembler::StubReturn(int argc) {
|
||||
}
|
||||
|
||||
|
||||
bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
|
||||
if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
|
||||
return stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_;
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::IllegalOperation(int num_arguments) {
|
||||
if (num_arguments > 0) {
|
||||
addq(rsp, Immediate(num_arguments * kPointerSize));
|
||||
@ -803,8 +792,8 @@ MaybeObject* MacroAssembler::TryJumpToExternalReference(
|
||||
void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper) {
|
||||
// You can't call a builtin without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
// Calls are not allowed in some stubs.
|
||||
ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
|
||||
|
||||
// Rely on the assertion to check that the number of provided
|
||||
// arguments match the expected number of arguments. Fake a
|
||||
@ -2782,10 +2771,10 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
|
||||
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
void MacroAssembler::DebugBreak() {
|
||||
ASSERT(allow_stub_calls());
|
||||
Set(rax, 0); // No arguments.
|
||||
LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
|
||||
CEntryStub ces(1);
|
||||
ASSERT(AllowThisStubCall(&ces));
|
||||
Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
|
||||
}
|
||||
#endif // ENABLE_DEBUGGER_SUPPORT
|
||||
@ -2811,9 +2800,6 @@ void MacroAssembler::InvokeCode(Register code,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
// You can't call a function without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
Label done;
|
||||
InvokePrologue(expected,
|
||||
actual,
|
||||
@ -2845,9 +2831,6 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
// You can't call a function without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
Label done;
|
||||
Register dummy = rax;
|
||||
InvokePrologue(expected,
|
||||
@ -2878,9 +2861,6 @@ void MacroAssembler::InvokeFunction(Register function,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
// You can't call a function without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
ASSERT(function.is(rdi));
|
||||
movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
|
||||
movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
|
||||
@ -2900,9 +2880,6 @@ void MacroAssembler::InvokeFunction(JSFunction* function,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
// You can't call a function without a valid frame.
|
||||
ASSERT(flag == JUMP_FUNCTION || has_frame());
|
||||
|
||||
ASSERT(function->is_compiled());
|
||||
// Get the function and setup the context.
|
||||
Move(rdi, Handle<JSFunction>(function));
|
||||
@ -3731,7 +3708,6 @@ void MacroAssembler::CallCFunction(ExternalReference function,
|
||||
|
||||
|
||||
void MacroAssembler::CallCFunction(Register function, int num_arguments) {
|
||||
ASSERT(has_frame());
|
||||
// Check stack alignment.
|
||||
if (emit_debug_code()) {
|
||||
CheckStackAlignment();
|
||||
|
@ -29,7 +29,6 @@
|
||||
#define V8_X64_MACRO_ASSEMBLER_X64_H_
|
||||
|
||||
#include "assembler.h"
|
||||
#include "frames.h"
|
||||
#include "v8globals.h"
|
||||
|
||||
namespace v8 {
|
||||
@ -73,7 +72,6 @@ struct SmiIndex {
|
||||
ScaleFactor scale;
|
||||
};
|
||||
|
||||
|
||||
// MacroAssembler implements a collection of frequently used macros.
|
||||
class MacroAssembler: public Assembler {
|
||||
public:
|
||||
@ -194,6 +192,15 @@ class MacroAssembler: public Assembler {
|
||||
void DebugBreak();
|
||||
#endif
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Activation frames
|
||||
|
||||
void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
|
||||
void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
|
||||
|
||||
void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
|
||||
void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
|
||||
|
||||
// Enter specific kind of exit frame; either in normal or
|
||||
// debug mode. Expects the number of arguments in register rax and
|
||||
// sets up the number of arguments in register rdi and the pointer
|
||||
@ -1118,18 +1125,11 @@ class MacroAssembler: public Assembler {
|
||||
bool generating_stub() { return generating_stub_; }
|
||||
void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
|
||||
bool allow_stub_calls() { return allow_stub_calls_; }
|
||||
void set_has_frame(bool value) { has_frame_ = value; }
|
||||
bool has_frame() { return has_frame_; }
|
||||
inline bool AllowThisStubCall(CodeStub* stub);
|
||||
|
||||
static int SafepointRegisterStackIndex(Register reg) {
|
||||
return SafepointRegisterStackIndex(reg.code());
|
||||
}
|
||||
|
||||
// Activation support.
|
||||
void EnterFrame(StackFrame::Type type);
|
||||
void LeaveFrame(StackFrame::Type type);
|
||||
|
||||
private:
|
||||
// Order general registers are pushed by Pushad.
|
||||
// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
|
||||
@ -1139,7 +1139,6 @@ class MacroAssembler: public Assembler {
|
||||
|
||||
bool generating_stub_;
|
||||
bool allow_stub_calls_;
|
||||
bool has_frame_;
|
||||
bool root_array_available_;
|
||||
|
||||
// Returns a register holding the smi value. The register MUST NOT be
|
||||
@ -1163,6 +1162,10 @@ class MacroAssembler: public Assembler {
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
// Activation support.
|
||||
void EnterFrame(StackFrame::Type type);
|
||||
void LeaveFrame(StackFrame::Type type);
|
||||
|
||||
void EnterExitFramePrologue(bool save_rax);
|
||||
|
||||
// Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
|
||||
|
@ -431,14 +431,9 @@ void RegExpMacroAssemblerX64::CheckNotBackReferenceIgnoreCase(
|
||||
// Isolate.
|
||||
__ LoadAddress(rcx, ExternalReference::isolate_address());
|
||||
#endif
|
||||
|
||||
{ // NOLINT: Can't find a way to open this scope without confusing the
|
||||
// linter.
|
||||
AllowExternalCallThatCantCauseGC scope(&masm_);
|
||||
ExternalReference compare =
|
||||
ExternalReference::re_case_insensitive_compare_uc16(masm_.isolate());
|
||||
__ CallCFunction(compare, num_arguments);
|
||||
}
|
||||
ExternalReference compare =
|
||||
ExternalReference::re_case_insensitive_compare_uc16(masm_.isolate());
|
||||
__ CallCFunction(compare, num_arguments);
|
||||
|
||||
// Restore original values before reacting on result value.
|
||||
__ Move(code_object_pointer(), masm_.CodeObject());
|
||||
@ -713,12 +708,7 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
|
||||
// registers we need.
|
||||
// Entry code:
|
||||
__ bind(&entry_label_);
|
||||
|
||||
// Tell the system that we have a stack frame. Because the type is MANUAL, no
|
||||
// is generated.
|
||||
FrameScope scope(&masm_, StackFrame::MANUAL);
|
||||
|
||||
// Actually emit code to start a new stack frame.
|
||||
// Start new stack frame.
|
||||
__ push(rbp);
|
||||
__ movq(rbp, rsp);
|
||||
// Save parameters and callee-save registers. Order here should correspond
|
||||
|
@ -645,7 +645,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
|
||||
scratch1, scratch2, scratch3, name,
|
||||
miss_label);
|
||||
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ EnterInternalFrame();
|
||||
// Save the name_ register across the call.
|
||||
__ push(name_);
|
||||
|
||||
@ -662,8 +662,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
|
||||
|
||||
// Restore the name_ register.
|
||||
__ pop(name_);
|
||||
|
||||
// Leave the internal frame.
|
||||
__ LeaveInternalFrame();
|
||||
}
|
||||
|
||||
void LoadWithInterceptor(MacroAssembler* masm,
|
||||
@ -671,21 +670,19 @@ class CallInterceptorCompiler BASE_EMBEDDED {
|
||||
Register holder,
|
||||
JSObject* holder_obj,
|
||||
Label* interceptor_succeeded) {
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
__ push(holder); // Save the holder.
|
||||
__ push(name_); // Save the name.
|
||||
__ EnterInternalFrame();
|
||||
__ push(holder); // Save the holder.
|
||||
__ push(name_); // Save the name.
|
||||
|
||||
CompileCallLoadPropertyWithInterceptor(masm,
|
||||
receiver,
|
||||
holder,
|
||||
name_,
|
||||
holder_obj);
|
||||
CompileCallLoadPropertyWithInterceptor(masm,
|
||||
receiver,
|
||||
holder,
|
||||
name_,
|
||||
holder_obj);
|
||||
|
||||
__ pop(name_); // Restore the name.
|
||||
__ pop(receiver); // Restore the holder.
|
||||
// Leave the internal frame.
|
||||
}
|
||||
__ pop(name_); // Restore the name.
|
||||
__ pop(receiver); // Restore the holder.
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
__ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
|
||||
__ j(not_equal, interceptor_succeeded);
|
||||
@ -1143,42 +1140,40 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
|
||||
|
||||
// Save necessary data before invoking an interceptor.
|
||||
// Requires a frame to make GC aware of pushed pointers.
|
||||
{
|
||||
FrameScope frame_scope(masm(), StackFrame::INTERNAL);
|
||||
__ EnterInternalFrame();
|
||||
|
||||
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
||||
// CALLBACKS case needs a receiver to be passed into C++ callback.
|
||||
__ push(receiver);
|
||||
}
|
||||
__ push(holder_reg);
|
||||
__ push(name_reg);
|
||||
|
||||
// Invoke an interceptor. Note: map checks from receiver to
|
||||
// interceptor's holder has been compiled before (see a caller
|
||||
// of this method.)
|
||||
CompileCallLoadPropertyWithInterceptor(masm(),
|
||||
receiver,
|
||||
holder_reg,
|
||||
name_reg,
|
||||
interceptor_holder);
|
||||
|
||||
// Check if interceptor provided a value for property. If it's
|
||||
// the case, return immediately.
|
||||
Label interceptor_failed;
|
||||
__ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
|
||||
__ j(equal, &interceptor_failed);
|
||||
frame_scope.GenerateLeaveFrame();
|
||||
__ ret(0);
|
||||
|
||||
__ bind(&interceptor_failed);
|
||||
__ pop(name_reg);
|
||||
__ pop(holder_reg);
|
||||
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
||||
__ pop(receiver);
|
||||
}
|
||||
|
||||
// Leave the internal frame.
|
||||
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
||||
// CALLBACKS case needs a receiver to be passed into C++ callback.
|
||||
__ push(receiver);
|
||||
}
|
||||
__ push(holder_reg);
|
||||
__ push(name_reg);
|
||||
|
||||
// Invoke an interceptor. Note: map checks from receiver to
|
||||
// interceptor's holder has been compiled before (see a caller
|
||||
// of this method.)
|
||||
CompileCallLoadPropertyWithInterceptor(masm(),
|
||||
receiver,
|
||||
holder_reg,
|
||||
name_reg,
|
||||
interceptor_holder);
|
||||
|
||||
// Check if interceptor provided a value for property. If it's
|
||||
// the case, return immediately.
|
||||
Label interceptor_failed;
|
||||
__ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
|
||||
__ j(equal, &interceptor_failed);
|
||||
__ LeaveInternalFrame();
|
||||
__ ret(0);
|
||||
|
||||
__ bind(&interceptor_failed);
|
||||
__ pop(name_reg);
|
||||
__ pop(holder_reg);
|
||||
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
||||
__ pop(receiver);
|
||||
}
|
||||
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Check that the maps from interceptor's holder to lookup's holder
|
||||
// haven't changed. And load lookup's holder into |holder| register.
|
||||
|
Loading…
Reference in New Issue
Block a user