2012-01-24 08:43:12 +00:00
|
|
|
// Copyright 2012 the V8 project authors. All rights reserved.
|
2014-04-29 06:42:26 +00:00
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2014-09-30 10:29:32 +00:00
|
|
|
#include "src/frames.h"
|
|
|
|
|
|
|
|
#include <sstream>
|
|
|
|
|
2015-11-26 16:22:34 +00:00
|
|
|
#include "src/ast/ast.h"
|
|
|
|
#include "src/ast/scopeinfo.h"
|
2014-08-20 12:10:41 +00:00
|
|
|
#include "src/base/bits.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/deoptimizer.h"
|
|
|
|
#include "src/frames-inl.h"
|
2015-07-24 10:11:46 +00:00
|
|
|
#include "src/full-codegen/full-codegen.h"
|
2015-10-02 16:55:12 +00:00
|
|
|
#include "src/register-configuration.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/safepoint-table.h"
|
|
|
|
#include "src/string-stream.h"
|
|
|
|
#include "src/vm-state-inl.h"
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2009-05-25 10:05:56 +00:00
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2013-06-28 13:40:41 +00:00
|
|
|
ReturnAddressLocationResolver
|
|
|
|
StackFrame::return_address_location_resolver_ = NULL;
|
2012-02-27 15:15:53 +00:00
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
// Iterator that supports traversing the stack handlers of a
|
|
|
|
// particular frame. Needs to know the top of the handler chain.
|
|
|
|
class StackHandlerIterator BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
|
|
|
|
: limit_(frame->fp()), handler_(handler) {
|
|
|
|
// Make sure the handler has already been unwound to this frame.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(frame->sp() <= handler->address());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
StackHandler* handler() const { return handler_; }
|
|
|
|
|
2009-06-10 08:01:30 +00:00
|
|
|
bool done() {
|
|
|
|
return handler_ == NULL || handler_->address() > limit_;
|
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
void Advance() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!done());
|
2008-07-03 15:10:15 +00:00
|
|
|
handler_ = handler_->next();
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
const Address limit_;
|
|
|
|
StackHandler* handler_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
// -------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
#define INITIALIZE_SINGLETON(type, field) field##_(this),
|
2013-06-27 09:34:31 +00:00
|
|
|
StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
|
|
|
|
bool can_access_heap_objects)
|
Simplify isolates access during stack iteration (WAS: Move SafeStackFrameIterator::active_count_...)
While trying to fix Mac and Windows versions for this change:
http://codereview.chromium.org/6771047/, I figured out, that we
already store an isolate in StackFrameIterator, so we can use it in
frame objects, instead of requiring it from caller.
I've changed iterators usage to the following scheme: whenever a
caller maintains an isolate pointer, it just passes it to stack
iterator, and no more worries about passing it to frame content
accessors. If a caller uses current isolate, it can omit passing it
to iterator, in this case, an iterator will use the current isolate,
too.
There was a special case with LiveEdit, which creates
detached copies of frame objects.
R=vitalyr@chromium.org
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/6794019
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7499 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2011-04-05 09:01:47 +00:00
|
|
|
: isolate_(isolate),
|
|
|
|
STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
|
|
|
|
frame_(NULL), handler_(NULL),
|
2013-06-27 09:34:31 +00:00
|
|
|
can_access_heap_objects_(can_access_heap_objects) {
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
2013-06-27 09:34:31 +00:00
|
|
|
#undef INITIALIZE_SINGLETON
|
|
|
|
|
|
|
|
|
|
|
|
StackFrameIterator::StackFrameIterator(Isolate* isolate)
|
|
|
|
: StackFrameIteratorBase(isolate, true) {
|
|
|
|
Reset(isolate->thread_local_top());
|
2009-03-03 11:56:44 +00:00
|
|
|
}
|
2009-03-20 14:49:12 +00:00
|
|
|
|
2013-06-27 09:34:31 +00:00
|
|
|
|
|
|
|
StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
|
|
|
|
: StackFrameIteratorBase(isolate, true) {
|
|
|
|
Reset(t);
|
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
|
2013-06-27 09:34:31 +00:00
|
|
|
void StackFrameIterator::Advance() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!done());
|
2008-07-03 15:10:15 +00:00
|
|
|
// Compute the state of the calling frame before restoring
|
|
|
|
// callee-saved registers and unwinding handlers. This allows the
|
|
|
|
// frame code that computes the caller state to access the top
|
|
|
|
// handler and the value of any callee-saved register if needed.
|
|
|
|
StackFrame::State state;
|
|
|
|
StackFrame::Type type = frame_->GetCallerState(&state);
|
|
|
|
|
|
|
|
// Unwind handlers corresponding to the current frame.
|
|
|
|
StackHandlerIterator it(frame_, handler_);
|
|
|
|
while (!it.done()) it.Advance();
|
|
|
|
handler_ = it.handler();
|
|
|
|
|
|
|
|
// Advance to the calling frame.
|
|
|
|
frame_ = SingletonFor(type, &state);
|
|
|
|
|
|
|
|
// When we're done iterating over the stack frames, the handler
|
|
|
|
// chain must have been completely unwound.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!done() || handler_ == NULL);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-27 09:34:31 +00:00
|
|
|
void StackFrameIterator::Reset(ThreadLocalTop* top) {
|
2008-07-03 15:10:15 +00:00
|
|
|
StackFrame::State state;
|
2013-06-27 09:34:31 +00:00
|
|
|
StackFrame::Type type = ExitFrame::GetStateForFramePointer(
|
|
|
|
Isolate::c_entry_fp(top), &state);
|
|
|
|
handler_ = StackHandler::FromAddress(Isolate::handler(top));
|
2008-07-03 15:10:15 +00:00
|
|
|
frame_ = SingletonFor(type, &state);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-27 09:34:31 +00:00
|
|
|
StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
|
2008-07-03 15:10:15 +00:00
|
|
|
StackFrame::State* state) {
|
2009-03-20 14:49:12 +00:00
|
|
|
StackFrame* result = SingletonFor(type);
|
2016-04-06 08:56:25 +00:00
|
|
|
DCHECK((!result) == (type == StackFrame::NONE));
|
|
|
|
if (result) result->state_ = *state;
|
2009-03-20 14:49:12 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-27 09:34:31 +00:00
|
|
|
StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
|
2008-07-03 15:10:15 +00:00
|
|
|
#define FRAME_TYPE_CASE(type, field) \
|
2016-01-27 17:40:38 +00:00
|
|
|
case StackFrame::type: \
|
|
|
|
return &field##_;
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
switch (type) {
|
|
|
|
case StackFrame::NONE: return NULL;
|
|
|
|
STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
|
|
|
|
default: break;
|
|
|
|
}
|
2016-01-27 17:40:38 +00:00
|
|
|
return NULL;
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
#undef FRAME_TYPE_CASE
|
|
|
|
}
|
|
|
|
|
|
|
|
// -------------------------------------------------------------------------
|
|
|
|
|
2016-02-23 19:38:50 +00:00
|
|
|
JavaScriptFrameIterator::JavaScriptFrameIterator(Isolate* isolate,
|
|
|
|
StackFrame::Id id)
|
2013-06-24 08:38:37 +00:00
|
|
|
: iterator_(isolate) {
|
|
|
|
while (!done()) {
|
|
|
|
Advance();
|
|
|
|
if (frame()->id() == id) return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JavaScriptFrameIterator::Advance() {
|
|
|
|
do {
|
|
|
|
iterator_.Advance();
|
|
|
|
} while (!iterator_.done() && !iterator_.frame()->is_java_script());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JavaScriptFrameIterator::AdvanceToArgumentsFrame() {
|
|
|
|
if (!frame()->has_adapted_arguments()) return;
|
|
|
|
iterator_.Advance();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(iterator_.frame()->is_arguments_adaptor());
|
2013-06-24 08:38:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// -------------------------------------------------------------------------
|
|
|
|
|
Simplify isolates access during stack iteration (WAS: Move SafeStackFrameIterator::active_count_...)
While trying to fix Mac and Windows versions for this change:
http://codereview.chromium.org/6771047/, I figured out, that we
already store an isolate in StackFrameIterator, so we can use it in
frame objects, instead of requiring it from caller.
I've changed iterators usage to the following scheme: whenever a
caller maintains an isolate pointer, it just passes it to stack
iterator, and no more worries about passing it to frame content
accessors. If a caller uses current isolate, it can omit passing it
to iterator, in this case, an iterator will use the current isolate,
too.
There was a special case with LiveEdit, which creates
detached copies of frame objects.
R=vitalyr@chromium.org
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/6794019
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7499 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2011-04-05 09:01:47 +00:00
|
|
|
StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
|
2016-04-06 11:37:15 +00:00
|
|
|
: iterator_(isolate) {
|
|
|
|
if (!done() && !IsValidFrame(iterator_.frame())) Advance();
|
Simplify isolates access during stack iteration (WAS: Move SafeStackFrameIterator::active_count_...)
While trying to fix Mac and Windows versions for this change:
http://codereview.chromium.org/6771047/, I figured out, that we
already store an isolate in StackFrameIterator, so we can use it in
frame objects, instead of requiring it from caller.
I've changed iterators usage to the following scheme: whenever a
caller maintains an isolate pointer, it just passes it to stack
iterator, and no more worries about passing it to frame content
accessors. If a caller uses current isolate, it can omit passing it
to iterator, in this case, an iterator will use the current isolate,
too.
There was a special case with LiveEdit, which creates
detached copies of frame objects.
R=vitalyr@chromium.org
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/6794019
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7499 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2011-04-05 09:01:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-03-03 11:56:44 +00:00
|
|
|
void StackTraceFrameIterator::Advance() {
|
2016-04-06 11:37:15 +00:00
|
|
|
do {
|
|
|
|
iterator_.Advance();
|
|
|
|
} while (!done() && !IsValidFrame(iterator_.frame()));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
2016-04-06 11:37:15 +00:00
|
|
|
bool StackTraceFrameIterator::IsValidFrame(StackFrame* frame) const {
|
|
|
|
if (frame->is_java_script()) {
|
|
|
|
JavaScriptFrame* jsFrame = static_cast<JavaScriptFrame*>(frame);
|
|
|
|
if (!jsFrame->function()->IsJSFunction()) return false;
|
|
|
|
Object* script = jsFrame->function()->shared()->script();
|
2010-02-01 10:34:57 +00:00
|
|
|
// Don't show functions from native scripts to user.
|
|
|
|
return (script->IsScript() &&
|
2015-09-28 13:10:13 +00:00
|
|
|
Script::TYPE_NATIVE != Script::cast(script)->type());
|
2016-04-06 11:37:15 +00:00
|
|
|
}
|
|
|
|
// apart from javascript, only wasm is valid
|
|
|
|
return frame->is_wasm();
|
2010-02-01 10:34:57 +00:00
|
|
|
}
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2009-03-03 11:56:44 +00:00
|
|
|
// -------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
SafeStackFrameIterator::SafeStackFrameIterator(
|
2011-03-18 20:35:07 +00:00
|
|
|
Isolate* isolate,
|
2013-07-03 16:20:59 +00:00
|
|
|
Address fp, Address sp, Address js_entry_sp)
|
|
|
|
: StackFrameIteratorBase(isolate, false),
|
|
|
|
low_bound_(sp),
|
|
|
|
high_bound_(js_entry_sp),
|
2013-07-23 15:01:38 +00:00
|
|
|
top_frame_type_(StackFrame::NONE),
|
|
|
|
external_callback_scope_(isolate->external_callback_scope()) {
|
2013-06-27 09:34:31 +00:00
|
|
|
StackFrame::State state;
|
|
|
|
StackFrame::Type type;
|
|
|
|
ThreadLocalTop* top = isolate->thread_local_top();
|
|
|
|
if (IsValidTop(top)) {
|
|
|
|
type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
|
2013-07-03 14:04:37 +00:00
|
|
|
top_frame_type_ = type;
|
2013-06-27 09:34:31 +00:00
|
|
|
} else if (IsValidStackAddress(fp)) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(fp != NULL);
|
2013-06-27 09:34:31 +00:00
|
|
|
state.fp = fp;
|
|
|
|
state.sp = sp;
|
2013-06-28 13:40:41 +00:00
|
|
|
state.pc_address = StackFrame::ResolveReturnAddressLocation(
|
2013-06-27 09:34:31 +00:00
|
|
|
reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
|
2013-07-03 14:04:37 +00:00
|
|
|
// StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
|
|
|
|
// we check only that kMarkerOffset is within the stack bounds and do
|
|
|
|
// compile time check that kContextOffset slot is pushed on the stack before
|
|
|
|
// kMarkerOffset.
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
STATIC_ASSERT(StandardFrameConstants::kFunctionOffset <
|
2013-07-03 14:04:37 +00:00
|
|
|
StandardFrameConstants::kContextOffset);
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
Address frame_marker = fp + StandardFrameConstants::kFunctionOffset;
|
2013-07-03 14:04:37 +00:00
|
|
|
if (IsValidStackAddress(frame_marker)) {
|
|
|
|
type = StackFrame::ComputeType(this, &state);
|
|
|
|
top_frame_type_ = type;
|
|
|
|
} else {
|
|
|
|
// Mark the frame as JAVA_SCRIPT if we cannot determine its type.
|
|
|
|
// The frame anyways will be skipped.
|
|
|
|
type = StackFrame::JAVA_SCRIPT;
|
|
|
|
// Top frame is incomplete so we cannot reliably determine its type.
|
|
|
|
top_frame_type_ = StackFrame::NONE;
|
|
|
|
}
|
2013-06-27 09:34:31 +00:00
|
|
|
} else {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
frame_ = SingletonFor(type, &state);
|
2016-04-06 08:56:25 +00:00
|
|
|
if (frame_) Advance();
|
2009-03-03 11:56:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-27 09:34:31 +00:00
|
|
|
bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
|
2013-07-23 15:01:38 +00:00
|
|
|
Address c_entry_fp = Isolate::c_entry_fp(top);
|
|
|
|
if (!IsValidExitFrame(c_entry_fp)) return false;
|
2013-06-27 09:34:31 +00:00
|
|
|
// There should be at least one JS_ENTRY stack handler.
|
2013-07-23 15:01:38 +00:00
|
|
|
Address handler = Isolate::handler(top);
|
|
|
|
if (handler == NULL) return false;
|
|
|
|
// Check that there are no js frames on top of the native frames.
|
|
|
|
return c_entry_fp < handler;
|
2010-09-16 08:23:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-25 07:14:06 +00:00
|
|
|
void SafeStackFrameIterator::AdvanceOneFrame() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!done());
|
2013-06-27 09:34:31 +00:00
|
|
|
StackFrame* last_frame = frame_;
|
2009-03-20 14:49:12 +00:00
|
|
|
Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
|
2013-06-27 09:34:31 +00:00
|
|
|
// Before advancing to the next stack frame, perform pointer validity tests.
|
|
|
|
if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
|
|
|
|
frame_ = NULL;
|
|
|
|
return;
|
|
|
|
}
|
2009-03-20 14:49:12 +00:00
|
|
|
|
2013-06-27 09:34:31 +00:00
|
|
|
// Advance to the previous frame.
|
|
|
|
StackFrame::State state;
|
|
|
|
StackFrame::Type type = frame_->GetCallerState(&state);
|
|
|
|
frame_ = SingletonFor(type, &state);
|
2016-04-06 08:56:25 +00:00
|
|
|
if (!frame_) return;
|
2013-06-27 09:34:31 +00:00
|
|
|
|
|
|
|
// Check that we have actually moved to the previous frame in the stack.
|
|
|
|
if (frame_->sp() < last_sp || frame_->fp() < last_fp) {
|
|
|
|
frame_ = NULL;
|
|
|
|
}
|
2009-03-20 14:49:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
|
2009-04-13 23:12:04 +00:00
|
|
|
return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
|
2009-03-20 14:49:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
|
|
|
|
StackFrame::State state;
|
2009-03-25 12:59:25 +00:00
|
|
|
if (frame->is_entry() || frame->is_entry_construct()) {
|
|
|
|
// See EntryFrame::GetCallerState. It computes the caller FP address
|
|
|
|
// and calls ExitFrame::GetStateForFramePointer on it. We need to be
|
|
|
|
// sure that caller FP address is valid.
|
|
|
|
Address caller_fp = Memory::Address_at(
|
|
|
|
frame->fp() + EntryFrameConstants::kCallerFPOffset);
|
2013-06-27 09:34:31 +00:00
|
|
|
if (!IsValidExitFrame(caller_fp)) return false;
|
2009-03-25 12:59:25 +00:00
|
|
|
} else if (frame->is_arguments_adaptor()) {
|
|
|
|
// See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
|
|
|
|
// the number of arguments is stored on stack as Smi. We need to check
|
|
|
|
// that it really an Smi.
|
|
|
|
Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
|
|
|
|
GetExpression(0);
|
|
|
|
if (!number_of_args->IsSmi()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
2009-03-20 14:49:12 +00:00
|
|
|
frame->ComputeCallerState(&state);
|
|
|
|
return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
|
2013-06-27 09:34:31 +00:00
|
|
|
SingletonFor(frame->GetCallerState(&state)) != NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
|
|
|
|
if (!IsValidStackAddress(fp)) return false;
|
|
|
|
Address sp = ExitFrame::ComputeStackPointer(fp);
|
|
|
|
if (!IsValidStackAddress(sp)) return false;
|
|
|
|
StackFrame::State state;
|
|
|
|
ExitFrame::FillState(fp, sp, &state);
|
|
|
|
return *state.pc_address != NULL;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-25 07:14:06 +00:00
|
|
|
void SafeStackFrameIterator::Advance() {
|
2009-03-03 11:56:44 +00:00
|
|
|
while (true) {
|
2013-06-25 07:14:06 +00:00
|
|
|
AdvanceOneFrame();
|
2016-02-04 20:00:28 +00:00
|
|
|
if (done()) break;
|
|
|
|
ExternalCallbackScope* last_callback_scope = NULL;
|
|
|
|
while (external_callback_scope_ != NULL &&
|
|
|
|
external_callback_scope_->scope_address() < frame_->fp()) {
|
|
|
|
// As long as the setup of a frame is not atomic, we may happen to be
|
|
|
|
// in an interval where an ExternalCallbackScope is already created,
|
|
|
|
// but the frame is not yet entered. So we are actually observing
|
|
|
|
// the previous frame.
|
|
|
|
// Skip all the ExternalCallbackScope's that are below the current fp.
|
|
|
|
last_callback_scope = external_callback_scope_;
|
|
|
|
external_callback_scope_ = external_callback_scope_->previous();
|
|
|
|
}
|
|
|
|
if (frame_->is_java_script()) break;
|
|
|
|
if (frame_->is_exit()) {
|
2013-07-23 15:01:38 +00:00
|
|
|
// Some of the EXIT frames may have ExternalCallbackScope allocated on
|
|
|
|
// top of them. In that case the scope corresponds to the first EXIT
|
|
|
|
// frame beneath it. There may be other EXIT frames on top of the
|
|
|
|
// ExternalCallbackScope, just skip them as we cannot collect any useful
|
|
|
|
// information about them.
|
2016-02-04 20:00:28 +00:00
|
|
|
if (last_callback_scope) {
|
2015-10-28 13:42:23 +00:00
|
|
|
frame_->state_.pc_address =
|
2016-02-04 20:00:28 +00:00
|
|
|
last_callback_scope->callback_entrypoint_address();
|
2013-07-23 15:01:38 +00:00
|
|
|
}
|
2016-02-04 20:00:28 +00:00
|
|
|
break;
|
2013-07-23 15:01:38 +00:00
|
|
|
}
|
2009-03-03 11:56:44 +00:00
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-25 07:14:06 +00:00
|
|
|
// -------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
Simplify isolates access during stack iteration (WAS: Move SafeStackFrameIterator::active_count_...)
While trying to fix Mac and Windows versions for this change:
http://codereview.chromium.org/6771047/, I figured out, that we
already store an isolate in StackFrameIterator, so we can use it in
frame objects, instead of requiring it from caller.
I've changed iterators usage to the following scheme: whenever a
caller maintains an isolate pointer, it just passes it to stack
iterator, and no more worries about passing it to frame content
accessors. If a caller uses current isolate, it can omit passing it
to iterator, in this case, an iterator will use the current isolate,
too.
There was a special case with LiveEdit, which creates
detached copies of frame objects.
R=vitalyr@chromium.org
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/6794019
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7499 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2011-04-05 09:01:47 +00:00
|
|
|
Code* StackFrame::GetSafepointData(Isolate* isolate,
|
2011-09-20 10:08:39 +00:00
|
|
|
Address inner_pointer,
|
2011-01-12 14:14:14 +00:00
|
|
|
SafepointEntry* safepoint_entry,
|
2010-12-07 11:31:57 +00:00
|
|
|
unsigned* stack_slots) {
|
2011-09-20 10:08:39 +00:00
|
|
|
InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
|
|
|
|
isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
|
2011-01-12 14:14:14 +00:00
|
|
|
if (!entry->safepoint_entry.is_valid()) {
|
2011-09-20 10:08:39 +00:00
|
|
|
entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(entry->safepoint_entry.is_valid());
|
2010-12-07 11:31:57 +00:00
|
|
|
} else {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(entry->safepoint_entry.Equals(
|
2011-09-20 10:08:39 +00:00
|
|
|
entry->code->GetSafepointEntry(inner_pointer)));
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Fill in the results and return the code.
|
|
|
|
Code* code = entry->code;
|
2011-01-12 14:14:14 +00:00
|
|
|
*safepoint_entry = entry->safepoint_entry;
|
2010-12-07 11:31:57 +00:00
|
|
|
*stack_slots = code->stack_slots();
|
|
|
|
return code;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-09-20 13:36:52 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
static bool GcSafeCodeContains(HeapObject* object, Address addr);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
2015-06-04 14:44:00 +00:00
|
|
|
void StackFrame::IteratePc(ObjectVisitor* v, Address* pc_address,
|
|
|
|
Address* constant_pool_address, Code* holder) {
|
2010-08-30 08:54:43 +00:00
|
|
|
Address pc = *pc_address;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(GcSafeCodeContains(holder, pc));
|
2010-08-31 12:20:22 +00:00
|
|
|
unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
|
2010-08-30 08:54:43 +00:00
|
|
|
Object* code = holder;
|
|
|
|
v->VisitPointer(&code);
|
|
|
|
if (code != holder) {
|
|
|
|
holder = reinterpret_cast<Code*>(code);
|
|
|
|
pc = holder->instruction_start() + pc_offset;
|
|
|
|
*pc_address = pc;
|
2015-06-04 14:44:00 +00:00
|
|
|
if (FLAG_enable_embedded_constant_pool && constant_pool_address) {
|
|
|
|
*constant_pool_address = holder->constant_pool();
|
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-02-27 15:15:53 +00:00
|
|
|
void StackFrame::SetReturnAddressLocationResolver(
|
|
|
|
ReturnAddressLocationResolver resolver) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(return_address_location_resolver_ == NULL);
|
2013-06-28 13:40:41 +00:00
|
|
|
return_address_location_resolver_ = resolver;
|
2012-02-27 15:15:53 +00:00
|
|
|
}
|
|
|
|
|
2016-02-26 11:04:04 +00:00
|
|
|
static bool IsInterpreterFramePc(Isolate* isolate, Address pc) {
|
|
|
|
Code* interpreter_entry_trampoline =
|
|
|
|
isolate->builtins()->builtin(Builtins::kInterpreterEntryTrampoline);
|
|
|
|
Code* interpreter_bytecode_dispatch =
|
|
|
|
isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
|
|
|
|
|
|
|
|
return (pc >= interpreter_entry_trampoline->instruction_start() &&
|
|
|
|
pc < interpreter_entry_trampoline->instruction_end()) ||
|
|
|
|
(pc >= interpreter_bytecode_dispatch->instruction_start() &&
|
|
|
|
pc < interpreter_bytecode_dispatch->instruction_end());
|
|
|
|
}
|
2012-02-27 15:15:53 +00:00
|
|
|
|
2013-06-27 09:34:31 +00:00
|
|
|
StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
|
2013-06-25 10:09:19 +00:00
|
|
|
State* state) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(state->fp != NULL);
|
2015-09-24 12:51:30 +00:00
|
|
|
|
2016-03-08 10:02:29 +00:00
|
|
|
#if defined(USE_SIMULATOR)
|
|
|
|
MSAN_MEMORY_IS_INITIALIZED(
|
|
|
|
state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,
|
|
|
|
kPointerSize);
|
|
|
|
#endif
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
Object* marker = Memory::Object_at(
|
|
|
|
state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
|
2015-09-24 12:51:30 +00:00
|
|
|
if (!iterator->can_access_heap_objects_) {
|
|
|
|
// TODO(titzer): "can_access_heap_objects" is kind of bogus. It really
|
|
|
|
// means that we are being called from the profiler, which can interrupt
|
|
|
|
// the VM with a signal at any arbitrary instruction, with essentially
|
|
|
|
// anything on the stack. So basically none of these checks are 100%
|
|
|
|
// reliable.
|
2016-02-17 09:07:58 +00:00
|
|
|
#if defined(USE_SIMULATOR)
|
|
|
|
MSAN_MEMORY_IS_INITIALIZED(
|
2016-03-08 10:02:29 +00:00
|
|
|
state->fp + StandardFrameConstants::kFunctionOffset, kPointerSize);
|
2016-02-17 09:07:58 +00:00
|
|
|
#endif
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
Object* maybe_function =
|
|
|
|
Memory::Object_at(state->fp + StandardFrameConstants::kFunctionOffset);
|
|
|
|
if (!marker->IsSmi()) {
|
|
|
|
if (maybe_function->IsSmi()) {
|
|
|
|
return NONE;
|
|
|
|
} else if (FLAG_ignition && IsInterpreterFramePc(iterator->isolate(),
|
|
|
|
*(state->pc_address))) {
|
|
|
|
return INTERPRETED;
|
|
|
|
} else {
|
|
|
|
return JAVA_SCRIPT;
|
|
|
|
}
|
2015-09-24 12:51:30 +00:00
|
|
|
}
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
} else {
|
|
|
|
// Look up the code object to figure out the type of the stack frame.
|
|
|
|
Code* code_obj =
|
|
|
|
GetContainingCode(iterator->isolate(), *(state->pc_address));
|
|
|
|
if (code_obj != nullptr) {
|
|
|
|
if (code_obj->is_interpreter_entry_trampoline() ||
|
|
|
|
code_obj->is_interpreter_enter_bytecode_dispatch()) {
|
|
|
|
return INTERPRETED;
|
|
|
|
}
|
|
|
|
switch (code_obj->kind()) {
|
2016-03-22 13:25:05 +00:00
|
|
|
case Code::BUILTIN:
|
|
|
|
if (marker->IsSmi()) break;
|
|
|
|
// We treat frames for BUILTIN Code objects as OptimizedFrame for now
|
|
|
|
// (all the builtins with JavaScript linkage are actually generated
|
|
|
|
// with TurboFan currently, so this is sound).
|
|
|
|
return OPTIMIZED;
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
case Code::FUNCTION:
|
|
|
|
return JAVA_SCRIPT;
|
|
|
|
case Code::OPTIMIZED_FUNCTION:
|
|
|
|
return OPTIMIZED;
|
|
|
|
case Code::WASM_FUNCTION:
|
|
|
|
return WASM;
|
|
|
|
case Code::WASM_TO_JS_FUNCTION:
|
|
|
|
return WASM_TO_JS;
|
|
|
|
case Code::JS_TO_WASM_FUNCTION:
|
|
|
|
return JS_TO_WASM;
|
|
|
|
default:
|
|
|
|
// All other types should have an explicit marker
|
|
|
|
break;
|
|
|
|
}
|
2015-09-24 12:51:30 +00:00
|
|
|
} else {
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
return NONE;
|
2015-09-24 12:51:30 +00:00
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
2015-09-24 12:51:30 +00:00
|
|
|
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
DCHECK(marker->IsSmi());
|
|
|
|
StackFrame::Type candidate =
|
|
|
|
static_cast<StackFrame::Type>(Smi::cast(marker)->value());
|
|
|
|
switch (candidate) {
|
|
|
|
case ENTRY:
|
|
|
|
case ENTRY_CONSTRUCT:
|
|
|
|
case EXIT:
|
|
|
|
case STUB:
|
|
|
|
case STUB_FAILURE_TRAMPOLINE:
|
|
|
|
case INTERNAL:
|
|
|
|
case CONSTRUCT:
|
|
|
|
case ARGUMENTS_ADAPTOR:
|
|
|
|
case WASM_TO_JS:
|
|
|
|
case WASM:
|
2016-04-17 16:30:32 +00:00
|
|
|
return candidate;
|
|
|
|
case JS_TO_WASM:
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
case JAVA_SCRIPT:
|
|
|
|
case OPTIMIZED:
|
|
|
|
case INTERPRETED:
|
|
|
|
default:
|
|
|
|
// Unoptimized and optimized JavaScript frames, including
|
|
|
|
// interpreted frames, should never have a StackFrame::Type
|
|
|
|
// marker. If we find one, we're likely being called from the
|
|
|
|
// profiler in a bogus stack frame.
|
|
|
|
return NONE;
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-25 10:09:19 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
bool StackFrame::can_access_heap_objects() const {
|
|
|
|
return iterator_->can_access_heap_objects_;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2009-03-20 14:49:12 +00:00
|
|
|
StackFrame::Type StackFrame::GetCallerState(State* state) const {
|
|
|
|
ComputeCallerState(state);
|
2013-06-25 10:09:19 +00:00
|
|
|
return ComputeType(iterator_, state);
|
2009-03-20 14:49:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-06-12 10:22:33 +00:00
|
|
|
Address StackFrame::UnpaddedFP() const {
|
|
|
|
return fp();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-08-17 11:44:01 +00:00
|
|
|
Code* EntryFrame::unchecked_code() const {
|
2013-09-10 14:30:36 +00:00
|
|
|
return isolate()->heap()->js_entry_code();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-03-20 14:49:12 +00:00
|
|
|
void EntryFrame::ComputeCallerState(State* state) const {
|
|
|
|
GetCallerState(state);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-04-06 17:58:28 +00:00
|
|
|
void EntryFrame::SetCallerFp(Address caller_fp) {
|
|
|
|
const int offset = EntryFrameConstants::kCallerFPOffset;
|
|
|
|
Memory::Address_at(this->fp() + offset) = caller_fp;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
StackFrame::Type EntryFrame::GetCallerState(State* state) const {
|
|
|
|
const int offset = EntryFrameConstants::kCallerFPOffset;
|
|
|
|
Address fp = Memory::Address_at(this->fp() + offset);
|
|
|
|
return ExitFrame::GetStateForFramePointer(fp, state);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-08-17 11:44:01 +00:00
|
|
|
Code* EntryConstructFrame::unchecked_code() const {
|
2013-09-10 14:30:36 +00:00
|
|
|
return isolate()->heap()->js_construct_entry_code();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-11-04 08:51:48 +00:00
|
|
|
Object*& ExitFrame::code_slot() const {
|
|
|
|
const int offset = ExitFrameConstants::kCodeOffset;
|
|
|
|
return Memory::Object_at(fp() + offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-08-17 11:44:01 +00:00
|
|
|
Code* ExitFrame::unchecked_code() const {
|
|
|
|
return reinterpret_cast<Code*>(code_slot());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-03-20 14:49:12 +00:00
|
|
|
void ExitFrame::ComputeCallerState(State* state) const {
|
2012-01-13 13:09:52 +00:00
|
|
|
// Set up the caller state.
|
2009-06-10 15:08:25 +00:00
|
|
|
state->sp = caller_sp();
|
2008-07-03 15:10:15 +00:00
|
|
|
state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
|
2012-02-27 15:15:53 +00:00
|
|
|
state->pc_address = ResolveReturnAddressLocation(
|
|
|
|
reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
|
2015-06-04 14:44:00 +00:00
|
|
|
if (FLAG_enable_embedded_constant_pool) {
|
2014-03-14 15:11:58 +00:00
|
|
|
state->constant_pool_address = reinterpret_cast<Address*>(
|
|
|
|
fp() + ExitFrameConstants::kConstantPoolOffset);
|
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-04-06 17:58:28 +00:00
|
|
|
void ExitFrame::SetCallerFp(Address caller_fp) {
|
|
|
|
Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset) = caller_fp;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-08-30 08:54:43 +00:00
|
|
|
void ExitFrame::Iterate(ObjectVisitor* v) const {
|
|
|
|
// The arguments are traversed as part of the expression stack of
|
|
|
|
// the calling frame.
|
2015-06-04 14:44:00 +00:00
|
|
|
IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
|
2010-08-30 08:54:43 +00:00
|
|
|
v->VisitPointer(&code_slot());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
Address ExitFrame::GetCallerStackPointer() const {
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
return fp() + ExitFrameConstants::kCallerSPOffset;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-09-16 08:23:34 +00:00
|
|
|
StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
|
|
|
|
if (fp == 0) return NONE;
|
|
|
|
Address sp = ComputeStackPointer(fp);
|
|
|
|
FillState(fp, sp, state);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(*state->pc_address != NULL);
|
2010-09-16 08:23:34 +00:00
|
|
|
return EXIT;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-24 13:27:48 +00:00
|
|
|
Address ExitFrame::ComputeStackPointer(Address fp) {
|
|
|
|
return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-09-16 08:23:34 +00:00
|
|
|
void ExitFrame::FillState(Address fp, Address sp, State* state) {
|
|
|
|
state->sp = sp;
|
|
|
|
state->fp = fp;
|
2012-02-27 15:15:53 +00:00
|
|
|
state->pc_address = ResolveReturnAddressLocation(
|
2013-07-23 13:46:10 +00:00
|
|
|
reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
|
2015-06-04 14:44:00 +00:00
|
|
|
// The constant pool recorded in the exit frame is not associated
|
|
|
|
// with the pc in this state (the return address into a C entry
|
|
|
|
// stub). ComputeCallerState will retrieve the constant pool
|
|
|
|
// together with the associated caller pc.
|
|
|
|
state->constant_pool_address = NULL;
|
2010-09-16 08:23:34 +00:00
|
|
|
}
|
|
|
|
|
2016-04-06 11:37:15 +00:00
|
|
|
void StandardFrame::Summarize(List<FrameSummary>* functions) const {
|
|
|
|
DCHECK(functions->length() == 0);
|
|
|
|
// default implementation: no summary added
|
|
|
|
}
|
|
|
|
|
|
|
|
JSFunction* StandardFrame::function() const {
|
|
|
|
// this default implementation is overridden by JS and WASM frames
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
Object* StandardFrame::receiver() const {
|
|
|
|
return isolate()->heap()->undefined_value();
|
|
|
|
}
|
2010-09-16 08:23:34 +00:00
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
Address StandardFrame::GetExpressionAddress(int n) const {
|
2008-07-30 08:49:36 +00:00
|
|
|
const int offset = StandardFrameConstants::kExpressionsOffset;
|
|
|
|
return fp() + offset - n * kPointerSize;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
2016-02-16 15:33:54 +00:00
|
|
|
Address InterpretedFrame::GetExpressionAddress(int n) const {
|
|
|
|
const int offset = InterpreterFrameConstants::kExpressionsOffset;
|
|
|
|
return fp() + offset - n * kPointerSize;
|
2011-06-29 13:02:00 +00:00
|
|
|
}
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
int StandardFrame::ComputeExpressionsCount() const {
|
2016-02-16 15:33:54 +00:00
|
|
|
Address base = GetExpressionAddress(0);
|
|
|
|
Address limit = sp() - kPointerSize;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(base >= limit); // stack grows downwards
|
2008-07-03 15:10:15 +00:00
|
|
|
// Include register-allocated locals in number of expressions.
|
2009-11-11 09:50:06 +00:00
|
|
|
return static_cast<int>((base - limit) / kPointerSize);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-03-20 14:49:12 +00:00
|
|
|
void StandardFrame::ComputeCallerState(State* state) const {
|
2008-07-03 15:10:15 +00:00
|
|
|
state->sp = caller_sp();
|
|
|
|
state->fp = caller_fp();
|
2012-02-27 15:15:53 +00:00
|
|
|
state->pc_address = ResolveReturnAddressLocation(
|
|
|
|
reinterpret_cast<Address*>(ComputePCAddress(fp())));
|
2014-03-14 15:11:58 +00:00
|
|
|
state->constant_pool_address =
|
|
|
|
reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-04-06 17:58:28 +00:00
|
|
|
void StandardFrame::SetCallerFp(Address caller_fp) {
|
|
|
|
Memory::Address_at(fp() + StandardFrameConstants::kCallerFPOffset) =
|
|
|
|
caller_fp;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-12-18 16:25:45 +00:00
|
|
|
void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const {
|
2010-12-07 11:31:57 +00:00
|
|
|
// Make sure that we're not doing "safe" stack frame iteration. We cannot
|
|
|
|
// possibly find pointers in optimized frames in that state.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(can_access_heap_objects());
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
// Compute the safepoint information.
|
|
|
|
unsigned stack_slots = 0;
|
2011-01-12 14:14:14 +00:00
|
|
|
SafepointEntry safepoint_entry;
|
2010-12-07 11:31:57 +00:00
|
|
|
Code* code = StackFrame::GetSafepointData(
|
Simplify isolates access during stack iteration (WAS: Move SafeStackFrameIterator::active_count_...)
While trying to fix Mac and Windows versions for this change:
http://codereview.chromium.org/6771047/, I figured out, that we
already store an isolate in StackFrameIterator, so we can use it in
frame objects, instead of requiring it from caller.
I've changed iterators usage to the following scheme: whenever a
caller maintains an isolate pointer, it just passes it to stack
iterator, and no more worries about passing it to frame content
accessors. If a caller uses current isolate, it can omit passing it
to iterator, in this case, an iterator will use the current isolate,
too.
There was a special case with LiveEdit, which creates
detached copies of frame objects.
R=vitalyr@chromium.org
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/6794019
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7499 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2011-04-05 09:01:47 +00:00
|
|
|
isolate(), pc(), &safepoint_entry, &stack_slots);
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
unsigned slot_space = stack_slots * kPointerSize;
|
|
|
|
|
|
|
|
// Determine the fixed header and spill slot area size.
|
|
|
|
int frame_header_size = StandardFrameConstants::kFixedFrameSizeFromFp;
|
2016-03-09 14:36:24 +00:00
|
|
|
Object* marker =
|
|
|
|
Memory::Object_at(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
if (marker->IsSmi()) {
|
|
|
|
StackFrame::Type candidate =
|
|
|
|
static_cast<StackFrame::Type>(Smi::cast(marker)->value());
|
|
|
|
switch (candidate) {
|
|
|
|
case ENTRY:
|
|
|
|
case ENTRY_CONSTRUCT:
|
|
|
|
case EXIT:
|
|
|
|
case STUB_FAILURE_TRAMPOLINE:
|
|
|
|
case ARGUMENTS_ADAPTOR:
|
|
|
|
case STUB:
|
|
|
|
case INTERNAL:
|
|
|
|
case CONSTRUCT:
|
|
|
|
case JS_TO_WASM:
|
|
|
|
case WASM_TO_JS:
|
|
|
|
case WASM:
|
|
|
|
frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp;
|
|
|
|
break;
|
|
|
|
case JAVA_SCRIPT:
|
|
|
|
case OPTIMIZED:
|
|
|
|
case INTERPRETED:
|
|
|
|
// These frame types have a context, but they are actually stored
|
|
|
|
// in the place on the stack that one finds the frame type.
|
|
|
|
UNREACHABLE();
|
|
|
|
break;
|
|
|
|
case NONE:
|
|
|
|
case NUMBER_OF_TYPES:
|
|
|
|
case MANUAL:
|
|
|
|
UNREACHABLE();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
slot_space -=
|
|
|
|
(frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
Object** frame_header_base = &Memory::Object_at(fp() - frame_header_size);
|
|
|
|
Object** frame_header_limit = &Memory::Object_at(fp());
|
2010-12-07 11:31:57 +00:00
|
|
|
Object** parameters_base = &Memory::Object_at(sp());
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
Object** parameters_limit = frame_header_base - slot_space / kPointerSize;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-01-12 14:14:14 +00:00
|
|
|
// Visit the parameters that may be on top of the saved registers.
|
|
|
|
if (safepoint_entry.argument_count() > 0) {
|
|
|
|
v->VisitPointers(parameters_base,
|
|
|
|
parameters_base + safepoint_entry.argument_count());
|
|
|
|
parameters_base += safepoint_entry.argument_count();
|
|
|
|
}
|
|
|
|
|
2011-01-17 12:52:06 +00:00
|
|
|
// Skip saved double registers.
|
2011-01-14 15:08:44 +00:00
|
|
|
if (safepoint_entry.has_doubles()) {
|
2013-01-23 16:29:48 +00:00
|
|
|
// Number of doubles not known at snapshot time.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!isolate()->serializer_enabled());
|
2015-10-22 13:24:31 +00:00
|
|
|
parameters_base +=
|
|
|
|
RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
|
|
|
|
->num_allocatable_double_registers() *
|
|
|
|
kDoubleSize / kPointerSize;
|
2011-01-14 15:08:44 +00:00
|
|
|
}
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
// Visit the registers that contain pointers if any.
|
2011-01-12 14:14:14 +00:00
|
|
|
if (safepoint_entry.HasRegisters()) {
|
2010-12-07 11:31:57 +00:00
|
|
|
for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
|
2011-01-12 14:14:14 +00:00
|
|
|
if (safepoint_entry.HasRegisterAt(i)) {
|
2010-12-07 11:31:57 +00:00
|
|
|
int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
|
|
|
|
v->VisitPointer(parameters_base + reg_stack_index);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Skip the words containing the register values.
|
|
|
|
parameters_base += kNumSafepointRegisters;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We're done dealing with the register bits.
|
2011-01-12 14:14:14 +00:00
|
|
|
uint8_t* safepoint_bits = safepoint_entry.bits();
|
|
|
|
safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
// Visit the rest of the parameters.
|
2016-03-10 15:13:34 +00:00
|
|
|
if (!is_js_to_wasm() && !is_wasm()) {
|
|
|
|
// Non-WASM frames have tagged values as parameters.
|
|
|
|
v->VisitPointers(parameters_base, parameters_limit);
|
|
|
|
}
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
// Visit pointer spill slots and locals.
|
|
|
|
for (unsigned index = 0; index < stack_slots; index++) {
|
|
|
|
int byte_index = index >> kBitsPerByteLog2;
|
|
|
|
int bit_index = index & (kBitsPerByte - 1);
|
2011-01-12 14:14:14 +00:00
|
|
|
if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
|
2010-12-07 11:31:57 +00:00
|
|
|
v->VisitPointer(parameters_limit + index);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-12-18 16:25:45 +00:00
|
|
|
// Visit the return address in the callee and incoming arguments.
|
2015-06-04 14:44:00 +00:00
|
|
|
IteratePc(v, pc_address(), constant_pool_address(), code);
|
2013-02-26 14:24:16 +00:00
|
|
|
|
2016-03-04 04:45:22 +00:00
|
|
|
if (!is_wasm() && !is_wasm_to_js()) {
|
|
|
|
// Visit the context in stub frame and JavaScript frame.
|
|
|
|
// Visit the function in JavaScript frame.
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
v->VisitPointers(frame_header_base, frame_header_limit);
|
2016-03-04 04:45:22 +00:00
|
|
|
}
|
2012-12-18 16:25:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void StubFrame::Iterate(ObjectVisitor* v) const {
|
|
|
|
IterateCompiledFrame(v);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Code* StubFrame::unchecked_code() const {
|
2013-07-19 09:39:01 +00:00
|
|
|
return static_cast<Code*>(isolate()->FindCodeObject(pc()));
|
2012-12-18 16:25:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Address StubFrame::GetCallerStackPointer() const {
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
return fp() + ExitFrameConstants::kCallerSPOffset;
|
2012-12-18 16:25:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int StubFrame::GetNumberOfIncomingArguments() const {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void OptimizedFrame::Iterate(ObjectVisitor* v) const {
|
|
|
|
IterateCompiledFrame(v);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-12-11 23:27:38 +00:00
|
|
|
void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
|
|
|
|
Memory::Object_at(GetParameterSlot(index)) = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
bool JavaScriptFrame::IsConstructor() const {
|
2008-10-10 09:09:38 +00:00
|
|
|
Address fp = caller_fp();
|
|
|
|
if (has_adapted_arguments()) {
|
|
|
|
// Skip the arguments adaptor frame and look at the real caller.
|
|
|
|
fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
|
|
|
|
}
|
|
|
|
return IsConstructFrame(fp);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-11-03 16:19:58 +00:00
|
|
|
bool JavaScriptFrame::HasInlinedFrames() const {
|
2015-09-16 11:32:54 +00:00
|
|
|
List<JSFunction*> functions(1);
|
|
|
|
GetFunctions(&functions);
|
|
|
|
return functions.length() > 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
int JavaScriptFrame::GetArgumentsLength() const {
|
|
|
|
// If there is an arguments adaptor frame get the arguments length from it.
|
|
|
|
if (has_adapted_arguments()) {
|
2016-02-16 15:33:54 +00:00
|
|
|
return ArgumentsAdaptorFrame::GetLength(caller_fp());
|
2011-06-29 13:02:00 +00:00
|
|
|
} else {
|
|
|
|
return GetNumberOfIncomingArguments();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-08-17 11:44:01 +00:00
|
|
|
Code* JavaScriptFrame::unchecked_code() const {
|
2013-07-11 16:45:58 +00:00
|
|
|
return function()->code();
|
2009-02-25 16:52:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-04-06 14:23:27 +00:00
|
|
|
int JavaScriptFrame::GetNumberOfIncomingArguments() const {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(can_access_heap_objects() &&
|
2011-04-06 14:23:27 +00:00
|
|
|
isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
|
|
|
|
|
2015-02-11 09:47:32 +00:00
|
|
|
return function()->shared()->internal_formal_parameter_count();
|
2011-04-06 14:23:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-08-30 08:54:43 +00:00
|
|
|
Address JavaScriptFrame::GetCallerStackPointer() const {
|
2011-04-06 14:23:27 +00:00
|
|
|
return fp() + StandardFrameConstants::kCallerSPOffset;
|
2010-08-30 08:54:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-11-03 16:19:58 +00:00
|
|
|
void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) const {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(functions->length() == 0);
|
2013-07-11 16:45:58 +00:00
|
|
|
functions->Add(function());
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
2016-04-06 11:37:15 +00:00
|
|
|
void JavaScriptFrame::Summarize(List<FrameSummary>* functions) const {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(functions->length() == 0);
|
2016-01-28 12:11:55 +00:00
|
|
|
Code* code = LookupCode();
|
|
|
|
int offset = static_cast<int>(pc() - code->instruction_start());
|
|
|
|
AbstractCode* abstract_code = AbstractCode::cast(code);
|
|
|
|
FrameSummary summary(receiver(), function(), abstract_code, offset,
|
2010-12-07 11:31:57 +00:00
|
|
|
IsConstructor());
|
|
|
|
functions->Add(summary);
|
|
|
|
}
|
|
|
|
|
2016-04-06 11:37:15 +00:00
|
|
|
JSFunction* JavaScriptFrame::function() const {
|
|
|
|
return JSFunction::cast(function_slot_object());
|
|
|
|
}
|
|
|
|
|
|
|
|
Object* JavaScriptFrame::receiver() const { return GetParameter(-1); }
|
|
|
|
|
2015-05-29 10:05:22 +00:00
|
|
|
int JavaScriptFrame::LookupExceptionHandlerInTable(
|
2016-02-05 13:51:42 +00:00
|
|
|
int* stack_depth, HandlerTable::CatchPrediction* prediction) {
|
2015-03-25 13:13:51 +00:00
|
|
|
Code* code = LookupCode();
|
|
|
|
DCHECK(!code->is_optimized_code());
|
|
|
|
HandlerTable* table = HandlerTable::cast(code->handler_table());
|
|
|
|
int pc_offset = static_cast<int>(pc() - code->entry());
|
2016-02-05 13:51:42 +00:00
|
|
|
return table->LookupRange(pc_offset, stack_depth, prediction);
|
2015-03-25 13:13:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-08 08:28:08 +00:00
|
|
|
void JavaScriptFrame::PrintFunctionAndOffset(JSFunction* function, Code* code,
|
|
|
|
Address pc, FILE* file,
|
|
|
|
bool print_line_number) {
|
|
|
|
PrintF(file, "%s", function->IsOptimized() ? "*" : "~");
|
|
|
|
function->PrintName(file);
|
|
|
|
int code_offset = static_cast<int>(pc - code->instruction_start());
|
|
|
|
PrintF(file, "+%d", code_offset);
|
|
|
|
if (print_line_number) {
|
|
|
|
SharedFunctionInfo* shared = function->shared();
|
2016-01-28 12:11:55 +00:00
|
|
|
int source_pos = code->SourcePosition(code_offset);
|
2014-07-08 08:28:08 +00:00
|
|
|
Object* maybe_script = shared->script();
|
|
|
|
if (maybe_script->IsScript()) {
|
|
|
|
Script* script = Script::cast(maybe_script);
|
|
|
|
int line = script->GetLineNumber(source_pos) + 1;
|
|
|
|
Object* script_name_raw = script->name();
|
|
|
|
if (script_name_raw->IsString()) {
|
|
|
|
String* script_name = String::cast(script->name());
|
2015-07-13 12:38:06 +00:00
|
|
|
base::SmartArrayPointer<char> c_script_name =
|
2014-07-08 08:28:08 +00:00
|
|
|
script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
|
|
|
|
PrintF(file, " at %s:%d", c_script_name.get(), line);
|
|
|
|
} else {
|
|
|
|
PrintF(file, " at <unknown>:%d", line);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
PrintF(file, " at <unknown>:<unknown>");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args,
|
2011-10-20 09:38:24 +00:00
|
|
|
bool print_line_number) {
|
|
|
|
// constructor calls
|
2013-06-03 15:32:22 +00:00
|
|
|
DisallowHeapAllocation no_allocation;
|
2013-02-15 09:27:10 +00:00
|
|
|
JavaScriptFrameIterator it(isolate);
|
2011-10-20 09:38:24 +00:00
|
|
|
while (!it.done()) {
|
|
|
|
if (it.frame()->is_java_script()) {
|
|
|
|
JavaScriptFrame* frame = it.frame();
|
|
|
|
if (frame->IsConstructor()) PrintF(file, "new ");
|
2014-07-08 08:28:08 +00:00
|
|
|
PrintFunctionAndOffset(frame->function(), frame->unchecked_code(),
|
|
|
|
frame->pc(), file, print_line_number);
|
2011-10-20 09:38:24 +00:00
|
|
|
if (print_args) {
|
|
|
|
// function arguments
|
|
|
|
// (we are intentionally only printing the actually
|
|
|
|
// supplied parameters, not all parameters required)
|
|
|
|
PrintF(file, "(this=");
|
|
|
|
frame->receiver()->ShortPrint(file);
|
|
|
|
const int length = frame->ComputeParametersCount();
|
|
|
|
for (int i = 0; i < length; i++) {
|
|
|
|
PrintF(file, ", ");
|
|
|
|
frame->GetParameter(i)->ShortPrint(file);
|
|
|
|
}
|
|
|
|
PrintF(file, ")");
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
it.Advance();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-03-25 13:13:51 +00:00
|
|
|
void JavaScriptFrame::SaveOperandStack(FixedArray* store) const {
|
2013-05-08 08:08:23 +00:00
|
|
|
int operands_count = store->length();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_LE(operands_count, ComputeOperandsCount());
|
2015-03-25 13:13:51 +00:00
|
|
|
for (int i = 0; i < operands_count; i++) {
|
2013-05-08 08:08:23 +00:00
|
|
|
store->set(i, GetOperand(i));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-01 09:06:44 +00:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
bool CannotDeoptFromAsmCode(Code* code, JSFunction* function) {
|
|
|
|
return code->is_turbofanned() && function->shared()->asm_function() &&
|
|
|
|
!FLAG_turbo_asm_deoptimization;
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace
|
|
|
|
|
2016-01-28 12:11:55 +00:00
|
|
|
FrameSummary::FrameSummary(Object* receiver, JSFunction* function,
|
|
|
|
AbstractCode* abstract_code, int code_offset,
|
|
|
|
bool is_constructor)
|
2015-08-31 15:04:22 +00:00
|
|
|
: receiver_(receiver, function->GetIsolate()),
|
|
|
|
function_(function),
|
2016-01-28 12:11:55 +00:00
|
|
|
abstract_code_(abstract_code),
|
|
|
|
code_offset_(code_offset),
|
2016-04-01 09:06:44 +00:00
|
|
|
is_constructor_(is_constructor) {
|
|
|
|
DCHECK(abstract_code->IsBytecodeArray() ||
|
|
|
|
Code::cast(abstract_code)->kind() != Code::OPTIMIZED_FUNCTION ||
|
|
|
|
CannotDeoptFromAsmCode(Code::cast(abstract_code), function));
|
|
|
|
}
|
2015-08-31 15:04:22 +00:00
|
|
|
|
2016-04-18 13:20:45 +00:00
|
|
|
FrameSummary FrameSummary::GetFirst(JavaScriptFrame* frame) {
|
|
|
|
List<FrameSummary> frames(FLAG_max_inlining_levels + 1);
|
|
|
|
frame->Summarize(&frames);
|
|
|
|
return frames.first();
|
|
|
|
}
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
void FrameSummary::Print() {
|
|
|
|
PrintF("receiver: ");
|
|
|
|
receiver_->ShortPrint();
|
|
|
|
PrintF("\nfunction: ");
|
|
|
|
function_->shared()->DebugName()->ShortPrint();
|
|
|
|
PrintF("\ncode: ");
|
2016-01-28 12:11:55 +00:00
|
|
|
abstract_code_->ShortPrint();
|
|
|
|
if (abstract_code_->IsCode()) {
|
|
|
|
Code* code = abstract_code_->GetCode();
|
|
|
|
if (code->kind() == Code::FUNCTION) PrintF(" UNOPT ");
|
2016-04-01 09:06:44 +00:00
|
|
|
if (code->kind() == Code::OPTIMIZED_FUNCTION) {
|
|
|
|
DCHECK(CannotDeoptFromAsmCode(code, *function()));
|
|
|
|
PrintF(" ASM ");
|
|
|
|
}
|
2016-01-28 12:11:55 +00:00
|
|
|
} else {
|
|
|
|
PrintF(" BYTECODE ");
|
|
|
|
}
|
|
|
|
PrintF("\npc: %d\n", code_offset_);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
2016-04-06 11:37:15 +00:00
|
|
|
void OptimizedFrame::Summarize(List<FrameSummary>* frames) const {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(frames->length() == 0);
|
|
|
|
DCHECK(is_optimized());
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2014-09-18 08:56:52 +00:00
|
|
|
// Delegate to JS frame in absence of turbofan deoptimization.
|
|
|
|
// TODO(turbofan): Revisit once we support deoptimization across the board.
|
2016-03-22 13:25:05 +00:00
|
|
|
Code* code = LookupCode();
|
|
|
|
if (code->kind() == Code::BUILTIN ||
|
2016-04-01 09:06:44 +00:00
|
|
|
CannotDeoptFromAsmCode(code, function())) {
|
2014-07-30 13:54:45 +00:00
|
|
|
return JavaScriptFrame::Summarize(frames);
|
|
|
|
}
|
|
|
|
|
2015-06-16 06:04:42 +00:00
|
|
|
DisallowHeapAllocation no_gc;
|
|
|
|
int deopt_index = Safepoint::kNoDeoptimizationIndex;
|
|
|
|
DeoptimizationInputData* const data = GetDeoptimizationData(&deopt_index);
|
|
|
|
FixedArray* const literal_array = data->LiteralArray();
|
|
|
|
|
|
|
|
TranslationIterator it(data->TranslationByteArray(),
|
|
|
|
data->TranslationIndex(deopt_index)->value());
|
2015-12-18 18:34:21 +00:00
|
|
|
Translation::Opcode frame_opcode =
|
|
|
|
static_cast<Translation::Opcode>(it.Next());
|
|
|
|
DCHECK_EQ(Translation::BEGIN, frame_opcode);
|
2015-06-16 06:04:42 +00:00
|
|
|
it.Next(); // Drop frame count.
|
|
|
|
int jsframe_count = it.Next();
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
// We create the summary in reverse order because the frames
|
|
|
|
// in the deoptimization translation are ordered bottom-to-top.
|
2012-02-28 09:05:55 +00:00
|
|
|
bool is_constructor = IsConstructor();
|
2015-06-16 06:04:42 +00:00
|
|
|
while (jsframe_count != 0) {
|
2015-12-18 18:34:21 +00:00
|
|
|
frame_opcode = static_cast<Translation::Opcode>(it.Next());
|
|
|
|
if (frame_opcode == Translation::JS_FRAME ||
|
|
|
|
frame_opcode == Translation::INTERPRETED_FRAME) {
|
2015-06-16 06:04:42 +00:00
|
|
|
jsframe_count--;
|
2016-03-01 14:15:07 +00:00
|
|
|
BailoutId const bailout_id = BailoutId(it.Next());
|
2015-06-16 06:04:42 +00:00
|
|
|
SharedFunctionInfo* const shared_info =
|
|
|
|
SharedFunctionInfo::cast(literal_array->get(it.Next()));
|
|
|
|
it.Next(); // Skip height.
|
|
|
|
|
|
|
|
// The translation commands are ordered and the function is always
|
|
|
|
// at the first position, and the receiver is next.
|
2015-12-18 18:34:21 +00:00
|
|
|
Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
|
2015-06-16 06:04:42 +00:00
|
|
|
|
|
|
|
// Get the correct function in the optimized frame.
|
|
|
|
JSFunction* function;
|
|
|
|
if (opcode == Translation::LITERAL) {
|
|
|
|
function = JSFunction::cast(literal_array->get(it.Next()));
|
|
|
|
} else {
|
2016-02-11 08:24:36 +00:00
|
|
|
CHECK_EQ(opcode, Translation::STACK_SLOT);
|
|
|
|
function = JSFunction::cast(StackSlotAt(it.Next()));
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
2015-06-16 06:04:42 +00:00
|
|
|
DCHECK_EQ(shared_info, function->shared());
|
|
|
|
|
|
|
|
// If we are at a call, the receiver is always in a stack slot.
|
|
|
|
// Otherwise we are not guaranteed to get the receiver value.
|
|
|
|
opcode = static_cast<Translation::Opcode>(it.Next());
|
|
|
|
|
|
|
|
// Get the correct receiver in the optimized frame.
|
|
|
|
Object* receiver;
|
|
|
|
if (opcode == Translation::LITERAL) {
|
|
|
|
receiver = literal_array->get(it.Next());
|
|
|
|
} else if (opcode == Translation::STACK_SLOT) {
|
|
|
|
receiver = StackSlotAt(it.Next());
|
|
|
|
} else {
|
|
|
|
// The receiver is not in a stack slot nor in a literal. We give up.
|
|
|
|
it.Skip(Translation::NumberOfOperandsFor(opcode));
|
|
|
|
// TODO(3029): Materializing a captured object (or duplicated
|
|
|
|
// object) is hard, we return undefined for now. This breaks the
|
|
|
|
// produced stack trace, as constructor frames aren't marked as
|
|
|
|
// such anymore.
|
|
|
|
receiver = isolate()->heap()->undefined_value();
|
2015-06-09 13:10:10 +00:00
|
|
|
}
|
|
|
|
|
2016-01-28 12:11:55 +00:00
|
|
|
AbstractCode* abstract_code;
|
2015-06-16 06:04:42 +00:00
|
|
|
|
2016-01-28 12:11:55 +00:00
|
|
|
unsigned code_offset;
|
2015-12-18 18:34:21 +00:00
|
|
|
if (frame_opcode == Translation::JS_FRAME) {
|
2016-01-28 12:11:55 +00:00
|
|
|
Code* code = shared_info->code();
|
2015-12-18 18:34:21 +00:00
|
|
|
DeoptimizationOutputData* const output_data =
|
|
|
|
DeoptimizationOutputData::cast(code->deoptimization_data());
|
|
|
|
unsigned const entry =
|
2016-03-01 14:15:07 +00:00
|
|
|
Deoptimizer::GetOutputInfo(output_data, bailout_id, shared_info);
|
2016-01-28 12:11:55 +00:00
|
|
|
code_offset = FullCodeGenerator::PcField::decode(entry);
|
|
|
|
abstract_code = AbstractCode::cast(code);
|
2015-12-18 18:34:21 +00:00
|
|
|
} else {
|
|
|
|
DCHECK_EQ(frame_opcode, Translation::INTERPRETED_FRAME);
|
2016-03-08 12:08:38 +00:00
|
|
|
// BailoutId points to the next bytecode in the bytecode aray. Subtract
|
|
|
|
// 1 to get the end of current bytecode.
|
|
|
|
code_offset = bailout_id.ToInt() - 1;
|
2016-01-28 12:11:55 +00:00
|
|
|
abstract_code = AbstractCode::cast(shared_info->bytecode_array());
|
2015-12-18 18:34:21 +00:00
|
|
|
}
|
2016-01-28 12:11:55 +00:00
|
|
|
FrameSummary summary(receiver, function, abstract_code, code_offset,
|
|
|
|
is_constructor);
|
2015-06-16 06:04:42 +00:00
|
|
|
frames->Add(summary);
|
|
|
|
is_constructor = false;
|
2015-12-18 18:34:21 +00:00
|
|
|
} else if (frame_opcode == Translation::CONSTRUCT_STUB_FRAME) {
|
2015-06-16 06:04:42 +00:00
|
|
|
// The next encountered JS_FRAME will be marked as a constructor call.
|
2015-12-18 18:34:21 +00:00
|
|
|
it.Skip(Translation::NumberOfOperandsFor(frame_opcode));
|
2015-06-16 06:04:42 +00:00
|
|
|
DCHECK(!is_constructor);
|
|
|
|
is_constructor = true;
|
|
|
|
} else {
|
|
|
|
// Skip over operands to advance to the next opcode.
|
2015-12-18 18:34:21 +00:00
|
|
|
it.Skip(Translation::NumberOfOperandsFor(frame_opcode));
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
}
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!is_constructor);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-05-29 10:05:22 +00:00
|
|
|
int OptimizedFrame::LookupExceptionHandlerInTable(
|
|
|
|
int* stack_slots, HandlerTable::CatchPrediction* prediction) {
|
2015-03-25 13:13:51 +00:00
|
|
|
Code* code = LookupCode();
|
|
|
|
HandlerTable* table = HandlerTable::cast(code->handler_table());
|
|
|
|
int pc_offset = static_cast<int>(pc() - code->entry());
|
2016-02-05 13:51:42 +00:00
|
|
|
if (stack_slots) *stack_slots = code->stack_slots();
|
2015-05-29 10:05:22 +00:00
|
|
|
return table->LookupReturn(pc_offset, prediction);
|
2015-03-25 13:13:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData(
|
2015-11-03 16:19:58 +00:00
|
|
|
int* deopt_index) const {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(is_optimized());
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2013-07-11 16:45:58 +00:00
|
|
|
JSFunction* opt_function = function();
|
2010-12-07 11:31:57 +00:00
|
|
|
Code* code = opt_function->code();
|
|
|
|
|
|
|
|
// The code object may have been replaced by lazy deoptimization. Fall
|
|
|
|
// back to a slow search in this case to find the original optimized
|
|
|
|
// code object.
|
|
|
|
if (!code->contains(pc())) {
|
2011-09-20 10:08:39 +00:00
|
|
|
code = isolate()->inner_pointer_to_code_cache()->
|
|
|
|
GcSafeFindCodeForInnerPointer(pc());
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(code != NULL);
|
|
|
|
DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-01-12 14:14:14 +00:00
|
|
|
SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
|
|
|
|
*deopt_index = safepoint_entry.deoptimization_index();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(*deopt_index != Safepoint::kNoDeoptimizationIndex);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
return DeoptimizationInputData::cast(code->deoptimization_data());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-11-03 16:19:58 +00:00
|
|
|
void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) const {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(functions->length() == 0);
|
|
|
|
DCHECK(is_optimized());
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2014-09-18 08:56:52 +00:00
|
|
|
// Delegate to JS frame in absence of turbofan deoptimization.
|
|
|
|
// TODO(turbofan): Revisit once we support deoptimization across the board.
|
2016-04-05 06:40:59 +00:00
|
|
|
Code* code = LookupCode();
|
|
|
|
if (code->kind() == Code::BUILTIN ||
|
|
|
|
CannotDeoptFromAsmCode(code, function())) {
|
2014-07-30 13:54:45 +00:00
|
|
|
return JavaScriptFrame::GetFunctions(functions);
|
|
|
|
}
|
|
|
|
|
2015-06-10 05:15:48 +00:00
|
|
|
DisallowHeapAllocation no_gc;
|
2015-06-16 06:04:42 +00:00
|
|
|
int deopt_index = Safepoint::kNoDeoptimizationIndex;
|
|
|
|
DeoptimizationInputData* const data = GetDeoptimizationData(&deopt_index);
|
|
|
|
FixedArray* const literal_array = data->LiteralArray();
|
|
|
|
|
|
|
|
TranslationIterator it(data->TranslationByteArray(),
|
|
|
|
data->TranslationIndex(deopt_index)->value());
|
|
|
|
Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
|
|
|
|
DCHECK_EQ(Translation::BEGIN, opcode);
|
|
|
|
it.Next(); // Skip frame count.
|
|
|
|
int jsframe_count = it.Next();
|
|
|
|
|
|
|
|
// We insert the frames in reverse order because the frames
|
|
|
|
// in the deoptimization translation are ordered bottom-to-top.
|
|
|
|
while (jsframe_count != 0) {
|
|
|
|
opcode = static_cast<Translation::Opcode>(it.Next());
|
|
|
|
// Skip over operands to advance to the next opcode.
|
|
|
|
it.Skip(Translation::NumberOfOperandsFor(opcode));
|
2015-12-18 18:34:21 +00:00
|
|
|
if (opcode == Translation::JS_FRAME ||
|
|
|
|
opcode == Translation::INTERPRETED_FRAME) {
|
2015-06-16 06:04:42 +00:00
|
|
|
jsframe_count--;
|
|
|
|
|
|
|
|
// The translation commands are ordered and the function is always at the
|
|
|
|
// first position.
|
|
|
|
opcode = static_cast<Translation::Opcode>(it.Next());
|
|
|
|
|
|
|
|
// Get the correct function in the optimized frame.
|
|
|
|
Object* function;
|
|
|
|
if (opcode == Translation::LITERAL) {
|
|
|
|
function = literal_array->get(it.Next());
|
|
|
|
} else {
|
2016-02-11 08:24:36 +00:00
|
|
|
CHECK_EQ(Translation::STACK_SLOT, opcode);
|
|
|
|
function = StackSlotAt(it.Next());
|
2015-06-16 06:04:42 +00:00
|
|
|
}
|
|
|
|
functions->Add(JSFunction::cast(function));
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
[turbofan] Unify referencing of stack slots
Previously, it was not possible to specify StackSlotOperands for all
slots in both the caller and callee stacks. Specifically, the region
of the callee's stack including the saved return address, frame
pointer, function pointer and context pointer could not be addressed
by the register allocator/gap resolver.
In preparation for better tail call support, which will use the gap
resolver to reconcile outgoing parameters, this change makes it
possible to address all slots on the stack, because slots in the
previously inaccessible dead zone may become parameter slots for
outgoing tail calls. All caller stack slots are accessible as they
were before, with slot -1 corresponding to the last stack
parameter. Stack slot indices >= 0 access the callee stack, with slot
0 corresponding to the callee's saved return address, 1 corresponding
to the saved frame pointer, 2 corresponding to the current function
context, 3 corresponding to the frame marker/JSFunction, and slots 4
and above corresponding to spill slots.
The following changes were specifically needed:
* Frame has been changed to explicitly manage three areas of the
callee frame, the fixed header, the spill slot area, and the
callee-saved register area.
* Conversions from stack slot indices to fp offsets all now go through
a common bottleneck: OptimizedFrame::StackSlotOffsetRelativeToFp
* The generation of deoptimization translation tables has been changed
to support the new stack slot indexing scheme. Crankshaft, which
doesn't support the new slot numbering in its register allocator,
must adapt the indexes when creating translation tables.
* Callee-saved parameters are now kept below spill slots, not above,
to support saving only the optimal set of used registers, which is
only known after register allocation is finished and spill slots
have been allocated.
Review URL: https://codereview.chromium.org/1261923007
Cr-Commit-Position: refs/heads/master@{#30224}
2015-08-18 14:47:56 +00:00
|
|
|
int OptimizedFrame::StackSlotOffsetRelativeToFp(int slot_index) {
|
|
|
|
return StandardFrameConstants::kCallerSPOffset -
|
|
|
|
((slot_index + 1) * kPointerSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-06-16 06:04:42 +00:00
|
|
|
Object* OptimizedFrame::StackSlotAt(int index) const {
|
[turbofan] Unify referencing of stack slots
Previously, it was not possible to specify StackSlotOperands for all
slots in both the caller and callee stacks. Specifically, the region
of the callee's stack including the saved return address, frame
pointer, function pointer and context pointer could not be addressed
by the register allocator/gap resolver.
In preparation for better tail call support, which will use the gap
resolver to reconcile outgoing parameters, this change makes it
possible to address all slots on the stack, because slots in the
previously inaccessible dead zone may become parameter slots for
outgoing tail calls. All caller stack slots are accessible as they
were before, with slot -1 corresponding to the last stack
parameter. Stack slot indices >= 0 access the callee stack, with slot
0 corresponding to the callee's saved return address, 1 corresponding
to the saved frame pointer, 2 corresponding to the current function
context, 3 corresponding to the frame marker/JSFunction, and slots 4
and above corresponding to spill slots.
The following changes were specifically needed:
* Frame has been changed to explicitly manage three areas of the
callee frame, the fixed header, the spill slot area, and the
callee-saved register area.
* Conversions from stack slot indices to fp offsets all now go through
a common bottleneck: OptimizedFrame::StackSlotOffsetRelativeToFp
* The generation of deoptimization translation tables has been changed
to support the new stack slot indexing scheme. Crankshaft, which
doesn't support the new slot numbering in its register allocator,
must adapt the indexes when creating translation tables.
* Callee-saved parameters are now kept below spill slots, not above,
to support saving only the optimal set of used registers, which is
only known after register allocation is finished and spill slots
have been allocated.
Review URL: https://codereview.chromium.org/1261923007
Cr-Commit-Position: refs/heads/master@{#30224}
2015-08-18 14:47:56 +00:00
|
|
|
return Memory::Object_at(fp() + StackSlotOffsetRelativeToFp(index));
|
2015-06-16 06:04:42 +00:00
|
|
|
}
|
|
|
|
|
2016-01-20 18:10:13 +00:00
|
|
|
int InterpretedFrame::LookupExceptionHandlerInTable(
|
2016-02-05 13:51:42 +00:00
|
|
|
int* context_register, HandlerTable::CatchPrediction* prediction) {
|
2016-01-20 18:10:13 +00:00
|
|
|
BytecodeArray* bytecode = function()->shared()->bytecode_array();
|
|
|
|
HandlerTable* table = HandlerTable::cast(bytecode->handler_table());
|
|
|
|
int pc_offset = GetBytecodeOffset() + 1; // Point after current bytecode.
|
2016-02-05 13:51:42 +00:00
|
|
|
return table->LookupRange(pc_offset, context_register, prediction);
|
2016-01-20 18:10:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
int InterpretedFrame::GetBytecodeOffset() const {
|
|
|
|
const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
|
2016-02-16 15:33:54 +00:00
|
|
|
DCHECK_EQ(
|
|
|
|
InterpreterFrameConstants::kBytecodeOffsetFromFp,
|
|
|
|
InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
|
2016-01-20 18:10:13 +00:00
|
|
|
int raw_offset = Smi::cast(GetExpression(index))->value();
|
|
|
|
return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
|
|
|
|
}
|
|
|
|
|
|
|
|
void InterpretedFrame::PatchBytecodeOffset(int new_offset) {
|
|
|
|
const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
|
2016-02-16 15:33:54 +00:00
|
|
|
DCHECK_EQ(
|
|
|
|
InterpreterFrameConstants::kBytecodeOffsetFromFp,
|
|
|
|
InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
|
2016-01-20 18:10:13 +00:00
|
|
|
int raw_offset = new_offset + BytecodeArray::kHeaderSize - kHeapObjectTag;
|
|
|
|
SetExpression(index, Smi::FromInt(raw_offset));
|
|
|
|
}
|
|
|
|
|
2016-04-18 13:20:45 +00:00
|
|
|
BytecodeArray* InterpretedFrame::GetBytecodeArray() const {
|
2016-02-16 12:35:38 +00:00
|
|
|
const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
|
2016-02-16 15:33:54 +00:00
|
|
|
DCHECK_EQ(
|
|
|
|
InterpreterFrameConstants::kBytecodeArrayFromFp,
|
|
|
|
InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
|
2016-04-18 13:20:45 +00:00
|
|
|
return BytecodeArray::cast(GetExpression(index));
|
2016-02-04 13:43:45 +00:00
|
|
|
}
|
|
|
|
|
2016-04-18 13:20:45 +00:00
|
|
|
void InterpretedFrame::PatchBytecodeArray(BytecodeArray* bytecode_array) {
|
2016-02-16 12:35:38 +00:00
|
|
|
const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
|
2016-02-16 15:33:54 +00:00
|
|
|
DCHECK_EQ(
|
|
|
|
InterpreterFrameConstants::kBytecodeArrayFromFp,
|
|
|
|
InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
|
2016-02-16 12:35:38 +00:00
|
|
|
SetExpression(index, bytecode_array);
|
2016-02-11 13:10:45 +00:00
|
|
|
}
|
|
|
|
|
2016-04-18 14:13:04 +00:00
|
|
|
Object* InterpretedFrame::ReadInterpreterRegister(int register_index) const {
|
2016-02-16 12:35:38 +00:00
|
|
|
const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
|
2016-02-16 15:33:54 +00:00
|
|
|
DCHECK_EQ(
|
2016-04-19 12:47:25 +00:00
|
|
|
InterpreterFrameConstants::kRegisterFileFromFp,
|
2016-02-16 15:33:54 +00:00
|
|
|
InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
|
2016-02-16 12:35:38 +00:00
|
|
|
return GetExpression(index + register_index);
|
2016-02-11 13:10:45 +00:00
|
|
|
}
|
|
|
|
|
2016-04-18 14:13:04 +00:00
|
|
|
void InterpretedFrame::WriteInterpreterRegister(int register_index,
|
|
|
|
Object* value) {
|
|
|
|
const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
|
|
|
|
DCHECK_EQ(
|
2016-04-19 12:47:25 +00:00
|
|
|
InterpreterFrameConstants::kRegisterFileFromFp,
|
2016-04-18 14:13:04 +00:00
|
|
|
InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
|
|
|
|
return SetExpression(index + register_index, value);
|
|
|
|
}
|
|
|
|
|
2016-04-06 11:37:15 +00:00
|
|
|
void InterpretedFrame::Summarize(List<FrameSummary>* functions) const {
|
2016-01-28 12:11:55 +00:00
|
|
|
DCHECK(functions->length() == 0);
|
|
|
|
AbstractCode* abstract_code =
|
|
|
|
AbstractCode::cast(function()->shared()->bytecode_array());
|
|
|
|
FrameSummary summary(receiver(), function(), abstract_code,
|
|
|
|
GetBytecodeOffset(), IsConstructor());
|
|
|
|
functions->Add(summary);
|
|
|
|
}
|
2016-01-20 18:10:13 +00:00
|
|
|
|
2011-10-03 11:13:20 +00:00
|
|
|
int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
|
|
|
|
return Smi::cast(GetExpression(0))->value();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-08-30 08:54:43 +00:00
|
|
|
Address ArgumentsAdaptorFrame::GetCallerStackPointer() const {
|
2011-04-06 14:23:27 +00:00
|
|
|
return fp() + StandardFrameConstants::kCallerSPOffset;
|
2010-08-30 08:54:43 +00:00
|
|
|
}
|
|
|
|
|
2016-02-16 15:33:54 +00:00
|
|
|
int ArgumentsAdaptorFrame::GetLength(Address fp) {
|
|
|
|
const int offset = ArgumentsAdaptorFrameConstants::kLengthOffset;
|
|
|
|
return Smi::cast(Memory::Object_at(fp + offset))->value();
|
2010-08-30 08:54:43 +00:00
|
|
|
}
|
|
|
|
|
2010-08-17 11:44:01 +00:00
|
|
|
Code* ArgumentsAdaptorFrame::unchecked_code() const {
|
Simplify isolates access during stack iteration (WAS: Move SafeStackFrameIterator::active_count_...)
While trying to fix Mac and Windows versions for this change:
http://codereview.chromium.org/6771047/, I figured out, that we
already store an isolate in StackFrameIterator, so we can use it in
frame objects, instead of requiring it from caller.
I've changed iterators usage to the following scheme: whenever a
caller maintains an isolate pointer, it just passes it to stack
iterator, and no more worries about passing it to frame content
accessors. If a caller uses current isolate, it can omit passing it
to iterator, in this case, an iterator will use the current isolate,
too.
There was a special case with LiveEdit, which creates
detached copies of frame objects.
R=vitalyr@chromium.org
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/6794019
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7499 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2011-04-05 09:01:47 +00:00
|
|
|
return isolate()->builtins()->builtin(
|
2011-03-23 13:40:07 +00:00
|
|
|
Builtins::kArgumentsAdaptorTrampoline);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
2016-02-16 15:33:54 +00:00
|
|
|
Address InternalFrame::GetCallerStackPointer() const {
|
|
|
|
// Internal frames have no arguments. The stack pointer of the
|
|
|
|
// caller is at a fixed offset from the frame pointer.
|
|
|
|
return fp() + StandardFrameConstants::kCallerSPOffset;
|
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2010-08-17 11:44:01 +00:00
|
|
|
Code* InternalFrame::unchecked_code() const {
|
2008-07-03 15:10:15 +00:00
|
|
|
const int offset = InternalFrameConstants::kCodeOffset;
|
|
|
|
Object* code = Memory::Object_at(fp() + offset);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(code != NULL);
|
2010-08-17 11:44:01 +00:00
|
|
|
return reinterpret_cast<Code*>(code);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void StackFrame::PrintIndex(StringStream* accumulator,
|
|
|
|
PrintMode mode,
|
|
|
|
int index) {
|
|
|
|
accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
|
|
|
|
}
|
|
|
|
|
2016-02-23 19:38:50 +00:00
|
|
|
void WasmFrame::Print(StringStream* accumulator, PrintMode mode,
|
|
|
|
int index) const {
|
|
|
|
accumulator->Add("wasm frame");
|
|
|
|
}
|
|
|
|
|
|
|
|
Code* WasmFrame::unchecked_code() const {
|
|
|
|
return static_cast<Code*>(isolate()->FindCodeObject(pc()));
|
|
|
|
}
|
|
|
|
|
2016-04-06 11:37:15 +00:00
|
|
|
JSFunction* WasmFrame::function() const {
|
|
|
|
// TODO(clemensh): generate the right JSFunctions once per wasm function and
|
|
|
|
// cache them
|
|
|
|
Factory* factory = isolate()->factory();
|
|
|
|
Handle<JSFunction> fun =
|
|
|
|
factory->NewFunction(factory->NewStringFromAsciiChecked("<WASM>"));
|
|
|
|
return *fun;
|
|
|
|
}
|
|
|
|
|
2016-04-19 11:55:23 +00:00
|
|
|
void WasmFrame::Summarize(List<FrameSummary>* functions) const {
|
|
|
|
DCHECK(functions->length() == 0);
|
|
|
|
Code* code = LookupCode();
|
|
|
|
int offset = static_cast<int>(pc() - code->instruction_start());
|
|
|
|
AbstractCode* abstract_code = AbstractCode::cast(code);
|
|
|
|
Handle<JSFunction> fun(function(), isolate());
|
|
|
|
FrameSummary summary(receiver(), *fun, abstract_code, offset, false);
|
|
|
|
functions->Add(summary);
|
|
|
|
}
|
|
|
|
|
2016-02-23 19:38:50 +00:00
|
|
|
void WasmFrame::Iterate(ObjectVisitor* v) const { IterateCompiledFrame(v); }
|
|
|
|
|
|
|
|
Address WasmFrame::GetCallerStackPointer() const {
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
return fp() + ExitFrameConstants::kCallerSPOffset;
|
2016-02-23 19:38:50 +00:00
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2015-06-17 11:58:17 +00:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
|
|
|
|
void PrintFunctionSource(StringStream* accumulator, SharedFunctionInfo* shared,
|
|
|
|
Code* code) {
|
|
|
|
if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
|
|
|
|
std::ostringstream os;
|
|
|
|
os << "--------- s o u r c e c o d e ---------\n"
|
|
|
|
<< SourceCodeOf(shared, FLAG_max_stack_trace_source_length)
|
|
|
|
<< "\n-----------------------------------------\n";
|
|
|
|
accumulator->Add(os.str().c_str());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
} // namespace
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void JavaScriptFrame::Print(StringStream* accumulator,
|
|
|
|
PrintMode mode,
|
|
|
|
int index) const {
|
2014-04-16 13:28:11 +00:00
|
|
|
DisallowHeapAllocation no_gc;
|
2008-07-03 15:10:15 +00:00
|
|
|
Object* receiver = this->receiver();
|
2013-07-11 16:45:58 +00:00
|
|
|
JSFunction* function = this->function();
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
accumulator->PrintSecurityTokenIfChanged(function);
|
|
|
|
PrintIndex(accumulator, mode, index);
|
|
|
|
Code* code = NULL;
|
|
|
|
if (IsConstructor()) accumulator->Add("new ");
|
|
|
|
accumulator->PrintFunction(function, receiver, &code);
|
2010-03-18 22:15:54 +00:00
|
|
|
|
2011-11-03 10:36:55 +00:00
|
|
|
// Get scope information for nicer output, if possible. If code is NULL, or
|
|
|
|
// doesn't contain scope info, scope_info will return 0 for the number of
|
|
|
|
// parameters, stack local variables, context local variables, stack slots,
|
|
|
|
// or context slots.
|
2014-04-16 13:28:11 +00:00
|
|
|
SharedFunctionInfo* shared = function->shared();
|
|
|
|
ScopeInfo* scope_info = shared->scope_info();
|
2013-07-11 16:45:58 +00:00
|
|
|
Object* script_obj = shared->script();
|
|
|
|
if (script_obj->IsScript()) {
|
2014-04-16 13:28:11 +00:00
|
|
|
Script* script = Script::cast(script_obj);
|
2013-07-11 16:45:58 +00:00
|
|
|
accumulator->Add(" [");
|
|
|
|
accumulator->PrintName(script->name());
|
|
|
|
|
|
|
|
Address pc = this->pc();
|
|
|
|
if (code != NULL && code->kind() == Code::FUNCTION &&
|
|
|
|
pc >= code->instruction_start() && pc < code->instruction_end()) {
|
2016-01-28 12:11:55 +00:00
|
|
|
int offset = static_cast<int>(pc - code->instruction_start());
|
|
|
|
int source_pos = code->SourcePosition(offset);
|
2014-04-16 13:28:11 +00:00
|
|
|
int line = script->GetLineNumber(source_pos) + 1;
|
2013-07-11 16:45:58 +00:00
|
|
|
accumulator->Add(":%d", line);
|
|
|
|
} else {
|
|
|
|
int function_start_pos = shared->start_position();
|
2014-04-16 13:28:11 +00:00
|
|
|
int line = script->GetLineNumber(function_start_pos) + 1;
|
2013-07-11 16:45:58 +00:00
|
|
|
accumulator->Add(":~%d", line);
|
2010-03-18 22:15:54 +00:00
|
|
|
}
|
2013-07-11 16:45:58 +00:00
|
|
|
|
2015-06-17 11:58:17 +00:00
|
|
|
accumulator->Add("] [pc=%p] ", pc);
|
2010-03-18 22:15:54 +00:00
|
|
|
}
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
accumulator->Add("(this=%o", receiver);
|
|
|
|
|
|
|
|
// Print the parameters.
|
|
|
|
int parameters_count = ComputeParametersCount();
|
|
|
|
for (int i = 0; i < parameters_count; i++) {
|
|
|
|
accumulator->Add(",");
|
|
|
|
// If we have a name for the parameter we print it. Nameless
|
|
|
|
// parameters are either because we have more actual parameters
|
|
|
|
// than formal parameters or because we have no scope information.
|
2011-11-03 10:36:55 +00:00
|
|
|
if (i < scope_info->ParameterCount()) {
|
|
|
|
accumulator->PrintName(scope_info->ParameterName(i));
|
2008-07-03 15:10:15 +00:00
|
|
|
accumulator->Add("=");
|
|
|
|
}
|
|
|
|
accumulator->Add("%o", GetParameter(i));
|
|
|
|
}
|
|
|
|
|
|
|
|
accumulator->Add(")");
|
|
|
|
if (mode == OVERVIEW) {
|
|
|
|
accumulator->Add("\n");
|
|
|
|
return;
|
|
|
|
}
|
2011-04-26 15:22:44 +00:00
|
|
|
if (is_optimized()) {
|
2015-06-17 11:58:17 +00:00
|
|
|
accumulator->Add(" {\n// optimized frame\n");
|
|
|
|
PrintFunctionSource(accumulator, shared, code);
|
|
|
|
accumulator->Add("}\n");
|
2011-04-26 15:22:44 +00:00
|
|
|
return;
|
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
accumulator->Add(" {\n");
|
|
|
|
|
|
|
|
// Compute the number of locals and expression stack elements.
|
2011-11-03 10:36:55 +00:00
|
|
|
int stack_locals_count = scope_info->StackLocalCount();
|
|
|
|
int heap_locals_count = scope_info->ContextLocalCount();
|
2008-07-03 15:10:15 +00:00
|
|
|
int expressions_count = ComputeExpressionsCount();
|
|
|
|
|
|
|
|
// Print stack-allocated local variables.
|
|
|
|
if (stack_locals_count > 0) {
|
|
|
|
accumulator->Add(" // stack-allocated locals\n");
|
|
|
|
}
|
|
|
|
for (int i = 0; i < stack_locals_count; i++) {
|
|
|
|
accumulator->Add(" var ");
|
2011-11-03 10:36:55 +00:00
|
|
|
accumulator->PrintName(scope_info->StackLocalName(i));
|
2008-07-03 15:10:15 +00:00
|
|
|
accumulator->Add(" = ");
|
|
|
|
if (i < expressions_count) {
|
|
|
|
accumulator->Add("%o", GetExpression(i));
|
|
|
|
} else {
|
|
|
|
accumulator->Add("// no expression found - inconsistent frame?");
|
|
|
|
}
|
|
|
|
accumulator->Add("\n");
|
|
|
|
}
|
|
|
|
|
|
|
|
// Try to get hold of the context of this frame.
|
|
|
|
Context* context = NULL;
|
|
|
|
if (this->context() != NULL && this->context()->IsContext()) {
|
|
|
|
context = Context::cast(this->context());
|
|
|
|
}
|
2014-05-30 17:04:40 +00:00
|
|
|
while (context->IsWithContext()) {
|
|
|
|
context = context->previous();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(context != NULL);
|
2014-05-30 17:04:40 +00:00
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
// Print heap-allocated local variables.
|
2011-11-03 10:36:55 +00:00
|
|
|
if (heap_locals_count > 0) {
|
2008-07-03 15:10:15 +00:00
|
|
|
accumulator->Add(" // heap-allocated locals\n");
|
|
|
|
}
|
2011-11-03 10:36:55 +00:00
|
|
|
for (int i = 0; i < heap_locals_count; i++) {
|
2008-07-03 15:10:15 +00:00
|
|
|
accumulator->Add(" var ");
|
2011-11-03 10:36:55 +00:00
|
|
|
accumulator->PrintName(scope_info->ContextLocalName(i));
|
2008-07-03 15:10:15 +00:00
|
|
|
accumulator->Add(" = ");
|
|
|
|
if (context != NULL) {
|
2014-05-30 17:04:40 +00:00
|
|
|
int index = Context::MIN_CONTEXT_SLOTS + i;
|
|
|
|
if (index < context->length()) {
|
|
|
|
accumulator->Add("%o", context->get(index));
|
2008-07-03 15:10:15 +00:00
|
|
|
} else {
|
|
|
|
accumulator->Add(
|
|
|
|
"// warning: missing context slot - inconsistent frame?");
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
accumulator->Add("// warning: no context found - inconsistent frame?");
|
|
|
|
}
|
|
|
|
accumulator->Add("\n");
|
|
|
|
}
|
|
|
|
|
|
|
|
// Print the expression stack.
|
2008-07-30 08:49:36 +00:00
|
|
|
int expressions_start = stack_locals_count;
|
2008-07-03 15:10:15 +00:00
|
|
|
if (expressions_start < expressions_count) {
|
|
|
|
accumulator->Add(" // expression stack (top to bottom)\n");
|
|
|
|
}
|
|
|
|
for (int i = expressions_count - 1; i >= expressions_start; i--) {
|
|
|
|
accumulator->Add(" [%02d] : %o\n", i, GetExpression(i));
|
|
|
|
}
|
|
|
|
|
2015-06-17 11:58:17 +00:00
|
|
|
PrintFunctionSource(accumulator, shared, code);
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
accumulator->Add("}\n\n");
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
|
|
|
|
PrintMode mode,
|
|
|
|
int index) const {
|
|
|
|
int actual = ComputeParametersCount();
|
|
|
|
int expected = -1;
|
2013-07-11 16:45:58 +00:00
|
|
|
JSFunction* function = this->function();
|
2015-02-11 09:47:32 +00:00
|
|
|
expected = function->shared()->internal_formal_parameter_count();
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
PrintIndex(accumulator, mode, index);
|
|
|
|
accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
|
|
|
|
if (mode == OVERVIEW) {
|
|
|
|
accumulator->Add("\n");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
accumulator->Add(" {\n");
|
|
|
|
|
|
|
|
// Print actual arguments.
|
|
|
|
if (actual > 0) accumulator->Add(" // actual arguments\n");
|
|
|
|
for (int i = 0; i < actual; i++) {
|
|
|
|
accumulator->Add(" [%02d] : %o", i, GetParameter(i));
|
|
|
|
if (expected != -1 && i >= expected) {
|
|
|
|
accumulator->Add(" // not passed to callee");
|
|
|
|
}
|
|
|
|
accumulator->Add("\n");
|
|
|
|
}
|
|
|
|
|
|
|
|
accumulator->Add("}\n\n");
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void EntryFrame::Iterate(ObjectVisitor* v) const {
|
2015-06-04 14:44:00 +00:00
|
|
|
IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
|
2014-01-07 15:53:54 +00:00
|
|
|
const int offset = StandardFrameConstants::kLastObjectOffset;
|
2008-07-03 15:10:15 +00:00
|
|
|
Object** base = &Memory::Object_at(sp());
|
|
|
|
Object** limit = &Memory::Object_at(fp() + offset) + 1;
|
|
|
|
v->VisitPointers(base, limit);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
|
|
|
|
IterateExpressions(v);
|
2015-06-04 14:44:00 +00:00
|
|
|
IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void InternalFrame::Iterate(ObjectVisitor* v) const {
|
|
|
|
// Internal frames only have object pointers on the expression stack
|
|
|
|
// as they never have any arguments.
|
|
|
|
IterateExpressions(v);
|
2015-06-04 14:44:00 +00:00
|
|
|
IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-01-29 09:12:20 +00:00
|
|
|
void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const {
|
|
|
|
Object** base = &Memory::Object_at(sp());
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
Object** limit = &Memory::Object_at(
|
|
|
|
fp() + StubFailureTrampolineFrameConstants::kFixedHeaderBottomOffset);
|
2013-02-05 08:09:32 +00:00
|
|
|
v->VisitPointers(base, limit);
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
base = &Memory::Object_at(fp() + StandardFrameConstants::kFunctionOffset);
|
2014-01-07 15:53:54 +00:00
|
|
|
const int offset = StandardFrameConstants::kLastObjectOffset;
|
2013-02-05 08:09:32 +00:00
|
|
|
limit = &Memory::Object_at(fp() + offset) + 1;
|
2013-01-29 09:12:20 +00:00
|
|
|
v->VisitPointers(base, limit);
|
2015-06-04 14:44:00 +00:00
|
|
|
IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
|
2013-01-29 09:12:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-05 08:09:32 +00:00
|
|
|
Address StubFailureTrampolineFrame::GetCallerStackPointer() const {
|
|
|
|
return fp() + StandardFrameConstants::kCallerSPOffset;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Code* StubFailureTrampolineFrame::unchecked_code() const {
|
2013-04-02 11:28:01 +00:00
|
|
|
Code* trampoline;
|
2014-04-24 06:25:42 +00:00
|
|
|
StubFailureTrampolineStub(isolate(), NOT_JS_FUNCTION_STUB_MODE).
|
2014-04-24 12:07:40 +00:00
|
|
|
FindCodeInCache(&trampoline);
|
2013-04-02 11:28:01 +00:00
|
|
|
if (trampoline->contains(pc())) {
|
|
|
|
return trampoline;
|
|
|
|
}
|
|
|
|
|
2014-04-24 06:25:42 +00:00
|
|
|
StubFailureTrampolineStub(isolate(), JS_FUNCTION_STUB_MODE).
|
2014-04-24 12:07:40 +00:00
|
|
|
FindCodeInCache(&trampoline);
|
2013-04-02 11:28:01 +00:00
|
|
|
if (trampoline->contains(pc())) {
|
|
|
|
return trampoline;
|
2013-02-05 08:09:32 +00:00
|
|
|
}
|
2013-04-02 11:28:01 +00:00
|
|
|
|
2013-02-05 08:09:32 +00:00
|
|
|
UNREACHABLE();
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
// -------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(n >= 0);
|
2008-07-03 15:10:15 +00:00
|
|
|
for (int i = 0; i <= n; i++) {
|
|
|
|
while (!iterator_.frame()->is_java_script()) iterator_.Advance();
|
|
|
|
if (i == n) return JavaScriptFrame::cast(iterator_.frame());
|
|
|
|
iterator_.Advance();
|
|
|
|
}
|
|
|
|
UNREACHABLE();
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// -------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
2011-09-20 13:36:52 +00:00
|
|
|
static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
|
|
|
|
MapWord map_word = object->map_word();
|
|
|
|
return map_word.IsForwardingAddress() ?
|
|
|
|
map_word.ToForwardingAddress()->map() : map_word.ToMap();
|
2010-08-30 08:54:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-09-19 18:36:47 +00:00
|
|
|
static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
|
2011-09-20 13:36:52 +00:00
|
|
|
return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef DEBUG
|
|
|
|
static bool GcSafeCodeContains(HeapObject* code, Address addr) {
|
|
|
|
Map* map = GcSafeMapOfCodeSpaceObject(code);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map == code->GetHeap()->code_map());
|
2011-09-20 13:36:52 +00:00
|
|
|
Address start = code->address();
|
|
|
|
Address end = code->address() + code->SizeFromMap(map);
|
|
|
|
return start <= addr && addr < end;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
|
|
Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object,
|
|
|
|
Address inner_pointer) {
|
|
|
|
Code* code = reinterpret_cast<Code*>(object);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(code != NULL && GcSafeCodeContains(code, inner_pointer));
|
2011-09-20 13:36:52 +00:00
|
|
|
return code;
|
2011-09-19 18:36:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-09-20 10:08:39 +00:00
|
|
|
Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
|
|
|
|
Address inner_pointer) {
|
2011-03-18 20:35:07 +00:00
|
|
|
Heap* heap = isolate_->heap();
|
2015-09-24 12:51:30 +00:00
|
|
|
|
2011-09-20 10:08:39 +00:00
|
|
|
// Check if the inner pointer points into a large object chunk.
|
2012-03-19 12:08:20 +00:00
|
|
|
LargePage* large_page = heap->lo_space()->FindPage(inner_pointer);
|
2011-09-20 10:08:39 +00:00
|
|
|
if (large_page != NULL) {
|
|
|
|
return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
|
|
|
|
}
|
2010-08-30 08:54:43 +00:00
|
|
|
|
2016-02-10 09:46:27 +00:00
|
|
|
if (!heap->code_space()->Contains(inner_pointer)) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2011-09-19 18:36:47 +00:00
|
|
|
// Iterate through the page until we reach the end or find an object starting
|
2011-09-20 10:08:39 +00:00
|
|
|
// after the inner pointer.
|
|
|
|
Page* page = Page::FromAddress(inner_pointer);
|
2011-09-19 18:36:47 +00:00
|
|
|
|
2015-08-03 14:12:25 +00:00
|
|
|
DCHECK_EQ(page->owner(), heap->code_space());
|
2016-04-13 08:33:11 +00:00
|
|
|
heap->mark_compact_collector()->sweeper().SweepOrWaitUntilSweepingCompleted(
|
|
|
|
page);
|
2015-08-03 14:12:25 +00:00
|
|
|
|
2011-09-20 10:08:39 +00:00
|
|
|
Address addr = page->skip_list()->StartFor(inner_pointer);
|
2011-09-19 18:36:47 +00:00
|
|
|
|
|
|
|
Address top = heap->code_space()->top();
|
|
|
|
Address limit = heap->code_space()->limit();
|
|
|
|
|
2010-08-30 08:54:43 +00:00
|
|
|
while (true) {
|
2011-09-19 18:36:47 +00:00
|
|
|
if (addr == top && addr != limit) {
|
|
|
|
addr = limit;
|
|
|
|
continue;
|
2010-08-30 08:54:43 +00:00
|
|
|
}
|
2011-09-19 18:36:47 +00:00
|
|
|
|
|
|
|
HeapObject* obj = HeapObject::FromAddress(addr);
|
|
|
|
int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
|
|
|
|
Address next_addr = addr + obj_size;
|
2011-09-20 10:08:39 +00:00
|
|
|
if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer);
|
2011-09-19 18:36:47 +00:00
|
|
|
addr = next_addr;
|
2010-08-30 08:54:43 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-09-20 10:08:39 +00:00
|
|
|
InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
|
|
|
|
InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
|
2011-03-23 11:13:07 +00:00
|
|
|
isolate_->counters()->pc_to_code()->Increment();
|
2014-09-02 13:36:35 +00:00
|
|
|
DCHECK(base::bits::IsPowerOfTwo32(kInnerPointerToCodeCacheSize));
|
2016-01-12 18:09:50 +00:00
|
|
|
uint32_t hash = ComputeIntegerHash(ObjectAddressForHashing(inner_pointer),
|
|
|
|
v8::internal::kZeroHashSeed);
|
2011-09-20 10:08:39 +00:00
|
|
|
uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
|
|
|
|
InnerPointerToCodeCacheEntry* entry = cache(index);
|
|
|
|
if (entry->inner_pointer == inner_pointer) {
|
2011-03-23 11:13:07 +00:00
|
|
|
isolate_->counters()->pc_to_code_cached()->Increment();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
|
2010-08-30 08:54:43 +00:00
|
|
|
} else {
|
|
|
|
// Because this code may be interrupted by a profiling signal that
|
2011-09-20 10:08:39 +00:00
|
|
|
// also queries the cache, we cannot update inner_pointer before the code
|
|
|
|
// has been set. Otherwise, we risk trying to use a cache entry before
|
2010-08-30 08:54:43 +00:00
|
|
|
// the code has been computed.
|
2011-09-20 10:08:39 +00:00
|
|
|
entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);
|
2011-01-12 14:14:14 +00:00
|
|
|
entry->safepoint_entry.Reset();
|
2011-09-20 10:08:39 +00:00
|
|
|
entry->inner_pointer = inner_pointer;
|
2010-08-30 08:54:43 +00:00
|
|
|
}
|
|
|
|
return entry;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-05-08 08:08:23 +00:00
|
|
|
// -------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
2015-05-15 05:13:15 +00:00
|
|
|
int NumRegs(RegList reglist) { return base::bits::CountPopulation(reglist); }
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
struct JSCallerSavedCodeData {
|
|
|
|
int reg_code[kNumJSCallerSaved];
|
|
|
|
};
|
|
|
|
|
2012-04-05 14:10:39 +00:00
|
|
|
JSCallerSavedCodeData caller_saved_code_data;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2012-04-05 14:10:39 +00:00
|
|
|
void SetUpJSCallerSavedCodeData() {
|
|
|
|
int i = 0;
|
|
|
|
for (int r = 0; r < kNumRegs; r++)
|
|
|
|
if ((kJSCallerSaved & (1 << r)) != 0)
|
|
|
|
caller_saved_code_data.reg_code[i++] = r;
|
|
|
|
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(i == kNumJSCallerSaved);
|
2012-04-05 14:10:39 +00:00
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2013-07-05 09:52:11 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
int JSCallerSavedCode(int n) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(0 <= n && n < kNumJSCallerSaved);
|
2012-04-05 14:10:39 +00:00
|
|
|
return caller_saved_code_data.reg_code[n];
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-04-06 17:58:28 +00:00
|
|
|
#define DEFINE_WRAPPER(type, field) \
|
|
|
|
class field##_Wrapper : public ZoneObject { \
|
|
|
|
public: /* NOLINT */ \
|
|
|
|
field##_Wrapper(const field& original) : frame_(original) { \
|
|
|
|
} \
|
|
|
|
field frame_; \
|
|
|
|
};
|
|
|
|
STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
|
|
|
|
#undef DEFINE_WRAPPER
|
|
|
|
|
2012-06-11 12:42:31 +00:00
|
|
|
static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
|
2010-04-06 17:58:28 +00:00
|
|
|
#define FRAME_TYPE_CASE(type, field) \
|
|
|
|
case StackFrame::type: { \
|
|
|
|
field##_Wrapper* wrapper = \
|
2012-06-11 12:42:31 +00:00
|
|
|
new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
|
2010-04-06 17:58:28 +00:00
|
|
|
return &wrapper->frame_; \
|
|
|
|
}
|
|
|
|
|
|
|
|
switch (frame->type()) {
|
|
|
|
STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
|
|
|
|
default: UNREACHABLE();
|
|
|
|
}
|
|
|
|
#undef FRAME_TYPE_CASE
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2013-07-05 09:52:11 +00:00
|
|
|
|
2013-02-15 09:27:10 +00:00
|
|
|
Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) {
|
2012-06-11 12:42:31 +00:00
|
|
|
ZoneList<StackFrame*> list(10, zone);
|
2013-02-15 09:27:10 +00:00
|
|
|
for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
|
2012-06-11 12:42:31 +00:00
|
|
|
StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
|
|
|
|
list.Add(frame, zone);
|
2010-04-06 17:58:28 +00:00
|
|
|
}
|
|
|
|
return list.ToVector();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-06-01 22:46:54 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|