Debugger: prepare code for debugging on a per-function basis.
Prior to this patch, we enter a global debug mode whenever a break point is set. By entering this mode, all code is deoptimized and activated frames are recompiled and redirected to newly compiled debug code. After this patch, we only deoptimize/redirect for functions we want to debug. Trigger for this is Debug::EnsureDebugInfo, and having DebugInfo object attached to the SFI prevents optimization/inlining. The result is that we can have optimized code for functions without break points alongside functions that do have break points, which are not optimized. R=mstarzinger@chromium.org, ulan@chromium.org BUG=v8:4132 LOG=Y Review URL: https://codereview.chromium.org/1233073005 Cr-Commit-Position: refs/heads/master@{#29758}
This commit is contained in:
parent
8019833da7
commit
35c28ce0a7
@ -336,8 +336,7 @@ void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
visitor->VisitCodeAgeSequence(this);
|
||||
} else if (RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence() &&
|
||||
isolate->debug()->has_break_points()) {
|
||||
IsPatchedDebugBreakSlotSequence()) {
|
||||
visitor->VisitDebugTarget(this);
|
||||
} else if (RelocInfo::IsRuntimeEntry(mode)) {
|
||||
visitor->VisitRuntimeEntry(this);
|
||||
@ -360,8 +359,7 @@ void RelocInfo::Visit(Heap* heap) {
|
||||
StaticVisitor::VisitInternalReference(this);
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
StaticVisitor::VisitCodeAgeSequence(heap, this);
|
||||
} else if (heap->isolate()->debug()->has_break_points() &&
|
||||
RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
} else if (RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence()) {
|
||||
StaticVisitor::VisitDebugTarget(heap, this);
|
||||
} else if (RelocInfo::IsRuntimeEntry(mode)) {
|
||||
|
@ -884,8 +884,7 @@ void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
|
||||
} else if (mode == RelocInfo::INTERNAL_REFERENCE) {
|
||||
visitor->VisitInternalReference(this);
|
||||
} else if (RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence() &&
|
||||
isolate->debug()->has_break_points()) {
|
||||
IsPatchedDebugBreakSlotSequence()) {
|
||||
visitor->VisitDebugTarget(this);
|
||||
} else if (RelocInfo::IsRuntimeEntry(mode)) {
|
||||
visitor->VisitRuntimeEntry(this);
|
||||
@ -906,8 +905,7 @@ void RelocInfo::Visit(Heap* heap) {
|
||||
StaticVisitor::VisitExternalReference(this);
|
||||
} else if (mode == RelocInfo::INTERNAL_REFERENCE) {
|
||||
StaticVisitor::VisitInternalReference(this);
|
||||
} else if (heap->isolate()->debug()->has_break_points() &&
|
||||
RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
} else if (RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence()) {
|
||||
StaticVisitor::VisitDebugTarget(heap, this);
|
||||
} else if (RelocInfo::IsRuntimeEntry(mode)) {
|
||||
|
@ -3,6 +3,7 @@
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "src/background-parsing-task.h"
|
||||
#include "src/debug.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -31,7 +32,7 @@ BackgroundParsingTask::BackgroundParsingTask(
|
||||
info->set_global();
|
||||
info->set_unicode_cache(&source_->unicode_cache);
|
||||
|
||||
bool disable_lazy = Compiler::DebuggerWantsEagerCompilation(isolate);
|
||||
bool disable_lazy = isolate->debug()->RequiresEagerCompilation();
|
||||
if (disable_lazy && options == ScriptCompiler::kProduceParserCache) {
|
||||
// Producing cached data while parsing eagerly is not supported.
|
||||
options = ScriptCompiler::kNoCompileOptions;
|
||||
|
@ -49,7 +49,6 @@ namespace internal {
|
||||
V(kCopyBuffersOverlap, "Copy buffers overlap") \
|
||||
V(kCouldNotGenerateZero, "Could not generate +0.0") \
|
||||
V(kCouldNotGenerateNegativeZero, "Could not generate -0.0") \
|
||||
V(kDebuggerHasBreakPoints, "Debugger has break points") \
|
||||
V(kDebuggerStatement, "DebuggerStatement") \
|
||||
V(kDeclarationInCatchContext, "Declaration in catch context") \
|
||||
V(kDeclarationInWithContext, "Declaration in with context") \
|
||||
@ -85,6 +84,7 @@ namespace internal {
|
||||
"ForInStatement with non-local each variable") \
|
||||
V(kForOfStatement, "ForOfStatement") \
|
||||
V(kFrameIsExpectedToBeAligned, "Frame is expected to be aligned") \
|
||||
V(kFunctionBeingDebugged, "Function is being debugged") \
|
||||
V(kFunctionCallsEval, "Function calls eval") \
|
||||
V(kFunctionWithIllegalRedeclaration, "Function with illegal redeclaration") \
|
||||
V(kGeneratedCodeIsTooLarge, "Generated code is too large") \
|
||||
|
123
src/compiler.cc
123
src/compiler.cc
@ -338,11 +338,10 @@ class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder {
|
||||
|
||||
OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() {
|
||||
DCHECK(info()->IsOptimizing());
|
||||
DCHECK(!info()->IsCompilingForDebugging());
|
||||
|
||||
// Do not use Crankshaft/TurboFan if we need to be able to set break points.
|
||||
if (isolate()->debug()->has_break_points()) {
|
||||
return RetryOptimization(kDebuggerHasBreakPoints);
|
||||
if (info()->shared_info()->HasDebugInfo()) {
|
||||
return AbortOptimization(kFunctionBeingDebugged);
|
||||
}
|
||||
|
||||
// Limit the number of times we try to optimize functions.
|
||||
@ -905,18 +904,6 @@ MaybeHandle<Code> Compiler::GetLazyCode(Handle<JSFunction> function) {
|
||||
}
|
||||
|
||||
|
||||
MaybeHandle<Code> Compiler::GetUnoptimizedCode(
|
||||
Handle<SharedFunctionInfo> shared) {
|
||||
DCHECK(!shared->GetIsolate()->has_pending_exception());
|
||||
DCHECK(!shared->is_compiled());
|
||||
|
||||
Zone zone;
|
||||
ParseInfo parse_info(&zone, shared);
|
||||
CompilationInfo info(&parse_info);
|
||||
return GetUnoptimizedCodeCommon(&info);
|
||||
}
|
||||
|
||||
|
||||
bool Compiler::EnsureCompiled(Handle<JSFunction> function,
|
||||
ClearExceptionFlag flag) {
|
||||
if (function->is_compiled()) return true;
|
||||
@ -979,45 +966,41 @@ bool Compiler::EnsureDeoptimizationSupport(CompilationInfo* info) {
|
||||
}
|
||||
|
||||
|
||||
// Compile full code for debugging. This code will have debug break slots
|
||||
// and deoptimization information. Deoptimization information is required
|
||||
// in case that an optimized version of this function is still activated on
|
||||
// the stack. It will also make sure that the full code is compiled with
|
||||
// the same flags as the previous version, that is flags which can change
|
||||
// the code generated. The current method of mapping from already compiled
|
||||
// full code without debug break slots to full code with debug break slots
|
||||
// depends on the generated code is otherwise exactly the same.
|
||||
// If compilation fails, just keep the existing code.
|
||||
MaybeHandle<Code> Compiler::GetDebugCode(Handle<JSFunction> function) {
|
||||
CompilationInfoWithZone info(function);
|
||||
Isolate* isolate = info.isolate();
|
||||
VMState<COMPILER> state(isolate);
|
||||
|
||||
info.MarkAsDebug();
|
||||
|
||||
DCHECK(!isolate->has_pending_exception());
|
||||
Handle<Code> old_code(function->shared()->code());
|
||||
DCHECK(old_code->kind() == Code::FUNCTION);
|
||||
DCHECK(!old_code->has_debug_break_slots());
|
||||
|
||||
info.MarkCompilingForDebugging();
|
||||
if (old_code->is_compiled_optimizable()) {
|
||||
info.EnableDeoptimizationSupport();
|
||||
} else {
|
||||
info.MarkNonOptimizable();
|
||||
MaybeHandle<Code> CompileForDebugging(CompilationInfo* info) {
|
||||
info->MarkAsDebug();
|
||||
VMState<COMPILER> state(info->isolate());
|
||||
if (info->shared_info()->is_compiled()) {
|
||||
if (info->shared_info()->code()->is_compiled_optimizable()) {
|
||||
info->EnableDeoptimizationSupport();
|
||||
} else {
|
||||
info->MarkNonOptimizable();
|
||||
}
|
||||
}
|
||||
MaybeHandle<Code> maybe_new_code = GetUnoptimizedCodeCommon(&info);
|
||||
MaybeHandle<Code> maybe_new_code = GetUnoptimizedCodeCommon(info);
|
||||
Handle<Code> new_code;
|
||||
if (!maybe_new_code.ToHandle(&new_code)) {
|
||||
isolate->clear_pending_exception();
|
||||
} else {
|
||||
DCHECK_EQ(old_code->is_compiled_optimizable(),
|
||||
new_code->is_compiled_optimizable());
|
||||
info->isolate()->clear_pending_exception();
|
||||
}
|
||||
return maybe_new_code;
|
||||
}
|
||||
|
||||
|
||||
MaybeHandle<Code> Compiler::GetDebugCode(Handle<JSFunction> function) {
|
||||
CompilationInfoWithZone info(function);
|
||||
VMState<COMPILER> state(info.isolate());
|
||||
return CompileForDebugging(&info);
|
||||
}
|
||||
|
||||
|
||||
MaybeHandle<Code> Compiler::GetDebugCode(Handle<SharedFunctionInfo> shared) {
|
||||
DCHECK(shared->allows_lazy_compilation_without_context());
|
||||
Zone zone;
|
||||
ParseInfo parse_info(&zone, shared);
|
||||
CompilationInfo info(&parse_info);
|
||||
return CompileForDebugging(&info);
|
||||
}
|
||||
|
||||
|
||||
void Compiler::CompileForLiveEdit(Handle<Script> script) {
|
||||
// TODO(635): support extensions.
|
||||
Zone zone;
|
||||
@ -1065,12 +1048,13 @@ static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
|
||||
{ VMState<COMPILER> state(info->isolate());
|
||||
if (parse_info->literal() == NULL) {
|
||||
// Parse the script if needed (if it's already parsed, function() is
|
||||
// non-NULL).
|
||||
// non-NULL). If compiling for debugging, we may eagerly compile inner
|
||||
// functions, so do not parse lazily in that case.
|
||||
ScriptCompiler::CompileOptions options = parse_info->compile_options();
|
||||
bool parse_allow_lazy = (options == ScriptCompiler::kConsumeParserCache ||
|
||||
String::cast(script->source())->length() >
|
||||
FLAG_min_preparse_length) &&
|
||||
!Compiler::DebuggerWantsEagerCompilation(isolate);
|
||||
!info->is_debug();
|
||||
|
||||
parse_info->set_allow_lazy_parsing(parse_allow_lazy);
|
||||
if (!parse_allow_lazy &&
|
||||
@ -1180,6 +1164,8 @@ MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
|
||||
parse_info.set_parse_restriction(restriction);
|
||||
parse_info.set_context(context);
|
||||
|
||||
// If we eval from debug code, compile for debugging as well.
|
||||
if (outer_info->HasDebugCode()) info.MarkAsDebug();
|
||||
Debug::RecordEvalCaller(script);
|
||||
|
||||
shared_info = CompileToplevel(&info);
|
||||
@ -1371,9 +1357,13 @@ Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfo(
|
||||
// We found an existing shared function info. If it's already compiled,
|
||||
// don't worry about compiling it, and simply return it. If it's not yet
|
||||
// compiled, continue to decide whether to eagerly compile.
|
||||
// Carry on if we are compiling eager to obtain code for debugging,
|
||||
// unless we already have code with debut break slots.
|
||||
Handle<SharedFunctionInfo> existing;
|
||||
if (maybe_existing.ToHandle(&existing) && existing->is_compiled()) {
|
||||
return existing;
|
||||
if (!outer_info->is_debug() || existing->HasDebugCode()) {
|
||||
return existing;
|
||||
}
|
||||
}
|
||||
|
||||
Zone zone;
|
||||
@ -1384,6 +1374,7 @@ Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfo(
|
||||
parse_info.set_language_mode(literal->scope()->language_mode());
|
||||
if (outer_info->will_serialize()) info.PrepareForSerializing();
|
||||
if (outer_info->is_first_compile()) info.MarkAsFirstCompile();
|
||||
if (outer_info->is_debug()) info.MarkAsDebug();
|
||||
|
||||
LiveEditFunctionTracker live_edit_tracker(isolate, literal);
|
||||
// Determine if the function can be lazily compiled. This is necessary to
|
||||
@ -1396,9 +1387,11 @@ Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfo(
|
||||
// of functions without an outer context when setting a breakpoint through
|
||||
// Debug::FindSharedFunctionInfoInScript.
|
||||
bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
|
||||
bool allow_lazy =
|
||||
literal->AllowsLazyCompilation() &&
|
||||
!DebuggerWantsEagerCompilation(isolate, allow_lazy_without_ctx);
|
||||
// Compile eagerly for live edit. When compiling debug code, eagerly compile
|
||||
// unless we can lazily compile without the context.
|
||||
bool allow_lazy = literal->AllowsLazyCompilation() &&
|
||||
!LiveEditFunctionTracker::IsActive(isolate) &&
|
||||
(!info.is_debug() || allow_lazy_without_ctx);
|
||||
|
||||
if (outer_info->parse_info()->is_toplevel() && outer_info->will_serialize()) {
|
||||
// Make sure that if the toplevel code (possibly to be serialized),
|
||||
@ -1463,8 +1456,8 @@ Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfo(
|
||||
live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
|
||||
return result;
|
||||
} else if (!lazy) {
|
||||
// We have additional data from compilation now.
|
||||
DCHECK(!existing->is_compiled());
|
||||
// Assert that we are not overwriting (possibly patched) debug code.
|
||||
DCHECK(!existing->HasDebugCode());
|
||||
existing->ReplaceCode(*info.code());
|
||||
existing->set_scope_info(*scope_info);
|
||||
existing->set_feedback_vector(*info.feedback_vector());
|
||||
@ -1478,6 +1471,10 @@ MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
|
||||
ConcurrencyMode mode,
|
||||
BailoutId osr_ast_id,
|
||||
JavaScriptFrame* osr_frame) {
|
||||
Isolate* isolate = function->GetIsolate();
|
||||
Handle<SharedFunctionInfo> shared(function->shared(), isolate);
|
||||
if (shared->HasDebugInfo()) return MaybeHandle<Code>();
|
||||
|
||||
Handle<Code> cached_code;
|
||||
if (GetCodeFromOptimizedCodeMap(
|
||||
function, osr_ast_id).ToHandle(&cached_code)) {
|
||||
@ -1492,10 +1489,8 @@ MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
|
||||
return cached_code;
|
||||
}
|
||||
|
||||
Isolate* isolate = function->GetIsolate();
|
||||
DCHECK(AllowCompilation::IsAllowed(isolate));
|
||||
|
||||
Handle<SharedFunctionInfo> shared(function->shared(), isolate);
|
||||
if (!shared->is_compiled() ||
|
||||
shared->scope_info() == ScopeInfo::Empty(isolate)) {
|
||||
// The function was never compiled. Compile it unoptimized first.
|
||||
@ -1557,19 +1552,18 @@ Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
|
||||
Handle<SharedFunctionInfo> shared = info->shared_info();
|
||||
shared->code()->set_profiler_ticks(0);
|
||||
|
||||
DCHECK(!shared->HasDebugInfo());
|
||||
|
||||
// 1) Optimization on the concurrent thread may have failed.
|
||||
// 2) The function may have already been optimized by OSR. Simply continue.
|
||||
// Except when OSR already disabled optimization for some reason.
|
||||
// 3) The code may have already been invalidated due to dependency change.
|
||||
// 4) Debugger may have been activated.
|
||||
// 5) Code generation may have failed.
|
||||
// 4) Code generation may have failed.
|
||||
if (job->last_status() == OptimizedCompileJob::SUCCEEDED) {
|
||||
if (shared->optimization_disabled()) {
|
||||
job->RetryOptimization(kOptimizationDisabled);
|
||||
} else if (info->dependencies()->HasAborted()) {
|
||||
job->RetryOptimization(kBailedOutDueToDependencyChange);
|
||||
} else if (isolate->debug()->has_break_points()) {
|
||||
job->RetryOptimization(kDebuggerHasBreakPoints);
|
||||
} else if (job->GenerateCode() == OptimizedCompileJob::SUCCEEDED) {
|
||||
RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info.get(), shared);
|
||||
if (shared->SearchOptimizedCodeMap(info->context()->native_context(),
|
||||
@ -1595,15 +1589,6 @@ Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
|
||||
}
|
||||
|
||||
|
||||
bool Compiler::DebuggerWantsEagerCompilation(Isolate* isolate,
|
||||
bool allow_lazy_without_ctx) {
|
||||
if (LiveEditFunctionTracker::IsActive(isolate)) return true;
|
||||
Debug* debug = isolate->debug();
|
||||
bool debugging = debug->is_active() || debug->has_break_points();
|
||||
return debugging && !allow_lazy_without_ctx;
|
||||
}
|
||||
|
||||
|
||||
CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
|
||||
: name_(name), info_(info) {
|
||||
if (FLAG_hydrogen_stats) {
|
||||
|
@ -120,18 +120,17 @@ class CompilationInfo {
|
||||
kMustNotHaveEagerFrame = 1 << 4,
|
||||
kDeoptimizationSupport = 1 << 5,
|
||||
kDebug = 1 << 6,
|
||||
kCompilingForDebugging = 1 << 7,
|
||||
kSerializing = 1 << 8,
|
||||
kContextSpecializing = 1 << 9,
|
||||
kFrameSpecializing = 1 << 10,
|
||||
kInliningEnabled = 1 << 11,
|
||||
kTypingEnabled = 1 << 12,
|
||||
kDisableFutureOptimization = 1 << 13,
|
||||
kSplittingEnabled = 1 << 14,
|
||||
kTypeFeedbackEnabled = 1 << 15,
|
||||
kDeoptimizationEnabled = 1 << 16,
|
||||
kSourcePositionsEnabled = 1 << 17,
|
||||
kFirstCompile = 1 << 18,
|
||||
kSerializing = 1 << 7,
|
||||
kContextSpecializing = 1 << 8,
|
||||
kFrameSpecializing = 1 << 9,
|
||||
kInliningEnabled = 1 << 10,
|
||||
kTypingEnabled = 1 << 11,
|
||||
kDisableFutureOptimization = 1 << 12,
|
||||
kSplittingEnabled = 1 << 13,
|
||||
kTypeFeedbackEnabled = 1 << 14,
|
||||
kDeoptimizationEnabled = 1 << 15,
|
||||
kSourcePositionsEnabled = 1 << 16,
|
||||
kFirstCompile = 1 << 17,
|
||||
};
|
||||
|
||||
explicit CompilationInfo(ParseInfo* parse_info);
|
||||
@ -207,6 +206,8 @@ class CompilationInfo {
|
||||
return GetFlag(kMustNotHaveEagerFrame);
|
||||
}
|
||||
|
||||
// Compiles marked as debug produce unoptimized code with debug break slots.
|
||||
// Inner functions that cannot be compiled w/o context are compiled eagerly.
|
||||
void MarkAsDebug() { SetFlag(kDebug); }
|
||||
|
||||
bool is_debug() const { return GetFlag(kDebug); }
|
||||
@ -270,8 +271,6 @@ class CompilationInfo {
|
||||
}
|
||||
void SetCode(Handle<Code> code) { code_ = code; }
|
||||
|
||||
void MarkCompilingForDebugging() { SetFlag(kCompilingForDebugging); }
|
||||
bool IsCompilingForDebugging() { return GetFlag(kCompilingForDebugging); }
|
||||
void MarkNonOptimizable() {
|
||||
SetMode(CompilationInfo::NONOPT);
|
||||
}
|
||||
@ -627,10 +626,11 @@ class Compiler : public AllStatic {
|
||||
Handle<JSFunction> function);
|
||||
MUST_USE_RESULT static MaybeHandle<Code> GetLazyCode(
|
||||
Handle<JSFunction> function);
|
||||
MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
|
||||
Handle<SharedFunctionInfo> shared);
|
||||
|
||||
MUST_USE_RESULT static MaybeHandle<Code> GetDebugCode(
|
||||
Handle<JSFunction> function);
|
||||
MUST_USE_RESULT static MaybeHandle<Code> GetDebugCode(
|
||||
Handle<SharedFunctionInfo> shared);
|
||||
|
||||
// Parser::Parse, then Compiler::Analyze.
|
||||
static bool ParseAndAnalyze(ParseInfo* info);
|
||||
@ -682,10 +682,6 @@ class Compiler : public AllStatic {
|
||||
// Generate and return code from previously queued optimization job.
|
||||
// On failure, return the empty handle.
|
||||
static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job);
|
||||
|
||||
// TODO(titzer): move this method out of the compiler.
|
||||
static bool DebuggerWantsEagerCompilation(
|
||||
Isolate* isolate, bool allow_lazy_without_ctx = false);
|
||||
};
|
||||
|
||||
|
||||
|
@ -249,6 +249,14 @@ Reduction JSInliner::Reduce(Node* node) {
|
||||
return NoChange();
|
||||
}
|
||||
|
||||
if (function->shared()->HasDebugInfo()) {
|
||||
// Function contains break points.
|
||||
TRACE("Not inlining %s into %s because callee may contain break points\n",
|
||||
function->shared()->DebugName()->ToCString().get(),
|
||||
info_->shared_info()->DebugName()->ToCString().get());
|
||||
return NoChange();
|
||||
}
|
||||
|
||||
// Disallow cross native-context inlining for now. This means that all parts
|
||||
// of the resulting code will operate on the same global object.
|
||||
// This also prevents cross context leaks for asm.js code, where we could
|
||||
|
580
src/debug.cc
580
src/debug.cc
@ -36,7 +36,6 @@ Debug::Debug(Isolate* isolate)
|
||||
is_active_(false),
|
||||
is_suppressed_(false),
|
||||
live_edit_enabled_(true), // TODO(yangguo): set to false by default.
|
||||
has_break_points_(false),
|
||||
break_disabled_(false),
|
||||
in_debug_event_listener_(false),
|
||||
break_on_exception_(false),
|
||||
@ -146,7 +145,6 @@ void BreakLocation::Iterator::Next() {
|
||||
// the address.
|
||||
BreakLocation BreakLocation::FromAddress(Handle<DebugInfo> debug_info,
|
||||
BreakLocatorType type, Address pc) {
|
||||
DCHECK(debug_info->code()->has_debug_break_slots());
|
||||
Iterator it(debug_info, type);
|
||||
it.SkipTo(BreakIndexFromAddress(debug_info, type, pc));
|
||||
return it.GetBreakLocation();
|
||||
@ -158,7 +156,6 @@ BreakLocation BreakLocation::FromAddress(Handle<DebugInfo> debug_info,
|
||||
void BreakLocation::FromAddressSameStatement(Handle<DebugInfo> debug_info,
|
||||
BreakLocatorType type, Address pc,
|
||||
List<BreakLocation>* result_out) {
|
||||
DCHECK(debug_info->code()->has_debug_break_slots());
|
||||
int break_index = BreakIndexFromAddress(debug_info, type, pc);
|
||||
Iterator it(debug_info, type);
|
||||
it.SkipTo(break_index);
|
||||
@ -191,7 +188,6 @@ int BreakLocation::BreakIndexFromAddress(Handle<DebugInfo> debug_info,
|
||||
BreakLocation BreakLocation::FromPosition(Handle<DebugInfo> debug_info,
|
||||
BreakLocatorType type, int position,
|
||||
BreakPositionAlignment alignment) {
|
||||
DCHECK(debug_info->code()->has_debug_break_slots());
|
||||
// Run through all break points to locate the one closest to the source
|
||||
// position.
|
||||
int closest_break = 0;
|
||||
@ -222,7 +218,6 @@ BreakLocation BreakLocation::FromPosition(Handle<DebugInfo> debug_info,
|
||||
void BreakLocation::SetBreakPoint(Handle<Object> break_point_object) {
|
||||
// If there is not already a real break point here patch code with debug
|
||||
// break.
|
||||
DCHECK(code()->has_debug_break_slots());
|
||||
if (!HasBreakPoint()) SetDebugBreak();
|
||||
DCHECK(IsDebugBreak() || IsDebuggerStatement());
|
||||
// Set the break point information.
|
||||
@ -607,7 +602,7 @@ void Debug::Break(Arguments args, JavaScriptFrame* frame) {
|
||||
// Get the debug info (create it if it does not exist).
|
||||
Handle<SharedFunctionInfo> shared =
|
||||
Handle<SharedFunctionInfo>(frame->function()->shared());
|
||||
Handle<DebugInfo> debug_info = GetDebugInfo(shared);
|
||||
Handle<DebugInfo> debug_info(shared->GetDebugInfo());
|
||||
|
||||
// Find the break point where execution has stopped.
|
||||
// PC points to the instruction after the current one, possibly a break
|
||||
@ -780,27 +775,11 @@ bool Debug::CheckBreakPoint(Handle<Object> break_point_object) {
|
||||
}
|
||||
|
||||
|
||||
// Check whether the function has debug information.
|
||||
bool Debug::HasDebugInfo(Handle<SharedFunctionInfo> shared) {
|
||||
return !shared->debug_info()->IsUndefined();
|
||||
}
|
||||
|
||||
|
||||
// Return the debug info for this function. EnsureDebugInfo must be called
|
||||
// prior to ensure the debug info has been generated for shared.
|
||||
Handle<DebugInfo> Debug::GetDebugInfo(Handle<SharedFunctionInfo> shared) {
|
||||
DCHECK(HasDebugInfo(shared));
|
||||
return Handle<DebugInfo>(DebugInfo::cast(shared->debug_info()));
|
||||
}
|
||||
|
||||
|
||||
bool Debug::SetBreakPoint(Handle<JSFunction> function,
|
||||
Handle<Object> break_point_object,
|
||||
int* source_position) {
|
||||
HandleScope scope(isolate_);
|
||||
|
||||
PrepareForBreakPoints();
|
||||
|
||||
// Make sure the function is compiled and has set up the debug info.
|
||||
Handle<SharedFunctionInfo> shared(function->shared());
|
||||
if (!EnsureDebugInfo(shared, function)) {
|
||||
@ -808,7 +787,7 @@ bool Debug::SetBreakPoint(Handle<JSFunction> function,
|
||||
return true;
|
||||
}
|
||||
|
||||
Handle<DebugInfo> debug_info = GetDebugInfo(shared);
|
||||
Handle<DebugInfo> debug_info(shared->GetDebugInfo());
|
||||
// Source positions starts with zero.
|
||||
DCHECK(*source_position >= 0);
|
||||
|
||||
@ -829,8 +808,6 @@ bool Debug::SetBreakPointForScript(Handle<Script> script,
|
||||
BreakPositionAlignment alignment) {
|
||||
HandleScope scope(isolate_);
|
||||
|
||||
PrepareForBreakPoints();
|
||||
|
||||
// Obtain shared function info for the function.
|
||||
Handle<Object> result =
|
||||
FindSharedFunctionInfoInScript(script, *source_position);
|
||||
@ -852,7 +829,7 @@ bool Debug::SetBreakPointForScript(Handle<Script> script,
|
||||
position = *source_position - shared->start_position();
|
||||
}
|
||||
|
||||
Handle<DebugInfo> debug_info = GetDebugInfo(shared);
|
||||
Handle<DebugInfo> debug_info(shared->GetDebugInfo());
|
||||
// Source positions starts with zero.
|
||||
DCHECK(position >= 0);
|
||||
|
||||
@ -926,8 +903,6 @@ void Debug::ClearAllBreakPoints() {
|
||||
|
||||
void Debug::FloodWithOneShot(Handle<JSFunction> function,
|
||||
BreakLocatorType type) {
|
||||
PrepareForBreakPoints();
|
||||
|
||||
// Make sure the function is compiled and has set up the debug info.
|
||||
Handle<SharedFunctionInfo> shared(function->shared());
|
||||
if (!EnsureDebugInfo(shared, function)) {
|
||||
@ -936,8 +911,8 @@ void Debug::FloodWithOneShot(Handle<JSFunction> function,
|
||||
}
|
||||
|
||||
// Flood the function with break points.
|
||||
for (BreakLocation::Iterator it(GetDebugInfo(shared), type); !it.Done();
|
||||
it.Next()) {
|
||||
Handle<DebugInfo> debug_info(shared->GetDebugInfo());
|
||||
for (BreakLocation::Iterator it(debug_info, type); !it.Done(); it.Next()) {
|
||||
it.GetBreakLocation().SetOneShot();
|
||||
}
|
||||
}
|
||||
@ -1033,13 +1008,18 @@ bool Debug::IsBreakOnException(ExceptionBreakType type) {
|
||||
}
|
||||
|
||||
|
||||
FrameSummary GetFirstFrameSummary(JavaScriptFrame* frame) {
|
||||
List<FrameSummary> frames(FLAG_max_inlining_levels + 1);
|
||||
frame->Summarize(&frames);
|
||||
return frames.first();
|
||||
}
|
||||
|
||||
|
||||
void Debug::PrepareStep(StepAction step_action,
|
||||
int step_count,
|
||||
StackFrame::Id frame_id) {
|
||||
HandleScope scope(isolate_);
|
||||
|
||||
PrepareForBreakPoints();
|
||||
|
||||
DCHECK(in_debug_scope());
|
||||
|
||||
// Remember this step action and count.
|
||||
@ -1084,22 +1064,18 @@ void Debug::PrepareStep(StepAction step_action,
|
||||
return;
|
||||
}
|
||||
|
||||
List<FrameSummary> frames(FLAG_max_inlining_levels + 1);
|
||||
frames_it.frame()->Summarize(&frames);
|
||||
FrameSummary summary = frames.first();
|
||||
|
||||
// Get the debug info (create it if it does not exist).
|
||||
FrameSummary summary = GetFirstFrameSummary(frame);
|
||||
Handle<JSFunction> function(summary.function());
|
||||
Handle<SharedFunctionInfo> shared(function->shared());
|
||||
if (!EnsureDebugInfo(shared, function)) {
|
||||
// Return if ensuring debug info failed.
|
||||
return;
|
||||
}
|
||||
Handle<DebugInfo> debug_info = GetDebugInfo(shared);
|
||||
|
||||
// Compute whether or not the target is a call target.
|
||||
bool is_at_restarted_function = false;
|
||||
Handle<Code> call_function_stub;
|
||||
Handle<DebugInfo> debug_info(shared->GetDebugInfo());
|
||||
// Refresh frame summary if the code has been recompiled for debugging.
|
||||
if (shared->code() != *summary.code()) summary = GetFirstFrameSummary(frame);
|
||||
|
||||
// PC points to the instruction after the current one, possibly a break
|
||||
// location as well. So the "- 1" to exclude it from the search.
|
||||
@ -1107,10 +1083,6 @@ void Debug::PrepareStep(StepAction step_action,
|
||||
BreakLocation location =
|
||||
BreakLocation::FromAddress(debug_info, ALL_BREAK_LOCATIONS, call_pc);
|
||||
|
||||
if (thread_local_.restarter_frame_function_pointer_ != NULL) {
|
||||
is_at_restarted_function = true;
|
||||
}
|
||||
|
||||
// If this is the last break code target step out is the only possibility.
|
||||
if (location.IsReturn() || step_action == StepOut) {
|
||||
if (step_action == StepOut) {
|
||||
@ -1142,7 +1114,7 @@ void Debug::PrepareStep(StepAction step_action,
|
||||
if (step_action != StepNext && step_action != StepMin) {
|
||||
// If there's restarter frame on top of the stack, just get the pointer
|
||||
// to function which is going to be restarted.
|
||||
if (is_at_restarted_function) {
|
||||
if (thread_local_.restarter_frame_function_pointer_ != NULL) {
|
||||
Handle<JSFunction> restarted_function(
|
||||
JSFunction::cast(*thread_local_.restarter_frame_function_pointer_));
|
||||
FloodWithOneShot(restarted_function);
|
||||
@ -1253,10 +1225,10 @@ Handle<Object> Debug::GetSourceBreakLocations(
|
||||
BreakPositionAlignment position_alignment) {
|
||||
Isolate* isolate = shared->GetIsolate();
|
||||
Heap* heap = isolate->heap();
|
||||
if (!HasDebugInfo(shared)) {
|
||||
if (!shared->HasDebugInfo()) {
|
||||
return Handle<Object>(heap->undefined_value(), isolate);
|
||||
}
|
||||
Handle<DebugInfo> debug_info = GetDebugInfo(shared);
|
||||
Handle<DebugInfo> debug_info(shared->GetDebugInfo());
|
||||
if (debug_info->GetBreakPointCount() == 0) {
|
||||
return Handle<Object>(heap->undefined_value(), isolate);
|
||||
}
|
||||
@ -1369,42 +1341,12 @@ void Debug::ClearStepNext() {
|
||||
}
|
||||
|
||||
|
||||
static void CollectActiveFunctionsFromThread(
|
||||
Isolate* isolate,
|
||||
ThreadLocalTop* top,
|
||||
List<Handle<JSFunction> >* active_functions,
|
||||
Object* active_code_marker) {
|
||||
// Find all non-optimized code functions with activation frames
|
||||
// on the stack. This includes functions which have optimized
|
||||
// activations (including inlined functions) on the stack as the
|
||||
// non-optimized code is needed for the lazy deoptimization.
|
||||
for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) {
|
||||
JavaScriptFrame* frame = it.frame();
|
||||
if (frame->is_optimized()) {
|
||||
List<JSFunction*> functions(FLAG_max_inlining_levels + 1);
|
||||
frame->GetFunctions(&functions);
|
||||
for (int i = 0; i < functions.length(); i++) {
|
||||
JSFunction* function = functions[i];
|
||||
active_functions->Add(Handle<JSFunction>(function));
|
||||
function->shared()->code()->set_gc_metadata(active_code_marker);
|
||||
}
|
||||
} else if (frame->function()->IsJSFunction()) {
|
||||
JSFunction* function = frame->function();
|
||||
DCHECK(frame->LookupCode()->kind() == Code::FUNCTION);
|
||||
active_functions->Add(Handle<JSFunction>(function));
|
||||
function->shared()->code()->set_gc_metadata(active_code_marker);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Count the number of calls before the current frame PC to find the
|
||||
// corresponding PC in the newly recompiled code.
|
||||
static Address ComputeNewPcForRedirect(Code* new_code, Code* old_code,
|
||||
Address old_pc) {
|
||||
DCHECK_EQ(old_code->kind(), Code::FUNCTION);
|
||||
DCHECK_EQ(new_code->kind(), Code::FUNCTION);
|
||||
DCHECK(!old_code->has_debug_break_slots());
|
||||
DCHECK(new_code->has_debug_break_slots());
|
||||
int mask = RelocInfo::kCodeTargetMask;
|
||||
int index = 0;
|
||||
@ -1427,7 +1369,6 @@ static Address ComputeNewPcForRedirect(Code* new_code, Code* old_code,
|
||||
// Count the number of continuations at which the current pc offset is at.
|
||||
static int ComputeContinuationIndexFromPcOffset(Code* code, int pc_offset) {
|
||||
DCHECK_EQ(code->kind(), Code::FUNCTION);
|
||||
DCHECK(!code->has_debug_break_slots());
|
||||
Address pc = code->instruction_start() + pc_offset;
|
||||
int mask = RelocInfo::ModeMask(RelocInfo::GENERATOR_CONTINUATION);
|
||||
int index = 0;
|
||||
@ -1453,301 +1394,120 @@ static int ComputePcOffsetFromContinuationIndex(Code* code, int index) {
|
||||
}
|
||||
|
||||
|
||||
static void RedirectActivationsToRecompiledCodeOnThread(
|
||||
Isolate* isolate,
|
||||
ThreadLocalTop* top) {
|
||||
for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) {
|
||||
JavaScriptFrame* frame = it.frame();
|
||||
|
||||
if (frame->is_optimized() || !frame->function()->IsJSFunction()) continue;
|
||||
|
||||
JSFunction* function = frame->function();
|
||||
|
||||
DCHECK(frame->LookupCode()->kind() == Code::FUNCTION);
|
||||
|
||||
Handle<Code> frame_code(frame->LookupCode());
|
||||
if (frame_code->has_debug_break_slots()) continue;
|
||||
|
||||
Handle<Code> new_code(function->shared()->code());
|
||||
if (new_code->kind() != Code::FUNCTION ||
|
||||
!new_code->has_debug_break_slots()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Address new_pc =
|
||||
ComputeNewPcForRedirect(*new_code, *frame_code, frame->pc());
|
||||
|
||||
if (FLAG_trace_deopt) {
|
||||
PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
|
||||
"with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
|
||||
"for debugging, "
|
||||
"changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n",
|
||||
reinterpret_cast<intptr_t>(
|
||||
frame_code->instruction_start()),
|
||||
reinterpret_cast<intptr_t>(
|
||||
frame_code->instruction_start()) +
|
||||
frame_code->instruction_size(),
|
||||
frame_code->instruction_size(),
|
||||
reinterpret_cast<intptr_t>(new_code->instruction_start()),
|
||||
reinterpret_cast<intptr_t>(new_code->instruction_start()) +
|
||||
new_code->instruction_size(),
|
||||
new_code->instruction_size(),
|
||||
reinterpret_cast<intptr_t>(frame->pc()),
|
||||
reinterpret_cast<intptr_t>(new_pc));
|
||||
}
|
||||
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
// Update constant pool pointer for new code.
|
||||
frame->set_constant_pool(new_code->constant_pool());
|
||||
}
|
||||
|
||||
// Patch the return address to return into the code with
|
||||
// debug break slots.
|
||||
frame->set_pc(new_pc);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class ActiveFunctionsCollector : public ThreadVisitor {
|
||||
class RedirectActiveFunctions : public ThreadVisitor {
|
||||
public:
|
||||
explicit ActiveFunctionsCollector(List<Handle<JSFunction> >* active_functions,
|
||||
Object* active_code_marker)
|
||||
: active_functions_(active_functions),
|
||||
active_code_marker_(active_code_marker) { }
|
||||
explicit RedirectActiveFunctions(SharedFunctionInfo* shared)
|
||||
: shared_(shared) {
|
||||
DCHECK(shared->HasDebugCode());
|
||||
}
|
||||
|
||||
void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
|
||||
CollectActiveFunctionsFromThread(isolate,
|
||||
top,
|
||||
active_functions_,
|
||||
active_code_marker_);
|
||||
for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) {
|
||||
JavaScriptFrame* frame = it.frame();
|
||||
JSFunction* function = frame->function();
|
||||
if (frame->is_optimized()) continue;
|
||||
if (!function->Inlines(shared_)) continue;
|
||||
|
||||
Code* frame_code = frame->LookupCode();
|
||||
DCHECK(frame_code->kind() == Code::FUNCTION);
|
||||
if (frame_code->has_debug_break_slots()) continue;
|
||||
|
||||
Code* new_code = function->shared()->code();
|
||||
Address old_pc = frame->pc();
|
||||
Address new_pc = ComputeNewPcForRedirect(new_code, frame_code, old_pc);
|
||||
|
||||
if (FLAG_trace_deopt) {
|
||||
PrintF("Replacing pc for debugging: %08" V8PRIxPTR " => %08" V8PRIxPTR
|
||||
"\n",
|
||||
reinterpret_cast<intptr_t>(old_pc),
|
||||
reinterpret_cast<intptr_t>(new_pc));
|
||||
}
|
||||
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
// Update constant pool pointer for new code.
|
||||
frame->set_constant_pool(new_code->constant_pool());
|
||||
}
|
||||
|
||||
// Patch the return address to return into the code with
|
||||
// debug break slots.
|
||||
frame->set_pc(new_pc);
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
List<Handle<JSFunction> >* active_functions_;
|
||||
Object* active_code_marker_;
|
||||
SharedFunctionInfo* shared_;
|
||||
DisallowHeapAllocation no_gc_;
|
||||
};
|
||||
|
||||
|
||||
class ActiveFunctionsRedirector : public ThreadVisitor {
|
||||
public:
|
||||
void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
|
||||
RedirectActivationsToRecompiledCodeOnThread(isolate, top);
|
||||
bool Debug::PrepareFunctionForBreakPoints(Handle<SharedFunctionInfo> shared) {
|
||||
DCHECK(shared->is_compiled());
|
||||
|
||||
if (isolate_->concurrent_recompilation_enabled()) {
|
||||
isolate_->optimizing_compile_dispatcher()->Flush();
|
||||
}
|
||||
};
|
||||
|
||||
List<Handle<JSFunction> > functions;
|
||||
List<Handle<JSGeneratorObject> > suspended_generators;
|
||||
|
||||
static void EnsureFunctionHasDebugBreakSlots(Handle<JSFunction> function) {
|
||||
if (function->code()->kind() == Code::FUNCTION &&
|
||||
function->code()->has_debug_break_slots()) {
|
||||
// Nothing to do. Function code already had debug break slots.
|
||||
return;
|
||||
if (!shared->optimized_code_map()->IsSmi()) {
|
||||
shared->ClearOptimizedCodeMap();
|
||||
}
|
||||
// Make sure that the shared full code is compiled with debug
|
||||
// break slots.
|
||||
if (!function->shared()->code()->has_debug_break_slots()) {
|
||||
MaybeHandle<Code> code = Compiler::GetDebugCode(function);
|
||||
// Recompilation can fail. In that case leave the code as it was.
|
||||
if (!code.is_null()) function->ReplaceCode(*code.ToHandleChecked());
|
||||
} else {
|
||||
// Simply use shared code if it has debug break slots.
|
||||
function->ReplaceCode(function->shared()->code());
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure we abort incremental marking.
|
||||
isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
|
||||
"prepare for break points");
|
||||
|
||||
static void RecompileAndRelocateSuspendedGenerators(
|
||||
const List<Handle<JSGeneratorObject> > &generators) {
|
||||
for (int i = 0; i < generators.length(); i++) {
|
||||
Handle<JSFunction> fun(generators[i]->function());
|
||||
{
|
||||
HeapIterator iterator(isolate_->heap());
|
||||
HeapObject* obj;
|
||||
bool include_generators = shared->is_generator();
|
||||
|
||||
EnsureFunctionHasDebugBreakSlots(fun);
|
||||
|
||||
int index = generators[i]->continuation();
|
||||
int pc_offset = ComputePcOffsetFromContinuationIndex(fun->code(), index);
|
||||
generators[i]->set_continuation(pc_offset);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static bool SkipSharedFunctionInfo(SharedFunctionInfo* shared,
|
||||
Object* active_code_marker) {
|
||||
if (!shared->allows_lazy_compilation()) return true;
|
||||
Object* script = shared->script();
|
||||
if (!script->IsScript()) return true;
|
||||
if (Script::cast(script)->type()->value() == Script::TYPE_NATIVE) return true;
|
||||
Code* shared_code = shared->code();
|
||||
return shared_code->gc_metadata() == active_code_marker;
|
||||
}
|
||||
|
||||
|
||||
static inline bool HasDebugBreakSlots(Code* code) {
|
||||
return code->kind() == Code::FUNCTION && code->has_debug_break_slots();
|
||||
}
|
||||
|
||||
|
||||
void Debug::PrepareForBreakPoints() {
|
||||
// If preparing for the first break point make sure to deoptimize all
|
||||
// functions as debugging does not work with optimized code.
|
||||
if (!has_break_points_) {
|
||||
if (isolate_->concurrent_recompilation_enabled()) {
|
||||
isolate_->optimizing_compile_dispatcher()->Flush();
|
||||
}
|
||||
|
||||
Deoptimizer::DeoptimizeAll(isolate_);
|
||||
|
||||
Handle<Code> lazy_compile = isolate_->builtins()->CompileLazy();
|
||||
|
||||
// There will be at least one break point when we are done.
|
||||
has_break_points_ = true;
|
||||
|
||||
// Keep the list of activated functions in a handlified list as it
|
||||
// is used both in GC and non-GC code.
|
||||
List<Handle<JSFunction> > active_functions(100);
|
||||
|
||||
// A list of all suspended generators.
|
||||
List<Handle<JSGeneratorObject> > suspended_generators;
|
||||
|
||||
// A list of all generator functions. We need to recompile all functions,
|
||||
// but we don't know until after visiting the whole heap which generator
|
||||
// functions have suspended activations and which do not. As in the case of
|
||||
// functions with activations on the stack, we need to be careful with
|
||||
// generator functions with suspended activations because although they
|
||||
// should be recompiled, recompilation can fail, and we need to avoid
|
||||
// leaving the heap in an inconsistent state.
|
||||
//
|
||||
// We could perhaps avoid this list and instead re-use the GC metadata
|
||||
// links.
|
||||
List<Handle<JSFunction> > generator_functions;
|
||||
|
||||
{
|
||||
// We are going to iterate heap to find all functions without
|
||||
// debug break slots.
|
||||
Heap* heap = isolate_->heap();
|
||||
heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
|
||||
"preparing for breakpoints");
|
||||
HeapIterator iterator(heap);
|
||||
|
||||
// Ensure no GC in this scope as we are going to use gc_metadata
|
||||
// field in the Code object to mark active functions.
|
||||
DisallowHeapAllocation no_allocation;
|
||||
|
||||
Object* active_code_marker = heap->the_hole_value();
|
||||
|
||||
CollectActiveFunctionsFromThread(isolate_,
|
||||
isolate_->thread_local_top(),
|
||||
&active_functions,
|
||||
active_code_marker);
|
||||
ActiveFunctionsCollector active_functions_collector(&active_functions,
|
||||
active_code_marker);
|
||||
isolate_->thread_manager()->IterateArchivedThreads(
|
||||
&active_functions_collector);
|
||||
|
||||
// Scan the heap for all non-optimized functions which have no
|
||||
// debug break slots and are not active or inlined into an active
|
||||
// function and mark them for lazy compilation.
|
||||
HeapObject* obj = NULL;
|
||||
while (((obj = iterator.next()) != NULL)) {
|
||||
if (obj->IsJSFunction()) {
|
||||
JSFunction* function = JSFunction::cast(obj);
|
||||
SharedFunctionInfo* shared = function->shared();
|
||||
if (SkipSharedFunctionInfo(shared, active_code_marker)) continue;
|
||||
if (shared->is_generator()) {
|
||||
generator_functions.Add(Handle<JSFunction>(function, isolate_));
|
||||
continue;
|
||||
}
|
||||
if (HasDebugBreakSlots(function->code())) continue;
|
||||
Code* fallback = HasDebugBreakSlots(shared->code()) ? shared->code()
|
||||
: *lazy_compile;
|
||||
Code::Kind kind = function->code()->kind();
|
||||
if (kind == Code::FUNCTION ||
|
||||
(kind == Code::BUILTIN && // Abort in-flight compilation.
|
||||
(function->IsInOptimizationQueue() ||
|
||||
function->IsMarkedForOptimization() ||
|
||||
function->IsMarkedForConcurrentOptimization()))) {
|
||||
function->ReplaceCode(fallback);
|
||||
}
|
||||
if (kind == Code::OPTIMIZED_FUNCTION) {
|
||||
// Optimized code can only get here if DeoptimizeAll did not
|
||||
// deoptimize turbo fan code.
|
||||
DCHECK(!FLAG_turbo_asm_deoptimization);
|
||||
DCHECK(function->shared()->asm_function());
|
||||
DCHECK(function->code()->is_turbofanned());
|
||||
function->ReplaceCode(fallback);
|
||||
}
|
||||
} else if (obj->IsJSGeneratorObject()) {
|
||||
JSGeneratorObject* gen = JSGeneratorObject::cast(obj);
|
||||
if (!gen->is_suspended()) continue;
|
||||
|
||||
JSFunction* fun = gen->function();
|
||||
DCHECK_EQ(fun->code()->kind(), Code::FUNCTION);
|
||||
if (fun->code()->has_debug_break_slots()) continue;
|
||||
|
||||
int pc_offset = gen->continuation();
|
||||
DCHECK_LT(0, pc_offset);
|
||||
|
||||
int index =
|
||||
ComputeContinuationIndexFromPcOffset(fun->code(), pc_offset);
|
||||
|
||||
// This will be fixed after we recompile the functions.
|
||||
gen->set_continuation(index);
|
||||
|
||||
suspended_generators.Add(Handle<JSGeneratorObject>(gen, isolate_));
|
||||
} else if (obj->IsSharedFunctionInfo()) {
|
||||
SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj);
|
||||
if (SkipSharedFunctionInfo(shared, active_code_marker)) continue;
|
||||
if (shared->is_generator()) continue;
|
||||
if (HasDebugBreakSlots(shared->code())) continue;
|
||||
shared->ReplaceCode(*lazy_compile);
|
||||
while ((obj = iterator.next())) {
|
||||
if (obj->IsJSFunction()) {
|
||||
JSFunction* function = JSFunction::cast(obj);
|
||||
if (!function->Inlines(*shared)) continue;
|
||||
if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
|
||||
Deoptimizer::DeoptimizeFunction(function);
|
||||
}
|
||||
}
|
||||
|
||||
// Clear gc_metadata field.
|
||||
for (int i = 0; i < active_functions.length(); i++) {
|
||||
Handle<JSFunction> function = active_functions[i];
|
||||
function->shared()->code()->set_gc_metadata(Smi::FromInt(0));
|
||||
functions.Add(handle(function));
|
||||
} else if (include_generators && obj->IsJSGeneratorObject()) {
|
||||
JSGeneratorObject* generator_obj = JSGeneratorObject::cast(obj);
|
||||
if (!generator_obj->is_suspended()) continue;
|
||||
JSFunction* function = generator_obj->function();
|
||||
if (!function->Inlines(*shared)) continue;
|
||||
int pc_offset = generator_obj->continuation();
|
||||
int index =
|
||||
ComputeContinuationIndexFromPcOffset(function->code(), pc_offset);
|
||||
generator_obj->set_continuation(index);
|
||||
suspended_generators.Add(handle(generator_obj));
|
||||
}
|
||||
}
|
||||
|
||||
// Recompile generator functions that have suspended activations, and
|
||||
// relocate those activations.
|
||||
RecompileAndRelocateSuspendedGenerators(suspended_generators);
|
||||
|
||||
// Mark generator functions that didn't have suspended activations for lazy
|
||||
// recompilation. Note that this set does not include any active functions.
|
||||
for (int i = 0; i < generator_functions.length(); i++) {
|
||||
Handle<JSFunction> &function = generator_functions[i];
|
||||
if (function->code()->kind() != Code::FUNCTION) continue;
|
||||
if (function->code()->has_debug_break_slots()) continue;
|
||||
function->ReplaceCode(*lazy_compile);
|
||||
function->shared()->ReplaceCode(*lazy_compile);
|
||||
}
|
||||
|
||||
// Now recompile all functions with activation frames and and
|
||||
// patch the return address to run in the new compiled code. It could be
|
||||
// that some active functions were recompiled already by the suspended
|
||||
// generator recompilation pass above; a generator with suspended
|
||||
// activations could also have active activations. That's fine.
|
||||
for (int i = 0; i < active_functions.length(); i++) {
|
||||
Handle<JSFunction> function = active_functions[i];
|
||||
Handle<SharedFunctionInfo> shared(function->shared());
|
||||
if (!shared->allows_lazy_compilation()) {
|
||||
// Ignore functions that cannot be recompiled. Fortunately, those are
|
||||
// only ones that are not subject to debugging in the first place.
|
||||
DCHECK(!function->IsSubjectToDebugging());
|
||||
continue;
|
||||
}
|
||||
if (shared->code()->kind() == Code::BUILTIN) continue;
|
||||
|
||||
EnsureFunctionHasDebugBreakSlots(function);
|
||||
}
|
||||
|
||||
RedirectActivationsToRecompiledCodeOnThread(isolate_,
|
||||
isolate_->thread_local_top());
|
||||
|
||||
ActiveFunctionsRedirector active_functions_redirector;
|
||||
isolate_->thread_manager()->IterateArchivedThreads(
|
||||
&active_functions_redirector);
|
||||
}
|
||||
|
||||
if (!shared->HasDebugCode()) {
|
||||
DCHECK(functions.length() > 0);
|
||||
if (Compiler::GetDebugCode(functions.first()).is_null()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
for (Handle<JSFunction> const function : functions) {
|
||||
function->ReplaceCode(shared->code());
|
||||
}
|
||||
|
||||
for (Handle<JSGeneratorObject> const generator_obj : suspended_generators) {
|
||||
int index = generator_obj->continuation();
|
||||
int pc_offset = ComputePcOffsetFromContinuationIndex(shared->code(), index);
|
||||
generator_obj->set_continuation(pc_offset);
|
||||
}
|
||||
|
||||
// Update PCs on the stack to point to recompiled code.
|
||||
RedirectActiveFunctions redirect_visitor(*shared);
|
||||
redirect_visitor.VisitThread(isolate_, isolate_->thread_local_top());
|
||||
isolate_->thread_manager()->IterateArchivedThreads(&redirect_visitor);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@ -1799,20 +1559,12 @@ class SharedFunctionInfoFinder {
|
||||
};
|
||||
|
||||
|
||||
template <typename C>
|
||||
bool Debug::CompileToRevealInnerFunctions(C* compilable) {
|
||||
HandleScope scope(isolate_);
|
||||
// Force compiling inner functions that require context.
|
||||
// TODO(yangguo): remove this hack.
|
||||
bool has_break_points = has_break_points_;
|
||||
has_break_points_ = true;
|
||||
Handle<C> compilable_handle(compilable);
|
||||
bool result = !Compiler::GetUnoptimizedCode(compilable_handle).is_null();
|
||||
has_break_points_ = has_break_points;
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
// We need to find a SFI for a literal that may not yet have been compiled yet,
|
||||
// and there may not be a JSFunction referencing it. Find the SFI closest to
|
||||
// the given position, compile it to reveal possible inner SFIs and repeat.
|
||||
// While we are at this, also ensure code with debug break slots so that we do
|
||||
// not have to compile a SFI without JSFunction, which is paifu for those that
|
||||
// cannot be compiled without context (need to find outer compilable SFI etc.)
|
||||
Handle<Object> Debug::FindSharedFunctionInfoInScript(Handle<Script> script,
|
||||
int position) {
|
||||
while (true) {
|
||||
@ -1832,19 +1584,20 @@ Handle<Object> Debug::FindSharedFunctionInfoInScript(Handle<Script> script,
|
||||
}
|
||||
shared = finder.Result();
|
||||
if (shared == NULL) break;
|
||||
// We found it if it's already compiled.
|
||||
if (shared->is_compiled()) return handle(shared);
|
||||
// We found it if it's already compiled and has debug code.
|
||||
if (shared->HasDebugCode()) return handle(shared);
|
||||
}
|
||||
// If not, compile to reveal inner functions, if possible.
|
||||
if (shared->allows_lazy_compilation_without_context()) {
|
||||
if (!CompileToRevealInnerFunctions(shared)) break;
|
||||
HandleScope scope(isolate_);
|
||||
if (Compiler::GetDebugCode(handle(shared)).is_null()) break;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If not possible, comb the heap for the best suitable compile target.
|
||||
JSFunction* closure;
|
||||
{
|
||||
HeapIterator it(isolate_->heap(), HeapIterator::kNoFiltering);
|
||||
HeapIterator it(isolate_->heap());
|
||||
SharedFunctionInfoFinder finder(position);
|
||||
while (HeapObject* object = it.next()) {
|
||||
JSFunction* candidate_closure = NULL;
|
||||
@ -1865,9 +1618,11 @@ Handle<Object> Debug::FindSharedFunctionInfoInScript(Handle<Script> script,
|
||||
closure = finder.ResultClosure();
|
||||
shared = finder.Result();
|
||||
}
|
||||
if (closure == NULL ? !CompileToRevealInnerFunctions(shared)
|
||||
: !CompileToRevealInnerFunctions(closure)) {
|
||||
break;
|
||||
HandleScope scope(isolate_);
|
||||
if (closure == NULL) {
|
||||
if (Compiler::GetDebugCode(handle(shared)).is_null()) break;
|
||||
} else {
|
||||
if (Compiler::GetDebugCode(handle(closure)).is_null()) break;
|
||||
}
|
||||
}
|
||||
return isolate_->factory()->undefined_value();
|
||||
@ -1880,30 +1635,23 @@ bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared,
|
||||
if (!shared->IsSubjectToDebugging()) return false;
|
||||
|
||||
// Return if we already have the debug info for shared.
|
||||
if (HasDebugInfo(shared)) {
|
||||
DCHECK(shared->is_compiled());
|
||||
DCHECK(shared->code()->has_debug_break_slots());
|
||||
return true;
|
||||
}
|
||||
|
||||
// There will be at least one break point when we are done.
|
||||
has_break_points_ = true;
|
||||
if (shared->HasDebugInfo()) return true;
|
||||
|
||||
if (function.is_null()) {
|
||||
DCHECK(shared->is_compiled());
|
||||
DCHECK(shared->code()->has_debug_break_slots());
|
||||
DCHECK(shared->HasDebugCode());
|
||||
} else if (!Compiler::EnsureCompiled(function, CLEAR_EXCEPTION)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!PrepareFunctionForBreakPoints(shared)) return false;
|
||||
|
||||
// Make sure IC state is clean. This is so that we correctly flood
|
||||
// accessor pairs when stepping in.
|
||||
shared->code()->ClearInlineCaches();
|
||||
shared->feedback_vector()->ClearICSlots(*shared);
|
||||
|
||||
// Create the debug info object.
|
||||
DCHECK(shared->is_compiled());
|
||||
DCHECK(shared->code()->has_debug_break_slots());
|
||||
DCHECK(shared->HasDebugCode());
|
||||
Handle<DebugInfo> debug_info = isolate_->factory()->NewDebugInfo(shared);
|
||||
|
||||
// Add debug info to the list.
|
||||
@ -1923,10 +1671,6 @@ void Debug::RemoveDebugInfo(DebugInfoListNode* prev, DebugInfoListNode* node) {
|
||||
prev->set_next(node->next());
|
||||
}
|
||||
delete node;
|
||||
|
||||
// If there are no more debug info objects there are not more break
|
||||
// points.
|
||||
has_break_points_ = debug_info_list_ != NULL;
|
||||
}
|
||||
|
||||
|
||||
@ -1989,21 +1733,13 @@ void Debug::SetAfterBreakTarget(JavaScriptFrame* frame) {
|
||||
bool Debug::IsBreakAtReturn(JavaScriptFrame* frame) {
|
||||
HandleScope scope(isolate_);
|
||||
|
||||
// If there are no break points this cannot be break at return, as
|
||||
// the debugger statement and stack guard debug break cannot be at
|
||||
// return.
|
||||
if (!has_break_points_) return false;
|
||||
|
||||
PrepareForBreakPoints();
|
||||
|
||||
// Get the executing function in which the debug break occurred.
|
||||
Handle<JSFunction> function(JSFunction::cast(frame->function()));
|
||||
Handle<SharedFunctionInfo> shared(function->shared());
|
||||
if (!EnsureDebugInfo(shared, function)) {
|
||||
// Return if we failed to retrieve the debug info.
|
||||
return false;
|
||||
}
|
||||
Handle<DebugInfo> debug_info = GetDebugInfo(shared);
|
||||
|
||||
// With no debug info there are no break points, so we can't be at a return.
|
||||
if (!shared->HasDebugInfo()) return false;
|
||||
Handle<DebugInfo> debug_info(shared->GetDebugInfo());
|
||||
Handle<Code> code(debug_info->code());
|
||||
#ifdef DEBUG
|
||||
// Get the code which is actually executing.
|
||||
@ -2014,7 +1750,7 @@ bool Debug::IsBreakAtReturn(JavaScriptFrame* frame) {
|
||||
// Find the reloc info matching the start of the debug break slot.
|
||||
Address slot_pc = frame->pc() - Assembler::kDebugBreakSlotLength;
|
||||
int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT_AT_RETURN);
|
||||
for (RelocIterator it(debug_info->code(), mask); !it.done(); it.next()) {
|
||||
for (RelocIterator it(*code, mask); !it.done(); it.next()) {
|
||||
if (it.rinfo()->pc() == slot_pc) return true;
|
||||
}
|
||||
return false;
|
||||
@ -2068,6 +1804,44 @@ Handle<FixedArray> Debug::GetLoadedScripts() {
|
||||
}
|
||||
|
||||
|
||||
void Debug::GetStepinPositions(JavaScriptFrame* frame, StackFrame::Id frame_id,
|
||||
List<int>* results_out) {
|
||||
FrameSummary summary = GetFirstFrameSummary(frame);
|
||||
|
||||
Handle<JSFunction> fun = Handle<JSFunction>(summary.function());
|
||||
Handle<SharedFunctionInfo> shared = Handle<SharedFunctionInfo>(fun->shared());
|
||||
|
||||
if (!EnsureDebugInfo(shared, fun)) return;
|
||||
|
||||
Handle<DebugInfo> debug_info(shared->GetDebugInfo());
|
||||
// Refresh frame summary if the code has been recompiled for debugging.
|
||||
if (shared->code() != *summary.code()) summary = GetFirstFrameSummary(frame);
|
||||
|
||||
// Find range of break points starting from the break point where execution
|
||||
// has stopped.
|
||||
Address call_pc = summary.pc() - 1;
|
||||
List<BreakLocation> locations;
|
||||
BreakLocation::FromAddressSameStatement(debug_info, ALL_BREAK_LOCATIONS,
|
||||
call_pc, &locations);
|
||||
|
||||
for (BreakLocation location : locations) {
|
||||
if (location.pc() <= summary.pc()) {
|
||||
// The break point is near our pc. Could be a step-in possibility,
|
||||
// that is currently taken by active debugger call.
|
||||
if (break_frame_id() == StackFrame::NO_ID) {
|
||||
continue; // We are not stepping.
|
||||
} else {
|
||||
JavaScriptFrameIterator frame_it(isolate_, break_frame_id());
|
||||
// If our frame is a top frame and we are stepping, we can do step-in
|
||||
// at this place.
|
||||
if (frame_it.frame()->id() != frame_id) continue;
|
||||
}
|
||||
}
|
||||
if (location.IsStepInLocation()) results_out->Add(location.position());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Debug::RecordEvalCaller(Handle<Script> script) {
|
||||
script->set_compilation_type(Script::COMPILATION_TYPE_EVAL);
|
||||
// For eval scripts add information on the function from which eval was
|
||||
|
13
src/debug.h
13
src/debug.h
@ -438,8 +438,10 @@ class Debug {
|
||||
void HandleStepIn(Handle<Object> function_obj, bool is_constructor);
|
||||
bool StepOutActive() { return thread_local_.step_out_fp_ != 0; }
|
||||
|
||||
// Purge all code objects that have no debug break slots.
|
||||
void PrepareForBreakPoints();
|
||||
void GetStepinPositions(JavaScriptFrame* frame, StackFrame::Id frame_id,
|
||||
List<int>* results_out);
|
||||
|
||||
bool PrepareFunctionForBreakPoints(Handle<SharedFunctionInfo> shared);
|
||||
|
||||
// Returns whether the operation succeeded. Compilation can only be triggered
|
||||
// if a valid closure is passed as the second argument, otherwise the shared
|
||||
@ -447,7 +449,6 @@ class Debug {
|
||||
bool EnsureDebugInfo(Handle<SharedFunctionInfo> shared,
|
||||
Handle<JSFunction> function);
|
||||
static Handle<DebugInfo> GetDebugInfo(Handle<SharedFunctionInfo> shared);
|
||||
static bool HasDebugInfo(Handle<SharedFunctionInfo> shared);
|
||||
|
||||
template <typename C>
|
||||
bool CompileToRevealInnerFunctions(C* compilable);
|
||||
@ -492,6 +493,11 @@ class Debug {
|
||||
break_id() == id;
|
||||
}
|
||||
|
||||
bool RequiresEagerCompilation(bool allows_lazy_without_ctx = false) {
|
||||
return LiveEditFunctionTracker::IsActive(isolate_) ||
|
||||
(is_active() && !allows_lazy_without_ctx);
|
||||
}
|
||||
|
||||
// Flags and states.
|
||||
DebugScope* debugger_entry() {
|
||||
return reinterpret_cast<DebugScope*>(
|
||||
@ -505,7 +511,6 @@ class Debug {
|
||||
|
||||
inline bool is_active() const { return is_active_; }
|
||||
inline bool is_loaded() const { return !debug_context_.is_null(); }
|
||||
inline bool has_break_points() const { return has_break_points_; }
|
||||
inline bool in_debug_scope() const {
|
||||
return !!base::NoBarrier_Load(&thread_local_.current_debug_scope_);
|
||||
}
|
||||
|
@ -4357,10 +4357,7 @@ void MarkCompactCollector::ParallelSweepSpacesComplete() {
|
||||
|
||||
|
||||
void MarkCompactCollector::EnableCodeFlushing(bool enable) {
|
||||
if (isolate()->debug()->is_loaded() ||
|
||||
isolate()->debug()->has_break_points()) {
|
||||
enable = false;
|
||||
}
|
||||
if (isolate()->debug()->is_active()) enable = false;
|
||||
|
||||
if (enable) {
|
||||
if (code_flusher_ != NULL) return;
|
||||
|
@ -8149,6 +8149,10 @@ bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
|
||||
|
||||
CompilationInfo target_info(&parse_info);
|
||||
Handle<SharedFunctionInfo> target_shared(target->shared());
|
||||
if (target_shared->HasDebugInfo()) {
|
||||
TraceInline(target, caller, "target is being debugged");
|
||||
return false;
|
||||
}
|
||||
if (!Compiler::ParseAndAnalyze(target_info.parse_info())) {
|
||||
if (target_info.isolate()->has_pending_exception()) {
|
||||
// Parse or scope error, never optimize this function.
|
||||
|
@ -292,8 +292,7 @@ void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
visitor->VisitCodeAgeSequence(this);
|
||||
} else if (RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence() &&
|
||||
isolate->debug()->has_break_points()) {
|
||||
IsPatchedDebugBreakSlotSequence()) {
|
||||
visitor->VisitDebugTarget(this);
|
||||
} else if (IsRuntimeEntry(mode)) {
|
||||
visitor->VisitRuntimeEntry(this);
|
||||
@ -317,8 +316,7 @@ void RelocInfo::Visit(Heap* heap) {
|
||||
StaticVisitor::VisitInternalReference(this);
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
StaticVisitor::VisitCodeAgeSequence(heap, this);
|
||||
} else if (heap->isolate()->debug()->has_break_points() &&
|
||||
RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
} else if (RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence()) {
|
||||
StaticVisitor::VisitDebugTarget(heap, this);
|
||||
} else if (IsRuntimeEntry(mode)) {
|
||||
|
@ -1200,7 +1200,7 @@ Handle<Code> LoadIC::CompileHandler(LookupIterator* lookup,
|
||||
if (!getter->IsJSFunction()) break;
|
||||
if (!holder->HasFastProperties()) break;
|
||||
// When debugging we need to go the slow path to flood the accessor.
|
||||
if (!GetSharedFunctionInfo()->debug_info()->IsUndefined()) break;
|
||||
if (GetSharedFunctionInfo()->HasDebugInfo()) break;
|
||||
Handle<JSFunction> function = Handle<JSFunction>::cast(getter);
|
||||
if (!receiver->IsJSObject() && !function->IsBuiltin() &&
|
||||
is_sloppy(function->shared()->language_mode())) {
|
||||
@ -1782,7 +1782,7 @@ Handle<Code> StoreIC::CompileHandler(LookupIterator* lookup,
|
||||
break;
|
||||
}
|
||||
// When debugging we need to go the slow path to flood the accessor.
|
||||
if (!GetSharedFunctionInfo()->debug_info()->IsUndefined()) break;
|
||||
if (GetSharedFunctionInfo()->HasDebugInfo()) break;
|
||||
Handle<JSFunction> function = Handle<JSFunction>::cast(setter);
|
||||
CallOptimization call_optimization(function);
|
||||
NamedStoreHandlerCompiler compiler(isolate(), receiver_map(), holder);
|
||||
|
@ -1093,37 +1093,6 @@ class LiteralFixer {
|
||||
};
|
||||
|
||||
|
||||
namespace {
|
||||
|
||||
// Check whether the code is natural function code (not a lazy-compile stub
|
||||
// code).
|
||||
bool IsJSFunctionCode(Code* code) { return code->kind() == Code::FUNCTION; }
|
||||
|
||||
|
||||
// Returns true if an instance of candidate were inlined into function's code.
|
||||
bool IsInlined(JSFunction* function, SharedFunctionInfo* candidate) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
|
||||
if (function->code()->kind() != Code::OPTIMIZED_FUNCTION) return false;
|
||||
|
||||
DeoptimizationInputData* const data =
|
||||
DeoptimizationInputData::cast(function->code()->deoptimization_data());
|
||||
if (data != function->GetIsolate()->heap()->empty_fixed_array()) {
|
||||
FixedArray* const literals = data->LiteralArray();
|
||||
int const inlined_count = data->InlinedFunctionCount()->value();
|
||||
for (int i = 0; i < inlined_count; ++i) {
|
||||
if (SharedFunctionInfo::cast(literals->get(i)) == candidate) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
// Marks code that shares the same shared function info or has inlined
|
||||
// code that shares the same function info.
|
||||
class DependentFunctionMarker: public OptimizedFunctionVisitor {
|
||||
@ -1139,8 +1108,7 @@ class DependentFunctionMarker: public OptimizedFunctionVisitor {
|
||||
virtual void VisitFunction(JSFunction* function) {
|
||||
// It should be guaranteed by the iterator that everything is optimized.
|
||||
DCHECK(function->code()->kind() == Code::OPTIMIZED_FUNCTION);
|
||||
if (shared_info_ == function->shared() ||
|
||||
IsInlined(function, shared_info_)) {
|
||||
if (function->Inlines(shared_info_)) {
|
||||
// Mark the code for deoptimization.
|
||||
function->code()->set_marked_for_deoptimization(true);
|
||||
found_ = true;
|
||||
@ -1172,7 +1140,7 @@ void LiveEdit::ReplaceFunctionCode(
|
||||
|
||||
Handle<SharedFunctionInfo> shared_info = shared_info_wrapper.GetInfo();
|
||||
|
||||
if (IsJSFunctionCode(shared_info->code())) {
|
||||
if (shared_info->code()->kind() == Code::FUNCTION) {
|
||||
Handle<Code> code = compile_info_wrapper.GetFunctionCode();
|
||||
ReplaceCodeObject(Handle<Code>(shared_info->code()), code);
|
||||
Handle<Object> code_scope_info = compile_info_wrapper.GetCodeScopeInfo();
|
||||
@ -1403,7 +1371,7 @@ void LiveEdit::PatchFunctionPositions(Handle<JSArray> shared_info_array,
|
||||
info->set_end_position(new_function_end);
|
||||
info->set_function_token_position(new_function_token_pos);
|
||||
|
||||
if (IsJSFunctionCode(info->code())) {
|
||||
if (info->code()->kind() == Code::FUNCTION) {
|
||||
// Patch relocation info section of the code.
|
||||
Handle<Code> patched_code = PatchPositionsInCode(Handle<Code>(info->code()),
|
||||
position_change_array);
|
||||
@ -1508,7 +1476,7 @@ static bool CheckActivation(Handle<JSArray> shared_info_array,
|
||||
Handle<SharedFunctionInfo> shared =
|
||||
UnwrapSharedFunctionInfoFromJSValue(jsvalue);
|
||||
|
||||
if (function->shared() == *shared || IsInlined(*function, *shared)) {
|
||||
if (function->Inlines(*shared)) {
|
||||
SetElementSloppy(result, i, Handle<Smi>(Smi::FromInt(status), isolate));
|
||||
return true;
|
||||
}
|
||||
|
@ -424,8 +424,7 @@ void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
visitor->VisitCodeAgeSequence(this);
|
||||
} else if (RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence() &&
|
||||
isolate->debug()->has_break_points()) {
|
||||
IsPatchedDebugBreakSlotSequence()) {
|
||||
visitor->VisitDebugTarget(this);
|
||||
} else if (RelocInfo::IsRuntimeEntry(mode)) {
|
||||
visitor->VisitRuntimeEntry(this);
|
||||
@ -449,8 +448,7 @@ void RelocInfo::Visit(Heap* heap) {
|
||||
StaticVisitor::VisitInternalReference(this);
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
StaticVisitor::VisitCodeAgeSequence(heap, this);
|
||||
} else if (heap->isolate()->debug()->has_break_points() &&
|
||||
RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
} else if (RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence()) {
|
||||
StaticVisitor::VisitDebugTarget(heap, this);
|
||||
} else if (RelocInfo::IsRuntimeEntry(mode)) {
|
||||
|
@ -417,8 +417,7 @@ void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
visitor->VisitCodeAgeSequence(this);
|
||||
} else if (RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence() &&
|
||||
isolate->debug()->has_break_points()) {
|
||||
IsPatchedDebugBreakSlotSequence()) {
|
||||
visitor->VisitDebugTarget(this);
|
||||
} else if (RelocInfo::IsRuntimeEntry(mode)) {
|
||||
visitor->VisitRuntimeEntry(this);
|
||||
@ -442,8 +441,7 @@ void RelocInfo::Visit(Heap* heap) {
|
||||
StaticVisitor::VisitInternalReference(this);
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
StaticVisitor::VisitCodeAgeSequence(heap, this);
|
||||
} else if (heap->isolate()->debug()->has_break_points() &&
|
||||
RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
} else if (RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence()) {
|
||||
StaticVisitor::VisitDebugTarget(heap, this);
|
||||
} else if (RelocInfo::IsRuntimeEntry(mode)) {
|
||||
|
@ -5462,6 +5462,24 @@ bool SharedFunctionInfo::is_simple_parameter_list() {
|
||||
}
|
||||
|
||||
|
||||
bool SharedFunctionInfo::HasDebugInfo() {
|
||||
bool has_debug_info = debug_info()->IsStruct();
|
||||
DCHECK(!has_debug_info || HasDebugCode());
|
||||
return has_debug_info;
|
||||
}
|
||||
|
||||
|
||||
DebugInfo* SharedFunctionInfo::GetDebugInfo() {
|
||||
DCHECK(HasDebugInfo());
|
||||
return DebugInfo::cast(debug_info());
|
||||
}
|
||||
|
||||
|
||||
bool SharedFunctionInfo::HasDebugCode() {
|
||||
return code()->kind() == Code::FUNCTION && code()->has_debug_break_slots();
|
||||
}
|
||||
|
||||
|
||||
bool SharedFunctionInfo::IsApiFunction() {
|
||||
return function_data()->IsFunctionTemplateInfo();
|
||||
}
|
||||
|
@ -9328,11 +9328,32 @@ void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
|
||||
}
|
||||
|
||||
|
||||
bool JSFunction::Inlines(SharedFunctionInfo* candidate) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
if (shared() == candidate) return true;
|
||||
if (code()->kind() != Code::OPTIMIZED_FUNCTION) return false;
|
||||
DeoptimizationInputData* const data =
|
||||
DeoptimizationInputData::cast(code()->deoptimization_data());
|
||||
if (data->length() == 0) return false;
|
||||
FixedArray* const literals = data->LiteralArray();
|
||||
int const inlined_count = data->InlinedFunctionCount()->value();
|
||||
for (int i = 0; i < inlined_count; ++i) {
|
||||
if (SharedFunctionInfo::cast(literals->get(i)) == candidate) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
void JSFunction::MarkForOptimization() {
|
||||
Isolate* isolate = GetIsolate();
|
||||
// Do not optimize if function contains break points.
|
||||
if (shared()->HasDebugInfo()) return;
|
||||
DCHECK(!IsOptimized());
|
||||
DCHECK(shared()->allows_lazy_compilation() ||
|
||||
!shared()->optimization_disabled());
|
||||
DCHECK(!shared()->HasDebugInfo());
|
||||
set_code_no_write_barrier(
|
||||
isolate->builtins()->builtin(Builtins::kCompileOptimized));
|
||||
// No write barrier required, since the builtin is part of the root set.
|
||||
|
@ -6668,6 +6668,13 @@ class SharedFunctionInfo: public HeapObject {
|
||||
inline int start_position_and_type() const;
|
||||
inline void set_start_position_and_type(int value);
|
||||
|
||||
// The function is subject to debugging if a debug info is attached.
|
||||
inline bool HasDebugInfo();
|
||||
inline DebugInfo* GetDebugInfo();
|
||||
|
||||
// A function has debug code if the compiled code has debug break slots.
|
||||
inline bool HasDebugCode();
|
||||
|
||||
// [debug info]: Debug information.
|
||||
DECL_ACCESSORS(debug_info, Object)
|
||||
|
||||
@ -7253,6 +7260,9 @@ class JSFunction: public JSObject {
|
||||
// Tells whether this function is builtin.
|
||||
inline bool IsBuiltin();
|
||||
|
||||
// Tells whether this function inlines the given shared function info.
|
||||
bool Inlines(SharedFunctionInfo* candidate);
|
||||
|
||||
// Tells whether this function should be subject to debugging.
|
||||
inline bool IsSubjectToDebugging();
|
||||
|
||||
|
@ -1396,14 +1396,14 @@ class ScopeIterator {
|
||||
// Later we may optimize getting the nested scopes (cache the result?)
|
||||
// and include nested scopes into the "fast" iteration case as well.
|
||||
|
||||
if (!ignore_nested_scopes && !shared_info->debug_info()->IsUndefined()) {
|
||||
if (!ignore_nested_scopes && shared_info->HasDebugInfo()) {
|
||||
// The source position at return is always the end of the function,
|
||||
// which is not consistent with the current scope chain. Therefore all
|
||||
// nested with, catch and block contexts are skipped, and we can only
|
||||
// inspect the function scope.
|
||||
// This can only happen if we set a break point inside right before the
|
||||
// return, which requires a debug info to be available.
|
||||
Handle<DebugInfo> debug_info = Debug::GetDebugInfo(shared_info);
|
||||
Handle<DebugInfo> debug_info(shared_info->GetDebugInfo());
|
||||
|
||||
// PC points to the instruction after the current one, possibly a break
|
||||
// location as well. So the "- 1" to exclude it from the search.
|
||||
@ -1818,59 +1818,14 @@ RUNTIME_FUNCTION(Runtime_GetStepInPositions) {
|
||||
JavaScriptFrameIterator frame_it(isolate, id);
|
||||
RUNTIME_ASSERT(!frame_it.done());
|
||||
|
||||
List<FrameSummary> frames(FLAG_max_inlining_levels + 1);
|
||||
frame_it.frame()->Summarize(&frames);
|
||||
FrameSummary summary = frames.first();
|
||||
|
||||
Handle<JSFunction> fun = Handle<JSFunction>(summary.function());
|
||||
Handle<SharedFunctionInfo> shared = Handle<SharedFunctionInfo>(fun->shared());
|
||||
|
||||
if (!isolate->debug()->EnsureDebugInfo(shared, fun)) {
|
||||
return isolate->heap()->undefined_value();
|
||||
List<int> positions;
|
||||
isolate->debug()->GetStepinPositions(frame_it.frame(), id, &positions);
|
||||
Factory* factory = isolate->factory();
|
||||
Handle<FixedArray> array = factory->NewFixedArray(positions.length());
|
||||
for (int i = 0; i < positions.length(); ++i) {
|
||||
array->set(i, Smi::FromInt(positions[i]));
|
||||
}
|
||||
|
||||
Handle<DebugInfo> debug_info = Debug::GetDebugInfo(shared);
|
||||
|
||||
// Find range of break points starting from the break point where execution
|
||||
// has stopped.
|
||||
Address call_pc = summary.pc() - 1;
|
||||
List<BreakLocation> locations;
|
||||
BreakLocation::FromAddressSameStatement(debug_info, ALL_BREAK_LOCATIONS,
|
||||
call_pc, &locations);
|
||||
|
||||
Handle<JSArray> array = isolate->factory()->NewJSArray(locations.length());
|
||||
|
||||
int index = 0;
|
||||
for (BreakLocation location : locations) {
|
||||
bool accept;
|
||||
if (location.pc() > summary.pc()) {
|
||||
accept = true;
|
||||
} else {
|
||||
StackFrame::Id break_frame_id = isolate->debug()->break_frame_id();
|
||||
// The break point is near our pc. Could be a step-in possibility,
|
||||
// that is currently taken by active debugger call.
|
||||
if (break_frame_id == StackFrame::NO_ID) {
|
||||
// We are not stepping.
|
||||
accept = false;
|
||||
} else {
|
||||
JavaScriptFrameIterator additional_frame_it(isolate, break_frame_id);
|
||||
// If our frame is a top frame and we are stepping, we can do step-in
|
||||
// at this place.
|
||||
accept = additional_frame_it.frame()->id() == id;
|
||||
}
|
||||
}
|
||||
if (accept) {
|
||||
if (location.IsStepInLocation()) {
|
||||
Smi* position_value = Smi::FromInt(location.position());
|
||||
RETURN_FAILURE_ON_EXCEPTION(
|
||||
isolate,
|
||||
Object::SetElement(isolate, array, index,
|
||||
handle(position_value, isolate), SLOPPY));
|
||||
index++;
|
||||
}
|
||||
}
|
||||
}
|
||||
return *array;
|
||||
return *factory->NewJSArrayWithElements(array, FAST_SMI_ELEMENTS);
|
||||
}
|
||||
|
||||
|
||||
|
@ -553,8 +553,7 @@ void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
visitor->VisitCodeAgeSequence(this);
|
||||
} else if (RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence() &&
|
||||
isolate->debug()->has_break_points()) {
|
||||
IsPatchedDebugBreakSlotSequence()) {
|
||||
visitor->VisitDebugTarget(this);
|
||||
} else if (RelocInfo::IsRuntimeEntry(mode)) {
|
||||
visitor->VisitRuntimeEntry(this);
|
||||
@ -578,8 +577,7 @@ void RelocInfo::Visit(Heap* heap) {
|
||||
StaticVisitor::VisitInternalReference(this);
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
StaticVisitor::VisitCodeAgeSequence(heap, this);
|
||||
} else if (heap->isolate()->debug()->has_break_points() &&
|
||||
RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
} else if (RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence()) {
|
||||
StaticVisitor::VisitDebugTarget(heap, this);
|
||||
} else if (RelocInfo::IsRuntimeEntry(mode)) {
|
||||
|
@ -155,7 +155,7 @@ static v8::Local<v8::Function> CompileFunction(v8::Isolate* isolate,
|
||||
static bool HasDebugInfo(v8::Handle<v8::Function> fun) {
|
||||
Handle<v8::internal::JSFunction> f = v8::Utils::OpenHandle(*fun);
|
||||
Handle<v8::internal::SharedFunctionInfo> shared(f->shared());
|
||||
return Debug::HasDebugInfo(shared);
|
||||
return shared->HasDebugInfo();
|
||||
}
|
||||
|
||||
|
||||
|
@ -80,7 +80,6 @@ static void CheckFunctionName(v8::Handle<v8::Script> script,
|
||||
CHECK_NE(0, func_pos);
|
||||
|
||||
// Obtain SharedFunctionInfo for the function.
|
||||
isolate->debug()->PrepareForBreakPoints();
|
||||
Handle<SharedFunctionInfo> shared_func_info =
|
||||
Handle<SharedFunctionInfo>::cast(
|
||||
isolate->debug()->FindSharedFunctionInfoInScript(i_script, func_pos));
|
||||
|
@ -136,6 +136,7 @@ function f() {
|
||||
function g() {
|
||||
var a = 2;
|
||||
f();
|
||||
return a; // Use the value to prevent it being removed by DCE.
|
||||
};
|
||||
|
||||
a = 1;
|
||||
|
54
test/mjsunit/debug-optimize.js
Normal file
54
test/mjsunit/debug-optimize.js
Normal file
@ -0,0 +1,54 @@
|
||||
// Copyright 2015 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// Flags: --expose-debug-as debug --allow-natives-syntax --use-inlining
|
||||
|
||||
var Debug = debug.Debug;
|
||||
|
||||
function f1() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
function f2() {
|
||||
return 2;
|
||||
}
|
||||
|
||||
function f3() {
|
||||
return f1();
|
||||
}
|
||||
|
||||
function f4() {
|
||||
return 4;
|
||||
}
|
||||
|
||||
|
||||
function optimize(f) {
|
||||
f();
|
||||
f();
|
||||
%OptimizeFunctionOnNextCall(f);
|
||||
f();
|
||||
}
|
||||
|
||||
optimize(f1);
|
||||
optimize(f2);
|
||||
optimize(f3);
|
||||
|
||||
Debug.setListener(function() {});
|
||||
|
||||
assertOptimized(f1);
|
||||
assertOptimized(f2);
|
||||
assertOptimized(f3);
|
||||
|
||||
Debug.setBreakPoint(f1, 1);
|
||||
|
||||
// Setting break point deoptimizes f1 and f3 (which inlines f1).
|
||||
assertUnoptimized(f1);
|
||||
assertOptimized(f2);
|
||||
assertUnoptimized(f3);
|
||||
|
||||
// We can optimize with break points set.
|
||||
optimize(f4);
|
||||
assertOptimized(f4);
|
||||
|
||||
Debug.setListener(null);
|
Loading…
Reference in New Issue
Block a user