2012-02-08 09:56:33 +00:00
|
|
|
// Copyright 2012 the V8 project authors. All rights reserved.
|
2014-04-29 06:42:26 +00:00
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/compiler.h"
|
2010-10-01 14:10:47 +00:00
|
|
|
|
2015-03-19 12:39:43 +00:00
|
|
|
#include <algorithm>
|
2016-07-25 10:24:45 +00:00
|
|
|
#include <memory>
|
2015-03-19 12:39:43 +00:00
|
|
|
|
2016-07-01 05:26:58 +00:00
|
|
|
#include "src/asmjs/asm-js.h"
|
2016-07-14 00:04:27 +00:00
|
|
|
#include "src/asmjs/asm-typer.h"
|
2015-11-26 16:22:34 +00:00
|
|
|
#include "src/ast/ast-numbering.h"
|
|
|
|
#include "src/ast/prettyprinter.h"
|
|
|
|
#include "src/ast/scopes.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/bootstrapper.h"
|
|
|
|
#include "src/codegen.h"
|
|
|
|
#include "src/compilation-cache.h"
|
2016-08-19 08:19:17 +00:00
|
|
|
#include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
|
2014-07-30 13:54:45 +00:00
|
|
|
#include "src/compiler/pipeline.h"
|
2015-10-20 13:25:47 +00:00
|
|
|
#include "src/crankshaft/hydrogen.h"
|
2015-07-31 11:07:50 +00:00
|
|
|
#include "src/debug/debug.h"
|
|
|
|
#include "src/debug/liveedit.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/deoptimizer.h"
|
2016-04-25 10:46:38 +00:00
|
|
|
#include "src/frames-inl.h"
|
2015-07-24 10:11:46 +00:00
|
|
|
#include "src/full-codegen/full-codegen.h"
|
2016-06-30 09:26:30 +00:00
|
|
|
#include "src/globals.h"
|
2016-07-13 17:44:41 +00:00
|
|
|
#include "src/heap/heap.h"
|
2015-08-18 13:46:43 +00:00
|
|
|
#include "src/interpreter/interpreter.h"
|
2015-09-01 09:25:19 +00:00
|
|
|
#include "src/isolate-inl.h"
|
2015-08-11 07:34:10 +00:00
|
|
|
#include "src/log-inl.h"
|
2014-11-28 04:08:48 +00:00
|
|
|
#include "src/messages.h"
|
2015-11-26 16:22:34 +00:00
|
|
|
#include "src/parsing/parser.h"
|
|
|
|
#include "src/parsing/rewriter.h"
|
|
|
|
#include "src/parsing/scanner-character-streams.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/runtime-profiler.h"
|
2016-03-01 14:42:57 +00:00
|
|
|
#include "src/snapshot/code-serializer.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/vm-state-inl.h"
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2009-05-25 10:05:56 +00:00
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2015-02-17 12:26:05 +00:00
|
|
|
|
2015-03-09 14:51:13 +00:00
|
|
|
#define PARSE_INFO_GETTER(type, name) \
|
|
|
|
type CompilationInfo::name() const { \
|
|
|
|
CHECK(parse_info()); \
|
|
|
|
return parse_info()->name(); \
|
2014-07-10 10:28:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-03-09 14:51:13 +00:00
|
|
|
#define PARSE_INFO_GETTER_WITH_DEFAULT(type, name, def) \
|
|
|
|
type CompilationInfo::name() const { \
|
|
|
|
return parse_info() ? parse_info()->name() : def; \
|
|
|
|
}
|
2010-10-04 11:35:46 +00:00
|
|
|
|
|
|
|
|
2015-03-09 14:51:13 +00:00
|
|
|
PARSE_INFO_GETTER(Handle<Script>, script)
|
2015-08-19 16:51:37 +00:00
|
|
|
PARSE_INFO_GETTER(FunctionLiteral*, literal)
|
2016-08-05 14:30:54 +00:00
|
|
|
PARSE_INFO_GETTER_WITH_DEFAULT(DeclarationScope*, scope, nullptr)
|
2016-04-08 08:28:57 +00:00
|
|
|
PARSE_INFO_GETTER_WITH_DEFAULT(Handle<Context>, context,
|
|
|
|
Handle<Context>::null())
|
2015-03-09 14:51:13 +00:00
|
|
|
PARSE_INFO_GETTER(Handle<SharedFunctionInfo>, shared_info)
|
|
|
|
|
|
|
|
#undef PARSE_INFO_GETTER
|
|
|
|
#undef PARSE_INFO_GETTER_WITH_DEFAULT
|
|
|
|
|
2016-03-04 10:44:31 +00:00
|
|
|
// A wrapper around a CompilationInfo that detaches the Handles from
|
|
|
|
// the underlying DeferredHandleScope and stores them in info_ on
|
|
|
|
// destruction.
|
|
|
|
class CompilationHandleScope BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
explicit CompilationHandleScope(CompilationInfo* info)
|
|
|
|
: deferred_(info->isolate()), info_(info) {}
|
|
|
|
~CompilationHandleScope() { info_->set_deferred_handles(deferred_.Detach()); }
|
|
|
|
|
|
|
|
private:
|
|
|
|
DeferredHandleScope deferred_;
|
|
|
|
CompilationInfo* info_;
|
|
|
|
};
|
2015-03-09 14:51:13 +00:00
|
|
|
|
2016-04-11 10:01:09 +00:00
|
|
|
// Helper that times a scoped region and records the elapsed time.
|
|
|
|
struct ScopedTimer {
|
|
|
|
explicit ScopedTimer(base::TimeDelta* location) : location_(location) {
|
|
|
|
DCHECK(location_ != NULL);
|
|
|
|
timer_.Start();
|
|
|
|
}
|
|
|
|
|
|
|
|
~ScopedTimer() { *location_ += timer_.Elapsed(); }
|
|
|
|
|
|
|
|
base::ElapsedTimer timer_;
|
|
|
|
base::TimeDelta* location_;
|
|
|
|
};
|
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Implementation of CompilationInfo
|
2015-03-24 14:17:05 +00:00
|
|
|
|
2015-03-09 14:51:13 +00:00
|
|
|
bool CompilationInfo::has_shared_info() const {
|
|
|
|
return parse_info_ && !parse_info_->shared_info().is_null();
|
2010-10-04 11:35:46 +00:00
|
|
|
}
|
|
|
|
|
2016-04-08 12:31:38 +00:00
|
|
|
CompilationInfo::CompilationInfo(ParseInfo* parse_info,
|
|
|
|
Handle<JSFunction> closure)
|
2016-04-26 14:36:35 +00:00
|
|
|
: CompilationInfo(parse_info, {}, Code::ComputeFlags(Code::FUNCTION), BASE,
|
|
|
|
parse_info->isolate(), parse_info->zone()) {
|
2016-04-08 12:31:38 +00:00
|
|
|
closure_ = closure;
|
|
|
|
|
2014-10-14 09:59:24 +00:00
|
|
|
// Compiling for the snapshot typically results in different code than
|
|
|
|
// compiling later on. This means that code recompiled with deoptimization
|
|
|
|
// support won't be "equivalent" (as defined by SharedFunctionInfo::
|
|
|
|
// EnableDeoptimizationSupport), so it will replace the old code and all
|
|
|
|
// its type feedback. To avoid this, always compile functions in the snapshot
|
|
|
|
// with deoptimization support.
|
|
|
|
if (isolate_->serializer_enabled()) EnableDeoptimizationSupport();
|
|
|
|
|
2015-09-25 11:33:28 +00:00
|
|
|
if (FLAG_function_context_specialization) MarkAsFunctionContextSpecializing();
|
2014-09-11 09:02:18 +00:00
|
|
|
if (FLAG_turbo_inlining) MarkAsInliningEnabled();
|
2015-04-30 09:56:24 +00:00
|
|
|
if (FLAG_turbo_source_positions) MarkAsSourcePositionsEnabled();
|
2015-02-03 14:50:40 +00:00
|
|
|
if (FLAG_turbo_splitting) MarkAsSplittingEnabled();
|
2010-10-04 11:35:46 +00:00
|
|
|
}
|
|
|
|
|
2016-04-26 14:36:35 +00:00
|
|
|
CompilationInfo::CompilationInfo(Vector<const char> debug_name,
|
|
|
|
Isolate* isolate, Zone* zone,
|
|
|
|
Code::Flags code_flags)
|
2016-01-20 15:17:39 +00:00
|
|
|
: CompilationInfo(nullptr, debug_name, code_flags, STUB, isolate, zone) {}
|
2015-03-19 12:39:43 +00:00
|
|
|
|
2016-04-26 14:36:35 +00:00
|
|
|
CompilationInfo::CompilationInfo(ParseInfo* parse_info,
|
|
|
|
Vector<const char> debug_name,
|
2016-01-20 15:17:39 +00:00
|
|
|
Code::Flags code_flags, Mode mode,
|
2015-08-24 10:23:39 +00:00
|
|
|
Isolate* isolate, Zone* zone)
|
2015-03-19 12:39:43 +00:00
|
|
|
: parse_info_(parse_info),
|
|
|
|
isolate_(isolate),
|
|
|
|
flags_(0),
|
2016-01-20 15:17:39 +00:00
|
|
|
code_flags_(code_flags),
|
2015-03-19 12:39:43 +00:00
|
|
|
mode_(mode),
|
|
|
|
osr_ast_id_(BailoutId::None()),
|
|
|
|
zone_(zone),
|
|
|
|
deferred_handles_(nullptr),
|
2015-04-20 15:22:02 +00:00
|
|
|
dependencies_(isolate, zone),
|
2015-03-19 12:39:43 +00:00
|
|
|
bailout_reason_(kNoReason),
|
|
|
|
prologue_offset_(Code::kPrologueOffsetNotSet),
|
2015-03-24 12:46:13 +00:00
|
|
|
track_positions_(FLAG_hydrogen_track_positions ||
|
2016-06-09 05:23:34 +00:00
|
|
|
isolate->is_profiling()),
|
2015-03-19 12:39:43 +00:00
|
|
|
parameter_count_(0),
|
|
|
|
optimization_id_(-1),
|
2015-06-24 06:21:47 +00:00
|
|
|
osr_expr_stack_height_(0),
|
2016-01-20 15:17:39 +00:00
|
|
|
debug_name_(debug_name) {}
|
2015-03-19 12:39:43 +00:00
|
|
|
|
2012-07-06 09:31:31 +00:00
|
|
|
CompilationInfo::~CompilationInfo() {
|
2016-07-14 13:05:15 +00:00
|
|
|
if (GetFlag(kDisableFutureOptimization) && has_shared_info()) {
|
|
|
|
shared_info()->DisableOptimization(bailout_reason());
|
|
|
|
}
|
2016-05-02 14:44:29 +00:00
|
|
|
dependencies()->Rollback();
|
2012-07-06 09:31:31 +00:00
|
|
|
delete deferred_handles_;
|
2013-06-12 09:43:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-12-18 16:25:45 +00:00
|
|
|
int CompilationInfo::num_parameters() const {
|
2016-04-07 09:52:13 +00:00
|
|
|
return !IsStub() ? scope()->num_parameters() : parameter_count_;
|
2012-12-18 16:25:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-05-11 11:45:02 +00:00
|
|
|
int CompilationInfo::num_parameters_including_this() const {
|
|
|
|
return num_parameters() + (is_this_defined() ? 1 : 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool CompilationInfo::is_this_defined() const { return !IsStub(); }
|
|
|
|
|
|
|
|
|
2014-11-21 17:28:18 +00:00
|
|
|
// Primitive functions are unlikely to be picked up by the stack-walking
|
|
|
|
// profiler, so they trigger their own optimization when they're called
|
|
|
|
// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
|
|
|
|
bool CompilationInfo::ShouldSelfOptimize() {
|
2015-07-10 09:34:56 +00:00
|
|
|
return FLAG_crankshaft &&
|
2015-08-19 16:51:37 +00:00
|
|
|
!(literal()->flags() & AstProperties::kDontSelfOptimize) &&
|
|
|
|
!literal()->dont_optimize() &&
|
|
|
|
literal()->scope()->AllowsLazyCompilation() &&
|
2016-04-19 09:32:29 +00:00
|
|
|
!shared_info()->optimization_disabled();
|
2014-11-14 08:21:13 +00:00
|
|
|
}
|
|
|
|
|
2016-02-05 10:48:27 +00:00
|
|
|
|
2015-08-07 11:38:20 +00:00
|
|
|
bool CompilationInfo::has_simple_parameters() {
|
|
|
|
return scope()->has_simple_parameters();
|
2015-02-14 00:14:46 +00:00
|
|
|
}
|
2015-02-17 09:44:44 +00:00
|
|
|
|
2016-07-25 10:24:45 +00:00
|
|
|
std::unique_ptr<char[]> CompilationInfo::GetDebugName() const {
|
2016-02-10 09:16:08 +00:00
|
|
|
if (parse_info() && parse_info()->literal()) {
|
2015-08-24 10:23:39 +00:00
|
|
|
AllowHandleDereference allow_deref;
|
2015-08-28 12:46:12 +00:00
|
|
|
return parse_info()->literal()->debug_name()->ToCString();
|
2015-08-24 10:23:39 +00:00
|
|
|
}
|
2016-02-10 09:16:08 +00:00
|
|
|
if (parse_info() && !parse_info()->shared_info().is_null()) {
|
|
|
|
return parse_info()->shared_info()->DebugName()->ToCString();
|
|
|
|
}
|
2016-04-26 14:36:35 +00:00
|
|
|
Vector<const char> name_vec = debug_name_;
|
|
|
|
if (name_vec.is_empty()) name_vec = ArrayVector("unknown");
|
2016-07-25 10:24:45 +00:00
|
|
|
std::unique_ptr<char[]> name(new char[name_vec.length() + 1]);
|
2016-04-26 14:36:35 +00:00
|
|
|
memcpy(name.get(), name_vec.start(), name_vec.length());
|
|
|
|
name[name_vec.length()] = '\0';
|
2015-08-28 12:46:12 +00:00
|
|
|
return name;
|
2015-08-24 10:23:39 +00:00
|
|
|
}
|
|
|
|
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
StackFrame::Type CompilationInfo::GetOutputStackFrameType() const {
|
|
|
|
switch (output_code_kind()) {
|
|
|
|
case Code::STUB:
|
2016-03-09 16:51:02 +00:00
|
|
|
case Code::BYTECODE_HANDLER:
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
case Code::HANDLER:
|
|
|
|
case Code::BUILTIN:
|
2016-06-14 13:20:42 +00:00
|
|
|
#define CASE_KIND(kind) case Code::kind:
|
|
|
|
IC_KIND_LIST(CASE_KIND)
|
|
|
|
#undef CASE_KIND
|
[runtime] Unify and simplify how frames are marked
Before this CL, various code stubs used different techniques
for marking their frames to enable stack-crawling and other
access to data in the frame. All of them were based on a abuse
of the "standard" frame representation, e.g. storing the a
context pointer immediately below the frame's fp, and a
function pointer after that. Although functional, this approach
tends to make stubs and builtins do an awkward, unnecessary
dance to appear like standard frames, even if they have
nothing to do with JavaScript execution.
This CL attempts to improve this by:
* Ensuring that there are only two fundamentally different
types of frames, a "standard" frame and a "typed" frame.
Standard frames, as before, contain both a context and
function pointer. Typed frames contain only a minimum
of a smi marker in the position immediately below the fp
where the context is in standard frames.
* Only interpreted, full codegen, and optimized Crankshaft and
TurboFan JavaScript frames use the "standard" format. All
other frames use the type frame format with an explicit
marker.
* Typed frames can contain one or more values below the
type marker. There is new magic macro machinery in
frames.h that simplifies defining the offsets of these fields
in typed frames.
* A new flag in the CallDescriptor enables specifying whether
a frame is a standard frame or a typed frame. Secondary
register location spilling is now only enabled for standard
frames.
* A zillion places in the code have been updated to deal with
the fact that most code stubs and internal frames use the
typed frame format. This includes changes in the
deoptimizer, debugger, and liveedit.
* StandardFrameConstants::kMarkerOffset is deprecated,
(CommonFrameConstants::kContextOrFrameTypeOffset
and StandardFrameConstants::kFrameOffset are now used
in its stead).
LOG=N
Review URL: https://codereview.chromium.org/1696043002
Cr-Commit-Position: refs/heads/master@{#34571}
2016-03-08 08:35:44 +00:00
|
|
|
return StackFrame::STUB;
|
|
|
|
case Code::WASM_FUNCTION:
|
|
|
|
return StackFrame::WASM;
|
|
|
|
case Code::JS_TO_WASM_FUNCTION:
|
|
|
|
return StackFrame::JS_TO_WASM;
|
|
|
|
case Code::WASM_TO_JS_FUNCTION:
|
|
|
|
return StackFrame::WASM_TO_JS;
|
|
|
|
default:
|
|
|
|
UNIMPLEMENTED();
|
|
|
|
return StackFrame::NONE;
|
|
|
|
}
|
|
|
|
}
|
2015-08-24 10:23:39 +00:00
|
|
|
|
2016-05-04 15:57:34 +00:00
|
|
|
int CompilationInfo::GetDeclareGlobalsFlags() const {
|
|
|
|
DCHECK(DeclareGlobalsLanguageMode::is_valid(parse_info()->language_mode()));
|
|
|
|
return DeclareGlobalsEvalFlag::encode(parse_info()->is_eval()) |
|
|
|
|
DeclareGlobalsNativeFlag::encode(parse_info()->is_native()) |
|
|
|
|
DeclareGlobalsLanguageMode::encode(parse_info()->language_mode());
|
|
|
|
}
|
|
|
|
|
2016-07-08 08:48:22 +00:00
|
|
|
SourcePositionTableBuilder::RecordingMode
|
|
|
|
CompilationInfo::SourcePositionRecordingMode() const {
|
|
|
|
return parse_info() && parse_info()->is_native()
|
|
|
|
? SourcePositionTableBuilder::OMIT_SOURCE_POSITIONS
|
|
|
|
: SourcePositionTableBuilder::RECORD_SOURCE_POSITIONS;
|
|
|
|
}
|
|
|
|
|
2015-11-05 13:23:13 +00:00
|
|
|
bool CompilationInfo::ExpectsJSReceiverAsReceiver() {
|
2016-05-04 15:57:34 +00:00
|
|
|
return is_sloppy(parse_info()->language_mode()) && !parse_info()->is_native();
|
2015-08-27 20:31:25 +00:00
|
|
|
}
|
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
// ----------------------------------------------------------------------------
|
2016-04-27 17:54:16 +00:00
|
|
|
// Implementation of CompilationJob
|
2015-08-27 20:31:25 +00:00
|
|
|
|
2016-08-18 17:06:37 +00:00
|
|
|
CompilationJob::Status CompilationJob::CreateGraph() {
|
2016-04-15 13:40:47 +00:00
|
|
|
DisallowJavascriptExecution no_js(isolate());
|
2016-08-18 17:06:37 +00:00
|
|
|
DCHECK(info()->IsOptimizing());
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2016-08-18 17:06:37 +00:00
|
|
|
if (FLAG_trace_opt) {
|
2016-04-11 12:06:41 +00:00
|
|
|
OFStream os(stdout);
|
|
|
|
os << "[compiling method " << Brief(*info()->closure()) << " using "
|
|
|
|
<< compiler_name_;
|
|
|
|
if (info()->is_osr()) os << " OSR";
|
|
|
|
os << "]" << std::endl;
|
|
|
|
}
|
|
|
|
|
2016-04-11 10:01:09 +00:00
|
|
|
// Delegate to the underlying implementation.
|
2016-08-18 17:06:37 +00:00
|
|
|
DCHECK_EQ(SUCCEEDED, last_status());
|
|
|
|
ScopedTimer t(&time_taken_to_create_graph_);
|
|
|
|
return SetLastStatus(CreateGraphImpl());
|
2012-07-17 16:24:40 +00:00
|
|
|
}
|
|
|
|
|
2016-08-18 17:06:37 +00:00
|
|
|
CompilationJob::Status CompilationJob::OptimizeGraph() {
|
2013-06-03 15:32:22 +00:00
|
|
|
DisallowHeapAllocation no_allocation;
|
|
|
|
DisallowHandleAllocation no_handles;
|
|
|
|
DisallowHandleDereference no_deref;
|
2013-08-12 14:10:25 +00:00
|
|
|
DisallowCodeDependencyChange no_dependency_change;
|
2012-07-19 18:58:23 +00:00
|
|
|
|
2016-04-11 10:01:09 +00:00
|
|
|
// Delegate to the underlying implementation.
|
2016-08-18 17:06:37 +00:00
|
|
|
DCHECK_EQ(SUCCEEDED, last_status());
|
|
|
|
ScopedTimer t(&time_taken_to_optimize_);
|
|
|
|
return SetLastStatus(OptimizeGraphImpl());
|
2016-04-11 10:01:09 +00:00
|
|
|
}
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2016-08-18 17:06:37 +00:00
|
|
|
CompilationJob::Status CompilationJob::GenerateCode() {
|
2016-04-11 10:01:09 +00:00
|
|
|
DisallowCodeDependencyChange no_dependency_change;
|
|
|
|
DisallowJavascriptExecution no_js(isolate());
|
|
|
|
DCHECK(!info()->dependencies()->HasAborted());
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2016-04-11 10:01:09 +00:00
|
|
|
// Delegate to the underlying implementation.
|
2016-08-18 17:06:37 +00:00
|
|
|
DCHECK_EQ(SUCCEEDED, last_status());
|
|
|
|
ScopedTimer t(&time_taken_to_codegen_);
|
|
|
|
return SetLastStatus(GenerateCodeImpl());
|
2012-07-17 16:24:40 +00:00
|
|
|
}
|
|
|
|
|
2016-08-18 17:06:37 +00:00
|
|
|
|
2016-01-04 07:11:45 +00:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object,
|
|
|
|
Handle<Code> code) {
|
|
|
|
Handle<WeakCell> cell = Code::WeakCellFor(code);
|
|
|
|
Heap* heap = isolate->heap();
|
2016-06-28 12:34:24 +00:00
|
|
|
if (heap->InNewSpace(*object)) {
|
|
|
|
heap->AddWeakNewSpaceObjectToCodeDependency(object, cell);
|
|
|
|
} else {
|
|
|
|
Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
|
|
|
|
dep =
|
|
|
|
DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
|
|
|
|
heap->AddWeakObjectToCodeDependency(object, dep);
|
|
|
|
}
|
2016-01-04 07:11:45 +00:00
|
|
|
}
|
|
|
|
|
2016-04-11 10:01:09 +00:00
|
|
|
} // namespace
|
2016-01-04 07:11:45 +00:00
|
|
|
|
2016-04-27 17:54:16 +00:00
|
|
|
void CompilationJob::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) {
|
2016-01-04 07:11:45 +00:00
|
|
|
// TODO(turbofan): Move this to pipeline.cc once Crankshaft dies.
|
|
|
|
Isolate* const isolate = code->GetIsolate();
|
|
|
|
DCHECK(code->is_optimized_code());
|
|
|
|
std::vector<Handle<Map>> maps;
|
|
|
|
std::vector<Handle<HeapObject>> objects;
|
|
|
|
{
|
|
|
|
DisallowHeapAllocation no_gc;
|
|
|
|
int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
|
|
|
|
RelocInfo::ModeMask(RelocInfo::CELL);
|
|
|
|
for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
|
|
|
|
RelocInfo::Mode mode = it.rinfo()->rmode();
|
|
|
|
if (mode == RelocInfo::CELL &&
|
|
|
|
code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) {
|
|
|
|
objects.push_back(handle(it.rinfo()->target_cell(), isolate));
|
|
|
|
} else if (mode == RelocInfo::EMBEDDED_OBJECT &&
|
|
|
|
code->IsWeakObjectInOptimizedCode(
|
|
|
|
it.rinfo()->target_object())) {
|
|
|
|
Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
|
|
|
|
isolate);
|
|
|
|
if (object->IsMap()) {
|
|
|
|
maps.push_back(Handle<Map>::cast(object));
|
|
|
|
} else {
|
|
|
|
objects.push_back(object);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for (Handle<Map> map : maps) {
|
|
|
|
if (map->dependent_code()->IsEmpty(DependentCode::kWeakCodeGroup)) {
|
|
|
|
isolate->heap()->AddRetainedMap(map);
|
|
|
|
}
|
|
|
|
Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code);
|
|
|
|
}
|
|
|
|
for (Handle<HeapObject> object : objects) {
|
|
|
|
AddWeakObjectToCodeDependency(isolate, object, code);
|
|
|
|
}
|
|
|
|
code->set_can_have_weak_objects(true);
|
|
|
|
}
|
|
|
|
|
2016-08-16 20:17:45 +00:00
|
|
|
void CompilationJob::RecordOptimizationStats() {
|
|
|
|
Handle<JSFunction> function = info()->closure();
|
|
|
|
if (!function->IsOptimized()) {
|
|
|
|
// Concurrent recompilation and OSR may race. Increment only once.
|
|
|
|
int opt_count = function->shared()->opt_count();
|
|
|
|
function->shared()->set_opt_count(opt_count + 1);
|
|
|
|
}
|
2016-08-18 17:06:37 +00:00
|
|
|
double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
|
|
|
|
double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
|
|
|
|
double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
|
2016-08-16 20:17:45 +00:00
|
|
|
if (FLAG_trace_opt) {
|
|
|
|
PrintF("[optimizing ");
|
|
|
|
function->ShortPrint();
|
|
|
|
PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
|
|
|
|
ms_codegen);
|
|
|
|
}
|
|
|
|
if (FLAG_trace_opt_stats) {
|
|
|
|
static double compilation_time = 0.0;
|
|
|
|
static int compiled_functions = 0;
|
|
|
|
static int code_size = 0;
|
|
|
|
|
|
|
|
compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
|
|
|
|
compiled_functions++;
|
|
|
|
code_size += function->shared()->SourceSize();
|
|
|
|
PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
|
2016-08-18 17:06:37 +00:00
|
|
|
compiled_functions,
|
|
|
|
code_size,
|
|
|
|
compilation_time);
|
2016-08-16 20:17:45 +00:00
|
|
|
}
|
|
|
|
if (FLAG_hydrogen_stats) {
|
2016-08-18 17:06:37 +00:00
|
|
|
isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
|
|
|
|
time_taken_to_optimize_,
|
|
|
|
time_taken_to_codegen_);
|
2016-08-16 20:17:45 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Local helper methods that make up the compilation pipeline.
|
|
|
|
|
|
|
|
namespace {
|
2013-01-15 10:16:52 +00:00
|
|
|
|
2016-05-04 10:56:53 +00:00
|
|
|
bool IsEvalToplevel(Handle<SharedFunctionInfo> shared) {
|
|
|
|
return shared->is_toplevel() && shared->script()->IsScript() &&
|
|
|
|
Script::cast(shared->script())->compilation_type() ==
|
|
|
|
Script::COMPILATION_TYPE_EVAL;
|
|
|
|
}
|
|
|
|
|
2016-06-15 13:22:39 +00:00
|
|
|
void RecordFunctionCompilation(CodeEventListener::LogEventsAndTags tag,
|
2016-04-20 11:05:53 +00:00
|
|
|
CompilationInfo* info) {
|
2014-09-18 09:41:45 +00:00
|
|
|
// Log the code generation. If source information is available include
|
|
|
|
// script name and line number. Check explicitly whether logging is
|
|
|
|
// enabled as finding the line number is not free.
|
|
|
|
if (info->isolate()->logger()->is_logging_code_events() ||
|
2016-06-09 05:23:34 +00:00
|
|
|
info->isolate()->is_profiling()) {
|
2016-04-20 11:05:53 +00:00
|
|
|
Handle<SharedFunctionInfo> shared = info->shared_info();
|
2015-03-09 14:51:13 +00:00
|
|
|
Handle<Script> script = info->parse_info()->script();
|
2016-04-20 11:05:53 +00:00
|
|
|
Handle<AbstractCode> abstract_code =
|
|
|
|
info->has_bytecode_array()
|
|
|
|
? Handle<AbstractCode>::cast(info->bytecode_array())
|
|
|
|
: Handle<AbstractCode>::cast(info->code());
|
2016-02-26 11:04:04 +00:00
|
|
|
if (abstract_code.is_identical_to(
|
|
|
|
info->isolate()->builtins()->CompileLazy())) {
|
2014-09-18 09:41:45 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
|
|
|
|
int column_num =
|
|
|
|
Script::GetColumnNumber(script, shared->start_position()) + 1;
|
|
|
|
String* script_name = script->name()->IsString()
|
|
|
|
? String::cast(script->name())
|
|
|
|
: info->isolate()->heap()->empty_string();
|
2016-06-15 13:22:39 +00:00
|
|
|
CodeEventListener::LogEventsAndTags log_tag =
|
|
|
|
Logger::ToNativeByScript(tag, *script);
|
2014-09-18 09:41:45 +00:00
|
|
|
PROFILE(info->isolate(),
|
2016-05-13 09:44:52 +00:00
|
|
|
CodeCreateEvent(log_tag, *abstract_code, *shared, script_name,
|
2014-09-18 09:41:45 +00:00
|
|
|
line_num, column_num));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-27 08:09:12 +00:00
|
|
|
void EnsureFeedbackMetadata(CompilationInfo* info) {
|
2016-04-19 09:32:29 +00:00
|
|
|
DCHECK(info->has_shared_info());
|
2016-04-05 14:29:56 +00:00
|
|
|
|
2016-05-27 08:09:12 +00:00
|
|
|
// If no type feedback metadata exists, we create it now. At this point the
|
2016-04-11 18:56:25 +00:00
|
|
|
// AstNumbering pass has already run. Note the snapshot can contain outdated
|
|
|
|
// vectors for a different configuration, hence we also recreate a new vector
|
|
|
|
// when the function is not compiled (i.e. no code was serialized).
|
2016-05-27 08:09:12 +00:00
|
|
|
|
|
|
|
// TODO(mvstanton): reintroduce is_empty() predicate to feedback_metadata().
|
|
|
|
if (info->shared_info()->feedback_metadata()->length() == 0 ||
|
2016-04-11 18:56:25 +00:00
|
|
|
!info->shared_info()->is_compiled()) {
|
2016-04-05 14:29:56 +00:00
|
|
|
Handle<TypeFeedbackMetadata> feedback_metadata = TypeFeedbackMetadata::New(
|
|
|
|
info->isolate(), info->literal()->feedback_vector_spec());
|
2016-05-27 08:09:12 +00:00
|
|
|
info->shared_info()->set_feedback_metadata(*feedback_metadata);
|
2016-04-05 14:29:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// It's very important that recompiles do not alter the structure of the type
|
|
|
|
// feedback vector. Verify that the structure fits the function literal.
|
2016-05-27 08:09:12 +00:00
|
|
|
CHECK(!info->shared_info()->feedback_metadata()->SpecDiffersFrom(
|
2016-04-05 14:29:56 +00:00
|
|
|
info->literal()->feedback_vector_spec()));
|
|
|
|
}
|
|
|
|
|
2016-07-25 09:42:48 +00:00
|
|
|
bool ShouldUseIgnition(CompilationInfo* info) {
|
2016-07-26 14:46:23 +00:00
|
|
|
if (!FLAG_ignition) return false;
|
|
|
|
|
2016-06-08 07:07:20 +00:00
|
|
|
DCHECK(info->has_shared_info());
|
|
|
|
|
|
|
|
// When requesting debug code as a replacement for existing code, we provide
|
|
|
|
// the same kind as the existing code (to prevent implicit tier-change).
|
|
|
|
if (info->is_debug() && info->shared_info()->is_compiled()) {
|
2016-08-12 11:05:43 +00:00
|
|
|
return !info->shared_info()->HasBaselineCode();
|
2016-06-08 07:07:20 +00:00
|
|
|
}
|
|
|
|
|
2016-05-24 16:30:37 +00:00
|
|
|
// Since we can't OSR from Ignition, skip Ignition for asm.js functions.
|
|
|
|
if (info->shared_info()->asm_function()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2015-12-14 20:57:38 +00:00
|
|
|
// Checks whether top level functions should be passed by the filter.
|
2016-04-05 14:07:15 +00:00
|
|
|
if (info->shared_info()->is_toplevel()) {
|
2015-12-14 20:57:38 +00:00
|
|
|
Vector<const char> filter = CStrVector(FLAG_ignition_filter);
|
|
|
|
return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*');
|
|
|
|
}
|
|
|
|
|
|
|
|
// Finally respect the filter.
|
2016-04-05 14:07:15 +00:00
|
|
|
return info->shared_info()->PassesFilter(FLAG_ignition_filter);
|
2015-12-14 20:57:38 +00:00
|
|
|
}
|
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
int CodeAndMetadataSize(CompilationInfo* info) {
|
2016-02-02 16:47:58 +00:00
|
|
|
if (info->has_bytecode_array()) {
|
2016-05-26 09:46:34 +00:00
|
|
|
return info->bytecode_array()->SizeIncludingMetadata();
|
2016-02-02 16:47:58 +00:00
|
|
|
}
|
2016-05-26 09:46:34 +00:00
|
|
|
return info->code()->SizeIncludingMetadata();
|
2016-02-02 16:47:58 +00:00
|
|
|
}
|
|
|
|
|
2016-04-20 14:11:53 +00:00
|
|
|
bool GenerateUnoptimizedCode(CompilationInfo* info) {
|
2016-02-02 16:47:58 +00:00
|
|
|
bool success;
|
2016-05-27 08:09:12 +00:00
|
|
|
EnsureFeedbackMetadata(info);
|
2016-08-17 17:22:09 +00:00
|
|
|
if (FLAG_validate_asm && info->scope()->asm_module() &&
|
|
|
|
!info->shared_info()->is_asm_wasm_broken()) {
|
2016-07-01 05:26:58 +00:00
|
|
|
MaybeHandle<FixedArray> wasm_data;
|
|
|
|
wasm_data = AsmJs::ConvertAsmToWasm(info->parse_info());
|
|
|
|
if (!wasm_data.is_null()) {
|
|
|
|
info->shared_info()->set_asm_wasm_data(*wasm_data.ToHandleChecked());
|
|
|
|
info->SetCode(info->isolate()->builtins()->InstantiateAsmJs());
|
|
|
|
return true;
|
2016-05-12 16:35:50 +00:00
|
|
|
}
|
|
|
|
}
|
2016-07-26 14:46:23 +00:00
|
|
|
if (ShouldUseIgnition(info)) {
|
2016-02-02 16:47:58 +00:00
|
|
|
success = interpreter::Interpreter::MakeBytecode(info);
|
2015-12-14 20:57:38 +00:00
|
|
|
} else {
|
2016-02-02 16:47:58 +00:00
|
|
|
success = FullCodeGenerator::MakeCode(info);
|
|
|
|
}
|
|
|
|
if (success) {
|
|
|
|
Isolate* isolate = info->isolate();
|
|
|
|
Counters* counters = isolate->counters();
|
2016-04-20 14:11:53 +00:00
|
|
|
// TODO(4280): Rename counters from "baseline" to "unoptimized" eventually.
|
2016-02-02 16:47:58 +00:00
|
|
|
counters->total_baseline_code_size()->Increment(CodeAndMetadataSize(info));
|
|
|
|
counters->total_baseline_compile_count()->Increment(1);
|
2015-12-14 20:57:38 +00:00
|
|
|
}
|
2016-02-02 16:47:58 +00:00
|
|
|
return success;
|
2015-12-14 20:57:38 +00:00
|
|
|
}
|
|
|
|
|
2016-04-20 14:11:53 +00:00
|
|
|
bool CompileUnoptimizedCode(CompilationInfo* info) {
|
2015-12-14 20:57:38 +00:00
|
|
|
DCHECK(AllowCompilation::IsAllowed(info->isolate()));
|
2016-04-20 14:11:53 +00:00
|
|
|
if (!Compiler::Analyze(info->parse_info()) ||
|
|
|
|
!GenerateUnoptimizedCode(info)) {
|
2015-08-18 13:46:43 +00:00
|
|
|
Isolate* isolate = info->isolate();
|
|
|
|
if (!isolate->has_pending_exception()) isolate->StackOverflow();
|
2015-12-14 20:57:38 +00:00
|
|
|
return false;
|
2015-08-18 13:46:43 +00:00
|
|
|
}
|
2015-12-14 20:57:38 +00:00
|
|
|
return true;
|
2015-08-18 13:46:43 +00:00
|
|
|
}
|
|
|
|
|
2016-04-20 08:50:24 +00:00
|
|
|
void InstallSharedScopeInfo(CompilationInfo* info,
|
|
|
|
Handle<SharedFunctionInfo> shared) {
|
2016-08-19 08:26:35 +00:00
|
|
|
Handle<ScopeInfo> scope_info = info->scope()->GetScopeInfo(info->isolate());
|
2016-04-20 08:50:24 +00:00
|
|
|
shared->set_scope_info(*scope_info);
|
|
|
|
}
|
|
|
|
|
|
|
|
void InstallSharedCompilationResult(CompilationInfo* info,
|
|
|
|
Handle<SharedFunctionInfo> shared) {
|
2016-06-08 07:07:20 +00:00
|
|
|
// TODO(mstarzinger): Compiling for debug code might be used to reveal inner
|
|
|
|
// functions via {FindSharedFunctionInfoInScript}, in which case we end up
|
|
|
|
// regenerating existing bytecode. Fix this!
|
|
|
|
if (info->is_debug() && info->has_bytecode_array()) {
|
|
|
|
shared->ClearBytecodeArray();
|
|
|
|
}
|
2016-03-18 10:46:40 +00:00
|
|
|
DCHECK(!info->code().is_null());
|
|
|
|
shared->ReplaceCode(*info->code());
|
|
|
|
if (info->has_bytecode_array()) {
|
|
|
|
DCHECK(!shared->HasBytecodeArray()); // Only compiled once.
|
|
|
|
shared->set_bytecode_array(*info->bytecode_array());
|
|
|
|
}
|
|
|
|
}
|
2015-08-18 13:46:43 +00:00
|
|
|
|
2016-04-06 19:20:10 +00:00
|
|
|
MUST_USE_RESULT MaybeHandle<Code> GetUnoptimizedCode(CompilationInfo* info) {
|
2013-12-23 14:30:35 +00:00
|
|
|
VMState<COMPILER> state(info->isolate());
|
|
|
|
PostponeInterruptsScope postpone(info->isolate());
|
2014-09-17 12:34:46 +00:00
|
|
|
|
2016-08-10 09:33:00 +00:00
|
|
|
// Create a canonical handle scope before internalizing parsed values if
|
|
|
|
// compiling bytecode. This is required for off-thread bytecode generation.
|
2016-08-05 10:09:50 +00:00
|
|
|
std::unique_ptr<CanonicalHandleScope> canonical;
|
|
|
|
if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info->isolate()));
|
|
|
|
|
2014-09-17 12:34:46 +00:00
|
|
|
// Parse and update CompilationInfo with the results.
|
2015-03-09 14:51:13 +00:00
|
|
|
if (!Parser::ParseStatic(info->parse_info())) return MaybeHandle<Code>();
|
2014-09-17 12:34:46 +00:00
|
|
|
Handle<SharedFunctionInfo> shared = info->shared_info();
|
2016-04-14 12:31:03 +00:00
|
|
|
DCHECK_EQ(shared->language_mode(), info->literal()->language_mode());
|
2013-10-28 17:54:43 +00:00
|
|
|
|
2015-12-14 20:57:38 +00:00
|
|
|
// Compile either unoptimized code or bytecode for the interpreter.
|
2016-04-20 14:11:53 +00:00
|
|
|
if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
|
2014-09-17 12:34:46 +00:00
|
|
|
|
2016-04-20 08:50:24 +00:00
|
|
|
// Update the shared function info with the scope info.
|
|
|
|
InstallSharedScopeInfo(info, shared);
|
2014-09-17 12:34:46 +00:00
|
|
|
|
2016-03-18 10:46:40 +00:00
|
|
|
// Install compilation result on the shared function info
|
2016-04-20 08:50:24 +00:00
|
|
|
InstallSharedCompilationResult(info, shared);
|
2014-09-17 12:34:46 +00:00
|
|
|
|
2016-04-25 10:46:38 +00:00
|
|
|
// Record the function compilation event.
|
2016-06-15 13:22:39 +00:00
|
|
|
RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
|
2016-04-25 10:46:38 +00:00
|
|
|
|
2013-12-23 14:30:35 +00:00
|
|
|
return info->code();
|
|
|
|
}
|
2013-10-28 17:54:43 +00:00
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
|
2014-09-22 12:32:47 +00:00
|
|
|
Handle<JSFunction> function, BailoutId osr_ast_id) {
|
2015-06-25 09:44:58 +00:00
|
|
|
Handle<SharedFunctionInfo> shared(function->shared());
|
|
|
|
DisallowHeapAllocation no_gc;
|
|
|
|
CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
|
|
|
|
function->context()->native_context(), osr_ast_id);
|
|
|
|
if (cached.code != nullptr) {
|
|
|
|
// Caching of optimized code enabled and optimized code found.
|
|
|
|
if (cached.literals != nullptr) function->set_literals(cached.literals);
|
|
|
|
DCHECK(!cached.code->marked_for_deoptimization());
|
|
|
|
DCHECK(function->shared()->is_compiled());
|
|
|
|
return Handle<Code>(cached.code);
|
2014-09-22 12:32:47 +00:00
|
|
|
}
|
|
|
|
return MaybeHandle<Code>();
|
|
|
|
}
|
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
|
2014-09-22 12:32:47 +00:00
|
|
|
Handle<Code> code = info->code();
|
|
|
|
if (code->kind() != Code::OPTIMIZED_FUNCTION) return; // Nothing to do.
|
|
|
|
|
2015-10-07 12:10:49 +00:00
|
|
|
// Function context specialization folds-in the function context,
|
|
|
|
// so no sharing can occur.
|
2015-09-25 11:33:28 +00:00
|
|
|
if (info->is_function_context_specializing()) return;
|
|
|
|
// Frame specialization implies function context specialization.
|
2015-07-06 11:11:15 +00:00
|
|
|
DCHECK(!info->is_frame_specializing());
|
2014-09-22 12:32:47 +00:00
|
|
|
|
2016-07-27 08:19:43 +00:00
|
|
|
// TODO(4764): When compiling for OSR from bytecode, BailoutId might derive
|
|
|
|
// from bytecode offset and overlap with actual BailoutId. No caching!
|
|
|
|
if (info->is_osr() && info->is_optimizing_from_bytecode()) return;
|
|
|
|
|
Revert of [runtime] Introduce dedicated JSBoundFunction to represent bound functions. (patchset #14 id:260001 of https://codereview.chromium.org/1542963002/ )
Reason for revert:
Breaks arm64 sim nosnap: https://build.chromium.org/p/client.v8/builders/V8%20Linux%20-%20arm64%20-%20sim%20-%20nosnap%20-%20debug/builds/805/steps/Check/logs/function-bind
Original issue's description:
> [runtime] Introduce dedicated JSBoundFunction to represent bound functions.
>
> According to the ES2015 specification, bound functions are exotic
> objects, and thus don't need to be implemented as JSFunctions. So
> we introduce a new JSBoundFunction type to represent bound functions
> and make them optimizable. This already improves the performance of
> calling or constructing bound functions by 10-100x depending on the
> use case because we avoid the crazy dance between JavaScript and C++
> that was implemented in v8natives.js previously.
>
> There's still room for improvement in the performance of actually
> creating bound functions, which is also relevant in practice, but
> we already have a plan how to accomplish that later.
>
> The mips/mips64 ports were contributed by akos.palfi@imgtec.com.
>
> CQ_INCLUDE_TRYBOTS=tryserver.chromium.linux:linux_chromium_rel_ng;tryserver.blink:linux_blink_rel
> BUG=chromium:535408, chromium:571299, v8:4629
> LOG=n
>
> Committed: https://crrev.com/ca8623eaa468cba65a5adafcdfb4615966f43ce2
> Cr-Commit-Position: refs/heads/master@{#33042}
TBR=cbruni@chromium.org,hpayer@chromium.org,yangguo@chromium.org,akos.palfi@imgtec.com
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=chromium:535408, chromium:571299, v8:4629
Review URL: https://codereview.chromium.org/1552473002
Cr-Commit-Position: refs/heads/master@{#33043}
2015-12-27 04:41:46 +00:00
|
|
|
// Cache optimized context-specific code.
|
2015-12-27 06:30:53 +00:00
|
|
|
Handle<JSFunction> function = info->closure();
|
2015-10-12 16:10:41 +00:00
|
|
|
Handle<SharedFunctionInfo> shared(function->shared());
|
|
|
|
Handle<LiteralsArray> literals(function->literals());
|
|
|
|
Handle<Context> native_context(function->context()->native_context());
|
|
|
|
SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
|
|
|
|
literals, info->osr_ast_id());
|
2015-06-26 09:07:30 +00:00
|
|
|
|
2015-10-07 12:10:49 +00:00
|
|
|
// Do not cache (native) context-independent code compiled for OSR.
|
2015-06-26 09:07:30 +00:00
|
|
|
if (code->is_turbofanned() && info->is_osr()) return;
|
|
|
|
|
2015-10-07 12:10:49 +00:00
|
|
|
// Cache optimized (native) context-independent code.
|
|
|
|
if (FLAG_turbo_cache_shared_code && code->is_turbofanned() &&
|
|
|
|
!info->is_native_context_specializing()) {
|
2015-09-25 11:33:28 +00:00
|
|
|
DCHECK(!info->is_function_context_specializing());
|
2015-06-26 09:07:30 +00:00
|
|
|
DCHECK(info->osr_ast_id().IsNone());
|
|
|
|
Handle<SharedFunctionInfo> shared(function->shared());
|
|
|
|
SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap(shared, code);
|
|
|
|
}
|
2014-09-22 12:32:47 +00:00
|
|
|
}
|
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
bool Renumber(ParseInfo* parse_info) {
|
2015-03-09 14:51:13 +00:00
|
|
|
if (!AstNumbering::Renumber(parse_info->isolate(), parse_info->zone(),
|
2015-08-19 16:51:37 +00:00
|
|
|
parse_info->literal())) {
|
2015-01-23 15:19:34 +00:00
|
|
|
return false;
|
|
|
|
}
|
2015-03-09 14:51:13 +00:00
|
|
|
Handle<SharedFunctionInfo> shared_info = parse_info->shared_info();
|
|
|
|
if (!shared_info.is_null()) {
|
2015-08-19 16:51:37 +00:00
|
|
|
FunctionLiteral* lit = parse_info->literal();
|
2015-03-09 14:51:13 +00:00
|
|
|
shared_info->set_ast_node_count(lit->ast_node_count());
|
2016-04-14 12:31:03 +00:00
|
|
|
if (lit->dont_optimize_reason() != kNoReason) {
|
|
|
|
shared_info->DisableOptimization(lit->dont_optimize_reason());
|
|
|
|
}
|
2016-07-14 13:05:15 +00:00
|
|
|
if (lit->flags() & AstProperties::kDontCrankshaft) {
|
|
|
|
shared_info->set_dont_crankshaft(true);
|
|
|
|
}
|
2014-10-28 13:23:54 +00:00
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-05-09 09:10:19 +00:00
|
|
|
bool UseTurboFan(Handle<SharedFunctionInfo> shared) {
|
2016-05-02 14:44:29 +00:00
|
|
|
bool optimization_disabled = shared->optimization_disabled();
|
|
|
|
bool dont_crankshaft = shared->dont_crankshaft();
|
2016-04-11 10:01:09 +00:00
|
|
|
|
|
|
|
// Check the enabling conditions for Turbofan.
|
|
|
|
// 1. "use asm" code.
|
2016-05-02 14:44:29 +00:00
|
|
|
bool is_turbofanable_asm =
|
|
|
|
FLAG_turbo_asm && shared->asm_function() && !optimization_disabled;
|
2016-04-11 10:01:09 +00:00
|
|
|
|
|
|
|
// 2. Fallback for features unsupported by Crankshaft.
|
|
|
|
bool is_unsupported_by_crankshaft_but_turbofanable =
|
|
|
|
dont_crankshaft && strcmp(FLAG_turbo_filter, "~~") == 0 &&
|
|
|
|
!optimization_disabled;
|
|
|
|
|
|
|
|
// 3. Explicitly enabled by the command-line filter.
|
2016-05-02 14:44:29 +00:00
|
|
|
bool passes_turbo_filter = shared->PassesFilter(FLAG_turbo_filter);
|
2016-04-11 10:01:09 +00:00
|
|
|
|
2016-05-09 09:10:19 +00:00
|
|
|
return is_turbofanable_asm || is_unsupported_by_crankshaft_but_turbofanable ||
|
|
|
|
passes_turbo_filter;
|
2016-04-11 10:01:09 +00:00
|
|
|
}
|
|
|
|
|
2016-04-27 17:54:16 +00:00
|
|
|
bool GetOptimizedCodeNow(CompilationJob* job) {
|
2016-04-27 17:45:26 +00:00
|
|
|
CompilationInfo* info = job->info();
|
2015-10-26 15:33:02 +00:00
|
|
|
Isolate* isolate = info->isolate();
|
2016-04-18 09:09:59 +00:00
|
|
|
|
|
|
|
// Parsing is not required when optimizing from existing bytecode.
|
2016-04-27 17:45:26 +00:00
|
|
|
if (!info->is_optimizing_from_bytecode()) {
|
2016-04-18 09:09:59 +00:00
|
|
|
if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
|
2016-05-27 08:09:12 +00:00
|
|
|
EnsureFeedbackMetadata(info);
|
2016-04-18 09:09:59 +00:00
|
|
|
}
|
2014-09-22 12:32:47 +00:00
|
|
|
|
2016-05-27 08:09:12 +00:00
|
|
|
JSFunction::EnsureLiterals(info->closure());
|
|
|
|
|
2015-10-26 15:33:02 +00:00
|
|
|
TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
|
2016-06-17 12:03:58 +00:00
|
|
|
RuntimeCallTimerScope runtimeTimer(isolate,
|
|
|
|
&RuntimeCallStats::RecompileSynchronous);
|
2016-08-10 01:18:38 +00:00
|
|
|
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
|
|
|
|
isolate, &tracing::TraceEventStatsTable::RecompileSynchronous);
|
2014-09-22 12:32:47 +00:00
|
|
|
|
2016-08-18 17:06:37 +00:00
|
|
|
if (job->CreateGraph() != CompilationJob::SUCCEEDED ||
|
|
|
|
job->OptimizeGraph() != CompilationJob::SUCCEEDED ||
|
|
|
|
job->GenerateCode() != CompilationJob::SUCCEEDED) {
|
2014-09-24 07:08:27 +00:00
|
|
|
if (FLAG_trace_opt) {
|
|
|
|
PrintF("[aborted optimizing ");
|
|
|
|
info->closure()->ShortPrint();
|
|
|
|
PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
2014-09-22 12:32:47 +00:00
|
|
|
|
|
|
|
// Success!
|
2016-04-11 10:01:09 +00:00
|
|
|
job->RecordOptimizationStats();
|
2015-10-26 15:33:02 +00:00
|
|
|
DCHECK(!isolate->has_pending_exception());
|
2014-09-22 12:32:47 +00:00
|
|
|
InsertCodeIntoOptimizedCodeMap(info);
|
2016-06-15 13:22:39 +00:00
|
|
|
RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
|
2014-09-22 12:32:47 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-04-27 17:54:16 +00:00
|
|
|
bool GetOptimizedCodeLater(CompilationJob* job) {
|
2016-04-27 17:45:26 +00:00
|
|
|
CompilationInfo* info = job->info();
|
2014-09-22 12:32:47 +00:00
|
|
|
Isolate* isolate = info->isolate();
|
2015-10-26 15:33:02 +00:00
|
|
|
|
2015-04-14 13:57:35 +00:00
|
|
|
if (!isolate->optimizing_compile_dispatcher()->IsQueueAvailable()) {
|
2014-09-22 12:32:47 +00:00
|
|
|
if (FLAG_trace_concurrent_recompilation) {
|
|
|
|
PrintF(" ** Compilation queue full, will retry optimizing ");
|
2014-09-24 07:08:27 +00:00
|
|
|
info->closure()->ShortPrint();
|
2014-09-22 12:32:47 +00:00
|
|
|
PrintF(" later.\n");
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2016-07-13 17:44:41 +00:00
|
|
|
if (isolate->heap()->HighMemoryPressure()) {
|
|
|
|
if (FLAG_trace_concurrent_recompilation) {
|
|
|
|
PrintF(" ** High memory pressure, will retry optimizing ");
|
|
|
|
info->closure()->ShortPrint();
|
|
|
|
PrintF(" later.\n");
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2016-04-18 09:09:59 +00:00
|
|
|
// All handles below this point will be allocated in a deferred handle scope
|
|
|
|
// that is detached and handed off to the background thread when we return.
|
2014-09-22 12:32:47 +00:00
|
|
|
CompilationHandleScope handle_scope(info);
|
2016-04-18 09:09:59 +00:00
|
|
|
|
|
|
|
// Parsing is not required when optimizing from existing bytecode.
|
2016-04-27 17:45:26 +00:00
|
|
|
if (!info->is_optimizing_from_bytecode()) {
|
2016-04-18 09:09:59 +00:00
|
|
|
if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
|
2016-05-27 08:09:12 +00:00
|
|
|
EnsureFeedbackMetadata(info);
|
2016-04-18 09:09:59 +00:00
|
|
|
}
|
2015-03-09 14:51:13 +00:00
|
|
|
|
2016-05-27 08:09:12 +00:00
|
|
|
JSFunction::EnsureLiterals(info->closure());
|
|
|
|
|
2015-03-09 14:51:13 +00:00
|
|
|
// Reopen handles in the new CompilationHandleScope.
|
|
|
|
info->ReopenHandlesInNewHandleScope();
|
|
|
|
info->parse_info()->ReopenHandlesInNewHandleScope();
|
2014-09-22 12:32:47 +00:00
|
|
|
|
|
|
|
TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
|
2016-06-17 12:03:58 +00:00
|
|
|
RuntimeCallTimerScope runtimeTimer(info->isolate(),
|
|
|
|
&RuntimeCallStats::RecompileSynchronous);
|
2016-08-10 01:18:38 +00:00
|
|
|
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
|
|
|
|
isolate, &tracing::TraceEventStatsTable::RecompileSynchronous);
|
2014-09-22 12:32:47 +00:00
|
|
|
|
2016-08-18 17:06:37 +00:00
|
|
|
if (job->CreateGraph() != CompilationJob::SUCCEEDED) return false;
|
2016-04-27 17:45:26 +00:00
|
|
|
isolate->optimizing_compile_dispatcher()->QueueForOptimization(job);
|
2014-09-22 12:32:47 +00:00
|
|
|
|
|
|
|
if (FLAG_trace_concurrent_recompilation) {
|
|
|
|
PrintF(" ** Queued ");
|
2014-09-24 07:08:27 +00:00
|
|
|
info->closure()->ShortPrint();
|
2016-04-20 08:49:15 +00:00
|
|
|
PrintF(" for concurrent optimization.\n");
|
2014-09-22 12:32:47 +00:00
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-03-04 16:10:55 +00:00
|
|
|
MaybeHandle<Code> GetOptimizedCode(Handle<JSFunction> function,
|
|
|
|
Compiler::ConcurrencyMode mode,
|
|
|
|
BailoutId osr_ast_id = BailoutId::None(),
|
|
|
|
JavaScriptFrame* osr_frame = nullptr) {
|
|
|
|
Isolate* isolate = function->GetIsolate();
|
|
|
|
Handle<SharedFunctionInfo> shared(function->shared(), isolate);
|
|
|
|
|
2016-07-27 08:19:43 +00:00
|
|
|
bool ignition_osr = osr_frame && osr_frame->is_interpreted();
|
|
|
|
DCHECK_IMPLIES(ignition_osr, !osr_ast_id.IsNone());
|
|
|
|
DCHECK_IMPLIES(ignition_osr, FLAG_ignition_osr);
|
|
|
|
|
|
|
|
// Flag combination --ignition-osr --no-turbo-from-bytecode is unsupported.
|
|
|
|
if (ignition_osr && !FLAG_turbo_from_bytecode) return MaybeHandle<Code>();
|
2016-07-26 14:31:10 +00:00
|
|
|
|
2016-03-04 16:10:55 +00:00
|
|
|
Handle<Code> cached_code;
|
2016-07-27 08:19:43 +00:00
|
|
|
// TODO(4764): When compiling for OSR from bytecode, BailoutId might derive
|
|
|
|
// from bytecode offset and overlap with actual BailoutId. No lookup!
|
|
|
|
if (!ignition_osr &&
|
|
|
|
GetCodeFromOptimizedCodeMap(function, osr_ast_id)
|
2016-03-04 16:10:55 +00:00
|
|
|
.ToHandle(&cached_code)) {
|
|
|
|
if (FLAG_trace_opt) {
|
|
|
|
PrintF("[found optimized code for ");
|
|
|
|
function->ShortPrint();
|
|
|
|
if (!osr_ast_id.IsNone()) {
|
|
|
|
PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
|
|
|
|
}
|
|
|
|
PrintF("]\n");
|
|
|
|
}
|
|
|
|
return cached_code;
|
|
|
|
}
|
|
|
|
|
2016-04-25 10:46:38 +00:00
|
|
|
// Reset profiler ticks, function is no longer considered hot.
|
2016-03-18 12:08:27 +00:00
|
|
|
if (shared->is_compiled()) {
|
|
|
|
shared->code()->set_profiler_ticks(0);
|
2016-03-04 16:10:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
VMState<COMPILER> state(isolate);
|
|
|
|
DCHECK(!isolate->has_pending_exception());
|
|
|
|
PostponeInterruptsScope postpone(isolate);
|
2016-07-27 08:19:43 +00:00
|
|
|
bool use_turbofan = UseTurboFan(shared) || ignition_osr;
|
2016-07-25 11:12:42 +00:00
|
|
|
std::unique_ptr<CompilationJob> job(
|
2016-05-02 14:44:29 +00:00
|
|
|
use_turbofan ? compiler::Pipeline::NewCompilationJob(function)
|
|
|
|
: new HCompilationJob(function));
|
|
|
|
CompilationInfo* info = job->info();
|
2016-05-04 10:56:53 +00:00
|
|
|
ParseInfo* parse_info = info->parse_info();
|
2016-03-04 16:10:55 +00:00
|
|
|
|
2016-05-10 09:47:50 +00:00
|
|
|
info->SetOptimizingForOsr(osr_ast_id, osr_frame);
|
2016-03-04 16:10:55 +00:00
|
|
|
|
2016-04-15 13:40:47 +00:00
|
|
|
// Do not use Crankshaft/TurboFan if we need to be able to set break points.
|
|
|
|
if (info->shared_info()->HasDebugInfo()) {
|
|
|
|
info->AbortOptimization(kFunctionBeingDebugged);
|
|
|
|
return MaybeHandle<Code>();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Limit the number of times we try to optimize functions.
|
|
|
|
const int kMaxOptCount =
|
|
|
|
FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
|
|
|
|
if (info->shared_info()->opt_count() > kMaxOptCount) {
|
|
|
|
info->AbortOptimization(kOptimizedTooManyTimes);
|
|
|
|
return MaybeHandle<Code>();
|
|
|
|
}
|
|
|
|
|
2016-04-27 17:45:26 +00:00
|
|
|
CanonicalHandleScope canonical(isolate);
|
|
|
|
TimerEventScope<TimerEventOptimizeCode> optimize_code_timer(isolate);
|
2016-06-17 12:03:58 +00:00
|
|
|
RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::OptimizeCode);
|
2016-08-10 01:18:38 +00:00
|
|
|
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
|
|
|
|
isolate, &tracing::TraceEventStatsTable::OptimizeCode);
|
2016-04-27 17:45:26 +00:00
|
|
|
|
2016-05-02 14:44:29 +00:00
|
|
|
// TurboFan can optimize directly from existing bytecode.
|
2016-07-25 09:42:48 +00:00
|
|
|
if (FLAG_turbo_from_bytecode && use_turbofan && ShouldUseIgnition(info)) {
|
|
|
|
if (!Compiler::EnsureBytecode(info)) {
|
|
|
|
if (isolate->has_pending_exception()) isolate->clear_pending_exception();
|
|
|
|
return MaybeHandle<Code>();
|
|
|
|
}
|
2016-04-27 17:45:26 +00:00
|
|
|
info->MarkAsOptimizeFromBytecode();
|
|
|
|
}
|
|
|
|
|
2016-05-04 10:56:53 +00:00
|
|
|
if (IsEvalToplevel(shared)) {
|
|
|
|
parse_info->set_eval();
|
|
|
|
if (function->context()->IsNativeContext()) parse_info->set_global();
|
|
|
|
parse_info->set_toplevel();
|
|
|
|
parse_info->set_allow_lazy_parsing(false);
|
|
|
|
parse_info->set_lazy(false);
|
|
|
|
}
|
|
|
|
|
2016-03-04 16:10:55 +00:00
|
|
|
if (mode == Compiler::CONCURRENT) {
|
2016-04-27 17:45:26 +00:00
|
|
|
if (GetOptimizedCodeLater(job.get())) {
|
2016-07-25 11:12:42 +00:00
|
|
|
job.release(); // The background recompile job owns this now.
|
2016-03-04 16:10:55 +00:00
|
|
|
return isolate->builtins()->InOptimizationQueue();
|
|
|
|
}
|
|
|
|
} else {
|
2016-04-27 17:45:26 +00:00
|
|
|
if (GetOptimizedCodeNow(job.get())) return info->code();
|
2016-03-04 16:10:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (isolate->has_pending_exception()) isolate->clear_pending_exception();
|
|
|
|
return MaybeHandle<Code>();
|
|
|
|
}
|
|
|
|
|
2016-05-02 08:35:26 +00:00
|
|
|
class InterpreterActivationsFinder : public ThreadVisitor,
|
|
|
|
public OptimizedFunctionVisitor {
|
2016-04-25 10:46:38 +00:00
|
|
|
public:
|
|
|
|
explicit InterpreterActivationsFinder(SharedFunctionInfo* shared)
|
|
|
|
: shared_(shared), has_activations_(false) {}
|
|
|
|
|
|
|
|
void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
|
2016-05-19 12:27:18 +00:00
|
|
|
Address* activation_pc_address = nullptr;
|
2016-04-25 10:46:38 +00:00
|
|
|
JavaScriptFrameIterator it(isolate, top);
|
2016-05-19 12:27:18 +00:00
|
|
|
for (; !it.done(); it.Advance()) {
|
2016-04-25 10:46:38 +00:00
|
|
|
JavaScriptFrame* frame = it.frame();
|
2016-08-05 07:51:23 +00:00
|
|
|
if (FLAG_turbo_from_bytecode && FLAG_ignition_osr &&
|
|
|
|
frame->is_optimized() && frame->function()->shared() == shared_) {
|
|
|
|
// If we are able to optimize functions directly from bytecode, then
|
|
|
|
// there might be optimized OSR code active on the stack that is not
|
|
|
|
// reachable through a function. We count this as an activation.
|
|
|
|
has_activations_ = true;
|
|
|
|
}
|
|
|
|
if (frame->is_interpreted() && frame->function()->shared() == shared_) {
|
2016-05-19 12:27:18 +00:00
|
|
|
has_activations_ = true;
|
|
|
|
activation_pc_address = frame->pc_address();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (activation_pc_address) {
|
|
|
|
activation_pc_addresses_.push_back(activation_pc_address);
|
2016-04-25 10:46:38 +00:00
|
|
|
}
|
|
|
|
}
|
2016-05-02 08:35:26 +00:00
|
|
|
|
|
|
|
void VisitFunction(JSFunction* function) {
|
|
|
|
if (function->Inlines(shared_)) has_activations_ = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void EnterContext(Context* context) {}
|
|
|
|
void LeaveContext(Context* context) {}
|
2016-05-19 12:27:18 +00:00
|
|
|
|
|
|
|
bool MarkActivationsForBaselineOnReturn(Isolate* isolate) {
|
|
|
|
if (activation_pc_addresses_.empty()) return false;
|
|
|
|
|
|
|
|
for (Address* activation_pc_address : activation_pc_addresses_) {
|
|
|
|
DCHECK(isolate->inner_pointer_to_code_cache()
|
|
|
|
->GetCacheEntry(*activation_pc_address)
|
|
|
|
->code->is_interpreter_trampoline_builtin());
|
|
|
|
*activation_pc_address =
|
|
|
|
isolate->builtins()->InterpreterMarkBaselineOnReturn()->entry();
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool has_activations() { return has_activations_; }
|
|
|
|
|
|
|
|
private:
|
|
|
|
SharedFunctionInfo* shared_;
|
|
|
|
bool has_activations_;
|
|
|
|
std::vector<Address*> activation_pc_addresses_;
|
2016-04-25 10:46:38 +00:00
|
|
|
};
|
|
|
|
|
2016-05-19 12:27:18 +00:00
|
|
|
bool HasInterpreterActivations(
|
|
|
|
Isolate* isolate, InterpreterActivationsFinder* activations_finder) {
|
|
|
|
activations_finder->VisitThread(isolate, isolate->thread_local_top());
|
|
|
|
isolate->thread_manager()->IterateArchivedThreads(activations_finder);
|
2016-05-02 08:35:26 +00:00
|
|
|
if (FLAG_turbo_from_bytecode) {
|
|
|
|
// If we are able to optimize functions directly from bytecode, then there
|
|
|
|
// might be optimized functions that rely on bytecode being around. We need
|
|
|
|
// to prevent switching the given function to baseline code in those cases.
|
2016-05-19 12:27:18 +00:00
|
|
|
Deoptimizer::VisitAllOptimizedFunctions(isolate, activations_finder);
|
2016-05-02 08:35:26 +00:00
|
|
|
}
|
2016-05-19 12:27:18 +00:00
|
|
|
return activations_finder->has_activations();
|
2016-04-25 10:46:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
MaybeHandle<Code> GetBaselineCode(Handle<JSFunction> function) {
|
|
|
|
Isolate* isolate = function->GetIsolate();
|
|
|
|
VMState<COMPILER> state(isolate);
|
|
|
|
PostponeInterruptsScope postpone(isolate);
|
2016-04-28 09:14:52 +00:00
|
|
|
Zone zone(isolate->allocator());
|
|
|
|
ParseInfo parse_info(&zone, function);
|
|
|
|
CompilationInfo info(&parse_info, function);
|
2016-04-25 10:46:38 +00:00
|
|
|
|
|
|
|
// Reset profiler ticks, function is no longer considered hot.
|
|
|
|
if (function->shared()->HasBytecodeArray()) {
|
|
|
|
function->shared()->set_profiler_ticks(0);
|
|
|
|
}
|
|
|
|
|
2016-04-26 08:52:52 +00:00
|
|
|
// Nothing left to do if the function already has baseline code.
|
|
|
|
if (function->shared()->code()->kind() == Code::FUNCTION) {
|
|
|
|
return Handle<Code>(function->shared()->code());
|
|
|
|
}
|
|
|
|
|
2016-04-25 10:46:38 +00:00
|
|
|
// We do not switch to baseline code when the debugger might have created a
|
|
|
|
// copy of the bytecode with break slots to be able to set break points.
|
|
|
|
if (function->shared()->HasDebugInfo()) {
|
|
|
|
return MaybeHandle<Code>();
|
|
|
|
}
|
|
|
|
|
2016-05-31 17:13:58 +00:00
|
|
|
// TODO(4280): For now we do not switch generators or async functions to
|
|
|
|
// baseline code because there might be suspended activations stored in
|
|
|
|
// generator objects on the heap. We could eventually go directly to
|
|
|
|
// TurboFan in this case.
|
|
|
|
if (function->shared()->is_resumable()) {
|
2016-04-25 10:46:38 +00:00
|
|
|
return MaybeHandle<Code>();
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO(4280): For now we disable switching to baseline code in the presence
|
|
|
|
// of interpreter activations of the given function. The reasons are:
|
|
|
|
// 1) The debugger assumes each function is either full-code or bytecode.
|
|
|
|
// 2) The underlying bytecode is cleared below, breaking stack unwinding.
|
2016-05-19 12:27:18 +00:00
|
|
|
InterpreterActivationsFinder activations_finder(function->shared());
|
|
|
|
if (HasInterpreterActivations(isolate, &activations_finder)) {
|
2016-04-25 10:46:38 +00:00
|
|
|
if (FLAG_trace_opt) {
|
|
|
|
OFStream os(stdout);
|
|
|
|
os << "[unable to switch " << Brief(*function) << " due to activations]"
|
|
|
|
<< std::endl;
|
|
|
|
}
|
2016-05-19 12:27:18 +00:00
|
|
|
|
|
|
|
if (activations_finder.MarkActivationsForBaselineOnReturn(isolate)) {
|
|
|
|
if (FLAG_trace_opt) {
|
|
|
|
OFStream os(stdout);
|
|
|
|
os << "[marking " << Brief(function->shared())
|
|
|
|
<< " for baseline recompilation on return]" << std::endl;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-25 10:46:38 +00:00
|
|
|
return MaybeHandle<Code>();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (FLAG_trace_opt) {
|
|
|
|
OFStream os(stdout);
|
|
|
|
os << "[switching method " << Brief(*function) << " to baseline code]"
|
|
|
|
<< std::endl;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parse and update CompilationInfo with the results.
|
|
|
|
if (!Parser::ParseStatic(info.parse_info())) return MaybeHandle<Code>();
|
|
|
|
Handle<SharedFunctionInfo> shared = info.shared_info();
|
|
|
|
DCHECK_EQ(shared->language_mode(), info.literal()->language_mode());
|
|
|
|
|
|
|
|
// Compile baseline code using the full code generator.
|
|
|
|
if (!Compiler::Analyze(info.parse_info()) ||
|
|
|
|
!FullCodeGenerator::MakeCode(&info)) {
|
|
|
|
if (!isolate->has_pending_exception()) isolate->StackOverflow();
|
|
|
|
return MaybeHandle<Code>();
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO(4280): For now we play it safe and remove the bytecode array when we
|
|
|
|
// switch to baseline code. We might consider keeping around the bytecode so
|
|
|
|
// that it can be used as the "source of truth" eventually.
|
2016-08-10 13:53:45 +00:00
|
|
|
if (!FLAG_ignition_preserve_bytecode) shared->ClearBytecodeArray();
|
2016-04-25 10:46:38 +00:00
|
|
|
|
|
|
|
// Update the shared function info with the scope info.
|
|
|
|
InstallSharedScopeInfo(&info, shared);
|
|
|
|
|
|
|
|
// Install compilation result on the shared function info
|
|
|
|
InstallSharedCompilationResult(&info, shared);
|
|
|
|
|
|
|
|
// Record the function compilation event.
|
2016-06-15 13:22:39 +00:00
|
|
|
RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, &info);
|
2016-04-25 10:46:38 +00:00
|
|
|
|
|
|
|
return info.code();
|
|
|
|
}
|
|
|
|
|
2016-03-04 10:44:31 +00:00
|
|
|
MaybeHandle<Code> GetLazyCode(Handle<JSFunction> function) {
|
2014-10-30 14:40:30 +00:00
|
|
|
Isolate* isolate = function->GetIsolate();
|
|
|
|
DCHECK(!isolate->has_pending_exception());
|
2014-09-17 15:29:42 +00:00
|
|
|
DCHECK(!function->is_compiled());
|
2016-02-15 17:29:22 +00:00
|
|
|
TimerEventScope<TimerEventCompileCode> compile_timer(isolate);
|
2016-06-17 12:03:58 +00:00
|
|
|
RuntimeCallTimerScope runtimeTimer(isolate,
|
|
|
|
&RuntimeCallStats::CompileCodeLazy);
|
2016-08-10 01:18:38 +00:00
|
|
|
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
|
|
|
|
isolate, &tracing::TraceEventStatsTable::CompileCodeLazy);
|
2015-01-22 18:38:19 +00:00
|
|
|
AggregatedHistogramTimerScope timer(isolate->counters()->compile_lazy());
|
2016-04-13 10:54:31 +00:00
|
|
|
|
|
|
|
if (FLAG_turbo_cache_shared_code) {
|
2016-04-14 09:20:05 +00:00
|
|
|
Handle<Code> cached_code;
|
|
|
|
if (GetCodeFromOptimizedCodeMap(function, BailoutId::None())
|
|
|
|
.ToHandle(&cached_code)) {
|
|
|
|
if (FLAG_trace_opt) {
|
|
|
|
PrintF("[found optimized code for ");
|
|
|
|
function->ShortPrint();
|
|
|
|
PrintF(" during unoptimized compile]\n");
|
|
|
|
}
|
|
|
|
DCHECK(function->shared()->is_compiled());
|
|
|
|
return cached_code;
|
2016-04-13 10:54:31 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-09-17 15:29:42 +00:00
|
|
|
if (function->shared()->is_compiled()) {
|
|
|
|
return Handle<Code>(function->shared()->code());
|
|
|
|
}
|
|
|
|
|
2016-08-12 13:44:43 +00:00
|
|
|
if (function->shared()->HasBytecodeArray()) {
|
|
|
|
Handle<Code> entry = isolate->builtins()->InterpreterEntryTrampoline();
|
|
|
|
function->shared()->ReplaceCode(*entry);
|
|
|
|
return entry;
|
|
|
|
}
|
|
|
|
|
2016-04-28 09:14:52 +00:00
|
|
|
Zone zone(isolate->allocator());
|
|
|
|
ParseInfo parse_info(&zone, function);
|
|
|
|
CompilationInfo info(&parse_info, function);
|
2014-09-17 15:29:42 +00:00
|
|
|
Handle<Code> result;
|
2016-04-06 19:20:10 +00:00
|
|
|
ASSIGN_RETURN_ON_EXCEPTION(isolate, result, GetUnoptimizedCode(&info), Code);
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2015-03-05 13:46:31 +00:00
|
|
|
if (FLAG_always_opt) {
|
2014-04-22 14:55:47 +00:00
|
|
|
Handle<Code> opt_code;
|
2016-03-04 16:10:55 +00:00
|
|
|
if (GetOptimizedCode(function, Compiler::NOT_CONCURRENT)
|
2016-01-18 10:23:49 +00:00
|
|
|
.ToHandle(&opt_code)) {
|
2014-04-22 14:55:47 +00:00
|
|
|
result = opt_code;
|
|
|
|
}
|
2013-10-28 17:54:43 +00:00
|
|
|
}
|
2012-03-15 11:51:26 +00:00
|
|
|
|
2013-12-23 14:30:35 +00:00
|
|
|
return result;
|
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2014-09-18 09:02:36 +00:00
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
Handle<SharedFunctionInfo> NewSharedFunctionInfoForLiteral(
|
2016-03-18 10:46:40 +00:00
|
|
|
Isolate* isolate, FunctionLiteral* literal, Handle<Script> script) {
|
|
|
|
Handle<Code> code = isolate->builtins()->CompileLazy();
|
|
|
|
Handle<ScopeInfo> scope_info = handle(ScopeInfo::Empty(isolate));
|
|
|
|
Handle<SharedFunctionInfo> result = isolate->factory()->NewSharedFunctionInfo(
|
|
|
|
literal->name(), literal->materialized_literal_count(), literal->kind(),
|
|
|
|
code, scope_info);
|
|
|
|
SharedFunctionInfo::InitFromFunctionLiteral(result, literal);
|
|
|
|
SharedFunctionInfo::SetScript(result, script);
|
|
|
|
return result;
|
|
|
|
}
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
|
2013-12-23 14:30:35 +00:00
|
|
|
Isolate* isolate = info->isolate();
|
2016-02-15 17:29:22 +00:00
|
|
|
TimerEventScope<TimerEventCompileCode> timer(isolate);
|
2016-06-17 12:03:58 +00:00
|
|
|
RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::CompileCode);
|
2016-08-10 01:18:38 +00:00
|
|
|
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
|
|
|
|
isolate, &tracing::TraceEventStatsTable::CompileCode);
|
2013-12-23 15:41:44 +00:00
|
|
|
PostponeInterruptsScope postpone(isolate);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!isolate->native_context().is_null());
|
2015-03-09 14:51:13 +00:00
|
|
|
ParseInfo* parse_info = info->parse_info();
|
|
|
|
Handle<Script> script = parse_info->script();
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2016-08-10 09:33:00 +00:00
|
|
|
// Create a canonical handle scope before internalizing parsed values if
|
|
|
|
// compiling bytecode. This is required for off-thread bytecode generation.
|
2016-08-05 10:09:50 +00:00
|
|
|
std::unique_ptr<CanonicalHandleScope> canonical;
|
|
|
|
if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(isolate));
|
|
|
|
|
2013-12-23 14:30:35 +00:00
|
|
|
// TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
|
|
|
|
FixedArray* array = isolate->native_context()->embedder_data();
|
2015-03-19 08:18:35 +00:00
|
|
|
script->set_context_data(array->get(v8::Context::kDebugIdIndex));
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2014-05-27 13:20:58 +00:00
|
|
|
isolate->debug()->OnBeforeCompile(script);
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2015-03-09 14:51:13 +00:00
|
|
|
DCHECK(parse_info->is_eval() || parse_info->is_global() ||
|
|
|
|
parse_info->is_module());
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2015-03-09 14:51:13 +00:00
|
|
|
parse_info->set_toplevel();
|
2014-09-29 07:53:22 +00:00
|
|
|
|
2013-12-23 14:30:35 +00:00
|
|
|
Handle<SharedFunctionInfo> result;
|
|
|
|
|
|
|
|
{ VMState<COMPILER> state(info->isolate());
|
2015-03-09 14:51:13 +00:00
|
|
|
if (parse_info->literal() == NULL) {
|
2015-08-19 16:51:37 +00:00
|
|
|
// Parse the script if needed (if it's already parsed, literal() is
|
2015-07-20 14:53:28 +00:00
|
|
|
// non-NULL). If compiling for debugging, we may eagerly compile inner
|
|
|
|
// functions, so do not parse lazily in that case.
|
2015-03-09 14:51:13 +00:00
|
|
|
ScriptCompiler::CompileOptions options = parse_info->compile_options();
|
|
|
|
bool parse_allow_lazy = (options == ScriptCompiler::kConsumeParserCache ||
|
|
|
|
String::cast(script->source())->length() >
|
|
|
|
FLAG_min_preparse_length) &&
|
2015-07-20 14:53:28 +00:00
|
|
|
!info->is_debug();
|
2014-09-12 09:12:08 +00:00
|
|
|
|
2016-03-14 18:55:41 +00:00
|
|
|
// Consider parsing eagerly when targeting the code cache.
|
|
|
|
parse_allow_lazy &= !(FLAG_serialize_eager && info->will_serialize());
|
|
|
|
|
2016-03-24 18:37:56 +00:00
|
|
|
// Consider parsing eagerly when targeting Ignition.
|
|
|
|
parse_allow_lazy &= !(FLAG_ignition && FLAG_ignition_eager &&
|
|
|
|
!isolate->serializer_enabled());
|
|
|
|
|
2015-03-09 14:51:13 +00:00
|
|
|
parse_info->set_allow_lazy_parsing(parse_allow_lazy);
|
2014-09-12 09:12:08 +00:00
|
|
|
if (!parse_allow_lazy &&
|
2015-03-09 14:51:13 +00:00
|
|
|
(options == ScriptCompiler::kProduceParserCache ||
|
|
|
|
options == ScriptCompiler::kConsumeParserCache)) {
|
2014-09-12 09:12:08 +00:00
|
|
|
// We are going to parse eagerly, but we either 1) have cached data
|
|
|
|
// produced by lazy parsing or 2) are asked to generate cached data.
|
|
|
|
// Eager parsing cannot benefit from cached data, and producing cached
|
|
|
|
// data while parsing eagerly is not implemented.
|
2015-03-09 14:51:13 +00:00
|
|
|
parse_info->set_cached_data(nullptr);
|
|
|
|
parse_info->set_compile_options(ScriptCompiler::kNoCompileOptions);
|
2014-09-12 09:12:08 +00:00
|
|
|
}
|
2016-08-05 10:09:50 +00:00
|
|
|
|
2015-03-09 14:51:13 +00:00
|
|
|
if (!Parser::ParseStatic(parse_info)) {
|
2014-09-12 09:12:08 +00:00
|
|
|
return Handle<SharedFunctionInfo>::null();
|
|
|
|
}
|
2013-12-23 14:30:35 +00:00
|
|
|
}
|
|
|
|
|
2015-07-22 07:37:21 +00:00
|
|
|
DCHECK(!info->is_debug() || !parse_info->allow_lazy_parsing());
|
|
|
|
|
2015-08-19 16:51:37 +00:00
|
|
|
FunctionLiteral* lit = parse_info->literal();
|
2013-12-23 14:30:35 +00:00
|
|
|
|
|
|
|
// Measure how long it takes to do the compilation; only take the
|
|
|
|
// rest of the function into account to avoid overlap with the
|
|
|
|
// parsing statistics.
|
2016-05-13 15:53:27 +00:00
|
|
|
RuntimeCallTimerScope runtimeTimer(
|
|
|
|
isolate, parse_info->is_eval() ? &RuntimeCallStats::CompileEval
|
|
|
|
: &RuntimeCallStats::Compile);
|
2016-05-04 15:57:34 +00:00
|
|
|
HistogramTimer* rate = parse_info->is_eval()
|
|
|
|
? info->isolate()->counters()->compile_eval()
|
|
|
|
: info->isolate()->counters()->compile();
|
2013-12-23 14:30:35 +00:00
|
|
|
HistogramTimerScope timer(rate);
|
2016-08-10 01:18:38 +00:00
|
|
|
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
|
|
|
|
isolate,
|
|
|
|
(parse_info->is_eval() ? &tracing::TraceEventStatsTable::CompileEval
|
|
|
|
: &tracing::TraceEventStatsTable::Compile));
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2016-03-18 10:46:40 +00:00
|
|
|
// Allocate a shared function info object.
|
2016-06-30 09:26:30 +00:00
|
|
|
DCHECK_EQ(kNoSourcePosition, lit->function_token_position());
|
2016-03-18 10:46:40 +00:00
|
|
|
result = NewSharedFunctionInfoForLiteral(isolate, lit, script);
|
2015-02-11 14:51:59 +00:00
|
|
|
result->set_is_toplevel(true);
|
2016-05-04 15:57:34 +00:00
|
|
|
if (parse_info->is_eval()) {
|
2015-08-11 08:20:29 +00:00
|
|
|
// Eval scripts cannot be (re-)compiled without context.
|
|
|
|
result->set_allows_lazy_compilation_without_context(false);
|
|
|
|
}
|
2016-04-05 14:07:15 +00:00
|
|
|
parse_info->set_shared_info(result);
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2016-03-18 10:46:40 +00:00
|
|
|
// Compile the code.
|
2016-04-20 14:11:53 +00:00
|
|
|
if (!CompileUnoptimizedCode(info)) {
|
2016-03-18 10:46:40 +00:00
|
|
|
return Handle<SharedFunctionInfo>::null();
|
|
|
|
}
|
|
|
|
|
2016-04-20 08:50:24 +00:00
|
|
|
// Update the shared function info with the scope info.
|
|
|
|
InstallSharedScopeInfo(info, result);
|
|
|
|
|
2016-03-18 10:46:40 +00:00
|
|
|
// Install compilation result on the shared function info
|
2016-04-20 08:50:24 +00:00
|
|
|
InstallSharedCompilationResult(info, result);
|
2016-03-18 10:46:40 +00:00
|
|
|
|
2015-10-07 14:27:24 +00:00
|
|
|
Handle<String> script_name =
|
|
|
|
script->name()->IsString()
|
|
|
|
? Handle<String>(String::cast(script->name()))
|
|
|
|
: isolate->factory()->empty_string();
|
2016-06-15 13:22:39 +00:00
|
|
|
CodeEventListener::LogEventsAndTags log_tag =
|
2016-05-04 15:57:34 +00:00
|
|
|
parse_info->is_eval()
|
2016-06-15 13:22:39 +00:00
|
|
|
? CodeEventListener::EVAL_TAG
|
|
|
|
: Logger::ToNativeByScript(CodeEventListener::SCRIPT_TAG, *script);
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2016-04-20 11:05:53 +00:00
|
|
|
PROFILE(isolate, CodeCreateEvent(log_tag, result->abstract_code(), *result,
|
2016-04-21 13:16:53 +00:00
|
|
|
*script_name));
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2014-09-12 09:12:08 +00:00
|
|
|
if (!script.is_null())
|
|
|
|
script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
|
2013-12-23 14:30:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
} // namespace
|
|
|
|
|
|
|
|
// ----------------------------------------------------------------------------
|
|
|
|
// Implementation of Compiler
|
|
|
|
|
|
|
|
bool Compiler::Analyze(ParseInfo* info) {
|
|
|
|
DCHECK_NOT_NULL(info->literal());
|
|
|
|
if (!Rewriter::Rewrite(info)) return false;
|
2016-08-05 18:57:29 +00:00
|
|
|
Scope::Analyze(info);
|
2016-03-18 13:56:57 +00:00
|
|
|
if (!Renumber(info)) return false;
|
|
|
|
DCHECK_NOT_NULL(info->scope());
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Compiler::ParseAndAnalyze(ParseInfo* info) {
|
|
|
|
if (!Parser::ParseStatic(info)) return false;
|
2016-04-18 13:06:51 +00:00
|
|
|
if (!Compiler::Analyze(info)) return false;
|
|
|
|
DCHECK_NOT_NULL(info->literal());
|
|
|
|
DCHECK_NOT_NULL(info->scope());
|
|
|
|
return true;
|
2016-03-18 13:56:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool Compiler::Compile(Handle<JSFunction> function, ClearExceptionFlag flag) {
|
|
|
|
if (function->is_compiled()) return true;
|
2016-04-18 14:11:44 +00:00
|
|
|
Isolate* isolate = function->GetIsolate();
|
|
|
|
DCHECK(AllowCompilation::IsAllowed(isolate));
|
2016-04-13 10:54:31 +00:00
|
|
|
|
2016-04-18 14:11:44 +00:00
|
|
|
// Start a compilation.
|
2016-03-18 13:56:57 +00:00
|
|
|
Handle<Code> code;
|
2016-04-18 14:11:44 +00:00
|
|
|
if (!GetLazyCode(function).ToHandle(&code)) {
|
2016-03-18 13:56:57 +00:00
|
|
|
if (flag == CLEAR_EXCEPTION) {
|
2016-04-18 14:11:44 +00:00
|
|
|
isolate->clear_pending_exception();
|
2016-03-18 13:56:57 +00:00
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
2016-04-13 10:54:31 +00:00
|
|
|
|
2016-04-18 14:11:44 +00:00
|
|
|
// Install code on closure.
|
2016-03-18 13:56:57 +00:00
|
|
|
function->ReplaceCode(*code);
|
2016-05-27 08:09:12 +00:00
|
|
|
JSFunction::EnsureLiterals(function);
|
2016-04-18 14:11:44 +00:00
|
|
|
|
|
|
|
// Check postconditions on success.
|
|
|
|
DCHECK(!isolate->has_pending_exception());
|
|
|
|
DCHECK(function->shared()->is_compiled());
|
2016-03-18 13:56:57 +00:00
|
|
|
DCHECK(function->is_compiled());
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-04-25 10:46:38 +00:00
|
|
|
bool Compiler::CompileBaseline(Handle<JSFunction> function) {
|
|
|
|
Isolate* isolate = function->GetIsolate();
|
|
|
|
DCHECK(AllowCompilation::IsAllowed(isolate));
|
|
|
|
|
|
|
|
// Start a compilation.
|
|
|
|
Handle<Code> code;
|
|
|
|
if (!GetBaselineCode(function).ToHandle(&code)) {
|
|
|
|
// Baseline generation failed, get unoptimized code.
|
|
|
|
DCHECK(function->shared()->is_compiled());
|
|
|
|
code = handle(function->shared()->code());
|
|
|
|
isolate->clear_pending_exception();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Install code on closure.
|
|
|
|
function->ReplaceCode(*code);
|
2016-06-02 12:42:53 +00:00
|
|
|
JSFunction::EnsureLiterals(function);
|
2016-04-25 10:46:38 +00:00
|
|
|
|
|
|
|
// Check postconditions on success.
|
|
|
|
DCHECK(!isolate->has_pending_exception());
|
|
|
|
DCHECK(function->shared()->is_compiled());
|
|
|
|
DCHECK(function->is_compiled());
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
bool Compiler::CompileOptimized(Handle<JSFunction> function,
|
|
|
|
ConcurrencyMode mode) {
|
2016-04-18 14:11:44 +00:00
|
|
|
if (function->IsOptimized()) return true;
|
|
|
|
Isolate* isolate = function->GetIsolate();
|
|
|
|
DCHECK(AllowCompilation::IsAllowed(isolate));
|
|
|
|
|
|
|
|
// Start a compilation.
|
2016-03-18 13:56:57 +00:00
|
|
|
Handle<Code> code;
|
2016-04-18 14:11:44 +00:00
|
|
|
if (!GetOptimizedCode(function, mode).ToHandle(&code)) {
|
2016-03-18 13:56:57 +00:00
|
|
|
// Optimization failed, get unoptimized code.
|
2016-04-18 14:11:44 +00:00
|
|
|
DCHECK(!isolate->has_pending_exception());
|
|
|
|
if (function->shared()->is_compiled()) {
|
|
|
|
code = handle(function->shared()->code(), isolate);
|
|
|
|
} else {
|
2016-04-28 09:14:52 +00:00
|
|
|
Zone zone(isolate->allocator());
|
|
|
|
ParseInfo parse_info(&zone, function);
|
|
|
|
CompilationInfo info(&parse_info, function);
|
2016-04-18 14:11:44 +00:00
|
|
|
if (!GetUnoptimizedCode(&info).ToHandle(&code)) {
|
|
|
|
return false;
|
2016-03-18 13:56:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-18 14:11:44 +00:00
|
|
|
// Install code on closure.
|
|
|
|
function->ReplaceCode(*code);
|
2016-05-27 08:09:12 +00:00
|
|
|
JSFunction::EnsureLiterals(function);
|
2016-04-18 14:11:44 +00:00
|
|
|
|
|
|
|
// Check postconditions on success.
|
|
|
|
DCHECK(!isolate->has_pending_exception());
|
|
|
|
DCHECK(function->shared()->is_compiled());
|
|
|
|
DCHECK(function->is_compiled());
|
2016-03-18 13:56:57 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Compiler::CompileDebugCode(Handle<JSFunction> function) {
|
2016-04-18 14:11:44 +00:00
|
|
|
Isolate* isolate = function->GetIsolate();
|
|
|
|
DCHECK(AllowCompilation::IsAllowed(isolate));
|
|
|
|
|
|
|
|
// Start a compilation.
|
|
|
|
Zone zone(isolate->allocator());
|
2016-04-18 12:58:09 +00:00
|
|
|
ParseInfo parse_info(&zone, function);
|
|
|
|
CompilationInfo info(&parse_info, Handle<JSFunction>::null());
|
|
|
|
if (IsEvalToplevel(handle(function->shared()))) {
|
2016-04-14 08:07:35 +00:00
|
|
|
parse_info.set_eval();
|
2016-04-18 12:58:09 +00:00
|
|
|
if (function->context()->IsNativeContext()) parse_info.set_global();
|
2016-04-14 08:07:35 +00:00
|
|
|
parse_info.set_toplevel();
|
|
|
|
parse_info.set_allow_lazy_parsing(false);
|
2016-04-18 12:58:09 +00:00
|
|
|
parse_info.set_lazy(false);
|
2016-03-18 13:56:57 +00:00
|
|
|
}
|
2016-04-18 14:11:44 +00:00
|
|
|
info.MarkAsDebug();
|
|
|
|
if (GetUnoptimizedCode(&info).is_null()) {
|
|
|
|
isolate->clear_pending_exception();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check postconditions on success.
|
|
|
|
DCHECK(!isolate->has_pending_exception());
|
|
|
|
DCHECK(function->shared()->is_compiled());
|
|
|
|
DCHECK(function->shared()->HasDebugCode());
|
|
|
|
return true;
|
2016-03-18 13:56:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool Compiler::CompileDebugCode(Handle<SharedFunctionInfo> shared) {
|
2016-04-18 14:11:44 +00:00
|
|
|
Isolate* isolate = shared->GetIsolate();
|
|
|
|
DCHECK(AllowCompilation::IsAllowed(isolate));
|
|
|
|
|
|
|
|
// Start a compilation.
|
|
|
|
Zone zone(isolate->allocator());
|
|
|
|
ParseInfo parse_info(&zone, shared);
|
|
|
|
CompilationInfo info(&parse_info, Handle<JSFunction>::null());
|
2016-03-18 13:56:57 +00:00
|
|
|
DCHECK(shared->allows_lazy_compilation_without_context());
|
|
|
|
DCHECK(!IsEvalToplevel(shared));
|
2016-04-18 14:11:44 +00:00
|
|
|
info.MarkAsDebug();
|
|
|
|
if (GetUnoptimizedCode(&info).is_null()) {
|
|
|
|
isolate->clear_pending_exception();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check postconditions on success.
|
|
|
|
DCHECK(!isolate->has_pending_exception());
|
|
|
|
DCHECK(shared->is_compiled());
|
|
|
|
DCHECK(shared->HasDebugCode());
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-05-18 07:09:37 +00:00
|
|
|
MaybeHandle<JSArray> Compiler::CompileForLiveEdit(Handle<Script> script) {
|
2016-04-18 14:11:44 +00:00
|
|
|
Isolate* isolate = script->GetIsolate();
|
|
|
|
DCHECK(AllowCompilation::IsAllowed(isolate));
|
|
|
|
|
2016-05-12 08:40:38 +00:00
|
|
|
// In order to ensure that live edit function info collection finds the newly
|
|
|
|
// generated shared function infos, clear the script's list temporarily
|
|
|
|
// and restore it at the end of this method.
|
|
|
|
Handle<Object> old_function_infos(script->shared_function_infos(), isolate);
|
|
|
|
script->set_shared_function_infos(Smi::FromInt(0));
|
|
|
|
|
2016-04-18 14:11:44 +00:00
|
|
|
// Start a compilation.
|
|
|
|
Zone zone(isolate->allocator());
|
|
|
|
ParseInfo parse_info(&zone, script);
|
2016-04-08 12:31:38 +00:00
|
|
|
CompilationInfo info(&parse_info, Handle<JSFunction>::null());
|
2016-04-18 16:16:16 +00:00
|
|
|
parse_info.set_global();
|
2016-04-18 14:11:44 +00:00
|
|
|
info.MarkAsDebug();
|
2016-05-18 07:09:37 +00:00
|
|
|
|
2016-04-18 16:16:16 +00:00
|
|
|
// TODO(635): support extensions.
|
2016-05-12 08:40:38 +00:00
|
|
|
const bool compilation_succeeded = !CompileToplevel(&info).is_null();
|
2016-05-18 07:09:37 +00:00
|
|
|
Handle<JSArray> infos;
|
|
|
|
if (compilation_succeeded) {
|
|
|
|
// Check postconditions on success.
|
|
|
|
DCHECK(!isolate->has_pending_exception());
|
|
|
|
infos = LiveEditFunctionTracker::Collect(parse_info.literal(), script,
|
|
|
|
&zone, isolate);
|
|
|
|
}
|
2016-05-12 08:40:38 +00:00
|
|
|
|
|
|
|
// Restore the original function info list in order to remain side-effect
|
|
|
|
// free as much as possible, since some code expects the old shared function
|
|
|
|
// infos to stick around.
|
|
|
|
script->set_shared_function_infos(*old_function_infos);
|
|
|
|
|
2016-05-18 07:09:37 +00:00
|
|
|
return infos;
|
2016-03-18 13:56:57 +00:00
|
|
|
}
|
|
|
|
|
2016-07-25 09:42:48 +00:00
|
|
|
bool Compiler::EnsureBytecode(CompilationInfo* info) {
|
|
|
|
DCHECK(ShouldUseIgnition(info));
|
|
|
|
if (!info->shared_info()->HasBytecodeArray()) {
|
|
|
|
DCHECK(!info->shared_info()->is_compiled());
|
|
|
|
if (GetUnoptimizedCode(info).is_null()) return false;
|
|
|
|
}
|
|
|
|
DCHECK(info->shared_info()->HasBytecodeArray());
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
// TODO(turbofan): In the future, unoptimized code with deopt support could
|
|
|
|
// be generated lazily once deopt is triggered.
|
|
|
|
bool Compiler::EnsureDeoptimizationSupport(CompilationInfo* info) {
|
|
|
|
DCHECK_NOT_NULL(info->literal());
|
2016-04-07 09:52:13 +00:00
|
|
|
DCHECK_NOT_NULL(info->scope());
|
2016-03-18 13:56:57 +00:00
|
|
|
Handle<SharedFunctionInfo> shared = info->shared_info();
|
|
|
|
if (!shared->has_deoptimization_support()) {
|
2016-04-19 11:20:13 +00:00
|
|
|
Zone zone(info->isolate()->allocator());
|
|
|
|
CompilationInfo unoptimized(info->parse_info(), info->closure());
|
2016-03-18 13:56:57 +00:00
|
|
|
unoptimized.EnableDeoptimizationSupport();
|
2016-05-10 12:07:06 +00:00
|
|
|
|
2016-05-31 17:13:58 +00:00
|
|
|
// TODO(4280): For now we do not switch generators or async functions to
|
|
|
|
// baseline code because there might be suspended activations stored in
|
|
|
|
// generator objects on the heap. We could eventually go directly to
|
|
|
|
// TurboFan in this case.
|
|
|
|
if (shared->is_resumable()) return false;
|
2016-05-10 12:07:06 +00:00
|
|
|
|
2016-05-02 08:35:26 +00:00
|
|
|
// TODO(4280): For now we disable switching to baseline code in the presence
|
|
|
|
// of interpreter activations of the given function. The reasons are:
|
|
|
|
// 1) The debugger assumes each function is either full-code or bytecode.
|
|
|
|
// 2) The underlying bytecode is cleared below, breaking stack unwinding.
|
|
|
|
// The expensive check for activations only needs to be done when the given
|
|
|
|
// function has bytecode, otherwise we can be sure there are no activations.
|
2016-05-19 12:27:18 +00:00
|
|
|
if (shared->HasBytecodeArray()) {
|
|
|
|
InterpreterActivationsFinder activations_finder(*shared);
|
|
|
|
if (HasInterpreterActivations(info->isolate(), &activations_finder)) {
|
|
|
|
return false;
|
|
|
|
}
|
2016-05-02 08:35:26 +00:00
|
|
|
}
|
2016-05-10 12:07:06 +00:00
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
// If the current code has reloc info for serialization, also include
|
|
|
|
// reloc info for serialization for the new code, so that deopt support
|
|
|
|
// can be added without losing IC state.
|
|
|
|
if (shared->code()->kind() == Code::FUNCTION &&
|
|
|
|
shared->code()->has_reloc_info_for_serialization()) {
|
|
|
|
unoptimized.PrepareForSerializing();
|
|
|
|
}
|
2016-05-27 08:09:12 +00:00
|
|
|
EnsureFeedbackMetadata(&unoptimized);
|
2016-03-18 13:56:57 +00:00
|
|
|
if (!FullCodeGenerator::MakeCode(&unoptimized)) return false;
|
|
|
|
|
2016-05-02 08:35:26 +00:00
|
|
|
// TODO(4280): For now we play it safe and remove the bytecode array when we
|
|
|
|
// switch to baseline code. We might consider keeping around the bytecode so
|
|
|
|
// that it can be used as the "source of truth" eventually.
|
2016-08-17 17:22:09 +00:00
|
|
|
if (shared->HasBytecodeArray()) {
|
|
|
|
if (!FLAG_ignition_preserve_bytecode) shared->ClearBytecodeArray();
|
|
|
|
}
|
2016-03-18 13:56:57 +00:00
|
|
|
|
|
|
|
// The scope info might not have been set if a lazily compiled
|
|
|
|
// function is inlined before being called for the first time.
|
|
|
|
if (shared->scope_info() == ScopeInfo::Empty(info->isolate())) {
|
2016-04-20 08:50:24 +00:00
|
|
|
InstallSharedScopeInfo(info, shared);
|
2016-03-18 13:56:57 +00:00
|
|
|
}
|
|
|
|
|
2016-05-02 08:35:26 +00:00
|
|
|
// Install compilation result on the shared function info
|
|
|
|
shared->EnableDeoptimizationSupport(*unoptimized.code());
|
|
|
|
|
2016-03-18 13:56:57 +00:00
|
|
|
// The existing unoptimized code was replaced with the new one.
|
2016-06-15 13:22:39 +00:00
|
|
|
RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG,
|
|
|
|
&unoptimized);
|
2016-03-18 13:56:57 +00:00
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-07-25 09:42:48 +00:00
|
|
|
// static
|
|
|
|
Compiler::CompilationTier Compiler::NextCompilationTier(JSFunction* function) {
|
|
|
|
Handle<SharedFunctionInfo> shared(function->shared(), function->GetIsolate());
|
|
|
|
if (shared->code()->is_interpreter_trampoline_builtin()) {
|
|
|
|
if (FLAG_turbo_from_bytecode && UseTurboFan(shared)) {
|
|
|
|
return OPTIMIZED;
|
|
|
|
} else {
|
|
|
|
return BASELINE;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return OPTIMIZED;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-04-17 05:41:58 +00:00
|
|
|
MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
|
2014-10-28 10:00:37 +00:00
|
|
|
Handle<String> source, Handle<SharedFunctionInfo> outer_info,
|
2015-02-04 09:34:05 +00:00
|
|
|
Handle<Context> context, LanguageMode language_mode,
|
2016-04-18 13:20:45 +00:00
|
|
|
ParseRestriction restriction, int eval_scope_position, int eval_position,
|
|
|
|
int line_offset, int column_offset, Handle<Object> script_name,
|
|
|
|
ScriptOriginOptions options) {
|
2013-12-23 14:30:35 +00:00
|
|
|
Isolate* isolate = source->GetIsolate();
|
|
|
|
int source_length = source->length();
|
|
|
|
isolate->counters()->total_eval_size()->Increment(source_length);
|
|
|
|
isolate->counters()->total_compile_size()->Increment(source_length);
|
|
|
|
|
|
|
|
CompilationCache* compilation_cache = isolate->compilation_cache();
|
2014-04-08 12:33:08 +00:00
|
|
|
MaybeHandle<SharedFunctionInfo> maybe_shared_info =
|
2015-02-04 09:34:05 +00:00
|
|
|
compilation_cache->LookupEval(source, outer_info, context, language_mode,
|
2016-04-18 13:20:45 +00:00
|
|
|
eval_scope_position);
|
2014-04-08 12:33:08 +00:00
|
|
|
Handle<SharedFunctionInfo> shared_info;
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2015-07-27 13:15:06 +00:00
|
|
|
Handle<Script> script;
|
2014-04-08 12:33:08 +00:00
|
|
|
if (!maybe_shared_info.ToHandle(&shared_info)) {
|
2015-07-27 13:15:06 +00:00
|
|
|
script = isolate->factory()->NewScript(source);
|
2015-07-16 12:08:01 +00:00
|
|
|
if (!script_name.is_null()) {
|
|
|
|
script->set_name(*script_name);
|
2015-09-28 13:10:13 +00:00
|
|
|
script->set_line_offset(line_offset);
|
|
|
|
script->set_column_offset(column_offset);
|
2015-07-16 12:08:01 +00:00
|
|
|
}
|
|
|
|
script->set_origin_options(options);
|
2016-04-18 13:20:45 +00:00
|
|
|
script->set_compilation_type(Script::COMPILATION_TYPE_EVAL);
|
2016-04-19 16:16:33 +00:00
|
|
|
Script::SetEvalOrigin(script, outer_info, eval_position);
|
2016-04-18 13:20:45 +00:00
|
|
|
|
2016-04-01 10:00:30 +00:00
|
|
|
Zone zone(isolate->allocator());
|
2015-03-24 14:17:05 +00:00
|
|
|
ParseInfo parse_info(&zone, script);
|
2016-04-08 12:31:38 +00:00
|
|
|
CompilationInfo info(&parse_info, Handle<JSFunction>::null());
|
2015-03-24 14:17:05 +00:00
|
|
|
parse_info.set_eval();
|
|
|
|
if (context->IsNativeContext()) parse_info.set_global();
|
|
|
|
parse_info.set_language_mode(language_mode);
|
|
|
|
parse_info.set_parse_restriction(restriction);
|
|
|
|
parse_info.set_context(context);
|
2013-12-23 14:30:35 +00:00
|
|
|
|
|
|
|
shared_info = CompileToplevel(&info);
|
|
|
|
|
|
|
|
if (shared_info.is_null()) {
|
2014-04-17 05:41:58 +00:00
|
|
|
return MaybeHandle<JSFunction>();
|
2013-12-23 14:30:35 +00:00
|
|
|
} else {
|
2014-03-11 14:41:22 +00:00
|
|
|
// If caller is strict mode, the result must be in strict mode as well.
|
2015-02-04 09:34:05 +00:00
|
|
|
DCHECK(is_sloppy(language_mode) ||
|
|
|
|
is_strict(shared_info->language_mode()));
|
2015-06-19 18:55:47 +00:00
|
|
|
compilation_cache->PutEval(source, outer_info, context, shared_info,
|
2016-04-18 13:20:45 +00:00
|
|
|
eval_scope_position);
|
2013-12-23 14:30:35 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-07-27 13:15:06 +00:00
|
|
|
Handle<JSFunction> result =
|
|
|
|
isolate->factory()->NewFunctionFromSharedFunctionInfo(
|
|
|
|
shared_info, context, NOT_TENURED);
|
|
|
|
|
|
|
|
// OnAfterCompile has to be called after we create the JSFunction, which we
|
|
|
|
// may require to recompile the eval for debugging, if we find a function
|
|
|
|
// that contains break points in the eval script.
|
|
|
|
isolate->debug()->OnAfterCompile(script);
|
|
|
|
|
|
|
|
return result;
|
2013-12-23 14:30:35 +00:00
|
|
|
}
|
|
|
|
|
2016-07-20 13:29:04 +00:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
bool CodeGenerationFromStringsAllowed(Isolate* isolate,
|
|
|
|
Handle<Context> context) {
|
|
|
|
DCHECK(context->allow_code_gen_from_strings()->IsFalse(isolate));
|
|
|
|
// Check with callback if set.
|
|
|
|
AllowCodeGenerationFromStringsCallback callback =
|
|
|
|
isolate->allow_code_gen_callback();
|
|
|
|
if (callback == NULL) {
|
|
|
|
// No callback set and code generation disallowed.
|
|
|
|
return false;
|
|
|
|
} else {
|
|
|
|
// Callback set. Let it decide if code generation is allowed.
|
|
|
|
VMState<EXTERNAL> state(isolate);
|
|
|
|
return callback(v8::Utils::ToLocal(context));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace
|
|
|
|
|
|
|
|
MaybeHandle<JSFunction> Compiler::GetFunctionFromString(
|
|
|
|
Handle<Context> context, Handle<String> source,
|
|
|
|
ParseRestriction restriction) {
|
|
|
|
Isolate* const isolate = context->GetIsolate();
|
|
|
|
Handle<Context> native_context(context->native_context(), isolate);
|
|
|
|
|
|
|
|
// Check if native context allows code generation from
|
|
|
|
// strings. Throw an exception if it doesn't.
|
|
|
|
if (native_context->allow_code_gen_from_strings()->IsFalse(isolate) &&
|
|
|
|
!CodeGenerationFromStringsAllowed(isolate, native_context)) {
|
|
|
|
Handle<Object> error_message =
|
|
|
|
native_context->ErrorMessageForCodeGenerationFromStrings();
|
|
|
|
THROW_NEW_ERROR(isolate, NewEvalError(MessageTemplate::kCodeGenFromStrings,
|
|
|
|
error_message),
|
|
|
|
JSFunction);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Compile source string in the native context.
|
|
|
|
int eval_scope_position = 0;
|
|
|
|
int eval_position = kNoSourcePosition;
|
|
|
|
Handle<SharedFunctionInfo> outer_info(native_context->closure()->shared());
|
|
|
|
return Compiler::GetFunctionFromEval(source, outer_info, native_context,
|
|
|
|
SLOPPY, restriction, eval_scope_position,
|
|
|
|
eval_position);
|
|
|
|
}
|
|
|
|
|
2016-03-08 12:07:27 +00:00
|
|
|
Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForScript(
|
Change ScriptCompiler::CompileOptions to allow for two 'cache' modes
(parser or code) and to be explicit about cache consumption or production
(rather than making presence of cached_data imply one or the other.)
Also add a --cache flag to d8, to allow testing the functionality.
-----------------------------
API change
Reason: Currently, V8 supports a 'parser cache' for repeatedly executing the same script. We'd like to add a 2nd mode that would cache code, and would like to let the embedder decide which mode they chose (if any).
Note: Previously, the 'use cached data' property was implied by the presence of the cached data itself. (That is, kNoCompileOptions and source->cached_data != NULL.) That is no longer sufficient, since the presence of data is no longer sufficient to determine /which kind/ of data is present.
Changes from old behaviour:
- If you previously didn't use caching, nothing changes.
Example:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
- If you previously used caching, it worked like this:
- 1st run:
v8::CompileUnbound(isolate, source, kProduceToCache);
Then, source->cached_data would contain the
data-to-be cached. This remains the same, except you
need to tell V8 which type of data you want.
v8::CompileUnbound(isolate, source, kProduceParserCache);
- 2nd run:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
with source->cached_data set to the data you received in
the first run. This will now ignore the cached data, and
you need to explicitly tell V8 to use it:
v8::CompileUnbound(isolate, source, kConsumeParserCache);
-----------------------------
BUG=
R=marja@chromium.org, yangguo@chromium.org
Review URL: https://codereview.chromium.org/389573006
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22431 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-07-16 12:18:33 +00:00
|
|
|
Handle<String> source, Handle<Object> script_name, int line_offset,
|
2015-05-19 03:11:33 +00:00
|
|
|
int column_offset, ScriptOriginOptions resource_options,
|
|
|
|
Handle<Object> source_map_url, Handle<Context> context,
|
|
|
|
v8::Extension* extension, ScriptData** cached_data,
|
2015-02-06 17:52:20 +00:00
|
|
|
ScriptCompiler::CompileOptions compile_options, NativesFlag natives,
|
|
|
|
bool is_module) {
|
2014-12-11 12:58:36 +00:00
|
|
|
Isolate* isolate = source->GetIsolate();
|
Change ScriptCompiler::CompileOptions to allow for two 'cache' modes
(parser or code) and to be explicit about cache consumption or production
(rather than making presence of cached_data imply one or the other.)
Also add a --cache flag to d8, to allow testing the functionality.
-----------------------------
API change
Reason: Currently, V8 supports a 'parser cache' for repeatedly executing the same script. We'd like to add a 2nd mode that would cache code, and would like to let the embedder decide which mode they chose (if any).
Note: Previously, the 'use cached data' property was implied by the presence of the cached data itself. (That is, kNoCompileOptions and source->cached_data != NULL.) That is no longer sufficient, since the presence of data is no longer sufficient to determine /which kind/ of data is present.
Changes from old behaviour:
- If you previously didn't use caching, nothing changes.
Example:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
- If you previously used caching, it worked like this:
- 1st run:
v8::CompileUnbound(isolate, source, kProduceToCache);
Then, source->cached_data would contain the
data-to-be cached. This remains the same, except you
need to tell V8 which type of data you want.
v8::CompileUnbound(isolate, source, kProduceParserCache);
- 2nd run:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
with source->cached_data set to the data you received in
the first run. This will now ignore the cached data, and
you need to explicitly tell V8 to use it:
v8::CompileUnbound(isolate, source, kConsumeParserCache);
-----------------------------
BUG=
R=marja@chromium.org, yangguo@chromium.org
Review URL: https://codereview.chromium.org/389573006
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22431 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-07-16 12:18:33 +00:00
|
|
|
if (compile_options == ScriptCompiler::kNoCompileOptions) {
|
2014-03-19 13:24:13 +00:00
|
|
|
cached_data = NULL;
|
Change ScriptCompiler::CompileOptions to allow for two 'cache' modes
(parser or code) and to be explicit about cache consumption or production
(rather than making presence of cached_data imply one or the other.)
Also add a --cache flag to d8, to allow testing the functionality.
-----------------------------
API change
Reason: Currently, V8 supports a 'parser cache' for repeatedly executing the same script. We'd like to add a 2nd mode that would cache code, and would like to let the embedder decide which mode they chose (if any).
Note: Previously, the 'use cached data' property was implied by the presence of the cached data itself. (That is, kNoCompileOptions and source->cached_data != NULL.) That is no longer sufficient, since the presence of data is no longer sufficient to determine /which kind/ of data is present.
Changes from old behaviour:
- If you previously didn't use caching, nothing changes.
Example:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
- If you previously used caching, it worked like this:
- 1st run:
v8::CompileUnbound(isolate, source, kProduceToCache);
Then, source->cached_data would contain the
data-to-be cached. This remains the same, except you
need to tell V8 which type of data you want.
v8::CompileUnbound(isolate, source, kProduceParserCache);
- 2nd run:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
with source->cached_data set to the data you received in
the first run. This will now ignore the cached data, and
you need to explicitly tell V8 to use it:
v8::CompileUnbound(isolate, source, kConsumeParserCache);
-----------------------------
BUG=
R=marja@chromium.org, yangguo@chromium.org
Review URL: https://codereview.chromium.org/389573006
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22431 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-07-16 12:18:33 +00:00
|
|
|
} else if (compile_options == ScriptCompiler::kProduceParserCache ||
|
|
|
|
compile_options == ScriptCompiler::kProduceCodeCache) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(cached_data && !*cached_data);
|
|
|
|
DCHECK(extension == NULL);
|
2014-12-11 12:58:36 +00:00
|
|
|
DCHECK(!isolate->debug()->is_loaded());
|
2014-03-19 13:24:13 +00:00
|
|
|
} else {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(compile_options == ScriptCompiler::kConsumeParserCache ||
|
Change ScriptCompiler::CompileOptions to allow for two 'cache' modes
(parser or code) and to be explicit about cache consumption or production
(rather than making presence of cached_data imply one or the other.)
Also add a --cache flag to d8, to allow testing the functionality.
-----------------------------
API change
Reason: Currently, V8 supports a 'parser cache' for repeatedly executing the same script. We'd like to add a 2nd mode that would cache code, and would like to let the embedder decide which mode they chose (if any).
Note: Previously, the 'use cached data' property was implied by the presence of the cached data itself. (That is, kNoCompileOptions and source->cached_data != NULL.) That is no longer sufficient, since the presence of data is no longer sufficient to determine /which kind/ of data is present.
Changes from old behaviour:
- If you previously didn't use caching, nothing changes.
Example:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
- If you previously used caching, it worked like this:
- 1st run:
v8::CompileUnbound(isolate, source, kProduceToCache);
Then, source->cached_data would contain the
data-to-be cached. This remains the same, except you
need to tell V8 which type of data you want.
v8::CompileUnbound(isolate, source, kProduceParserCache);
- 2nd run:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
with source->cached_data set to the data you received in
the first run. This will now ignore the cached data, and
you need to explicitly tell V8 to use it:
v8::CompileUnbound(isolate, source, kConsumeParserCache);
-----------------------------
BUG=
R=marja@chromium.org, yangguo@chromium.org
Review URL: https://codereview.chromium.org/389573006
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22431 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-07-16 12:18:33 +00:00
|
|
|
compile_options == ScriptCompiler::kConsumeCodeCache);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(cached_data && *cached_data);
|
|
|
|
DCHECK(extension == NULL);
|
2014-03-19 13:24:13 +00:00
|
|
|
}
|
2013-12-23 14:30:35 +00:00
|
|
|
int source_length = source->length();
|
|
|
|
isolate->counters()->total_load_size()->Increment(source_length);
|
|
|
|
isolate->counters()->total_compile_size()->Increment(source_length);
|
|
|
|
|
2016-03-10 12:43:51 +00:00
|
|
|
LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
|
2013-12-23 14:30:35 +00:00
|
|
|
CompilationCache* compilation_cache = isolate->compilation_cache();
|
|
|
|
|
|
|
|
// Do a lookup in the compilation cache but not for extensions.
|
2014-04-08 12:33:08 +00:00
|
|
|
MaybeHandle<SharedFunctionInfo> maybe_result;
|
2010-03-23 06:04:44 +00:00
|
|
|
Handle<SharedFunctionInfo> result;
|
2008-09-11 10:51:52 +00:00
|
|
|
if (extension == NULL) {
|
2015-03-16 13:19:10 +00:00
|
|
|
// First check per-isolate compilation cache.
|
2015-01-26 10:56:53 +00:00
|
|
|
maybe_result = compilation_cache->LookupScript(
|
2015-05-19 03:11:33 +00:00
|
|
|
source, script_name, line_offset, column_offset, resource_options,
|
|
|
|
context, language_mode);
|
2015-01-26 10:56:53 +00:00
|
|
|
if (maybe_result.is_null() && FLAG_serialize_toplevel &&
|
2014-07-23 08:27:04 +00:00
|
|
|
compile_options == ScriptCompiler::kConsumeCodeCache &&
|
|
|
|
!isolate->debug()->is_loaded()) {
|
2015-03-16 13:19:10 +00:00
|
|
|
// Then check cached code provided by embedder.
|
2014-09-22 17:19:19 +00:00
|
|
|
HistogramTimerScope timer(isolate->counters()->compile_deserialize());
|
2016-06-17 12:03:58 +00:00
|
|
|
RuntimeCallTimerScope runtimeTimer(isolate,
|
|
|
|
&RuntimeCallStats::CompileDeserialize);
|
2016-08-10 01:18:38 +00:00
|
|
|
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
|
|
|
|
isolate, &tracing::TraceEventStatsTable::CompileDeserialize);
|
2014-10-15 14:04:53 +00:00
|
|
|
Handle<SharedFunctionInfo> result;
|
|
|
|
if (CodeSerializer::Deserialize(isolate, *cached_data, source)
|
|
|
|
.ToHandle(&result)) {
|
2015-03-16 13:19:10 +00:00
|
|
|
// Promote to per-isolate compilation cache.
|
|
|
|
compilation_cache->PutScript(source, context, language_mode, result);
|
2014-10-15 14:04:53 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
// Deserializer failed. Fall through to compile.
|
2014-07-08 09:04:08 +00:00
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
2014-07-22 10:35:38 +00:00
|
|
|
base::ElapsedTimer timer;
|
|
|
|
if (FLAG_profile_deserialization && FLAG_serialize_toplevel &&
|
|
|
|
compile_options == ScriptCompiler::kProduceCodeCache) {
|
|
|
|
timer.Start();
|
|
|
|
}
|
|
|
|
|
2016-04-18 08:12:58 +00:00
|
|
|
if (!maybe_result.ToHandle(&result) ||
|
|
|
|
(FLAG_serialize_toplevel &&
|
|
|
|
compile_options == ScriptCompiler::kProduceCodeCache)) {
|
|
|
|
// No cache entry found, or embedder wants a code cache. Compile the script.
|
2008-09-11 10:51:52 +00:00
|
|
|
|
|
|
|
// Create a script object describing the script to be compiled.
|
2013-06-04 10:30:05 +00:00
|
|
|
Handle<Script> script = isolate->factory()->NewScript(source);
|
2010-03-17 08:14:59 +00:00
|
|
|
if (natives == NATIVES_CODE) {
|
2015-09-28 13:10:13 +00:00
|
|
|
script->set_type(Script::TYPE_NATIVE);
|
2015-08-21 09:12:57 +00:00
|
|
|
script->set_hide_source(true);
|
2016-02-05 12:33:20 +00:00
|
|
|
} else if (natives == EXTENSION_CODE) {
|
|
|
|
script->set_type(Script::TYPE_EXTENSION);
|
|
|
|
script->set_hide_source(true);
|
2010-03-17 08:14:59 +00:00
|
|
|
}
|
2008-09-11 10:51:52 +00:00
|
|
|
if (!script_name.is_null()) {
|
|
|
|
script->set_name(*script_name);
|
2015-09-28 13:10:13 +00:00
|
|
|
script->set_line_offset(line_offset);
|
|
|
|
script->set_column_offset(column_offset);
|
2008-09-11 10:51:52 +00:00
|
|
|
}
|
2015-05-19 03:11:33 +00:00
|
|
|
script->set_origin_options(resource_options);
|
2015-03-05 13:03:42 +00:00
|
|
|
if (!source_map_url.is_null()) {
|
|
|
|
script->set_source_mapping_url(*source_map_url);
|
|
|
|
}
|
2008-09-11 10:51:52 +00:00
|
|
|
|
|
|
|
// Compile the function and add it to the cache.
|
2016-04-01 10:00:30 +00:00
|
|
|
Zone zone(isolate->allocator());
|
2015-03-24 14:17:05 +00:00
|
|
|
ParseInfo parse_info(&zone, script);
|
2016-04-08 12:31:38 +00:00
|
|
|
CompilationInfo info(&parse_info, Handle<JSFunction>::null());
|
2016-03-15 00:43:31 +00:00
|
|
|
if (is_module) {
|
2015-03-24 14:17:05 +00:00
|
|
|
parse_info.set_module();
|
2015-02-06 17:52:20 +00:00
|
|
|
} else {
|
2015-03-24 14:17:05 +00:00
|
|
|
parse_info.set_global();
|
2015-02-06 17:52:20 +00:00
|
|
|
}
|
2015-03-09 14:51:13 +00:00
|
|
|
if (compile_options != ScriptCompiler::kNoCompileOptions) {
|
2015-03-24 14:17:05 +00:00
|
|
|
parse_info.set_cached_data(cached_data);
|
2015-03-09 14:51:13 +00:00
|
|
|
}
|
2015-03-24 14:17:05 +00:00
|
|
|
parse_info.set_compile_options(compile_options);
|
|
|
|
parse_info.set_extension(extension);
|
|
|
|
parse_info.set_context(context);
|
Change ScriptCompiler::CompileOptions to allow for two 'cache' modes
(parser or code) and to be explicit about cache consumption or production
(rather than making presence of cached_data imply one or the other.)
Also add a --cache flag to d8, to allow testing the functionality.
-----------------------------
API change
Reason: Currently, V8 supports a 'parser cache' for repeatedly executing the same script. We'd like to add a 2nd mode that would cache code, and would like to let the embedder decide which mode they chose (if any).
Note: Previously, the 'use cached data' property was implied by the presence of the cached data itself. (That is, kNoCompileOptions and source->cached_data != NULL.) That is no longer sufficient, since the presence of data is no longer sufficient to determine /which kind/ of data is present.
Changes from old behaviour:
- If you previously didn't use caching, nothing changes.
Example:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
- If you previously used caching, it worked like this:
- 1st run:
v8::CompileUnbound(isolate, source, kProduceToCache);
Then, source->cached_data would contain the
data-to-be cached. This remains the same, except you
need to tell V8 which type of data you want.
v8::CompileUnbound(isolate, source, kProduceParserCache);
- 2nd run:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
with source->cached_data set to the data you received in
the first run. This will now ignore the cached data, and
you need to explicitly tell V8 to use it:
v8::CompileUnbound(isolate, source, kConsumeParserCache);
-----------------------------
BUG=
R=marja@chromium.org, yangguo@chromium.org
Review URL: https://codereview.chromium.org/389573006
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22431 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-07-16 12:18:33 +00:00
|
|
|
if (FLAG_serialize_toplevel &&
|
|
|
|
compile_options == ScriptCompiler::kProduceCodeCache) {
|
2014-07-08 14:13:50 +00:00
|
|
|
info.PrepareForSerializing();
|
|
|
|
}
|
2015-02-10 19:12:51 +00:00
|
|
|
|
2015-03-24 14:17:05 +00:00
|
|
|
parse_info.set_language_mode(
|
2016-05-04 09:27:26 +00:00
|
|
|
static_cast<LanguageMode>(parse_info.language_mode() | language_mode));
|
2013-12-23 14:30:35 +00:00
|
|
|
result = CompileToplevel(&info);
|
2015-06-19 18:55:47 +00:00
|
|
|
if (extension == NULL && !result.is_null()) {
|
2015-02-12 16:29:42 +00:00
|
|
|
compilation_cache->PutScript(source, context, language_mode, result);
|
2014-11-12 13:12:17 +00:00
|
|
|
if (FLAG_serialize_toplevel &&
|
Change ScriptCompiler::CompileOptions to allow for two 'cache' modes
(parser or code) and to be explicit about cache consumption or production
(rather than making presence of cached_data imply one or the other.)
Also add a --cache flag to d8, to allow testing the functionality.
-----------------------------
API change
Reason: Currently, V8 supports a 'parser cache' for repeatedly executing the same script. We'd like to add a 2nd mode that would cache code, and would like to let the embedder decide which mode they chose (if any).
Note: Previously, the 'use cached data' property was implied by the presence of the cached data itself. (That is, kNoCompileOptions and source->cached_data != NULL.) That is no longer sufficient, since the presence of data is no longer sufficient to determine /which kind/ of data is present.
Changes from old behaviour:
- If you previously didn't use caching, nothing changes.
Example:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
- If you previously used caching, it worked like this:
- 1st run:
v8::CompileUnbound(isolate, source, kProduceToCache);
Then, source->cached_data would contain the
data-to-be cached. This remains the same, except you
need to tell V8 which type of data you want.
v8::CompileUnbound(isolate, source, kProduceParserCache);
- 2nd run:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
with source->cached_data set to the data you received in
the first run. This will now ignore the cached data, and
you need to explicitly tell V8 to use it:
v8::CompileUnbound(isolate, source, kConsumeParserCache);
-----------------------------
BUG=
R=marja@chromium.org, yangguo@chromium.org
Review URL: https://codereview.chromium.org/389573006
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22431 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-07-16 12:18:33 +00:00
|
|
|
compile_options == ScriptCompiler::kProduceCodeCache) {
|
2014-09-22 17:19:19 +00:00
|
|
|
HistogramTimerScope histogram_timer(
|
|
|
|
isolate->counters()->compile_serialize());
|
2016-06-17 12:03:58 +00:00
|
|
|
RuntimeCallTimerScope runtimeTimer(isolate,
|
|
|
|
&RuntimeCallStats::CompileSerialize);
|
2016-08-10 01:18:38 +00:00
|
|
|
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
|
|
|
|
isolate, &tracing::TraceEventStatsTable::CompileSerialize);
|
2014-07-15 10:17:22 +00:00
|
|
|
*cached_data = CodeSerializer::Serialize(isolate, result, source);
|
2014-07-22 10:35:38 +00:00
|
|
|
if (FLAG_profile_deserialization) {
|
2014-10-23 08:43:17 +00:00
|
|
|
PrintF("[Compiling and serializing took %0.3f ms]\n",
|
|
|
|
timer.Elapsed().InMillisecondsF());
|
2014-07-22 10:35:38 +00:00
|
|
|
}
|
2014-07-08 09:04:08 +00:00
|
|
|
}
|
2008-09-11 10:51:52 +00:00
|
|
|
}
|
Change ScriptCompiler::CompileOptions to allow for two 'cache' modes
(parser or code) and to be explicit about cache consumption or production
(rather than making presence of cached_data imply one or the other.)
Also add a --cache flag to d8, to allow testing the functionality.
-----------------------------
API change
Reason: Currently, V8 supports a 'parser cache' for repeatedly executing the same script. We'd like to add a 2nd mode that would cache code, and would like to let the embedder decide which mode they chose (if any).
Note: Previously, the 'use cached data' property was implied by the presence of the cached data itself. (That is, kNoCompileOptions and source->cached_data != NULL.) That is no longer sufficient, since the presence of data is no longer sufficient to determine /which kind/ of data is present.
Changes from old behaviour:
- If you previously didn't use caching, nothing changes.
Example:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
- If you previously used caching, it worked like this:
- 1st run:
v8::CompileUnbound(isolate, source, kProduceToCache);
Then, source->cached_data would contain the
data-to-be cached. This remains the same, except you
need to tell V8 which type of data you want.
v8::CompileUnbound(isolate, source, kProduceParserCache);
- 2nd run:
v8::CompileUnbound(isolate, source, kNoCompileOptions);
with source->cached_data set to the data you received in
the first run. This will now ignore the cached data, and
you need to explicitly tell V8 to use it:
v8::CompileUnbound(isolate, source, kConsumeParserCache);
-----------------------------
BUG=
R=marja@chromium.org, yangguo@chromium.org
Review URL: https://codereview.chromium.org/389573006
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22431 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-07-16 12:18:33 +00:00
|
|
|
|
2015-07-27 13:15:06 +00:00
|
|
|
if (result.is_null()) {
|
|
|
|
isolate->ReportPendingMessages();
|
|
|
|
} else {
|
|
|
|
isolate->debug()->OnAfterCompile(script);
|
|
|
|
}
|
2013-12-23 14:30:35 +00:00
|
|
|
} else if (result->ic_age() != isolate->heap()->global_ic_age()) {
|
2014-07-08 09:04:08 +00:00
|
|
|
result->ResetForNewContext(isolate->heap()->global_ic_age());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2016-03-08 12:07:27 +00:00
|
|
|
Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForStreamedScript(
|
2015-03-09 14:51:13 +00:00
|
|
|
Handle<Script> script, ParseInfo* parse_info, int source_length) {
|
|
|
|
Isolate* isolate = script->GetIsolate();
|
|
|
|
// TODO(titzer): increment the counters in caller.
|
2014-09-12 09:12:08 +00:00
|
|
|
isolate->counters()->total_load_size()->Increment(source_length);
|
|
|
|
isolate->counters()->total_compile_size()->Increment(source_length);
|
|
|
|
|
2016-03-10 12:43:51 +00:00
|
|
|
LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
|
2015-03-09 14:51:13 +00:00
|
|
|
parse_info->set_language_mode(
|
|
|
|
static_cast<LanguageMode>(parse_info->language_mode() | language_mode));
|
2015-02-10 19:12:51 +00:00
|
|
|
|
2016-04-08 12:31:38 +00:00
|
|
|
CompilationInfo compile_info(parse_info, Handle<JSFunction>::null());
|
2015-07-22 07:37:21 +00:00
|
|
|
|
2015-07-27 14:19:53 +00:00
|
|
|
// The source was parsed lazily, so compiling for debugging is not possible.
|
|
|
|
DCHECK(!compile_info.is_debug());
|
2015-07-22 07:37:21 +00:00
|
|
|
|
2015-07-27 13:15:06 +00:00
|
|
|
Handle<SharedFunctionInfo> result = CompileToplevel(&compile_info);
|
|
|
|
if (!result.is_null()) isolate->debug()->OnAfterCompile(script);
|
|
|
|
return result;
|
2014-09-12 09:12:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-06-25 12:19:55 +00:00
|
|
|
Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfo(
|
2014-07-23 09:35:06 +00:00
|
|
|
FunctionLiteral* literal, Handle<Script> script,
|
|
|
|
CompilationInfo* outer_info) {
|
2013-12-23 14:30:35 +00:00
|
|
|
// Precondition: code has been parsed and scopes have been analyzed.
|
2015-06-26 13:53:26 +00:00
|
|
|
Isolate* isolate = outer_info->isolate();
|
2015-06-25 12:19:55 +00:00
|
|
|
MaybeHandle<SharedFunctionInfo> maybe_existing;
|
2016-04-14 10:27:40 +00:00
|
|
|
|
|
|
|
// Find any previously allocated shared function info for the given literal.
|
2016-04-19 09:32:29 +00:00
|
|
|
if (outer_info->shared_info()->never_compiled()) {
|
2015-06-25 12:19:55 +00:00
|
|
|
// On the first compile, there are no existing shared function info for
|
2015-06-26 13:53:26 +00:00
|
|
|
// inner functions yet, so do not try to find them. All bets are off for
|
|
|
|
// live edit though.
|
2016-03-17 13:15:56 +00:00
|
|
|
SLOW_DCHECK(script->FindSharedFunctionInfo(literal).is_null() ||
|
|
|
|
isolate->debug()->live_edit_enabled());
|
2015-06-25 12:19:55 +00:00
|
|
|
} else {
|
|
|
|
maybe_existing = script->FindSharedFunctionInfo(literal);
|
|
|
|
}
|
2016-04-14 10:27:40 +00:00
|
|
|
|
2016-08-18 10:42:30 +00:00
|
|
|
// We found an existing shared function info. If it has any sort of code
|
|
|
|
// attached, don't worry about compiling and simply return it. Otherwise,
|
|
|
|
// continue to decide whether to eagerly compile.
|
|
|
|
// Note that we also carry on if we are compiling eager to obtain code for
|
|
|
|
// debugging, unless we already have code with debug break slots.
|
2015-06-25 12:19:55 +00:00
|
|
|
Handle<SharedFunctionInfo> existing;
|
2016-08-18 10:42:30 +00:00
|
|
|
if (maybe_existing.ToHandle(&existing)) {
|
2016-05-12 11:24:07 +00:00
|
|
|
DCHECK(!existing->is_toplevel());
|
2016-08-18 10:42:30 +00:00
|
|
|
if (existing->HasBaselineCode() || existing->HasBytecodeArray()) {
|
|
|
|
if (!outer_info->is_debug() || existing->HasDebugCode()) {
|
|
|
|
return existing;
|
|
|
|
}
|
2015-07-20 14:53:28 +00:00
|
|
|
}
|
2015-06-25 12:19:55 +00:00
|
|
|
}
|
|
|
|
|
2016-03-18 10:46:40 +00:00
|
|
|
// Allocate a shared function info object.
|
|
|
|
Handle<SharedFunctionInfo> result;
|
|
|
|
if (!maybe_existing.ToHandle(&result)) {
|
|
|
|
result = NewSharedFunctionInfoForLiteral(isolate, literal, script);
|
|
|
|
result->set_is_toplevel(false);
|
2016-04-14 10:27:40 +00:00
|
|
|
|
|
|
|
// If the outer function has been compiled before, we cannot be sure that
|
|
|
|
// shared function info for this function literal has been created for the
|
|
|
|
// first time. It may have already been compiled previously.
|
2016-04-19 09:32:29 +00:00
|
|
|
result->set_never_compiled(outer_info->shared_info()->never_compiled());
|
2016-03-18 10:46:40 +00:00
|
|
|
}
|
|
|
|
|
2016-04-01 10:00:30 +00:00
|
|
|
Zone zone(isolate->allocator());
|
2015-03-24 14:17:05 +00:00
|
|
|
ParseInfo parse_info(&zone, script);
|
2016-04-08 12:31:38 +00:00
|
|
|
CompilationInfo info(&parse_info, Handle<JSFunction>::null());
|
2015-03-24 14:17:05 +00:00
|
|
|
parse_info.set_literal(literal);
|
2016-04-05 14:07:15 +00:00
|
|
|
parse_info.set_shared_info(result);
|
2015-03-24 14:17:05 +00:00
|
|
|
parse_info.set_language_mode(literal->scope()->language_mode());
|
2014-07-23 09:35:06 +00:00
|
|
|
if (outer_info->will_serialize()) info.PrepareForSerializing();
|
2015-07-20 14:53:28 +00:00
|
|
|
if (outer_info->is_debug()) info.MarkAsDebug();
|
2009-08-13 10:25:35 +00:00
|
|
|
|
2013-12-23 14:30:35 +00:00
|
|
|
// Determine if the function can be lazily compiled. This is necessary to
|
|
|
|
// allow some of our builtin JS files to be lazily compiled. These
|
|
|
|
// builtins cannot be handled lazily by the parser, since we have to know
|
|
|
|
// if a function uses the special natives syntax, which is something the
|
|
|
|
// parser records.
|
|
|
|
// If the debugger requests compilation for break points, we cannot be
|
|
|
|
// aggressive about lazy compilation, because it might trigger compilation
|
|
|
|
// of functions without an outer context when setting a breakpoint through
|
|
|
|
// Debug::FindSharedFunctionInfoInScript.
|
2016-05-12 11:25:15 +00:00
|
|
|
bool allow_lazy = literal->AllowsLazyCompilation() && !info.is_debug();
|
2015-06-25 12:19:55 +00:00
|
|
|
bool lazy = FLAG_lazy && allow_lazy && !literal->should_eager_compile();
|
|
|
|
|
2016-03-14 18:55:41 +00:00
|
|
|
// Consider compiling eagerly when targeting the code cache.
|
|
|
|
lazy &= !(FLAG_serialize_eager && info.will_serialize());
|
|
|
|
|
2016-03-24 18:37:56 +00:00
|
|
|
// Consider compiling eagerly when compiling bytecode for Ignition.
|
|
|
|
lazy &=
|
|
|
|
!(FLAG_ignition && FLAG_ignition_eager && !isolate->serializer_enabled());
|
|
|
|
|
2013-12-23 14:30:35 +00:00
|
|
|
// Generate code
|
2016-02-15 17:29:22 +00:00
|
|
|
TimerEventScope<TimerEventCompileCode> timer(isolate);
|
2016-06-17 12:03:58 +00:00
|
|
|
RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::CompileCode);
|
2016-08-10 01:18:38 +00:00
|
|
|
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
|
|
|
|
isolate, &tracing::TraceEventStatsTable::CompileCode);
|
2016-08-11 14:00:36 +00:00
|
|
|
|
|
|
|
// Create a canonical handle scope if compiling ignition bytecode. This is
|
|
|
|
// required by the constant array builder to de-duplicate common objects
|
|
|
|
// without dereferencing handles.
|
|
|
|
std::unique_ptr<CanonicalHandleScope> canonical;
|
|
|
|
if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info.isolate()));
|
|
|
|
|
2015-06-25 12:19:55 +00:00
|
|
|
if (lazy) {
|
2016-03-18 10:46:40 +00:00
|
|
|
info.SetCode(isolate->builtins()->CompileLazy());
|
2016-04-20 14:11:53 +00:00
|
|
|
} else if (Renumber(info.parse_info()) && GenerateUnoptimizedCode(&info)) {
|
2015-12-14 20:57:38 +00:00
|
|
|
// Code generation will ensure that the feedback vector is present and
|
2014-11-14 08:21:13 +00:00
|
|
|
// appropriately sized.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!info.code().is_null());
|
2015-05-06 10:21:20 +00:00
|
|
|
if (literal->should_eager_compile() &&
|
|
|
|
literal->should_be_used_once_hint()) {
|
|
|
|
info.code()->MarkToBeExecutedOnce(isolate);
|
|
|
|
}
|
2016-04-20 08:50:24 +00:00
|
|
|
// Update the shared function info with the scope info.
|
|
|
|
InstallSharedScopeInfo(&info, result);
|
2016-03-18 10:46:40 +00:00
|
|
|
// Install compilation result on the shared function info.
|
2016-04-20 08:50:24 +00:00
|
|
|
InstallSharedCompilationResult(&info, result);
|
2012-03-26 13:08:08 +00:00
|
|
|
} else {
|
2013-12-23 14:30:35 +00:00
|
|
|
return Handle<SharedFunctionInfo>::null();
|
2008-09-11 10:51:52 +00:00
|
|
|
}
|
2008-12-05 08:35:52 +00:00
|
|
|
|
2015-06-25 12:19:55 +00:00
|
|
|
if (maybe_existing.is_null()) {
|
2016-06-15 13:22:39 +00:00
|
|
|
RecordFunctionCompilation(CodeEventListener::FUNCTION_TAG, &info);
|
2016-03-18 10:46:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
2016-01-26 11:30:27 +00:00
|
|
|
Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForNative(
|
|
|
|
v8::Extension* extension, Handle<String> name) {
|
|
|
|
Isolate* isolate = name->GetIsolate();
|
|
|
|
v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
|
|
|
|
|
|
|
|
// Compute the function template for the native function.
|
|
|
|
v8::Local<v8::FunctionTemplate> fun_template =
|
|
|
|
extension->GetNativeFunctionTemplate(v8_isolate,
|
|
|
|
v8::Utils::ToLocal(name));
|
|
|
|
DCHECK(!fun_template.IsEmpty());
|
|
|
|
|
|
|
|
// Instantiate the function and create a shared function info from it.
|
|
|
|
Handle<JSFunction> fun = Handle<JSFunction>::cast(Utils::OpenHandle(
|
|
|
|
*fun_template->GetFunction(v8_isolate->GetCurrentContext())
|
|
|
|
.ToLocalChecked()));
|
|
|
|
Handle<Code> code = Handle<Code>(fun->shared()->code());
|
|
|
|
Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
|
|
|
|
Handle<SharedFunctionInfo> shared = isolate->factory()->NewSharedFunctionInfo(
|
2016-05-24 15:11:55 +00:00
|
|
|
name, fun->shared()->num_literals(), FunctionKind::kNormalFunction, code,
|
2016-03-18 10:46:40 +00:00
|
|
|
Handle<ScopeInfo>(fun->shared()->scope_info()));
|
2016-07-11 10:03:02 +00:00
|
|
|
shared->SetConstructStub(*construct_stub);
|
2016-05-27 08:09:12 +00:00
|
|
|
shared->set_feedback_metadata(fun->shared()->feedback_metadata());
|
2016-01-26 11:30:27 +00:00
|
|
|
|
|
|
|
// Copy the function data to the shared function info.
|
|
|
|
shared->set_function_data(fun->shared()->function_data());
|
|
|
|
int parameters = fun->shared()->internal_formal_parameter_count();
|
|
|
|
shared->set_internal_formal_parameter_count(parameters);
|
|
|
|
|
|
|
|
return shared;
|
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2016-03-08 09:01:27 +00:00
|
|
|
MaybeHandle<Code> Compiler::GetOptimizedCodeForOSR(Handle<JSFunction> function,
|
|
|
|
BailoutId osr_ast_id,
|
|
|
|
JavaScriptFrame* osr_frame) {
|
2016-03-04 16:10:55 +00:00
|
|
|
DCHECK(!osr_ast_id.IsNone());
|
2016-03-08 09:01:27 +00:00
|
|
|
DCHECK_NOT_NULL(osr_frame);
|
|
|
|
return GetOptimizedCode(function, NOT_CONCURRENT, osr_ast_id, osr_frame);
|
2012-07-19 18:58:23 +00:00
|
|
|
}
|
|
|
|
|
2016-05-02 14:44:29 +00:00
|
|
|
void Compiler::FinalizeCompilationJob(CompilationJob* raw_job) {
|
|
|
|
// Take ownership of compilation job. Deleting job also tears down the zone.
|
2016-07-25 11:12:42 +00:00
|
|
|
std::unique_ptr<CompilationJob> job(raw_job);
|
2016-05-02 14:44:29 +00:00
|
|
|
CompilationInfo* info = job->info();
|
2012-11-22 13:04:11 +00:00
|
|
|
Isolate* isolate = info->isolate();
|
2013-12-23 14:30:35 +00:00
|
|
|
|
2013-04-24 14:44:08 +00:00
|
|
|
VMState<COMPILER> state(isolate);
|
2014-07-16 08:14:50 +00:00
|
|
|
TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
|
2016-06-17 12:03:58 +00:00
|
|
|
RuntimeCallTimerScope runtimeTimer(isolate,
|
|
|
|
&RuntimeCallStats::RecompileSynchronous);
|
2016-08-10 01:18:38 +00:00
|
|
|
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
|
|
|
|
isolate, &tracing::TraceEventStatsTable::RecompileSynchronous);
|
2012-07-19 18:58:23 +00:00
|
|
|
|
2013-12-23 14:30:35 +00:00
|
|
|
Handle<SharedFunctionInfo> shared = info->shared_info();
|
|
|
|
shared->code()->set_profiler_ticks(0);
|
|
|
|
|
2015-07-20 14:53:28 +00:00
|
|
|
DCHECK(!shared->HasDebugInfo());
|
|
|
|
|
2014-09-24 07:08:27 +00:00
|
|
|
// 1) Optimization on the concurrent thread may have failed.
|
2013-12-23 14:30:35 +00:00
|
|
|
// 2) The function may have already been optimized by OSR. Simply continue.
|
|
|
|
// Except when OSR already disabled optimization for some reason.
|
|
|
|
// 3) The code may have already been invalidated due to dependency change.
|
2015-07-20 14:53:28 +00:00
|
|
|
// 4) Code generation may have failed.
|
2016-08-18 17:06:37 +00:00
|
|
|
if (job->last_status() == CompilationJob::SUCCEEDED) {
|
2014-09-24 07:08:27 +00:00
|
|
|
if (shared->optimization_disabled()) {
|
|
|
|
job->RetryOptimization(kOptimizationDisabled);
|
2015-04-20 15:22:02 +00:00
|
|
|
} else if (info->dependencies()->HasAborted()) {
|
2014-09-24 07:08:27 +00:00
|
|
|
job->RetryOptimization(kBailedOutDueToDependencyChange);
|
2016-08-18 17:06:37 +00:00
|
|
|
} else if (job->GenerateCode() == CompilationJob::SUCCEEDED) {
|
2016-04-11 10:01:09 +00:00
|
|
|
job->RecordOptimizationStats();
|
2016-06-15 13:22:39 +00:00
|
|
|
RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
|
2015-06-25 08:28:19 +00:00
|
|
|
if (shared->SearchOptimizedCodeMap(info->context()->native_context(),
|
|
|
|
info->osr_ast_id()).code == nullptr) {
|
2016-05-02 14:44:29 +00:00
|
|
|
InsertCodeIntoOptimizedCodeMap(info);
|
2014-09-24 07:08:27 +00:00
|
|
|
}
|
|
|
|
if (FLAG_trace_opt) {
|
|
|
|
PrintF("[completed optimizing ");
|
|
|
|
info->closure()->ShortPrint();
|
|
|
|
PrintF("]\n");
|
|
|
|
}
|
2016-03-08 10:09:21 +00:00
|
|
|
info->closure()->ReplaceCode(*info->code());
|
|
|
|
return;
|
2014-09-24 07:08:27 +00:00
|
|
|
}
|
2013-12-23 14:30:35 +00:00
|
|
|
}
|
2009-11-04 17:59:24 +00:00
|
|
|
|
2016-08-18 17:06:37 +00:00
|
|
|
DCHECK(job->last_status() != CompilationJob::SUCCEEDED);
|
2014-09-24 07:08:27 +00:00
|
|
|
if (FLAG_trace_opt) {
|
|
|
|
PrintF("[aborted optimizing ");
|
|
|
|
info->closure()->ShortPrint();
|
|
|
|
PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
|
2013-12-23 14:30:35 +00:00
|
|
|
}
|
2016-03-08 10:09:21 +00:00
|
|
|
info->closure()->ReplaceCode(shared->code());
|
2009-11-04 17:59:24 +00:00
|
|
|
}
|
|
|
|
|
2016-03-07 16:26:08 +00:00
|
|
|
void Compiler::PostInstantiation(Handle<JSFunction> function,
|
|
|
|
PretenureFlag pretenure) {
|
|
|
|
Handle<SharedFunctionInfo> shared(function->shared());
|
|
|
|
|
|
|
|
if (FLAG_always_opt && shared->allows_lazy_compilation()) {
|
|
|
|
function->MarkForOptimization();
|
|
|
|
}
|
|
|
|
|
|
|
|
CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
|
|
|
|
function->context()->native_context(), BailoutId::None());
|
|
|
|
if (cached.code != nullptr) {
|
|
|
|
// Caching of optimized code enabled and optimized code found.
|
|
|
|
DCHECK(!cached.code->marked_for_deoptimization());
|
|
|
|
DCHECK(function->shared()->is_compiled());
|
|
|
|
function->ReplaceCode(cached.code);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (cached.literals != nullptr) {
|
2016-05-27 08:09:12 +00:00
|
|
|
DCHECK(shared->is_compiled());
|
2016-03-07 16:26:08 +00:00
|
|
|
function->set_literals(cached.literals);
|
2016-05-27 08:09:12 +00:00
|
|
|
} else if (shared->is_compiled()) {
|
|
|
|
// TODO(mvstanton): pass pretenure flag to EnsureLiterals.
|
|
|
|
JSFunction::EnsureLiterals(function);
|
2016-03-07 16:26:08 +00:00
|
|
|
}
|
|
|
|
}
|
2009-11-04 17:59:24 +00:00
|
|
|
|
2015-06-01 22:46:54 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|