Ensure that soft-deopts don't count against opt_count
This makes sure that Crankshaft doesn't disable optimization to early on hot functions that still contain unexecuted code without type information. R=jkummerow@chromium.org Review URL: https://codereview.chromium.org/14738009 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@14658 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
483a2958fa
commit
b65b6d735f
@ -1340,6 +1340,11 @@ void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
|
||||
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
|
||||
}
|
||||
|
||||
|
||||
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
|
||||
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
|
||||
}
|
||||
|
@ -544,9 +544,14 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
|
||||
// Set the continuation for the topmost frame.
|
||||
if (is_topmost && bailout_type_ != DEBUGGER) {
|
||||
Builtins* builtins = isolate_->builtins();
|
||||
Code* continuation = (bailout_type_ == EAGER)
|
||||
? builtins->builtin(Builtins::kNotifyDeoptimized)
|
||||
: builtins->builtin(Builtins::kNotifyLazyDeoptimized);
|
||||
Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
|
||||
if (bailout_type_ == LAZY) {
|
||||
continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
|
||||
} else if (bailout_type_ == SOFT) {
|
||||
continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
|
||||
} else {
|
||||
ASSERT(bailout_type_ == EAGER);
|
||||
}
|
||||
output_frame->SetContinuation(
|
||||
reinterpret_cast<uint32_t>(continuation->entry()));
|
||||
}
|
||||
@ -639,7 +644,7 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
// Get the address of the location in the code object if possible (r3) (return
|
||||
// address for lazy deoptimization) and compute the fp-to-sp delta in
|
||||
// register r4.
|
||||
if (type() == EAGER) {
|
||||
if (type() == EAGER || type() == SOFT) {
|
||||
__ mov(r3, Operand::Zero());
|
||||
// Correct one word for bailout id.
|
||||
__ add(r4, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
|
||||
@ -694,7 +699,7 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
|
||||
// Remove the bailout id, eventually return address, and the saved registers
|
||||
// from the stack.
|
||||
if (type() == EAGER || type() == OSR) {
|
||||
if (type() == EAGER || type() == SOFT || type() == OSR) {
|
||||
__ add(sp, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
|
||||
} else {
|
||||
__ add(sp, sp, Operand(kSavedRegistersAreaSize + (2 * kPointerSize)));
|
||||
@ -813,7 +818,7 @@ void Deoptimizer::TableEntryGenerator::GeneratePrologue() {
|
||||
for (int i = 0; i < count(); i++) {
|
||||
int start = masm()->pc_offset();
|
||||
USE(start);
|
||||
if (type() == EAGER) {
|
||||
if (type() == EAGER || type() == SOFT) {
|
||||
__ nop();
|
||||
} else {
|
||||
// Emulate ia32 like call by pushing return address to stack.
|
||||
|
@ -360,9 +360,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
|
||||
for (int i = 0; i < deopt_jump_table_.length(); i++) {
|
||||
__ bind(&deopt_jump_table_[i].label);
|
||||
Address entry = deopt_jump_table_[i].address;
|
||||
bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt;
|
||||
Deoptimizer::BailoutType type =
|
||||
is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
|
||||
Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type;
|
||||
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
|
||||
if (id == Deoptimizer::kNotDeoptimizationEntry) {
|
||||
Comment(";;; jump table entry %d.", i);
|
||||
@ -371,7 +369,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
|
||||
}
|
||||
if (deopt_jump_table_[i].needs_frame) {
|
||||
__ mov(ip, Operand(ExternalReference::ForDeoptEntry(entry)));
|
||||
if (is_lazy_deopt) {
|
||||
if (type == Deoptimizer::LAZY) {
|
||||
if (needs_frame_is_call.is_bound()) {
|
||||
__ b(&needs_frame_is_call);
|
||||
} else {
|
||||
@ -404,7 +402,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (is_lazy_deopt) {
|
||||
if (type == Deoptimizer::LAZY) {
|
||||
__ mov(lr, Operand(pc), LeaveCC, al);
|
||||
__ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry)));
|
||||
} else {
|
||||
@ -833,14 +831,13 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
||||
void LCodeGen::DeoptimizeIf(Condition cc,
|
||||
LEnvironment* environment,
|
||||
Deoptimizer::BailoutType bailout_type) {
|
||||
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
|
||||
ASSERT(environment->HasBeenRegistered());
|
||||
int id = environment->deoptimization_index();
|
||||
ASSERT(info()->IsOptimizing() || info()->IsStub());
|
||||
Deoptimizer::BailoutType bailout_type = info()->IsStub()
|
||||
? Deoptimizer::LAZY
|
||||
: Deoptimizer::EAGER;
|
||||
Address entry =
|
||||
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
|
||||
if (entry == NULL) {
|
||||
@ -873,9 +870,11 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
||||
// jump entry if this is the case.
|
||||
if (deopt_jump_table_.is_empty() ||
|
||||
(deopt_jump_table_.last().address != entry) ||
|
||||
(deopt_jump_table_.last().is_lazy_deopt != needs_lazy_deopt) ||
|
||||
(deopt_jump_table_.last().bailout_type != bailout_type) ||
|
||||
(deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
|
||||
JumpTableEntry table_entry(entry, !frame_is_built_, needs_lazy_deopt);
|
||||
Deoptimizer::JumpTableEntry table_entry(entry,
|
||||
bailout_type,
|
||||
!frame_is_built_);
|
||||
deopt_jump_table_.Add(table_entry, zone());
|
||||
}
|
||||
__ b(cc, &deopt_jump_table_.last().label);
|
||||
@ -883,6 +882,21 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DeoptimizeIf(Condition cc,
|
||||
LEnvironment* environment) {
|
||||
Deoptimizer::BailoutType bailout_type = info()->IsStub()
|
||||
? Deoptimizer::LAZY
|
||||
: Deoptimizer::EAGER;
|
||||
DeoptimizeIf(cc, environment, bailout_type);
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::SoftDeoptimize(LEnvironment* environment) {
|
||||
ASSERT(!info()->IsStub());
|
||||
DeoptimizeIf(al, environment, Deoptimizer::SOFT);
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
|
||||
ZoneList<Handle<Map> > maps(1, zone());
|
||||
int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
|
||||
@ -5722,7 +5736,11 @@ void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
|
||||
|
||||
|
||||
void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
|
||||
DeoptimizeIf(al, instr->environment());
|
||||
if (instr->hydrogen_value()->IsSoftDeoptimize()) {
|
||||
SoftDeoptimize(instr->environment());
|
||||
} else {
|
||||
DeoptimizeIf(al, instr->environment());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -290,7 +290,11 @@ class LCodeGen BASE_EMBEDDED {
|
||||
|
||||
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
|
||||
Safepoint::DeoptMode mode);
|
||||
void DeoptimizeIf(Condition cc,
|
||||
LEnvironment* environment,
|
||||
Deoptimizer::BailoutType bailout_type);
|
||||
void DeoptimizeIf(Condition cc, LEnvironment* environment);
|
||||
void SoftDeoptimize(LEnvironment* environment);
|
||||
|
||||
void AddToTranslation(Translation* translation,
|
||||
LOperand* op,
|
||||
@ -387,18 +391,6 @@ class LCodeGen BASE_EMBEDDED {
|
||||
Register scratch,
|
||||
LEnvironment* environment);
|
||||
|
||||
struct JumpTableEntry {
|
||||
inline JumpTableEntry(Address entry, bool frame, bool is_lazy)
|
||||
: label(),
|
||||
address(entry),
|
||||
needs_frame(frame),
|
||||
is_lazy_deopt(is_lazy) { }
|
||||
Label label;
|
||||
Address address;
|
||||
bool needs_frame;
|
||||
bool is_lazy_deopt;
|
||||
};
|
||||
|
||||
void EnsureSpaceForLazyDeopt();
|
||||
void DoLoadKeyedExternalArray(LLoadKeyed* instr);
|
||||
void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
|
||||
@ -416,7 +408,7 @@ class LCodeGen BASE_EMBEDDED {
|
||||
int current_instruction_;
|
||||
const ZoneList<LInstruction*>* instructions_;
|
||||
ZoneList<LEnvironment*> deoptimizations_;
|
||||
ZoneList<JumpTableEntry> deopt_jump_table_;
|
||||
ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
|
||||
ZoneList<Handle<Object> > deoptimization_literals_;
|
||||
ZoneList<Handle<Map> > prototype_maps_;
|
||||
ZoneList<Handle<Map> > transition_maps_;
|
||||
|
@ -107,6 +107,8 @@ enum BuiltinExtraArguments {
|
||||
Code::kNoExtraICState) \
|
||||
V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(NotifySoftDeoptimized, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(NotifyStubFailure, BUILTIN, UNINITIALIZED, \
|
||||
@ -380,6 +382,7 @@ class Builtins {
|
||||
static void Generate_LazyCompile(MacroAssembler* masm);
|
||||
static void Generate_LazyRecompile(MacroAssembler* masm);
|
||||
static void Generate_NotifyDeoptimized(MacroAssembler* masm);
|
||||
static void Generate_NotifySoftDeoptimized(MacroAssembler* masm);
|
||||
static void Generate_NotifyLazyDeoptimized(MacroAssembler* masm);
|
||||
static void Generate_NotifyOSR(MacroAssembler* masm);
|
||||
static void Generate_NotifyStubFailure(MacroAssembler* masm);
|
||||
|
@ -50,22 +50,23 @@ static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) {
|
||||
|
||||
DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator)
|
||||
: allocator_(allocator),
|
||||
eager_deoptimization_entry_code_entries_(-1),
|
||||
lazy_deoptimization_entry_code_entries_(-1),
|
||||
eager_deoptimization_entry_code_(AllocateCodeChunk(allocator)),
|
||||
lazy_deoptimization_entry_code_(AllocateCodeChunk(allocator)),
|
||||
current_(NULL),
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
deoptimized_frame_info_(NULL),
|
||||
#endif
|
||||
deoptimizing_code_list_(NULL) { }
|
||||
deoptimizing_code_list_(NULL) {
|
||||
for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
|
||||
deopt_entry_code_entries_[i] = -1;
|
||||
deopt_entry_code_[i] = AllocateCodeChunk(allocator);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
DeoptimizerData::~DeoptimizerData() {
|
||||
allocator_->Free(eager_deoptimization_entry_code_);
|
||||
eager_deoptimization_entry_code_ = NULL;
|
||||
allocator_->Free(lazy_deoptimization_entry_code_);
|
||||
lazy_deoptimization_entry_code_ = NULL;
|
||||
for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
|
||||
allocator_->Free(deopt_entry_code_[i]);
|
||||
deopt_entry_code_[i] = NULL;
|
||||
}
|
||||
|
||||
DeoptimizingCodeListNode* current = deoptimizing_code_list_;
|
||||
while (current != NULL) {
|
||||
@ -488,6 +489,7 @@ bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type,
|
||||
StackFrame::Type frame_type) {
|
||||
switch (deopt_type) {
|
||||
case EAGER:
|
||||
case SOFT:
|
||||
case LAZY:
|
||||
case DEBUGGER:
|
||||
return (frame_type == StackFrame::STUB)
|
||||
@ -504,6 +506,7 @@ bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type,
|
||||
const char* Deoptimizer::MessageFor(BailoutType type) {
|
||||
switch (type) {
|
||||
case EAGER:
|
||||
case SOFT:
|
||||
case LAZY:
|
||||
return "DEOPT";
|
||||
case DEBUGGER:
|
||||
@ -545,6 +548,13 @@ Deoptimizer::Deoptimizer(Isolate* isolate,
|
||||
}
|
||||
if (function != NULL && function->IsOptimized()) {
|
||||
function->shared()->increment_deopt_count();
|
||||
if (bailout_type_ == Deoptimizer::SOFT) {
|
||||
// Soft deopts shouldn't count against the overall re-optimization count
|
||||
// that can eventually lead to disabling optimization for a function.
|
||||
int opt_count = function->shared()->opt_count();
|
||||
if (opt_count > 0) opt_count--;
|
||||
function->shared()->set_opt_count(opt_count);
|
||||
}
|
||||
}
|
||||
compiled_code_ = FindOptimizedCode(function, optimized_code);
|
||||
StackFrame::Type frame_type = function == NULL
|
||||
@ -562,6 +572,7 @@ Deoptimizer::Deoptimizer(Isolate* isolate,
|
||||
Code* Deoptimizer::FindOptimizedCode(JSFunction* function,
|
||||
Code* optimized_code) {
|
||||
switch (bailout_type_) {
|
||||
case Deoptimizer::SOFT:
|
||||
case Deoptimizer::EAGER:
|
||||
ASSERT(from_ == NULL);
|
||||
return function->code();
|
||||
@ -597,7 +608,9 @@ void Deoptimizer::Trace() {
|
||||
bailout_id_,
|
||||
reinterpret_cast<intptr_t>(from_),
|
||||
fp_to_sp_delta_ - (2 * kPointerSize));
|
||||
if (bailout_type_ == EAGER) compiled_code_->PrintDeoptLocation(bailout_id_);
|
||||
if (bailout_type_ == EAGER || bailout_type_ == SOFT) {
|
||||
compiled_code_->PrintDeoptLocation(bailout_id_);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -639,9 +652,8 @@ Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate,
|
||||
ASSERT(mode == CALCULATE_ENTRY_ADDRESS);
|
||||
}
|
||||
DeoptimizerData* data = isolate->deoptimizer_data();
|
||||
MemoryChunk* base = (type == EAGER)
|
||||
? data->eager_deoptimization_entry_code_
|
||||
: data->lazy_deoptimization_entry_code_;
|
||||
ASSERT(type < kBailoutTypesWithCodeEntry);
|
||||
MemoryChunk* base = data->deopt_entry_code_[type];
|
||||
return base->area_start() + (id * table_entry_size_);
|
||||
}
|
||||
|
||||
@ -650,9 +662,7 @@ int Deoptimizer::GetDeoptimizationId(Isolate* isolate,
|
||||
Address addr,
|
||||
BailoutType type) {
|
||||
DeoptimizerData* data = isolate->deoptimizer_data();
|
||||
MemoryChunk* base = (type == EAGER)
|
||||
? data->eager_deoptimization_entry_code_
|
||||
: data->lazy_deoptimization_entry_code_;
|
||||
MemoryChunk* base = data->deopt_entry_code_[type];
|
||||
Address start = base->area_start();
|
||||
if (base == NULL ||
|
||||
addr < start ||
|
||||
@ -2206,11 +2216,9 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
|
||||
// cause us to emit relocation information for the external
|
||||
// references. This is fine because the deoptimizer's code section
|
||||
// isn't meant to be serialized at all.
|
||||
ASSERT(type == EAGER || type == LAZY);
|
||||
ASSERT(type == EAGER || type == SOFT || type == LAZY);
|
||||
DeoptimizerData* data = isolate->deoptimizer_data();
|
||||
int entry_count = (type == EAGER)
|
||||
? data->eager_deoptimization_entry_code_entries_
|
||||
: data->lazy_deoptimization_entry_code_entries_;
|
||||
int entry_count = data->deopt_entry_code_entries_[type];
|
||||
if (max_entry_id < entry_count) return;
|
||||
entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries);
|
||||
while (max_entry_id >= entry_count) entry_count *= 2;
|
||||
@ -2223,9 +2231,7 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
|
||||
masm.GetCode(&desc);
|
||||
ASSERT(!RelocInfo::RequiresRelocation(desc));
|
||||
|
||||
MemoryChunk* chunk = (type == EAGER)
|
||||
? data->eager_deoptimization_entry_code_
|
||||
: data->lazy_deoptimization_entry_code_;
|
||||
MemoryChunk* chunk = data->deopt_entry_code_[type];
|
||||
ASSERT(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
|
||||
desc.instr_size);
|
||||
chunk->CommitArea(desc.instr_size);
|
||||
@ -2233,11 +2239,7 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
|
||||
static_cast<size_t>(desc.instr_size));
|
||||
CPU::FlushICache(chunk->area_start(), desc.instr_size);
|
||||
|
||||
if (type == EAGER) {
|
||||
data->eager_deoptimization_entry_code_entries_ = entry_count;
|
||||
} else {
|
||||
data->lazy_deoptimization_entry_code_entries_ = entry_count;
|
||||
}
|
||||
data->deopt_entry_code_entries_[type] = entry_count;
|
||||
}
|
||||
|
||||
|
||||
|
@ -98,53 +98,34 @@ class OptimizedFunctionFilter BASE_EMBEDDED {
|
||||
class Deoptimizer;
|
||||
|
||||
|
||||
class DeoptimizerData {
|
||||
public:
|
||||
explicit DeoptimizerData(MemoryAllocator* allocator);
|
||||
~DeoptimizerData();
|
||||
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
void Iterate(ObjectVisitor* v);
|
||||
#endif
|
||||
|
||||
Code* FindDeoptimizingCode(Address addr);
|
||||
void RemoveDeoptimizingCode(Code* code);
|
||||
|
||||
private:
|
||||
MemoryAllocator* allocator_;
|
||||
int eager_deoptimization_entry_code_entries_;
|
||||
int lazy_deoptimization_entry_code_entries_;
|
||||
MemoryChunk* eager_deoptimization_entry_code_;
|
||||
MemoryChunk* lazy_deoptimization_entry_code_;
|
||||
Deoptimizer* current_;
|
||||
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
DeoptimizedFrameInfo* deoptimized_frame_info_;
|
||||
#endif
|
||||
|
||||
// List of deoptimized code which still have references from active stack
|
||||
// frames. These code objects are needed by the deoptimizer when deoptimizing
|
||||
// a frame for which the code object for the function function has been
|
||||
// changed from the code present when deoptimizing was done.
|
||||
DeoptimizingCodeListNode* deoptimizing_code_list_;
|
||||
|
||||
friend class Deoptimizer;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
|
||||
};
|
||||
|
||||
|
||||
class Deoptimizer : public Malloced {
|
||||
public:
|
||||
enum BailoutType {
|
||||
EAGER,
|
||||
LAZY,
|
||||
SOFT,
|
||||
OSR,
|
||||
// This last bailout type is not really a bailout, but used by the
|
||||
// debugger to deoptimize stack frames to allow inspection.
|
||||
DEBUGGER
|
||||
};
|
||||
|
||||
static const int kBailoutTypesWithCodeEntry = SOFT + 1;
|
||||
|
||||
struct JumpTableEntry {
|
||||
inline JumpTableEntry(Address entry,
|
||||
Deoptimizer::BailoutType type,
|
||||
bool frame)
|
||||
: label(),
|
||||
address(entry),
|
||||
bailout_type(type),
|
||||
needs_frame(frame) { }
|
||||
Label label;
|
||||
Address address;
|
||||
Deoptimizer::BailoutType bailout_type;
|
||||
bool needs_frame;
|
||||
};
|
||||
|
||||
static bool TraceEnabledFor(BailoutType deopt_type,
|
||||
StackFrame::Type frame_type);
|
||||
static const char* MessageFor(BailoutType type);
|
||||
@ -626,6 +607,40 @@ class FrameDescription {
|
||||
};
|
||||
|
||||
|
||||
class DeoptimizerData {
|
||||
public:
|
||||
explicit DeoptimizerData(MemoryAllocator* allocator);
|
||||
~DeoptimizerData();
|
||||
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
void Iterate(ObjectVisitor* v);
|
||||
#endif
|
||||
|
||||
Code* FindDeoptimizingCode(Address addr);
|
||||
void RemoveDeoptimizingCode(Code* code);
|
||||
|
||||
private:
|
||||
MemoryAllocator* allocator_;
|
||||
int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
|
||||
MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
|
||||
Deoptimizer* current_;
|
||||
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
DeoptimizedFrameInfo* deoptimized_frame_info_;
|
||||
#endif
|
||||
|
||||
// List of deoptimized code which still have references from active stack
|
||||
// frames. These code objects are needed by the deoptimizer when deoptimizing
|
||||
// a frame for which the code object for the function function has been
|
||||
// changed from the code present when deoptimizing was done.
|
||||
DeoptimizingCodeListNode* deoptimizing_code_list_;
|
||||
|
||||
friend class Deoptimizer;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
|
||||
};
|
||||
|
||||
|
||||
class TranslationBuffer BASE_EMBEDDED {
|
||||
public:
|
||||
explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
|
||||
|
@ -293,7 +293,14 @@ static int DecodeIt(Isolate* isolate,
|
||||
addr,
|
||||
Deoptimizer::LAZY);
|
||||
if (id == Deoptimizer::kNotDeoptimizationEntry) {
|
||||
out.AddFormatted(" ;; %s", RelocInfo::RelocModeName(rmode));
|
||||
id = Deoptimizer::GetDeoptimizationId(isolate,
|
||||
addr,
|
||||
Deoptimizer::SOFT);
|
||||
if (id == Deoptimizer::kNotDeoptimizationEntry) {
|
||||
out.AddFormatted(" ;; %s", RelocInfo::RelocModeName(rmode));
|
||||
} else {
|
||||
out.AddFormatted(" ;; soft deoptimization bailout %d", id);
|
||||
}
|
||||
} else {
|
||||
out.AddFormatted(" ;; lazy deoptimization bailout %d", id);
|
||||
}
|
||||
|
@ -657,6 +657,11 @@ void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
|
||||
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
|
||||
}
|
||||
|
||||
|
||||
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
|
||||
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
|
||||
}
|
||||
|
@ -659,9 +659,14 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
|
||||
// Set the continuation for the topmost frame.
|
||||
if (is_topmost && bailout_type_ != DEBUGGER) {
|
||||
Builtins* builtins = isolate_->builtins();
|
||||
Code* continuation = (bailout_type_ == EAGER)
|
||||
? builtins->builtin(Builtins::kNotifyDeoptimized)
|
||||
: builtins->builtin(Builtins::kNotifyLazyDeoptimized);
|
||||
Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
|
||||
if (bailout_type_ == LAZY) {
|
||||
continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
|
||||
} else if (bailout_type_ == SOFT) {
|
||||
continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
|
||||
} else {
|
||||
ASSERT(bailout_type_ == EAGER);
|
||||
}
|
||||
output_frame->SetContinuation(
|
||||
reinterpret_cast<uint32_t>(continuation->entry()));
|
||||
}
|
||||
@ -740,7 +745,7 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
|
||||
// Get the address of the location in the code object if possible
|
||||
// and compute the fp-to-sp delta in register edx.
|
||||
if (type() == EAGER) {
|
||||
if (type() == EAGER || type() == SOFT) {
|
||||
__ Set(ecx, Immediate(0));
|
||||
__ lea(edx, Operand(esp, kSavedRegistersAreaSize + 1 * kPointerSize));
|
||||
} else {
|
||||
@ -793,7 +798,7 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
__ fnclex();
|
||||
|
||||
// Remove the bailout id and the double registers from the stack.
|
||||
if (type() == EAGER) {
|
||||
if (type() == EAGER || type() == SOFT) {
|
||||
__ add(esp, Immediate(kDoubleRegsSize + kPointerSize));
|
||||
} else {
|
||||
__ add(esp, Immediate(kDoubleRegsSize + 2 * kPointerSize));
|
||||
|
@ -385,9 +385,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
for (int i = 0; i < jump_table_.length(); i++) {
|
||||
__ bind(&jump_table_[i].label);
|
||||
Address entry = jump_table_[i].address;
|
||||
bool is_lazy_deopt = jump_table_[i].is_lazy_deopt;
|
||||
Deoptimizer::BailoutType type =
|
||||
is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
|
||||
Deoptimizer::BailoutType type = jump_table_[i].bailout_type;
|
||||
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
|
||||
if (id == Deoptimizer::kNotDeoptimizationEntry) {
|
||||
Comment(";;; jump table entry %d.", i);
|
||||
@ -396,7 +394,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
}
|
||||
if (jump_table_[i].needs_frame) {
|
||||
__ push(Immediate(ExternalReference::ForDeoptEntry(entry)));
|
||||
if (is_lazy_deopt) {
|
||||
if (type == Deoptimizer::LAZY) {
|
||||
if (needs_frame_is_call.is_bound()) {
|
||||
__ jmp(&needs_frame_is_call);
|
||||
} else {
|
||||
@ -441,7 +439,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (is_lazy_deopt) {
|
||||
if (type == Deoptimizer::LAZY) {
|
||||
__ call(entry, RelocInfo::RUNTIME_ENTRY);
|
||||
} else {
|
||||
__ jmp(entry, RelocInfo::RUNTIME_ENTRY);
|
||||
@ -893,16 +891,15 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
||||
void LCodeGen::DeoptimizeIf(Condition cc,
|
||||
LEnvironment* environment,
|
||||
Deoptimizer::BailoutType bailout_type) {
|
||||
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
|
||||
ASSERT(environment->HasBeenRegistered());
|
||||
// It's an error to deoptimize with the x87 fp stack in use.
|
||||
ASSERT(x87_stack_depth_ == 0);
|
||||
int id = environment->deoptimization_index();
|
||||
ASSERT(info()->IsOptimizing() || info()->IsStub());
|
||||
Deoptimizer::BailoutType bailout_type = info()->IsStub()
|
||||
? Deoptimizer::LAZY
|
||||
: Deoptimizer::EAGER;
|
||||
Address entry =
|
||||
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
|
||||
if (entry == NULL) {
|
||||
@ -948,9 +945,8 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
||||
}
|
||||
|
||||
ASSERT(info()->IsStub() || frame_is_built_);
|
||||
bool needs_lazy_deopt = info()->IsStub();
|
||||
if (cc == no_condition && frame_is_built_) {
|
||||
if (needs_lazy_deopt) {
|
||||
if (bailout_type == Deoptimizer::LAZY) {
|
||||
__ call(entry, RelocInfo::RUNTIME_ENTRY);
|
||||
} else {
|
||||
__ jmp(entry, RelocInfo::RUNTIME_ENTRY);
|
||||
@ -961,8 +957,10 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
||||
if (jump_table_.is_empty() ||
|
||||
jump_table_.last().address != entry ||
|
||||
jump_table_.last().needs_frame != !frame_is_built_ ||
|
||||
jump_table_.last().is_lazy_deopt != needs_lazy_deopt) {
|
||||
JumpTableEntry table_entry(entry, !frame_is_built_, needs_lazy_deopt);
|
||||
jump_table_.last().bailout_type != bailout_type) {
|
||||
Deoptimizer::JumpTableEntry table_entry(entry,
|
||||
bailout_type,
|
||||
!frame_is_built_);
|
||||
jump_table_.Add(table_entry, zone());
|
||||
}
|
||||
if (cc == no_condition) {
|
||||
@ -974,6 +972,21 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DeoptimizeIf(Condition cc,
|
||||
LEnvironment* environment) {
|
||||
Deoptimizer::BailoutType bailout_type = info()->IsStub()
|
||||
? Deoptimizer::LAZY
|
||||
: Deoptimizer::EAGER;
|
||||
DeoptimizeIf(cc, environment, bailout_type);
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::SoftDeoptimize(LEnvironment* environment) {
|
||||
ASSERT(!info()->IsStub());
|
||||
DeoptimizeIf(no_condition, environment, Deoptimizer::SOFT);
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
|
||||
ZoneList<Handle<Map> > maps(1, zone());
|
||||
int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
|
||||
@ -6316,7 +6329,11 @@ void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
|
||||
|
||||
|
||||
void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
|
||||
DeoptimizeIf(no_condition, instr->environment());
|
||||
if (instr->hydrogen_value()->IsSoftDeoptimize()) {
|
||||
SoftDeoptimize(instr->environment());
|
||||
} else {
|
||||
DeoptimizeIf(no_condition, instr->environment());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -276,7 +276,11 @@ class LCodeGen BASE_EMBEDDED {
|
||||
|
||||
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
|
||||
Safepoint::DeoptMode mode);
|
||||
void DeoptimizeIf(Condition cc,
|
||||
LEnvironment* environment,
|
||||
Deoptimizer::BailoutType bailout_type);
|
||||
void DeoptimizeIf(Condition cc, LEnvironment* environment);
|
||||
void SoftDeoptimize(LEnvironment* environment);
|
||||
|
||||
void AddToTranslation(Translation* translation,
|
||||
LOperand* op,
|
||||
@ -397,23 +401,11 @@ class LCodeGen BASE_EMBEDDED {
|
||||
MacroAssembler* const masm_;
|
||||
CompilationInfo* const info_;
|
||||
|
||||
struct JumpTableEntry {
|
||||
inline JumpTableEntry(Address entry, bool frame, bool is_lazy)
|
||||
: label(),
|
||||
address(entry),
|
||||
needs_frame(frame),
|
||||
is_lazy_deopt(is_lazy) { }
|
||||
Label label;
|
||||
Address address;
|
||||
bool needs_frame;
|
||||
bool is_lazy_deopt;
|
||||
};
|
||||
|
||||
int current_block_;
|
||||
int current_instruction_;
|
||||
const ZoneList<LInstruction*>* instructions_;
|
||||
ZoneList<LEnvironment*> deoptimizations_;
|
||||
ZoneList<JumpTableEntry> jump_table_;
|
||||
ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
|
||||
ZoneList<Handle<Object> > deoptimization_literals_;
|
||||
ZoneList<Handle<Map> > prototype_maps_;
|
||||
ZoneList<Handle<Map> > transition_maps_;
|
||||
|
@ -10147,12 +10147,15 @@ void Code::PrintDeoptLocation(int bailout_id) {
|
||||
RelocInfo* info = it.rinfo();
|
||||
if (info->rmode() == RelocInfo::COMMENT) {
|
||||
last_comment = reinterpret_cast<const char*>(info->data());
|
||||
} else if (last_comment != NULL &&
|
||||
bailout_id == Deoptimizer::GetDeoptimizationId(
|
||||
GetIsolate(), info->target_address(), Deoptimizer::EAGER)) {
|
||||
CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
|
||||
PrintF(" %s\n", last_comment);
|
||||
return;
|
||||
} else if (last_comment != NULL) {
|
||||
if ((bailout_id == Deoptimizer::GetDeoptimizationId(
|
||||
GetIsolate(), info->target_address(), Deoptimizer::EAGER)) ||
|
||||
(bailout_id == Deoptimizer::GetDeoptimizationId(
|
||||
GetIsolate(), info->target_address(), Deoptimizer::SOFT))) {
|
||||
CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
|
||||
PrintF(" %s\n", last_comment);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -7974,7 +7974,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
|
||||
RUNTIME_ASSERT(frame->function()->IsJSFunction());
|
||||
Handle<JSFunction> function(JSFunction::cast(frame->function()), isolate);
|
||||
Handle<Code> optimized_code(function->code());
|
||||
RUNTIME_ASSERT(type != Deoptimizer::EAGER || function->IsOptimized());
|
||||
RUNTIME_ASSERT((type != Deoptimizer::EAGER &&
|
||||
type != Deoptimizer::SOFT) || function->IsOptimized());
|
||||
|
||||
// Avoid doing too much work when running with --always-opt and keep
|
||||
// the optimized code around.
|
||||
|
@ -729,6 +729,11 @@ void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
|
||||
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
|
||||
}
|
||||
|
||||
|
||||
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
|
||||
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
|
||||
}
|
||||
|
@ -530,9 +530,15 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
|
||||
|
||||
// Set the continuation for the topmost frame.
|
||||
if (is_topmost && bailout_type_ != DEBUGGER) {
|
||||
Code* continuation = (bailout_type_ == EAGER)
|
||||
? isolate_->builtins()->builtin(Builtins::kNotifyDeoptimized)
|
||||
: isolate_->builtins()->builtin(Builtins::kNotifyLazyDeoptimized);
|
||||
Builtins* builtins = isolate_->builtins();
|
||||
Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
|
||||
if (bailout_type_ == LAZY) {
|
||||
continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
|
||||
} else if (bailout_type_ == SOFT) {
|
||||
continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
|
||||
} else {
|
||||
ASSERT(bailout_type_ == EAGER);
|
||||
}
|
||||
output_frame->SetContinuation(
|
||||
reinterpret_cast<intptr_t>(continuation->entry()));
|
||||
}
|
||||
@ -618,7 +624,7 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
|
||||
// Get the address of the location in the code object if possible
|
||||
// and compute the fp-to-sp delta in register arg5.
|
||||
if (type() == EAGER) {
|
||||
if (type() == EAGER || type() == SOFT) {
|
||||
__ Set(arg_reg_4, 0);
|
||||
__ lea(arg5, Operand(rsp, kSavedRegistersAreaSize + 1 * kPointerSize));
|
||||
} else {
|
||||
@ -669,7 +675,7 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
}
|
||||
|
||||
// Remove the bailout id from the stack.
|
||||
if (type() == EAGER) {
|
||||
if (type() == EAGER || type() == SOFT) {
|
||||
__ addq(rsp, Immediate(kPointerSize));
|
||||
} else {
|
||||
__ addq(rsp, Immediate(2 * kPointerSize));
|
||||
|
@ -301,9 +301,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
for (int i = 0; i < jump_table_.length(); i++) {
|
||||
__ bind(&jump_table_[i].label);
|
||||
Address entry = jump_table_[i].address;
|
||||
bool is_lazy_deopt = jump_table_[i].is_lazy_deopt;
|
||||
Deoptimizer::BailoutType type =
|
||||
is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
|
||||
Deoptimizer::BailoutType type = jump_table_[i].bailout_type;
|
||||
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
|
||||
if (id == Deoptimizer::kNotDeoptimizationEntry) {
|
||||
Comment(";;; jump table entry %d.", i);
|
||||
@ -312,7 +310,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
}
|
||||
if (jump_table_[i].needs_frame) {
|
||||
__ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
|
||||
if (is_lazy_deopt) {
|
||||
if (type == Deoptimizer::LAZY) {
|
||||
if (needs_frame_is_call.is_bound()) {
|
||||
__ jmp(&needs_frame_is_call);
|
||||
} else {
|
||||
@ -348,7 +346,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (is_lazy_deopt) {
|
||||
if (type == Deoptimizer::LAZY) {
|
||||
__ call(entry, RelocInfo::RUNTIME_ENTRY);
|
||||
} else {
|
||||
__ jmp(entry, RelocInfo::RUNTIME_ENTRY);
|
||||
@ -719,14 +717,13 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
||||
void LCodeGen::DeoptimizeIf(Condition cc,
|
||||
LEnvironment* environment,
|
||||
Deoptimizer::BailoutType bailout_type) {
|
||||
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
|
||||
ASSERT(environment->HasBeenRegistered());
|
||||
int id = environment->deoptimization_index();
|
||||
ASSERT(info()->IsOptimizing() || info()->IsStub());
|
||||
Deoptimizer::BailoutType bailout_type = info()->IsStub()
|
||||
? Deoptimizer::LAZY
|
||||
: Deoptimizer::EAGER;
|
||||
Address entry =
|
||||
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
|
||||
if (entry == NULL) {
|
||||
@ -759,8 +756,10 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
||||
if (jump_table_.is_empty() ||
|
||||
jump_table_.last().address != entry ||
|
||||
jump_table_.last().needs_frame != !frame_is_built_ ||
|
||||
jump_table_.last().is_lazy_deopt != needs_lazy_deopt) {
|
||||
JumpTableEntry table_entry(entry, !frame_is_built_, needs_lazy_deopt);
|
||||
jump_table_.last().bailout_type != bailout_type) {
|
||||
Deoptimizer::JumpTableEntry table_entry(entry,
|
||||
bailout_type,
|
||||
!frame_is_built_);
|
||||
jump_table_.Add(table_entry, zone());
|
||||
}
|
||||
if (cc == no_condition) {
|
||||
@ -772,6 +771,21 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DeoptimizeIf(Condition cc,
|
||||
LEnvironment* environment) {
|
||||
Deoptimizer::BailoutType bailout_type = info()->IsStub()
|
||||
? Deoptimizer::LAZY
|
||||
: Deoptimizer::EAGER;
|
||||
DeoptimizeIf(cc, environment, bailout_type);
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::SoftDeoptimize(LEnvironment* environment) {
|
||||
ASSERT(!info()->IsStub());
|
||||
DeoptimizeIf(no_condition, environment, Deoptimizer::SOFT);
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
|
||||
ZoneList<Handle<Map> > maps(1, zone());
|
||||
int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
|
||||
@ -5414,7 +5428,11 @@ void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
|
||||
|
||||
|
||||
void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
|
||||
DeoptimizeIf(no_condition, instr->environment());
|
||||
if (instr->hydrogen_value()->IsSoftDeoptimize()) {
|
||||
SoftDeoptimize(instr->environment());
|
||||
} else {
|
||||
DeoptimizeIf(no_condition, instr->environment());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -247,8 +247,11 @@ class LCodeGen BASE_EMBEDDED {
|
||||
int argc);
|
||||
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
|
||||
Safepoint::DeoptMode mode);
|
||||
void DeoptimizeIf(Condition cc,
|
||||
LEnvironment* environment,
|
||||
Deoptimizer::BailoutType bailout_type);
|
||||
void DeoptimizeIf(Condition cc, LEnvironment* environment);
|
||||
|
||||
void SoftDeoptimize(LEnvironment* environment);
|
||||
void AddToTranslation(Translation* translation,
|
||||
LOperand* op,
|
||||
bool is_tagged,
|
||||
@ -340,18 +343,6 @@ class LCodeGen BASE_EMBEDDED {
|
||||
int* offset,
|
||||
AllocationSiteMode mode);
|
||||
|
||||
struct JumpTableEntry {
|
||||
inline JumpTableEntry(Address entry, bool frame, bool is_lazy)
|
||||
: label(),
|
||||
address(entry),
|
||||
needs_frame(frame),
|
||||
is_lazy_deopt(is_lazy) { }
|
||||
Label label;
|
||||
Address address;
|
||||
bool needs_frame;
|
||||
bool is_lazy_deopt;
|
||||
};
|
||||
|
||||
void EnsureSpaceForLazyDeopt(int space_needed);
|
||||
void DoLoadKeyedExternalArray(LLoadKeyed* instr);
|
||||
void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
|
||||
@ -369,7 +360,7 @@ class LCodeGen BASE_EMBEDDED {
|
||||
int current_instruction_;
|
||||
const ZoneList<LInstruction*>* instructions_;
|
||||
ZoneList<LEnvironment*> deoptimizations_;
|
||||
ZoneList<JumpTableEntry> jump_table_;
|
||||
ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
|
||||
ZoneList<Handle<Object> > deoptimization_literals_;
|
||||
ZoneList<Handle<Map> > prototype_maps_;
|
||||
ZoneList<Handle<Map> > transition_maps_;
|
||||
|
Loading…
Reference in New Issue
Block a user