Pass Isolates explicitly in Deoptimizer-related code.

Removed a few ancient useless ASSERTs on the way. Reduced the number of train wrecks.

BUG=v8:2487

Review URL: https://codereview.chromium.org/12917002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13965 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
svenpanne@chromium.org 2013-03-18 13:57:49 +00:00
parent 37a2e32496
commit e4e4447305
20 changed files with 123 additions and 107 deletions

View File

@ -3041,7 +3041,7 @@ bool v8::Object::ForceDelete(v8::Handle<Value> key) {
// value with DontDelete properties. We have to deoptimize all contexts // value with DontDelete properties. We have to deoptimize all contexts
// because of possible cross-context inlined functions. // because of possible cross-context inlined functions.
if (self->IsJSGlobalProxy() || self->IsGlobalObject()) { if (self->IsJSGlobalProxy() || self->IsGlobalObject()) {
i::Deoptimizer::DeoptimizeAll(); i::Deoptimizer::DeoptimizeAll(isolate);
} }
EXCEPTION_PREAMBLE(isolate); EXCEPTION_PREAMBLE(isolate);
@ -6926,7 +6926,7 @@ void Testing::PrepareStressRun(int run) {
void Testing::DeoptimizeAll() { void Testing::DeoptimizeAll() {
i::Isolate* isolate = i::Isolate::Current(); i::Isolate* isolate = i::Isolate::Current();
i::HandleScope scope(isolate); i::HandleScope scope(isolate);
internal::Deoptimizer::DeoptimizeAll(); internal::Deoptimizer::DeoptimizeAll(isolate);
} }

View File

@ -365,7 +365,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt; bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt;
Deoptimizer::BailoutType type = Deoptimizer::BailoutType type =
is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER; is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
int id = Deoptimizer::GetDeoptimizationId(entry, type); int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) { if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i); Comment(";;; jump table entry %d.", i);
} else { } else {
@ -915,7 +915,8 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
Handle<DeoptimizationInputData> data = Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED); factory()->NewDeoptimizationInputData(length, TENURED);
Handle<ByteArray> translations = translations_.CreateByteArray(); Handle<ByteArray> translations =
translations_.CreateByteArray(isolate()->factory());
data->SetTranslationByteArray(*translations); data->SetTranslationByteArray(*translations);
data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));

View File

@ -789,7 +789,7 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
} }
void RelocInfo::Print(FILE* out) { void RelocInfo::Print(Isolate* isolate, FILE* out) {
PrintF(out, "%p %s", pc_, RelocModeName(rmode_)); PrintF(out, "%p %s", pc_, RelocModeName(rmode_));
if (IsComment(rmode_)) { if (IsComment(rmode_)) {
PrintF(out, " (%s)", reinterpret_cast<char*>(data_)); PrintF(out, " (%s)", reinterpret_cast<char*>(data_));
@ -812,10 +812,10 @@ void RelocInfo::Print(FILE* out) {
} else if (IsPosition(rmode_)) { } else if (IsPosition(rmode_)) {
PrintF(out, " (%" V8_PTR_PREFIX "d)", data()); PrintF(out, " (%" V8_PTR_PREFIX "d)", data());
} else if (IsRuntimeEntry(rmode_) && } else if (IsRuntimeEntry(rmode_) &&
Isolate::Current()->deoptimizer_data() != NULL) { isolate->deoptimizer_data() != NULL) {
// Depotimization bailouts are stored as runtime entries. // Depotimization bailouts are stored as runtime entries.
int id = Deoptimizer::GetDeoptimizationId( int id = Deoptimizer::GetDeoptimizationId(
target_address(), Deoptimizer::EAGER); isolate, target_address(), Deoptimizer::EAGER);
if (id != Deoptimizer::kNotDeoptimizationEntry) { if (id != Deoptimizer::kNotDeoptimizationEntry) {
PrintF(out, " (deoptimization bailout %d)", id); PrintF(out, " (deoptimization bailout %d)", id);
} }

View File

@ -450,7 +450,7 @@ class RelocInfo BASE_EMBEDDED {
#ifdef ENABLE_DISASSEMBLER #ifdef ENABLE_DISASSEMBLER
// Printing // Printing
static const char* RelocModeName(Mode rmode); static const char* RelocModeName(Mode rmode);
void Print(FILE* out); void Print(Isolate* isolate, FILE* out);
#endif // ENABLE_DISASSEMBLER #endif // ENABLE_DISASSEMBLER
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
void Verify(); void Verify();

View File

@ -2000,7 +2000,7 @@ void Debug::PrepareForBreakPoints() {
// If preparing for the first break point make sure to deoptimize all // If preparing for the first break point make sure to deoptimize all
// functions as debugging does not work with optimized code. // functions as debugging does not work with optimized code.
if (!has_break_points_) { if (!has_break_points_) {
Deoptimizer::DeoptimizeAll(); Deoptimizer::DeoptimizeAll(isolate_);
Handle<Code> lazy_compile = Handle<Code> lazy_compile =
Handle<Code>(isolate_->builtins()->builtin(Builtins::kLazyCompile)); Handle<Code>(isolate_->builtins()->builtin(Builtins::kLazyCompile));

View File

@ -40,36 +40,31 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
DeoptimizerData::DeoptimizerData() { static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) {
eager_deoptimization_entry_code_entries_ = -1; return allocator->AllocateChunk(Deoptimizer::GetMaxDeoptTableSize(),
lazy_deoptimization_entry_code_entries_ = -1; OS::CommitPageSize(),
size_t deopt_table_size = Deoptimizer::GetMaxDeoptTableSize(); EXECUTABLE,
MemoryAllocator* allocator = Isolate::Current()->memory_allocator(); NULL);
size_t initial_commit_size = OS::CommitPageSize();
eager_deoptimization_entry_code_ =
allocator->AllocateChunk(deopt_table_size,
initial_commit_size,
EXECUTABLE,
NULL);
lazy_deoptimization_entry_code_ =
allocator->AllocateChunk(deopt_table_size,
initial_commit_size,
EXECUTABLE,
NULL);
current_ = NULL;
deoptimizing_code_list_ = NULL;
#ifdef ENABLE_DEBUGGER_SUPPORT
deoptimized_frame_info_ = NULL;
#endif
} }
DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator)
: allocator_(allocator),
eager_deoptimization_entry_code_entries_(-1),
lazy_deoptimization_entry_code_entries_(-1),
eager_deoptimization_entry_code_(AllocateCodeChunk(allocator)),
lazy_deoptimization_entry_code_(AllocateCodeChunk(allocator)),
current_(NULL),
#ifdef ENABLE_DEBUGGER_SUPPORT
deoptimized_frame_info_(NULL),
#endif
deoptimizing_code_list_(NULL) { }
DeoptimizerData::~DeoptimizerData() { DeoptimizerData::~DeoptimizerData() {
Isolate::Current()->memory_allocator()->Free( allocator_->Free(eager_deoptimization_entry_code_);
eager_deoptimization_entry_code_);
eager_deoptimization_entry_code_ = NULL; eager_deoptimization_entry_code_ = NULL;
Isolate::Current()->memory_allocator()->Free( allocator_->Free(lazy_deoptimization_entry_code_);
lazy_deoptimization_entry_code_);
lazy_deoptimization_entry_code_ = NULL; lazy_deoptimization_entry_code_ = NULL;
DeoptimizingCodeListNode* current = deoptimizing_code_list_; DeoptimizingCodeListNode* current = deoptimizing_code_list_;
@ -129,7 +124,6 @@ Deoptimizer* Deoptimizer::New(JSFunction* function,
Address from, Address from,
int fp_to_sp_delta, int fp_to_sp_delta,
Isolate* isolate) { Isolate* isolate) {
ASSERT(isolate == Isolate::Current());
Deoptimizer* deoptimizer = new Deoptimizer(isolate, Deoptimizer* deoptimizer = new Deoptimizer(isolate,
function, function,
type, type,
@ -158,7 +152,6 @@ size_t Deoptimizer::GetMaxDeoptTableSize() {
Deoptimizer* Deoptimizer::Grab(Isolate* isolate) { Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
ASSERT(isolate == Isolate::Current());
Deoptimizer* result = isolate->deoptimizer_data()->current_; Deoptimizer* result = isolate->deoptimizer_data()->current_;
ASSERT(result != NULL); ASSERT(result != NULL);
result->DeleteFrameDescriptions(); result->DeleteFrameDescriptions();
@ -188,7 +181,6 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
JavaScriptFrame* frame, JavaScriptFrame* frame,
int jsframe_index, int jsframe_index,
Isolate* isolate) { Isolate* isolate) {
ASSERT(isolate == Isolate::Current());
ASSERT(frame->is_optimized()); ASSERT(frame->is_optimized());
ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL); ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL);
@ -274,7 +266,6 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info, void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
Isolate* isolate) { Isolate* isolate) {
ASSERT(isolate == Isolate::Current());
ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == info); ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == info);
delete info; delete info;
isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL; isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL;
@ -319,11 +310,12 @@ void Deoptimizer::VisitAllOptimizedFunctionsForContext(
void Deoptimizer::VisitAllOptimizedFunctions( void Deoptimizer::VisitAllOptimizedFunctions(
Isolate* isolate,
OptimizedFunctionVisitor* visitor) { OptimizedFunctionVisitor* visitor) {
AssertNoAllocation no_allocation; AssertNoAllocation no_allocation;
// Run through the list of all native contexts and deoptimize. // Run through the list of all native contexts and deoptimize.
Object* context = Isolate::Current()->heap()->native_contexts_list(); Object* context = isolate->heap()->native_contexts_list();
while (!context->IsUndefined()) { while (!context->IsUndefined()) {
VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor); VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor);
context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
@ -394,7 +386,7 @@ class DeoptimizeWithMatchingCodeFilter : public OptimizedFunctionFilter {
}; };
void Deoptimizer::DeoptimizeAll() { void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
AssertNoAllocation no_allocation; AssertNoAllocation no_allocation;
if (FLAG_trace_deopt) { if (FLAG_trace_deopt) {
@ -402,7 +394,7 @@ void Deoptimizer::DeoptimizeAll() {
} }
DeoptimizeAllFilter filter; DeoptimizeAllFilter filter;
DeoptimizeAllFunctionsWith(&filter); DeoptimizeAllFunctionsWith(isolate, &filter);
} }
@ -456,11 +448,12 @@ void Deoptimizer::DeoptimizeAllFunctionsForContext(
} }
void Deoptimizer::DeoptimizeAllFunctionsWith(OptimizedFunctionFilter* filter) { void Deoptimizer::DeoptimizeAllFunctionsWith(Isolate* isolate,
OptimizedFunctionFilter* filter) {
AssertNoAllocation no_allocation; AssertNoAllocation no_allocation;
// Run through the list of all native contexts and deoptimize. // Run through the list of all native contexts and deoptimize.
Object* context = Isolate::Current()->heap()->native_contexts_list(); Object* context = isolate->heap()->native_contexts_list();
while (!context->IsUndefined()) { while (!context->IsUndefined()) {
DeoptimizeAllFunctionsForContext(Context::cast(context), filter); DeoptimizeAllFunctionsForContext(Context::cast(context), filter);
context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
@ -640,30 +633,26 @@ Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate,
GetEntryMode mode) { GetEntryMode mode) {
ASSERT(id >= 0); ASSERT(id >= 0);
if (id >= kMaxNumberOfEntries) return NULL; if (id >= kMaxNumberOfEntries) return NULL;
MemoryChunk* base = NULL;
if (mode == ENSURE_ENTRY_CODE) { if (mode == ENSURE_ENTRY_CODE) {
EnsureCodeForDeoptimizationEntry(isolate, type, id); EnsureCodeForDeoptimizationEntry(isolate, type, id);
} else { } else {
ASSERT(mode == CALCULATE_ENTRY_ADDRESS); ASSERT(mode == CALCULATE_ENTRY_ADDRESS);
} }
DeoptimizerData* data = isolate->deoptimizer_data(); DeoptimizerData* data = isolate->deoptimizer_data();
if (type == EAGER) { MemoryChunk* base = (type == EAGER)
base = data->eager_deoptimization_entry_code_; ? data->eager_deoptimization_entry_code_
} else { : data->lazy_deoptimization_entry_code_;
base = data->lazy_deoptimization_entry_code_;
}
return base->area_start() + (id * table_entry_size_); return base->area_start() + (id * table_entry_size_);
} }
int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) { int Deoptimizer::GetDeoptimizationId(Isolate* isolate,
MemoryChunk* base = NULL; Address addr,
DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); BailoutType type) {
if (type == EAGER) { DeoptimizerData* data = isolate->deoptimizer_data();
base = data->eager_deoptimization_entry_code_; MemoryChunk* base = (type == EAGER)
} else { ? data->eager_deoptimization_entry_code_
base = data->lazy_deoptimization_entry_code_; : data->lazy_deoptimization_entry_code_;
}
Address start = base->area_start(); Address start = base->area_start();
if (base == NULL || if (base == NULL ||
addr < start || addr < start ||
@ -2135,7 +2124,7 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
masm.GetCode(&desc); masm.GetCode(&desc);
ASSERT(!RelocInfo::RequiresRelocation(desc)); ASSERT(!RelocInfo::RequiresRelocation(desc));
MemoryChunk* chunk = type == EAGER MemoryChunk* chunk = (type == EAGER)
? data->eager_deoptimization_entry_code_ ? data->eager_deoptimization_entry_code_
: data->lazy_deoptimization_entry_code_; : data->lazy_deoptimization_entry_code_;
ASSERT(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >= ASSERT(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
@ -2155,7 +2144,7 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
void Deoptimizer::ReplaceCodeForRelatedFunctions(JSFunction* function, void Deoptimizer::ReplaceCodeForRelatedFunctions(JSFunction* function,
Code* code) { Code* code) {
SharedFunctionInfo* shared = function->shared(); SharedFunctionInfo* shared = function->shared();
Object* undefined = Isolate::Current()->heap()->undefined_value(); Object* undefined = function->GetHeap()->undefined_value();
Object* current = function; Object* current = function;
while (current != undefined) { while (current != undefined) {
@ -2281,10 +2270,9 @@ int32_t TranslationIterator::Next() {
} }
Handle<ByteArray> TranslationBuffer::CreateByteArray() { Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
int length = contents_.length(); int length = contents_.length();
Handle<ByteArray> result = Handle<ByteArray> result = factory->NewByteArray(length, TENURED);
Isolate::Current()->factory()->NewByteArray(length, TENURED);
memcpy(result->GetDataStartAddress(), contents_.ToVector().start(), length); memcpy(result->GetDataStartAddress(), contents_.ToVector().start(), length);
return result; return result;
} }
@ -2479,7 +2467,7 @@ const char* Translation::StringFor(Opcode opcode) {
DeoptimizingCodeListNode::DeoptimizingCodeListNode(Code* code): next_(NULL) { DeoptimizingCodeListNode::DeoptimizingCodeListNode(Code* code): next_(NULL) {
GlobalHandles* global_handles = Isolate::Current()->global_handles(); GlobalHandles* global_handles = code->GetIsolate()->global_handles();
// Globalize the code object and make it weak. // Globalize the code object and make it weak.
code_ = Handle<Code>::cast(global_handles->Create(code)); code_ = Handle<Code>::cast(global_handles->Create(code));
global_handles->MakeWeak(reinterpret_cast<Object**>(code_.location()), global_handles->MakeWeak(reinterpret_cast<Object**>(code_.location()),
@ -2490,7 +2478,7 @@ DeoptimizingCodeListNode::DeoptimizingCodeListNode(Code* code): next_(NULL) {
DeoptimizingCodeListNode::~DeoptimizingCodeListNode() { DeoptimizingCodeListNode::~DeoptimizingCodeListNode() {
GlobalHandles* global_handles = Isolate::Current()->global_handles(); GlobalHandles* global_handles = code_->GetIsolate()->global_handles();
global_handles->Destroy(reinterpret_cast<Object**>(code_.location())); global_handles->Destroy(reinterpret_cast<Object**>(code_.location()));
} }
@ -2656,7 +2644,7 @@ DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
expression_stack_ = new Object*[expression_count_]; expression_stack_ = new Object*[expression_count_];
// Get the source position using the unoptimized code. // Get the source position using the unoptimized code.
Address pc = reinterpret_cast<Address>(output_frame->GetPc()); Address pc = reinterpret_cast<Address>(output_frame->GetPc());
Code* code = Code::cast(Isolate::Current()->heap()->FindCodeObject(pc)); Code* code = Code::cast(deoptimizer->isolate()->heap()->FindCodeObject(pc));
source_position_ = code->SourcePosition(pc); source_position_ = code->SourcePosition(pc);
for (int i = 0; i < expression_count_; i++) { for (int i = 0; i < expression_count_; i++) {

View File

@ -100,7 +100,7 @@ class Deoptimizer;
class DeoptimizerData { class DeoptimizerData {
public: public:
DeoptimizerData(); explicit DeoptimizerData(MemoryAllocator* allocator);
~DeoptimizerData(); ~DeoptimizerData();
#ifdef ENABLE_DEBUGGER_SUPPORT #ifdef ENABLE_DEBUGGER_SUPPORT
@ -111,6 +111,7 @@ class DeoptimizerData {
void RemoveDeoptimizingCode(Code* code); void RemoveDeoptimizingCode(Code* code);
private: private:
MemoryAllocator* allocator_;
int eager_deoptimization_entry_code_entries_; int eager_deoptimization_entry_code_entries_;
int lazy_deoptimization_entry_code_entries_; int lazy_deoptimization_entry_code_entries_;
MemoryChunk* eager_deoptimization_entry_code_; MemoryChunk* eager_deoptimization_entry_code_;
@ -190,11 +191,12 @@ class Deoptimizer : public Malloced {
static void ReplaceCodeForRelatedFunctions(JSFunction* function, Code* code); static void ReplaceCodeForRelatedFunctions(JSFunction* function, Code* code);
// Deoptimize all functions in the heap. // Deoptimize all functions in the heap.
static void DeoptimizeAll(); static void DeoptimizeAll(Isolate* isolate);
static void DeoptimizeGlobalObject(JSObject* object); static void DeoptimizeGlobalObject(JSObject* object);
static void DeoptimizeAllFunctionsWith(OptimizedFunctionFilter* filter); static void DeoptimizeAllFunctionsWith(Isolate* isolate,
OptimizedFunctionFilter* filter);
static void DeoptimizeAllFunctionsForContext( static void DeoptimizeAllFunctionsForContext(
Context* context, OptimizedFunctionFilter* filter); Context* context, OptimizedFunctionFilter* filter);
@ -202,7 +204,8 @@ class Deoptimizer : public Malloced {
static void VisitAllOptimizedFunctionsForContext( static void VisitAllOptimizedFunctionsForContext(
Context* context, OptimizedFunctionVisitor* visitor); Context* context, OptimizedFunctionVisitor* visitor);
static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor); static void VisitAllOptimizedFunctions(Isolate* isolate,
OptimizedFunctionVisitor* visitor);
// The size in bytes of the code required at a lazy deopt patch site. // The size in bytes of the code required at a lazy deopt patch site.
static int patch_size(); static int patch_size();
@ -259,7 +262,9 @@ class Deoptimizer : public Malloced {
int id, int id,
BailoutType type, BailoutType type,
GetEntryMode mode = ENSURE_ENTRY_CODE); GetEntryMode mode = ENSURE_ENTRY_CODE);
static int GetDeoptimizationId(Address addr, BailoutType type); static int GetDeoptimizationId(Isolate* isolate,
Address addr,
BailoutType type);
static int GetOutputInfo(DeoptimizationOutputData* data, static int GetOutputInfo(DeoptimizationOutputData* data,
BailoutId node_id, BailoutId node_id,
SharedFunctionInfo* shared); SharedFunctionInfo* shared);
@ -321,6 +326,8 @@ class Deoptimizer : public Malloced {
BailoutType type, BailoutType type,
int max_entry_id); int max_entry_id);
Isolate* isolate() const { return isolate_; }
private: private:
static const int kMinNumberOfEntries = 64; static const int kMinNumberOfEntries = 64;
static const int kMaxNumberOfEntries = 16384; static const int kMaxNumberOfEntries = 16384;
@ -604,7 +611,7 @@ class TranslationBuffer BASE_EMBEDDED {
int CurrentIndex() const { return contents_.length(); } int CurrentIndex() const { return contents_.length(); }
void Add(int32_t value, Zone* zone); void Add(int32_t value, Zone* zone);
Handle<ByteArray> CreateByteArray(); Handle<ByteArray> CreateByteArray(Factory* factory);
private: private:
ZoneList<uint8_t> contents_; ZoneList<uint8_t> contents_;

View File

@ -286,9 +286,13 @@ static int DecodeIt(Isolate* isolate,
isolate->deoptimizer_data() != NULL) { isolate->deoptimizer_data() != NULL) {
// A runtime entry reloinfo might be a deoptimization bailout. // A runtime entry reloinfo might be a deoptimization bailout.
Address addr = relocinfo.target_address(); Address addr = relocinfo.target_address();
int id = Deoptimizer::GetDeoptimizationId(addr, Deoptimizer::EAGER); int id = Deoptimizer::GetDeoptimizationId(isolate,
addr,
Deoptimizer::EAGER);
if (id == Deoptimizer::kNotDeoptimizationEntry) { if (id == Deoptimizer::kNotDeoptimizationEntry) {
id = Deoptimizer::GetDeoptimizationId(addr, Deoptimizer::LAZY); id = Deoptimizer::GetDeoptimizationId(isolate,
addr,
Deoptimizer::LAZY);
if (id == Deoptimizer::kNotDeoptimizationEntry) { if (id == Deoptimizer::kNotDeoptimizationEntry) {
out.AddFormatted(" ;; %s", RelocInfo::RelocModeName(rmode)); out.AddFormatted(" ;; %s", RelocInfo::RelocModeName(rmode));
} else { } else {

View File

@ -383,7 +383,7 @@ bool LCodeGen::GenerateJumpTable() {
bool is_lazy_deopt = jump_table_[i].is_lazy_deopt; bool is_lazy_deopt = jump_table_[i].is_lazy_deopt;
Deoptimizer::BailoutType type = Deoptimizer::BailoutType type =
is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER; is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
int id = Deoptimizer::GetDeoptimizationId(entry, type); int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) { if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i); Comment(";;; jump table entry %d.", i);
} else { } else {
@ -953,7 +953,8 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
Handle<DeoptimizationInputData> data = Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED); factory()->NewDeoptimizationInputData(length, TENURED);
Handle<ByteArray> translations = translations_.CreateByteArray(); Handle<ByteArray> translations =
translations_.CreateByteArray(isolate()->factory());
data->SetTranslationByteArray(*translations); data->SetTranslationByteArray(*translations);
data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));

View File

@ -2064,7 +2064,7 @@ bool Isolate::Init(Deserializer* des) {
return false; return false;
} }
deoptimizer_data_ = new DeoptimizerData; deoptimizer_data_ = new DeoptimizerData(memory_allocator_);
const bool create_heap_objects = (des == NULL); const bool create_heap_objects = (des == NULL);
if (create_heap_objects && !heap_.CreateHeapObjects()) { if (create_heap_objects && !heap_.CreateHeapObjects()) {

View File

@ -1260,7 +1260,7 @@ static void DeoptimizeDependentFunctions(SharedFunctionInfo* function_info) {
AssertNoAllocation no_allocation; AssertNoAllocation no_allocation;
DependentFunctionFilter filter(function_info); DependentFunctionFilter filter(function_info);
Deoptimizer::DeoptimizeAllFunctionsWith(&filter); Deoptimizer::DeoptimizeAllFunctionsWith(function_info->GetIsolate(), &filter);
} }

View File

@ -1559,7 +1559,7 @@ static int EnumerateCompiledFunctions(Heap* heap,
EnumerateOptimizedFunctionsVisitor visitor(sfis, EnumerateOptimizedFunctionsVisitor visitor(sfis,
code_objects, code_objects,
&compiled_funcs_count); &compiled_funcs_count);
Deoptimizer::VisitAllOptimizedFunctions(&visitor); Deoptimizer::VisitAllOptimizedFunctions(heap->isolate(), &visitor);
return compiled_funcs_count; return compiled_funcs_count;
} }

View File

@ -545,7 +545,7 @@ void MarkCompactCollector::ClearMarkbits() {
void MarkCompactCollector::StartSweeperThreads() { void MarkCompactCollector::StartSweeperThreads() {
sweeping_pending_ = true; sweeping_pending_ = true;
for (int i = 0; i < FLAG_sweeper_threads; i++) { for (int i = 0; i < FLAG_sweeper_threads; i++) {
heap()->isolate()->sweeper_threads()[i]->StartSweeping(); isolate()->sweeper_threads()[i]->StartSweeping();
} }
} }
@ -553,7 +553,7 @@ void MarkCompactCollector::StartSweeperThreads() {
void MarkCompactCollector::WaitUntilSweepingCompleted() { void MarkCompactCollector::WaitUntilSweepingCompleted() {
ASSERT(sweeping_pending_ == true); ASSERT(sweeping_pending_ == true);
for (int i = 0; i < FLAG_sweeper_threads; i++) { for (int i = 0; i < FLAG_sweeper_threads; i++) {
heap()->isolate()->sweeper_threads()[i]->WaitForSweeperThread(); isolate()->sweeper_threads()[i]->WaitForSweeperThread();
} }
sweeping_pending_ = false; sweeping_pending_ = false;
StealMemoryFromSweeperThreads(heap()->paged_space(OLD_DATA_SPACE)); StealMemoryFromSweeperThreads(heap()->paged_space(OLD_DATA_SPACE));
@ -567,7 +567,7 @@ intptr_t MarkCompactCollector::
StealMemoryFromSweeperThreads(PagedSpace* space) { StealMemoryFromSweeperThreads(PagedSpace* space) {
intptr_t freed_bytes = 0; intptr_t freed_bytes = 0;
for (int i = 0; i < FLAG_sweeper_threads; i++) { for (int i = 0; i < FLAG_sweeper_threads; i++) {
freed_bytes += heap()->isolate()->sweeper_threads()[i]->StealMemory(space); freed_bytes += isolate()->sweeper_threads()[i]->StealMemory(space);
} }
space->AddToAccountingStats(freed_bytes); space->AddToAccountingStats(freed_bytes);
space->DecrementUnsweptFreeBytes(freed_bytes); space->DecrementUnsweptFreeBytes(freed_bytes);
@ -576,7 +576,7 @@ intptr_t MarkCompactCollector::
bool MarkCompactCollector::AreSweeperThreadsActivated() { bool MarkCompactCollector::AreSweeperThreadsActivated() {
return heap()->isolate()->sweeper_threads() != NULL; return isolate()->sweeper_threads() != NULL;
} }
@ -587,14 +587,14 @@ bool MarkCompactCollector::IsConcurrentSweepingInProgress() {
void MarkCompactCollector::MarkInParallel() { void MarkCompactCollector::MarkInParallel() {
for (int i = 0; i < FLAG_marking_threads; i++) { for (int i = 0; i < FLAG_marking_threads; i++) {
heap()->isolate()->marking_threads()[i]->StartMarking(); isolate()->marking_threads()[i]->StartMarking();
} }
} }
void MarkCompactCollector::WaitUntilMarkingCompleted() { void MarkCompactCollector::WaitUntilMarkingCompleted() {
for (int i = 0; i < FLAG_marking_threads; i++) { for (int i = 0; i < FLAG_marking_threads; i++) {
heap()->isolate()->marking_threads()[i]->WaitForMarkingThread(); isolate()->marking_threads()[i]->WaitForMarkingThread();
} }
} }
@ -952,10 +952,10 @@ void MarkCompactCollector::Finish() {
// force lazy re-initialization of it. This must be done after the // force lazy re-initialization of it. This must be done after the
// GC, because it relies on the new address of certain old space // GC, because it relies on the new address of certain old space
// objects (empty string, illegal builtin). // objects (empty string, illegal builtin).
heap()->isolate()->stub_cache()->Clear(); isolate()->stub_cache()->Clear();
DeoptimizeMarkedCodeFilter filter; DeoptimizeMarkedCodeFilter filter;
Deoptimizer::DeoptimizeAllFunctionsWith(&filter); Deoptimizer::DeoptimizeAllFunctionsWith(isolate(), &filter);
} }
@ -1932,7 +1932,7 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
void MarkCompactCollector::MarkImplicitRefGroups() { void MarkCompactCollector::MarkImplicitRefGroups() {
List<ImplicitRefGroup*>* ref_groups = List<ImplicitRefGroup*>* ref_groups =
heap()->isolate()->global_handles()->implicit_ref_groups(); isolate()->global_handles()->implicit_ref_groups();
int last = 0; int last = 0;
for (int i = 0; i < ref_groups->length(); i++) { for (int i = 0; i < ref_groups->length(); i++) {
@ -2052,7 +2052,7 @@ void MarkCompactCollector::ProcessExternalMarking(RootMarkingVisitor* visitor) {
bool work_to_do = true; bool work_to_do = true;
ASSERT(marking_deque_.IsEmpty()); ASSERT(marking_deque_.IsEmpty());
while (work_to_do) { while (work_to_do) {
heap()->isolate()->global_handles()->IterateObjectGroups( isolate()->global_handles()->IterateObjectGroups(
visitor, &IsUnmarkedHeapObjectWithHeap); visitor, &IsUnmarkedHeapObjectWithHeap);
MarkImplicitRefGroups(); MarkImplicitRefGroups();
work_to_do = !marking_deque_.IsEmpty(); work_to_do = !marking_deque_.IsEmpty();
@ -2066,7 +2066,7 @@ void MarkCompactCollector::MarkLiveObjects() {
// The recursive GC marker detects when it is nearing stack overflow, // The recursive GC marker detects when it is nearing stack overflow,
// and switches to a different marking system. JS interrupts interfere // and switches to a different marking system. JS interrupts interfere
// with the C stack limit check. // with the C stack limit check.
PostponeInterruptsScope postpone(heap()->isolate()); PostponeInterruptsScope postpone(isolate());
bool incremental_marking_overflowed = false; bool incremental_marking_overflowed = false;
IncrementalMarking* incremental_marking = heap_->incremental_marking(); IncrementalMarking* incremental_marking = heap_->incremental_marking();
@ -2520,7 +2520,7 @@ void MarkCompactCollector::MigrateObject(Address dst,
} }
} }
} else if (dest == CODE_SPACE) { } else if (dest == CODE_SPACE) {
PROFILE(heap()->isolate(), CodeMoveEvent(src, dst)); PROFILE(isolate(), CodeMoveEvent(src, dst));
heap()->MoveBlock(dst, src, size); heap()->MoveBlock(dst, src, size);
SlotsBuffer::AddTo(&slots_buffer_allocator_, SlotsBuffer::AddTo(&slots_buffer_allocator_,
&migration_slots_buffer_, &migration_slots_buffer_,
@ -3940,15 +3940,15 @@ void MarkCompactCollector::SweepSpaces() {
void MarkCompactCollector::EnableCodeFlushing(bool enable) { void MarkCompactCollector::EnableCodeFlushing(bool enable) {
#ifdef ENABLE_DEBUGGER_SUPPORT #ifdef ENABLE_DEBUGGER_SUPPORT
if (heap()->isolate()->debug()->IsLoaded() || if (isolate()->debug()->IsLoaded() ||
heap()->isolate()->debug()->has_break_points()) { isolate()->debug()->has_break_points()) {
enable = false; enable = false;
} }
#endif #endif
if (enable) { if (enable) {
if (code_flusher_ != NULL) return; if (code_flusher_ != NULL) return;
code_flusher_ = new CodeFlusher(heap()->isolate()); code_flusher_ = new CodeFlusher(isolate());
} else { } else {
if (code_flusher_ == NULL) return; if (code_flusher_ == NULL) return;
code_flusher_->EvictAllCandidates(); code_flusher_->EvictAllCandidates();
@ -3974,6 +3974,11 @@ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj,
} }
Isolate* MarkCompactCollector::isolate() const {
return heap_->isolate();
}
void MarkCompactCollector::Initialize() { void MarkCompactCollector::Initialize() {
MarkCompactMarkingVisitor::Initialize(); MarkCompactMarkingVisitor::Initialize();
IncrementalMarking::Initialize(); IncrementalMarking::Initialize();
@ -4055,7 +4060,7 @@ void MarkCompactCollector::RecordCodeEntrySlot(Address slot, Code* target) {
void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
ASSERT(heap()->gc_state() == Heap::MARK_COMPACT); ASSERT(heap()->gc_state() == Heap::MARK_COMPACT);
if (is_compacting()) { if (is_compacting()) {
Code* host = heap()->isolate()->inner_pointer_to_code_cache()-> Code* host = isolate()->inner_pointer_to_code_cache()->
GcSafeFindCodeForInnerPointer(pc); GcSafeFindCodeForInnerPointer(pc);
MarkBit mark_bit = Marking::MarkBitFrom(host); MarkBit mark_bit = Marking::MarkBitFrom(host);
if (Marking::IsBlack(mark_bit)) { if (Marking::IsBlack(mark_bit)) {

View File

@ -580,6 +580,7 @@ class MarkCompactCollector {
static inline bool IsMarked(Object* obj); static inline bool IsMarked(Object* obj);
inline Heap* heap() const { return heap_; } inline Heap* heap() const { return heap_; }
inline Isolate* isolate() const;
CodeFlusher* code_flusher() { return code_flusher_; } CodeFlusher* code_flusher() { return code_flusher_; }
inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; } inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; }

View File

@ -360,7 +360,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt; bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt;
Deoptimizer::BailoutType type = Deoptimizer::BailoutType type =
is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER; is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
int id = Deoptimizer::GetDeoptimizationId(entry, type); int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) { if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i); Comment(";;; jump table entry %d.", i);
} else { } else {
@ -897,7 +897,8 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
Handle<DeoptimizationInputData> data = Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED); factory()->NewDeoptimizationInputData(length, TENURED);
Handle<ByteArray> translations = translations_.CreateByteArray(); Handle<ByteArray> translations =
translations_.CreateByteArray(isolate()->factory());
data->SetTranslationByteArray(*translations); data->SetTranslationByteArray(*translations);
data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));

View File

@ -3479,6 +3479,7 @@ bool Map::is_observed() {
void Map::NotifyLeafMapLayoutChange() { void Map::NotifyLeafMapLayoutChange() {
dependent_code()->DeoptimizeDependentCodeGroup( dependent_code()->DeoptimizeDependentCodeGroup(
GetIsolate(),
DependentCode::kPrototypeCheckGroup); DependentCode::kPrototypeCheckGroup);
} }

View File

@ -9107,7 +9107,7 @@ void Code::PrintDeoptLocation(int bailout_id) {
last_comment = reinterpret_cast<const char*>(info->data()); last_comment = reinterpret_cast<const char*>(info->data());
} else if (last_comment != NULL && } else if (last_comment != NULL &&
bailout_id == Deoptimizer::GetDeoptimizationId( bailout_id == Deoptimizer::GetDeoptimizationId(
info->target_address(), Deoptimizer::EAGER)) { GetIsolate(), info->target_address(), Deoptimizer::EAGER)) {
CHECK(RelocInfo::IsRuntimeEntry(info->rmode())); CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
PrintF(" %s\n", last_comment); PrintF(" %s\n", last_comment);
return; return;
@ -9466,7 +9466,9 @@ void Code::Disassemble(const char* name, FILE* out) {
} }
PrintF("RelocInfo (size = %d)\n", relocation_size()); PrintF("RelocInfo (size = %d)\n", relocation_size());
for (RelocIterator it(this); !it.done(); it.next()) it.rinfo()->Print(out); for (RelocIterator it(this); !it.done(); it.next()) {
it.rinfo()->Print(GetIsolate(), out);
}
PrintF(out, "\n"); PrintF(out, "\n");
} }
#endif // ENABLE_DISASSEMBLER #endif // ENABLE_DISASSEMBLER
@ -9839,6 +9841,7 @@ class DeoptimizeDependentCodeFilter : public OptimizedFunctionFilter {
void DependentCode::DeoptimizeDependentCodeGroup( void DependentCode::DeoptimizeDependentCodeGroup(
Isolate* isolate,
DependentCode::DependencyGroup group) { DependentCode::DependencyGroup group) {
AssertNoAllocation no_allocation_scope; AssertNoAllocation no_allocation_scope;
DependentCode::GroupStartIndexes starts(this); DependentCode::GroupStartIndexes starts(this);
@ -9861,7 +9864,7 @@ void DependentCode::DeoptimizeDependentCodeGroup(
} }
set_number_of_entries(group, 0); set_number_of_entries(group, 0);
DeoptimizeDependentCodeFilter filter; DeoptimizeDependentCodeFilter filter;
Deoptimizer::DeoptimizeAllFunctionsWith(&filter); Deoptimizer::DeoptimizeAllFunctionsWith(isolate, &filter);
} }

View File

@ -4860,7 +4860,8 @@ class DependentCode: public FixedArray {
static Handle<DependentCode> Insert(Handle<DependentCode> entries, static Handle<DependentCode> Insert(Handle<DependentCode> entries,
DependencyGroup group, DependencyGroup group,
Handle<Code> value); Handle<Code> value);
void DeoptimizeDependentCodeGroup(DependentCode::DependencyGroup group); void DeoptimizeDependentCodeGroup(Isolate* isolate,
DependentCode::DependencyGroup group);
// The following low-level accessors should only be used by this class // The following low-level accessors should only be used by this class
// and the mark compact collector. // and the mark compact collector.

View File

@ -308,7 +308,7 @@ bool LCodeGen::GenerateJumpTable() {
bool is_lazy_deopt = jump_table_[i].is_lazy_deopt; bool is_lazy_deopt = jump_table_[i].is_lazy_deopt;
Deoptimizer::BailoutType type = Deoptimizer::BailoutType type =
is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER; is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
int id = Deoptimizer::GetDeoptimizationId(entry, type); int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) { if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i); Comment(";;; jump table entry %d.", i);
} else { } else {
@ -808,7 +808,8 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
Handle<DeoptimizationInputData> data = Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED); factory()->NewDeoptimizationInputData(length, TENURED);
Handle<ByteArray> translations = translations_.CreateByteArray(); Handle<ByteArray> translations =
translations_.CreateByteArray(isolate()->factory());
data->SetTranslationByteArray(*translations); data->SetTranslationByteArray(*translations);
data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));

View File

@ -656,7 +656,8 @@ static void DebugEventBreakPointHitCount(v8::DebugEvent event,
v8::Handle<v8::Object> exec_state, v8::Handle<v8::Object> exec_state,
v8::Handle<v8::Object> event_data, v8::Handle<v8::Object> event_data,
v8::Handle<v8::Value> data) { v8::Handle<v8::Value> data) {
Debug* debug = v8::internal::Isolate::Current()->debug(); v8::internal::Isolate* isolate = v8::internal::Isolate::Current();
Debug* debug = isolate->debug();
// When hitting a debug event listener there must be a break set. // When hitting a debug event listener there must be a break set.
CHECK_NE(debug->break_id(), 0); CHECK_NE(debug->break_id(), 0);
@ -732,7 +733,7 @@ static void DebugEventBreakPointHitCount(v8::DebugEvent event,
// Perform a full deoptimization when the specified number of // Perform a full deoptimization when the specified number of
// breaks have been hit. // breaks have been hit.
if (break_point_hit_count == break_point_hit_count_deoptimize) { if (break_point_hit_count == break_point_hit_count_deoptimize) {
i::Deoptimizer::DeoptimizeAll(); i::Deoptimizer::DeoptimizeAll(isolate);
} }
} else if (event == v8::AfterCompile && !compiled_script_data.IsEmpty()) { } else if (event == v8::AfterCompile && !compiled_script_data.IsEmpty()) {
const int argc = 1; const int argc = 1;
@ -983,7 +984,8 @@ static void DebugEventBreakMax(v8::DebugEvent event,
v8::Handle<v8::Object> exec_state, v8::Handle<v8::Object> exec_state,
v8::Handle<v8::Object> event_data, v8::Handle<v8::Object> event_data,
v8::Handle<v8::Value> data) { v8::Handle<v8::Value> data) {
v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug(); v8::internal::Isolate* isolate = v8::internal::Isolate::Current();
v8::internal::Debug* debug = isolate->debug();
// When hitting a debug event listener there must be a break set. // When hitting a debug event listener there must be a break set.
CHECK_NE(debug->break_id(), 0); CHECK_NE(debug->break_id(), 0);
@ -1014,7 +1016,7 @@ static void DebugEventBreakMax(v8::DebugEvent event,
// Perform a full deoptimization when the specified number of // Perform a full deoptimization when the specified number of
// breaks have been hit. // breaks have been hit.
if (break_point_hit_count == break_point_hit_count_deoptimize) { if (break_point_hit_count == break_point_hit_count_deoptimize) {
i::Deoptimizer::DeoptimizeAll(); i::Deoptimizer::DeoptimizeAll(isolate);
} }
} }
} }
@ -7167,7 +7169,7 @@ static void DebugEventBreakDeoptimize(v8::DebugEvent event,
v8::Handle<v8::String> function_name(result->ToString()); v8::Handle<v8::String> function_name(result->ToString());
function_name->WriteAscii(fn); function_name->WriteAscii(fn);
if (strcmp(fn, "bar") == 0) { if (strcmp(fn, "bar") == 0) {
i::Deoptimizer::DeoptimizeAll(); i::Deoptimizer::DeoptimizeAll(v8::internal::Isolate::Current());
debug_event_break_deoptimize_done = true; debug_event_break_deoptimize_done = true;
} }
} }