Pass Isolates explicitly in Deoptimizer-related code.
Removed a few ancient useless ASSERTs on the way. Reduced the number of train wrecks. BUG=v8:2487 Review URL: https://codereview.chromium.org/12917002 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13965 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
37a2e32496
commit
e4e4447305
@ -3041,7 +3041,7 @@ bool v8::Object::ForceDelete(v8::Handle<Value> key) {
|
||||
// value with DontDelete properties. We have to deoptimize all contexts
|
||||
// because of possible cross-context inlined functions.
|
||||
if (self->IsJSGlobalProxy() || self->IsGlobalObject()) {
|
||||
i::Deoptimizer::DeoptimizeAll();
|
||||
i::Deoptimizer::DeoptimizeAll(isolate);
|
||||
}
|
||||
|
||||
EXCEPTION_PREAMBLE(isolate);
|
||||
@ -6926,7 +6926,7 @@ void Testing::PrepareStressRun(int run) {
|
||||
void Testing::DeoptimizeAll() {
|
||||
i::Isolate* isolate = i::Isolate::Current();
|
||||
i::HandleScope scope(isolate);
|
||||
internal::Deoptimizer::DeoptimizeAll();
|
||||
internal::Deoptimizer::DeoptimizeAll(isolate);
|
||||
}
|
||||
|
||||
|
||||
|
@ -365,7 +365,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
|
||||
bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt;
|
||||
Deoptimizer::BailoutType type =
|
||||
is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
|
||||
int id = Deoptimizer::GetDeoptimizationId(entry, type);
|
||||
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
|
||||
if (id == Deoptimizer::kNotDeoptimizationEntry) {
|
||||
Comment(";;; jump table entry %d.", i);
|
||||
} else {
|
||||
@ -915,7 +915,8 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
|
||||
Handle<DeoptimizationInputData> data =
|
||||
factory()->NewDeoptimizationInputData(length, TENURED);
|
||||
|
||||
Handle<ByteArray> translations = translations_.CreateByteArray();
|
||||
Handle<ByteArray> translations =
|
||||
translations_.CreateByteArray(isolate()->factory());
|
||||
data->SetTranslationByteArray(*translations);
|
||||
data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
|
||||
|
||||
|
@ -789,7 +789,7 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
|
||||
}
|
||||
|
||||
|
||||
void RelocInfo::Print(FILE* out) {
|
||||
void RelocInfo::Print(Isolate* isolate, FILE* out) {
|
||||
PrintF(out, "%p %s", pc_, RelocModeName(rmode_));
|
||||
if (IsComment(rmode_)) {
|
||||
PrintF(out, " (%s)", reinterpret_cast<char*>(data_));
|
||||
@ -812,10 +812,10 @@ void RelocInfo::Print(FILE* out) {
|
||||
} else if (IsPosition(rmode_)) {
|
||||
PrintF(out, " (%" V8_PTR_PREFIX "d)", data());
|
||||
} else if (IsRuntimeEntry(rmode_) &&
|
||||
Isolate::Current()->deoptimizer_data() != NULL) {
|
||||
isolate->deoptimizer_data() != NULL) {
|
||||
// Depotimization bailouts are stored as runtime entries.
|
||||
int id = Deoptimizer::GetDeoptimizationId(
|
||||
target_address(), Deoptimizer::EAGER);
|
||||
isolate, target_address(), Deoptimizer::EAGER);
|
||||
if (id != Deoptimizer::kNotDeoptimizationEntry) {
|
||||
PrintF(out, " (deoptimization bailout %d)", id);
|
||||
}
|
||||
|
@ -450,7 +450,7 @@ class RelocInfo BASE_EMBEDDED {
|
||||
#ifdef ENABLE_DISASSEMBLER
|
||||
// Printing
|
||||
static const char* RelocModeName(Mode rmode);
|
||||
void Print(FILE* out);
|
||||
void Print(Isolate* isolate, FILE* out);
|
||||
#endif // ENABLE_DISASSEMBLER
|
||||
#ifdef VERIFY_HEAP
|
||||
void Verify();
|
||||
|
@ -2000,7 +2000,7 @@ void Debug::PrepareForBreakPoints() {
|
||||
// If preparing for the first break point make sure to deoptimize all
|
||||
// functions as debugging does not work with optimized code.
|
||||
if (!has_break_points_) {
|
||||
Deoptimizer::DeoptimizeAll();
|
||||
Deoptimizer::DeoptimizeAll(isolate_);
|
||||
|
||||
Handle<Code> lazy_compile =
|
||||
Handle<Code>(isolate_->builtins()->builtin(Builtins::kLazyCompile));
|
||||
|
@ -40,36 +40,31 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
DeoptimizerData::DeoptimizerData() {
|
||||
eager_deoptimization_entry_code_entries_ = -1;
|
||||
lazy_deoptimization_entry_code_entries_ = -1;
|
||||
size_t deopt_table_size = Deoptimizer::GetMaxDeoptTableSize();
|
||||
MemoryAllocator* allocator = Isolate::Current()->memory_allocator();
|
||||
size_t initial_commit_size = OS::CommitPageSize();
|
||||
eager_deoptimization_entry_code_ =
|
||||
allocator->AllocateChunk(deopt_table_size,
|
||||
initial_commit_size,
|
||||
EXECUTABLE,
|
||||
NULL);
|
||||
lazy_deoptimization_entry_code_ =
|
||||
allocator->AllocateChunk(deopt_table_size,
|
||||
initial_commit_size,
|
||||
EXECUTABLE,
|
||||
NULL);
|
||||
current_ = NULL;
|
||||
deoptimizing_code_list_ = NULL;
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
deoptimized_frame_info_ = NULL;
|
||||
#endif
|
||||
static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) {
|
||||
return allocator->AllocateChunk(Deoptimizer::GetMaxDeoptTableSize(),
|
||||
OS::CommitPageSize(),
|
||||
EXECUTABLE,
|
||||
NULL);
|
||||
}
|
||||
|
||||
|
||||
DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator)
|
||||
: allocator_(allocator),
|
||||
eager_deoptimization_entry_code_entries_(-1),
|
||||
lazy_deoptimization_entry_code_entries_(-1),
|
||||
eager_deoptimization_entry_code_(AllocateCodeChunk(allocator)),
|
||||
lazy_deoptimization_entry_code_(AllocateCodeChunk(allocator)),
|
||||
current_(NULL),
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
deoptimized_frame_info_(NULL),
|
||||
#endif
|
||||
deoptimizing_code_list_(NULL) { }
|
||||
|
||||
|
||||
DeoptimizerData::~DeoptimizerData() {
|
||||
Isolate::Current()->memory_allocator()->Free(
|
||||
eager_deoptimization_entry_code_);
|
||||
allocator_->Free(eager_deoptimization_entry_code_);
|
||||
eager_deoptimization_entry_code_ = NULL;
|
||||
Isolate::Current()->memory_allocator()->Free(
|
||||
lazy_deoptimization_entry_code_);
|
||||
allocator_->Free(lazy_deoptimization_entry_code_);
|
||||
lazy_deoptimization_entry_code_ = NULL;
|
||||
|
||||
DeoptimizingCodeListNode* current = deoptimizing_code_list_;
|
||||
@ -129,7 +124,6 @@ Deoptimizer* Deoptimizer::New(JSFunction* function,
|
||||
Address from,
|
||||
int fp_to_sp_delta,
|
||||
Isolate* isolate) {
|
||||
ASSERT(isolate == Isolate::Current());
|
||||
Deoptimizer* deoptimizer = new Deoptimizer(isolate,
|
||||
function,
|
||||
type,
|
||||
@ -158,7 +152,6 @@ size_t Deoptimizer::GetMaxDeoptTableSize() {
|
||||
|
||||
|
||||
Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
|
||||
ASSERT(isolate == Isolate::Current());
|
||||
Deoptimizer* result = isolate->deoptimizer_data()->current_;
|
||||
ASSERT(result != NULL);
|
||||
result->DeleteFrameDescriptions();
|
||||
@ -188,7 +181,6 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
|
||||
JavaScriptFrame* frame,
|
||||
int jsframe_index,
|
||||
Isolate* isolate) {
|
||||
ASSERT(isolate == Isolate::Current());
|
||||
ASSERT(frame->is_optimized());
|
||||
ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL);
|
||||
|
||||
@ -274,7 +266,6 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
|
||||
|
||||
void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
|
||||
Isolate* isolate) {
|
||||
ASSERT(isolate == Isolate::Current());
|
||||
ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == info);
|
||||
delete info;
|
||||
isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL;
|
||||
@ -319,11 +310,12 @@ void Deoptimizer::VisitAllOptimizedFunctionsForContext(
|
||||
|
||||
|
||||
void Deoptimizer::VisitAllOptimizedFunctions(
|
||||
Isolate* isolate,
|
||||
OptimizedFunctionVisitor* visitor) {
|
||||
AssertNoAllocation no_allocation;
|
||||
|
||||
// Run through the list of all native contexts and deoptimize.
|
||||
Object* context = Isolate::Current()->heap()->native_contexts_list();
|
||||
Object* context = isolate->heap()->native_contexts_list();
|
||||
while (!context->IsUndefined()) {
|
||||
VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor);
|
||||
context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
|
||||
@ -394,7 +386,7 @@ class DeoptimizeWithMatchingCodeFilter : public OptimizedFunctionFilter {
|
||||
};
|
||||
|
||||
|
||||
void Deoptimizer::DeoptimizeAll() {
|
||||
void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
|
||||
AssertNoAllocation no_allocation;
|
||||
|
||||
if (FLAG_trace_deopt) {
|
||||
@ -402,7 +394,7 @@ void Deoptimizer::DeoptimizeAll() {
|
||||
}
|
||||
|
||||
DeoptimizeAllFilter filter;
|
||||
DeoptimizeAllFunctionsWith(&filter);
|
||||
DeoptimizeAllFunctionsWith(isolate, &filter);
|
||||
}
|
||||
|
||||
|
||||
@ -456,11 +448,12 @@ void Deoptimizer::DeoptimizeAllFunctionsForContext(
|
||||
}
|
||||
|
||||
|
||||
void Deoptimizer::DeoptimizeAllFunctionsWith(OptimizedFunctionFilter* filter) {
|
||||
void Deoptimizer::DeoptimizeAllFunctionsWith(Isolate* isolate,
|
||||
OptimizedFunctionFilter* filter) {
|
||||
AssertNoAllocation no_allocation;
|
||||
|
||||
// Run through the list of all native contexts and deoptimize.
|
||||
Object* context = Isolate::Current()->heap()->native_contexts_list();
|
||||
Object* context = isolate->heap()->native_contexts_list();
|
||||
while (!context->IsUndefined()) {
|
||||
DeoptimizeAllFunctionsForContext(Context::cast(context), filter);
|
||||
context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
|
||||
@ -640,30 +633,26 @@ Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate,
|
||||
GetEntryMode mode) {
|
||||
ASSERT(id >= 0);
|
||||
if (id >= kMaxNumberOfEntries) return NULL;
|
||||
MemoryChunk* base = NULL;
|
||||
if (mode == ENSURE_ENTRY_CODE) {
|
||||
EnsureCodeForDeoptimizationEntry(isolate, type, id);
|
||||
} else {
|
||||
ASSERT(mode == CALCULATE_ENTRY_ADDRESS);
|
||||
}
|
||||
DeoptimizerData* data = isolate->deoptimizer_data();
|
||||
if (type == EAGER) {
|
||||
base = data->eager_deoptimization_entry_code_;
|
||||
} else {
|
||||
base = data->lazy_deoptimization_entry_code_;
|
||||
}
|
||||
MemoryChunk* base = (type == EAGER)
|
||||
? data->eager_deoptimization_entry_code_
|
||||
: data->lazy_deoptimization_entry_code_;
|
||||
return base->area_start() + (id * table_entry_size_);
|
||||
}
|
||||
|
||||
|
||||
int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) {
|
||||
MemoryChunk* base = NULL;
|
||||
DeoptimizerData* data = Isolate::Current()->deoptimizer_data();
|
||||
if (type == EAGER) {
|
||||
base = data->eager_deoptimization_entry_code_;
|
||||
} else {
|
||||
base = data->lazy_deoptimization_entry_code_;
|
||||
}
|
||||
int Deoptimizer::GetDeoptimizationId(Isolate* isolate,
|
||||
Address addr,
|
||||
BailoutType type) {
|
||||
DeoptimizerData* data = isolate->deoptimizer_data();
|
||||
MemoryChunk* base = (type == EAGER)
|
||||
? data->eager_deoptimization_entry_code_
|
||||
: data->lazy_deoptimization_entry_code_;
|
||||
Address start = base->area_start();
|
||||
if (base == NULL ||
|
||||
addr < start ||
|
||||
@ -2135,7 +2124,7 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
|
||||
masm.GetCode(&desc);
|
||||
ASSERT(!RelocInfo::RequiresRelocation(desc));
|
||||
|
||||
MemoryChunk* chunk = type == EAGER
|
||||
MemoryChunk* chunk = (type == EAGER)
|
||||
? data->eager_deoptimization_entry_code_
|
||||
: data->lazy_deoptimization_entry_code_;
|
||||
ASSERT(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
|
||||
@ -2155,7 +2144,7 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
|
||||
void Deoptimizer::ReplaceCodeForRelatedFunctions(JSFunction* function,
|
||||
Code* code) {
|
||||
SharedFunctionInfo* shared = function->shared();
|
||||
Object* undefined = Isolate::Current()->heap()->undefined_value();
|
||||
Object* undefined = function->GetHeap()->undefined_value();
|
||||
Object* current = function;
|
||||
|
||||
while (current != undefined) {
|
||||
@ -2281,10 +2270,9 @@ int32_t TranslationIterator::Next() {
|
||||
}
|
||||
|
||||
|
||||
Handle<ByteArray> TranslationBuffer::CreateByteArray() {
|
||||
Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
|
||||
int length = contents_.length();
|
||||
Handle<ByteArray> result =
|
||||
Isolate::Current()->factory()->NewByteArray(length, TENURED);
|
||||
Handle<ByteArray> result = factory->NewByteArray(length, TENURED);
|
||||
memcpy(result->GetDataStartAddress(), contents_.ToVector().start(), length);
|
||||
return result;
|
||||
}
|
||||
@ -2479,7 +2467,7 @@ const char* Translation::StringFor(Opcode opcode) {
|
||||
|
||||
|
||||
DeoptimizingCodeListNode::DeoptimizingCodeListNode(Code* code): next_(NULL) {
|
||||
GlobalHandles* global_handles = Isolate::Current()->global_handles();
|
||||
GlobalHandles* global_handles = code->GetIsolate()->global_handles();
|
||||
// Globalize the code object and make it weak.
|
||||
code_ = Handle<Code>::cast(global_handles->Create(code));
|
||||
global_handles->MakeWeak(reinterpret_cast<Object**>(code_.location()),
|
||||
@ -2490,7 +2478,7 @@ DeoptimizingCodeListNode::DeoptimizingCodeListNode(Code* code): next_(NULL) {
|
||||
|
||||
|
||||
DeoptimizingCodeListNode::~DeoptimizingCodeListNode() {
|
||||
GlobalHandles* global_handles = Isolate::Current()->global_handles();
|
||||
GlobalHandles* global_handles = code_->GetIsolate()->global_handles();
|
||||
global_handles->Destroy(reinterpret_cast<Object**>(code_.location()));
|
||||
}
|
||||
|
||||
@ -2656,7 +2644,7 @@ DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
|
||||
expression_stack_ = new Object*[expression_count_];
|
||||
// Get the source position using the unoptimized code.
|
||||
Address pc = reinterpret_cast<Address>(output_frame->GetPc());
|
||||
Code* code = Code::cast(Isolate::Current()->heap()->FindCodeObject(pc));
|
||||
Code* code = Code::cast(deoptimizer->isolate()->heap()->FindCodeObject(pc));
|
||||
source_position_ = code->SourcePosition(pc);
|
||||
|
||||
for (int i = 0; i < expression_count_; i++) {
|
||||
|
@ -100,7 +100,7 @@ class Deoptimizer;
|
||||
|
||||
class DeoptimizerData {
|
||||
public:
|
||||
DeoptimizerData();
|
||||
explicit DeoptimizerData(MemoryAllocator* allocator);
|
||||
~DeoptimizerData();
|
||||
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
@ -111,6 +111,7 @@ class DeoptimizerData {
|
||||
void RemoveDeoptimizingCode(Code* code);
|
||||
|
||||
private:
|
||||
MemoryAllocator* allocator_;
|
||||
int eager_deoptimization_entry_code_entries_;
|
||||
int lazy_deoptimization_entry_code_entries_;
|
||||
MemoryChunk* eager_deoptimization_entry_code_;
|
||||
@ -190,11 +191,12 @@ class Deoptimizer : public Malloced {
|
||||
static void ReplaceCodeForRelatedFunctions(JSFunction* function, Code* code);
|
||||
|
||||
// Deoptimize all functions in the heap.
|
||||
static void DeoptimizeAll();
|
||||
static void DeoptimizeAll(Isolate* isolate);
|
||||
|
||||
static void DeoptimizeGlobalObject(JSObject* object);
|
||||
|
||||
static void DeoptimizeAllFunctionsWith(OptimizedFunctionFilter* filter);
|
||||
static void DeoptimizeAllFunctionsWith(Isolate* isolate,
|
||||
OptimizedFunctionFilter* filter);
|
||||
|
||||
static void DeoptimizeAllFunctionsForContext(
|
||||
Context* context, OptimizedFunctionFilter* filter);
|
||||
@ -202,7 +204,8 @@ class Deoptimizer : public Malloced {
|
||||
static void VisitAllOptimizedFunctionsForContext(
|
||||
Context* context, OptimizedFunctionVisitor* visitor);
|
||||
|
||||
static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor);
|
||||
static void VisitAllOptimizedFunctions(Isolate* isolate,
|
||||
OptimizedFunctionVisitor* visitor);
|
||||
|
||||
// The size in bytes of the code required at a lazy deopt patch site.
|
||||
static int patch_size();
|
||||
@ -259,7 +262,9 @@ class Deoptimizer : public Malloced {
|
||||
int id,
|
||||
BailoutType type,
|
||||
GetEntryMode mode = ENSURE_ENTRY_CODE);
|
||||
static int GetDeoptimizationId(Address addr, BailoutType type);
|
||||
static int GetDeoptimizationId(Isolate* isolate,
|
||||
Address addr,
|
||||
BailoutType type);
|
||||
static int GetOutputInfo(DeoptimizationOutputData* data,
|
||||
BailoutId node_id,
|
||||
SharedFunctionInfo* shared);
|
||||
@ -321,6 +326,8 @@ class Deoptimizer : public Malloced {
|
||||
BailoutType type,
|
||||
int max_entry_id);
|
||||
|
||||
Isolate* isolate() const { return isolate_; }
|
||||
|
||||
private:
|
||||
static const int kMinNumberOfEntries = 64;
|
||||
static const int kMaxNumberOfEntries = 16384;
|
||||
@ -604,7 +611,7 @@ class TranslationBuffer BASE_EMBEDDED {
|
||||
int CurrentIndex() const { return contents_.length(); }
|
||||
void Add(int32_t value, Zone* zone);
|
||||
|
||||
Handle<ByteArray> CreateByteArray();
|
||||
Handle<ByteArray> CreateByteArray(Factory* factory);
|
||||
|
||||
private:
|
||||
ZoneList<uint8_t> contents_;
|
||||
|
@ -286,9 +286,13 @@ static int DecodeIt(Isolate* isolate,
|
||||
isolate->deoptimizer_data() != NULL) {
|
||||
// A runtime entry reloinfo might be a deoptimization bailout.
|
||||
Address addr = relocinfo.target_address();
|
||||
int id = Deoptimizer::GetDeoptimizationId(addr, Deoptimizer::EAGER);
|
||||
int id = Deoptimizer::GetDeoptimizationId(isolate,
|
||||
addr,
|
||||
Deoptimizer::EAGER);
|
||||
if (id == Deoptimizer::kNotDeoptimizationEntry) {
|
||||
id = Deoptimizer::GetDeoptimizationId(addr, Deoptimizer::LAZY);
|
||||
id = Deoptimizer::GetDeoptimizationId(isolate,
|
||||
addr,
|
||||
Deoptimizer::LAZY);
|
||||
if (id == Deoptimizer::kNotDeoptimizationEntry) {
|
||||
out.AddFormatted(" ;; %s", RelocInfo::RelocModeName(rmode));
|
||||
} else {
|
||||
|
@ -383,7 +383,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
bool is_lazy_deopt = jump_table_[i].is_lazy_deopt;
|
||||
Deoptimizer::BailoutType type =
|
||||
is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
|
||||
int id = Deoptimizer::GetDeoptimizationId(entry, type);
|
||||
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
|
||||
if (id == Deoptimizer::kNotDeoptimizationEntry) {
|
||||
Comment(";;; jump table entry %d.", i);
|
||||
} else {
|
||||
@ -953,7 +953,8 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
|
||||
Handle<DeoptimizationInputData> data =
|
||||
factory()->NewDeoptimizationInputData(length, TENURED);
|
||||
|
||||
Handle<ByteArray> translations = translations_.CreateByteArray();
|
||||
Handle<ByteArray> translations =
|
||||
translations_.CreateByteArray(isolate()->factory());
|
||||
data->SetTranslationByteArray(*translations);
|
||||
data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
|
||||
|
||||
|
@ -2064,7 +2064,7 @@ bool Isolate::Init(Deserializer* des) {
|
||||
return false;
|
||||
}
|
||||
|
||||
deoptimizer_data_ = new DeoptimizerData;
|
||||
deoptimizer_data_ = new DeoptimizerData(memory_allocator_);
|
||||
|
||||
const bool create_heap_objects = (des == NULL);
|
||||
if (create_heap_objects && !heap_.CreateHeapObjects()) {
|
||||
|
@ -1260,7 +1260,7 @@ static void DeoptimizeDependentFunctions(SharedFunctionInfo* function_info) {
|
||||
AssertNoAllocation no_allocation;
|
||||
|
||||
DependentFunctionFilter filter(function_info);
|
||||
Deoptimizer::DeoptimizeAllFunctionsWith(&filter);
|
||||
Deoptimizer::DeoptimizeAllFunctionsWith(function_info->GetIsolate(), &filter);
|
||||
}
|
||||
|
||||
|
||||
|
@ -1559,7 +1559,7 @@ static int EnumerateCompiledFunctions(Heap* heap,
|
||||
EnumerateOptimizedFunctionsVisitor visitor(sfis,
|
||||
code_objects,
|
||||
&compiled_funcs_count);
|
||||
Deoptimizer::VisitAllOptimizedFunctions(&visitor);
|
||||
Deoptimizer::VisitAllOptimizedFunctions(heap->isolate(), &visitor);
|
||||
|
||||
return compiled_funcs_count;
|
||||
}
|
||||
|
@ -545,7 +545,7 @@ void MarkCompactCollector::ClearMarkbits() {
|
||||
void MarkCompactCollector::StartSweeperThreads() {
|
||||
sweeping_pending_ = true;
|
||||
for (int i = 0; i < FLAG_sweeper_threads; i++) {
|
||||
heap()->isolate()->sweeper_threads()[i]->StartSweeping();
|
||||
isolate()->sweeper_threads()[i]->StartSweeping();
|
||||
}
|
||||
}
|
||||
|
||||
@ -553,7 +553,7 @@ void MarkCompactCollector::StartSweeperThreads() {
|
||||
void MarkCompactCollector::WaitUntilSweepingCompleted() {
|
||||
ASSERT(sweeping_pending_ == true);
|
||||
for (int i = 0; i < FLAG_sweeper_threads; i++) {
|
||||
heap()->isolate()->sweeper_threads()[i]->WaitForSweeperThread();
|
||||
isolate()->sweeper_threads()[i]->WaitForSweeperThread();
|
||||
}
|
||||
sweeping_pending_ = false;
|
||||
StealMemoryFromSweeperThreads(heap()->paged_space(OLD_DATA_SPACE));
|
||||
@ -567,7 +567,7 @@ intptr_t MarkCompactCollector::
|
||||
StealMemoryFromSweeperThreads(PagedSpace* space) {
|
||||
intptr_t freed_bytes = 0;
|
||||
for (int i = 0; i < FLAG_sweeper_threads; i++) {
|
||||
freed_bytes += heap()->isolate()->sweeper_threads()[i]->StealMemory(space);
|
||||
freed_bytes += isolate()->sweeper_threads()[i]->StealMemory(space);
|
||||
}
|
||||
space->AddToAccountingStats(freed_bytes);
|
||||
space->DecrementUnsweptFreeBytes(freed_bytes);
|
||||
@ -576,7 +576,7 @@ intptr_t MarkCompactCollector::
|
||||
|
||||
|
||||
bool MarkCompactCollector::AreSweeperThreadsActivated() {
|
||||
return heap()->isolate()->sweeper_threads() != NULL;
|
||||
return isolate()->sweeper_threads() != NULL;
|
||||
}
|
||||
|
||||
|
||||
@ -587,14 +587,14 @@ bool MarkCompactCollector::IsConcurrentSweepingInProgress() {
|
||||
|
||||
void MarkCompactCollector::MarkInParallel() {
|
||||
for (int i = 0; i < FLAG_marking_threads; i++) {
|
||||
heap()->isolate()->marking_threads()[i]->StartMarking();
|
||||
isolate()->marking_threads()[i]->StartMarking();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::WaitUntilMarkingCompleted() {
|
||||
for (int i = 0; i < FLAG_marking_threads; i++) {
|
||||
heap()->isolate()->marking_threads()[i]->WaitForMarkingThread();
|
||||
isolate()->marking_threads()[i]->WaitForMarkingThread();
|
||||
}
|
||||
}
|
||||
|
||||
@ -952,10 +952,10 @@ void MarkCompactCollector::Finish() {
|
||||
// force lazy re-initialization of it. This must be done after the
|
||||
// GC, because it relies on the new address of certain old space
|
||||
// objects (empty string, illegal builtin).
|
||||
heap()->isolate()->stub_cache()->Clear();
|
||||
isolate()->stub_cache()->Clear();
|
||||
|
||||
DeoptimizeMarkedCodeFilter filter;
|
||||
Deoptimizer::DeoptimizeAllFunctionsWith(&filter);
|
||||
Deoptimizer::DeoptimizeAllFunctionsWith(isolate(), &filter);
|
||||
}
|
||||
|
||||
|
||||
@ -1932,7 +1932,7 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
|
||||
|
||||
void MarkCompactCollector::MarkImplicitRefGroups() {
|
||||
List<ImplicitRefGroup*>* ref_groups =
|
||||
heap()->isolate()->global_handles()->implicit_ref_groups();
|
||||
isolate()->global_handles()->implicit_ref_groups();
|
||||
|
||||
int last = 0;
|
||||
for (int i = 0; i < ref_groups->length(); i++) {
|
||||
@ -2052,7 +2052,7 @@ void MarkCompactCollector::ProcessExternalMarking(RootMarkingVisitor* visitor) {
|
||||
bool work_to_do = true;
|
||||
ASSERT(marking_deque_.IsEmpty());
|
||||
while (work_to_do) {
|
||||
heap()->isolate()->global_handles()->IterateObjectGroups(
|
||||
isolate()->global_handles()->IterateObjectGroups(
|
||||
visitor, &IsUnmarkedHeapObjectWithHeap);
|
||||
MarkImplicitRefGroups();
|
||||
work_to_do = !marking_deque_.IsEmpty();
|
||||
@ -2066,7 +2066,7 @@ void MarkCompactCollector::MarkLiveObjects() {
|
||||
// The recursive GC marker detects when it is nearing stack overflow,
|
||||
// and switches to a different marking system. JS interrupts interfere
|
||||
// with the C stack limit check.
|
||||
PostponeInterruptsScope postpone(heap()->isolate());
|
||||
PostponeInterruptsScope postpone(isolate());
|
||||
|
||||
bool incremental_marking_overflowed = false;
|
||||
IncrementalMarking* incremental_marking = heap_->incremental_marking();
|
||||
@ -2520,7 +2520,7 @@ void MarkCompactCollector::MigrateObject(Address dst,
|
||||
}
|
||||
}
|
||||
} else if (dest == CODE_SPACE) {
|
||||
PROFILE(heap()->isolate(), CodeMoveEvent(src, dst));
|
||||
PROFILE(isolate(), CodeMoveEvent(src, dst));
|
||||
heap()->MoveBlock(dst, src, size);
|
||||
SlotsBuffer::AddTo(&slots_buffer_allocator_,
|
||||
&migration_slots_buffer_,
|
||||
@ -3940,15 +3940,15 @@ void MarkCompactCollector::SweepSpaces() {
|
||||
|
||||
void MarkCompactCollector::EnableCodeFlushing(bool enable) {
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
if (heap()->isolate()->debug()->IsLoaded() ||
|
||||
heap()->isolate()->debug()->has_break_points()) {
|
||||
if (isolate()->debug()->IsLoaded() ||
|
||||
isolate()->debug()->has_break_points()) {
|
||||
enable = false;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (enable) {
|
||||
if (code_flusher_ != NULL) return;
|
||||
code_flusher_ = new CodeFlusher(heap()->isolate());
|
||||
code_flusher_ = new CodeFlusher(isolate());
|
||||
} else {
|
||||
if (code_flusher_ == NULL) return;
|
||||
code_flusher_->EvictAllCandidates();
|
||||
@ -3974,6 +3974,11 @@ void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj,
|
||||
}
|
||||
|
||||
|
||||
Isolate* MarkCompactCollector::isolate() const {
|
||||
return heap_->isolate();
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::Initialize() {
|
||||
MarkCompactMarkingVisitor::Initialize();
|
||||
IncrementalMarking::Initialize();
|
||||
@ -4055,7 +4060,7 @@ void MarkCompactCollector::RecordCodeEntrySlot(Address slot, Code* target) {
|
||||
void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
|
||||
ASSERT(heap()->gc_state() == Heap::MARK_COMPACT);
|
||||
if (is_compacting()) {
|
||||
Code* host = heap()->isolate()->inner_pointer_to_code_cache()->
|
||||
Code* host = isolate()->inner_pointer_to_code_cache()->
|
||||
GcSafeFindCodeForInnerPointer(pc);
|
||||
MarkBit mark_bit = Marking::MarkBitFrom(host);
|
||||
if (Marking::IsBlack(mark_bit)) {
|
||||
|
@ -580,6 +580,7 @@ class MarkCompactCollector {
|
||||
static inline bool IsMarked(Object* obj);
|
||||
|
||||
inline Heap* heap() const { return heap_; }
|
||||
inline Isolate* isolate() const;
|
||||
|
||||
CodeFlusher* code_flusher() { return code_flusher_; }
|
||||
inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; }
|
||||
|
@ -360,7 +360,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
|
||||
bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt;
|
||||
Deoptimizer::BailoutType type =
|
||||
is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
|
||||
int id = Deoptimizer::GetDeoptimizationId(entry, type);
|
||||
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
|
||||
if (id == Deoptimizer::kNotDeoptimizationEntry) {
|
||||
Comment(";;; jump table entry %d.", i);
|
||||
} else {
|
||||
@ -897,7 +897,8 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
|
||||
Handle<DeoptimizationInputData> data =
|
||||
factory()->NewDeoptimizationInputData(length, TENURED);
|
||||
|
||||
Handle<ByteArray> translations = translations_.CreateByteArray();
|
||||
Handle<ByteArray> translations =
|
||||
translations_.CreateByteArray(isolate()->factory());
|
||||
data->SetTranslationByteArray(*translations);
|
||||
data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
|
||||
|
||||
|
@ -3479,6 +3479,7 @@ bool Map::is_observed() {
|
||||
|
||||
void Map::NotifyLeafMapLayoutChange() {
|
||||
dependent_code()->DeoptimizeDependentCodeGroup(
|
||||
GetIsolate(),
|
||||
DependentCode::kPrototypeCheckGroup);
|
||||
}
|
||||
|
||||
|
@ -9107,7 +9107,7 @@ void Code::PrintDeoptLocation(int bailout_id) {
|
||||
last_comment = reinterpret_cast<const char*>(info->data());
|
||||
} else if (last_comment != NULL &&
|
||||
bailout_id == Deoptimizer::GetDeoptimizationId(
|
||||
info->target_address(), Deoptimizer::EAGER)) {
|
||||
GetIsolate(), info->target_address(), Deoptimizer::EAGER)) {
|
||||
CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
|
||||
PrintF(" %s\n", last_comment);
|
||||
return;
|
||||
@ -9466,7 +9466,9 @@ void Code::Disassemble(const char* name, FILE* out) {
|
||||
}
|
||||
|
||||
PrintF("RelocInfo (size = %d)\n", relocation_size());
|
||||
for (RelocIterator it(this); !it.done(); it.next()) it.rinfo()->Print(out);
|
||||
for (RelocIterator it(this); !it.done(); it.next()) {
|
||||
it.rinfo()->Print(GetIsolate(), out);
|
||||
}
|
||||
PrintF(out, "\n");
|
||||
}
|
||||
#endif // ENABLE_DISASSEMBLER
|
||||
@ -9839,6 +9841,7 @@ class DeoptimizeDependentCodeFilter : public OptimizedFunctionFilter {
|
||||
|
||||
|
||||
void DependentCode::DeoptimizeDependentCodeGroup(
|
||||
Isolate* isolate,
|
||||
DependentCode::DependencyGroup group) {
|
||||
AssertNoAllocation no_allocation_scope;
|
||||
DependentCode::GroupStartIndexes starts(this);
|
||||
@ -9861,7 +9864,7 @@ void DependentCode::DeoptimizeDependentCodeGroup(
|
||||
}
|
||||
set_number_of_entries(group, 0);
|
||||
DeoptimizeDependentCodeFilter filter;
|
||||
Deoptimizer::DeoptimizeAllFunctionsWith(&filter);
|
||||
Deoptimizer::DeoptimizeAllFunctionsWith(isolate, &filter);
|
||||
}
|
||||
|
||||
|
||||
|
@ -4860,7 +4860,8 @@ class DependentCode: public FixedArray {
|
||||
static Handle<DependentCode> Insert(Handle<DependentCode> entries,
|
||||
DependencyGroup group,
|
||||
Handle<Code> value);
|
||||
void DeoptimizeDependentCodeGroup(DependentCode::DependencyGroup group);
|
||||
void DeoptimizeDependentCodeGroup(Isolate* isolate,
|
||||
DependentCode::DependencyGroup group);
|
||||
|
||||
// The following low-level accessors should only be used by this class
|
||||
// and the mark compact collector.
|
||||
|
@ -308,7 +308,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
bool is_lazy_deopt = jump_table_[i].is_lazy_deopt;
|
||||
Deoptimizer::BailoutType type =
|
||||
is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
|
||||
int id = Deoptimizer::GetDeoptimizationId(entry, type);
|
||||
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
|
||||
if (id == Deoptimizer::kNotDeoptimizationEntry) {
|
||||
Comment(";;; jump table entry %d.", i);
|
||||
} else {
|
||||
@ -808,7 +808,8 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
|
||||
Handle<DeoptimizationInputData> data =
|
||||
factory()->NewDeoptimizationInputData(length, TENURED);
|
||||
|
||||
Handle<ByteArray> translations = translations_.CreateByteArray();
|
||||
Handle<ByteArray> translations =
|
||||
translations_.CreateByteArray(isolate()->factory());
|
||||
data->SetTranslationByteArray(*translations);
|
||||
data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
|
||||
|
||||
|
@ -656,7 +656,8 @@ static void DebugEventBreakPointHitCount(v8::DebugEvent event,
|
||||
v8::Handle<v8::Object> exec_state,
|
||||
v8::Handle<v8::Object> event_data,
|
||||
v8::Handle<v8::Value> data) {
|
||||
Debug* debug = v8::internal::Isolate::Current()->debug();
|
||||
v8::internal::Isolate* isolate = v8::internal::Isolate::Current();
|
||||
Debug* debug = isolate->debug();
|
||||
// When hitting a debug event listener there must be a break set.
|
||||
CHECK_NE(debug->break_id(), 0);
|
||||
|
||||
@ -732,7 +733,7 @@ static void DebugEventBreakPointHitCount(v8::DebugEvent event,
|
||||
// Perform a full deoptimization when the specified number of
|
||||
// breaks have been hit.
|
||||
if (break_point_hit_count == break_point_hit_count_deoptimize) {
|
||||
i::Deoptimizer::DeoptimizeAll();
|
||||
i::Deoptimizer::DeoptimizeAll(isolate);
|
||||
}
|
||||
} else if (event == v8::AfterCompile && !compiled_script_data.IsEmpty()) {
|
||||
const int argc = 1;
|
||||
@ -983,7 +984,8 @@ static void DebugEventBreakMax(v8::DebugEvent event,
|
||||
v8::Handle<v8::Object> exec_state,
|
||||
v8::Handle<v8::Object> event_data,
|
||||
v8::Handle<v8::Value> data) {
|
||||
v8::internal::Debug* debug = v8::internal::Isolate::Current()->debug();
|
||||
v8::internal::Isolate* isolate = v8::internal::Isolate::Current();
|
||||
v8::internal::Debug* debug = isolate->debug();
|
||||
// When hitting a debug event listener there must be a break set.
|
||||
CHECK_NE(debug->break_id(), 0);
|
||||
|
||||
@ -1014,7 +1016,7 @@ static void DebugEventBreakMax(v8::DebugEvent event,
|
||||
// Perform a full deoptimization when the specified number of
|
||||
// breaks have been hit.
|
||||
if (break_point_hit_count == break_point_hit_count_deoptimize) {
|
||||
i::Deoptimizer::DeoptimizeAll();
|
||||
i::Deoptimizer::DeoptimizeAll(isolate);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -7167,7 +7169,7 @@ static void DebugEventBreakDeoptimize(v8::DebugEvent event,
|
||||
v8::Handle<v8::String> function_name(result->ToString());
|
||||
function_name->WriteAscii(fn);
|
||||
if (strcmp(fn, "bar") == 0) {
|
||||
i::Deoptimizer::DeoptimizeAll();
|
||||
i::Deoptimizer::DeoptimizeAll(v8::internal::Isolate::Current());
|
||||
debug_event_break_deoptimize_done = true;
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user