2012-01-18 16:16:11 +00:00
|
|
|
// Copyright 2012 the V8 project authors. All rights reserved.
|
2014-04-29 06:42:26 +00:00
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2015-08-20 07:44:00 +00:00
|
|
|
#include "src/isolate.h"
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#include <stdlib.h>
|
|
|
|
|
2014-10-14 08:43:33 +00:00
|
|
|
#include <fstream> // NOLINT(readability/streams)
|
2014-10-23 11:56:26 +00:00
|
|
|
#include <sstream>
|
2014-10-14 08:43:33 +00:00
|
|
|
|
2015-11-26 16:22:34 +00:00
|
|
|
#include "src/ast/ast.h"
|
|
|
|
#include "src/ast/scopeinfo.h"
|
2014-06-30 13:25:46 +00:00
|
|
|
#include "src/base/platform/platform.h"
|
2014-08-27 08:29:22 +00:00
|
|
|
#include "src/base/sys-info.h"
|
2014-06-30 13:25:46 +00:00
|
|
|
#include "src/base/utils/random-number-generator.h"
|
2014-09-29 07:29:14 +00:00
|
|
|
#include "src/basic-block-profiler.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/bootstrapper.h"
|
|
|
|
#include "src/codegen.h"
|
|
|
|
#include "src/compilation-cache.h"
|
2014-10-23 09:14:35 +00:00
|
|
|
#include "src/compilation-statistics.h"
|
2015-10-20 13:25:47 +00:00
|
|
|
#include "src/crankshaft/hydrogen.h"
|
2015-07-31 11:07:50 +00:00
|
|
|
#include "src/debug/debug.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/deoptimizer.h"
|
2016-03-17 13:14:11 +00:00
|
|
|
#include "src/external-reference-table.h"
|
2015-08-12 10:28:34 +00:00
|
|
|
#include "src/frames-inl.h"
|
2014-08-22 11:43:39 +00:00
|
|
|
#include "src/ic/stub-cache.h"
|
2015-07-23 14:21:26 +00:00
|
|
|
#include "src/interpreter/interpreter.h"
|
2015-09-01 09:25:19 +00:00
|
|
|
#include "src/isolate-inl.h"
|
2016-07-08 06:44:46 +00:00
|
|
|
#include "src/libsampler/sampler.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/log.h"
|
|
|
|
#include "src/messages.h"
|
2015-09-28 19:34:08 +00:00
|
|
|
#include "src/profiler/cpu-profiler.h"
|
2014-07-14 07:19:49 +00:00
|
|
|
#include "src/prototype.h"
|
2015-08-13 06:55:21 +00:00
|
|
|
#include "src/regexp/regexp-stack.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/runtime-profiler.h"
|
|
|
|
#include "src/simulator.h"
|
2016-03-01 14:42:57 +00:00
|
|
|
#include "src/snapshot/deserializer.h"
|
2015-08-20 07:44:00 +00:00
|
|
|
#include "src/v8.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/version.h"
|
|
|
|
#include "src/vm-state-inl.h"
|
2016-05-19 07:52:35 +00:00
|
|
|
#include "src/wasm/wasm-module.h"
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
|
2014-06-05 12:14:47 +00:00
|
|
|
base::Atomic32 ThreadId::highest_thread_id_ = 0;
|
2011-04-11 23:46:22 +00:00
|
|
|
|
|
|
|
int ThreadId::AllocateThreadId() {
|
2014-06-05 12:14:47 +00:00
|
|
|
int new_id = base::NoBarrier_AtomicIncrement(&highest_thread_id_, 1);
|
2011-04-11 23:46:22 +00:00
|
|
|
return new_id;
|
|
|
|
}
|
|
|
|
|
2011-05-13 08:54:16 +00:00
|
|
|
|
2011-04-11 23:46:22 +00:00
|
|
|
int ThreadId::GetCurrentThreadId() {
|
2014-06-30 13:25:46 +00:00
|
|
|
int thread_id = base::Thread::GetThreadLocalInt(Isolate::thread_id_key_);
|
2011-04-11 23:46:22 +00:00
|
|
|
if (thread_id == 0) {
|
|
|
|
thread_id = AllocateThreadId();
|
2014-06-30 13:25:46 +00:00
|
|
|
base::Thread::SetThreadLocalInt(Isolate::thread_id_key_, thread_id);
|
2011-04-11 23:46:22 +00:00
|
|
|
}
|
|
|
|
return thread_id;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2011-05-13 08:54:16 +00:00
|
|
|
|
|
|
|
ThreadLocalTop::ThreadLocalTop() {
|
|
|
|
InitializeInternal();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void ThreadLocalTop::InitializeInternal() {
|
|
|
|
c_entry_fp_ = 0;
|
2014-10-21 10:40:14 +00:00
|
|
|
c_function_ = 0;
|
2011-05-13 08:54:16 +00:00
|
|
|
handler_ = 0;
|
|
|
|
#ifdef USE_SIMULATOR
|
|
|
|
simulator_ = NULL;
|
|
|
|
#endif
|
|
|
|
js_entry_sp_ = NULL;
|
2013-07-23 15:01:38 +00:00
|
|
|
external_callback_scope_ = NULL;
|
2011-05-13 08:54:16 +00:00
|
|
|
current_vm_state_ = EXTERNAL;
|
2014-05-20 10:13:46 +00:00
|
|
|
try_catch_handler_ = NULL;
|
2011-05-13 08:54:16 +00:00
|
|
|
context_ = NULL;
|
|
|
|
thread_id_ = ThreadId::Invalid();
|
|
|
|
external_caught_exception_ = false;
|
|
|
|
failed_access_check_callback_ = NULL;
|
|
|
|
save_context_ = NULL;
|
2014-08-13 11:14:35 +00:00
|
|
|
promise_on_stack_ = NULL;
|
2011-10-10 09:21:48 +00:00
|
|
|
|
|
|
|
// These members are re-initialized later after deserialization
|
|
|
|
// is complete.
|
|
|
|
pending_exception_ = NULL;
|
2013-07-01 10:54:39 +00:00
|
|
|
rethrowing_message_ = false;
|
2011-10-10 09:21:48 +00:00
|
|
|
pending_message_obj_ = NULL;
|
|
|
|
scheduled_exception_ = NULL;
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void ThreadLocalTop::Initialize() {
|
|
|
|
InitializeInternal();
|
|
|
|
#ifdef USE_SIMULATOR
|
|
|
|
simulator_ = Simulator::current(isolate_);
|
|
|
|
#endif
|
|
|
|
thread_id_ = ThreadId::Current();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-08-13 11:14:35 +00:00
|
|
|
void ThreadLocalTop::Free() {
|
|
|
|
// Match unmatched PopPromise calls.
|
|
|
|
while (promise_on_stack_) isolate_->PopPromise();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-30 13:25:46 +00:00
|
|
|
base::Thread::LocalStorageKey Isolate::isolate_key_;
|
|
|
|
base::Thread::LocalStorageKey Isolate::thread_id_key_;
|
|
|
|
base::Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_;
|
2014-09-19 08:01:35 +00:00
|
|
|
base::LazyMutex Isolate::thread_data_table_mutex_ = LAZY_MUTEX_INITIALIZER;
|
2012-03-30 14:30:46 +00:00
|
|
|
Isolate::ThreadDataTable* Isolate::thread_data_table_ = NULL;
|
2014-06-05 12:14:47 +00:00
|
|
|
base::Atomic32 Isolate::isolate_counter_ = 0;
|
2014-11-26 05:15:17 +00:00
|
|
|
#if DEBUG
|
|
|
|
base::Atomic32 Isolate::isolate_key_created_ = 0;
|
|
|
|
#endif
|
2012-03-30 14:30:46 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
Isolate::PerIsolateThreadData*
|
|
|
|
Isolate::FindOrAllocatePerThreadDataForThisThread() {
|
2011-04-11 23:46:22 +00:00
|
|
|
ThreadId thread_id = ThreadId::Current();
|
2011-03-18 20:35:07 +00:00
|
|
|
PerIsolateThreadData* per_thread = NULL;
|
|
|
|
{
|
2014-09-19 08:01:35 +00:00
|
|
|
base::LockGuard<base::Mutex> lock_guard(thread_data_table_mutex_.Pointer());
|
2012-03-30 14:30:46 +00:00
|
|
|
per_thread = thread_data_table_->Lookup(this, thread_id);
|
2013-07-31 07:51:46 +00:00
|
|
|
if (per_thread == NULL) {
|
2013-09-06 13:18:26 +00:00
|
|
|
per_thread = new PerIsolateThreadData(this, thread_id);
|
|
|
|
thread_data_table_->Insert(per_thread);
|
2013-07-31 07:51:46 +00:00
|
|
|
}
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(thread_data_table_->Lookup(this, thread_id) == per_thread);
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
return per_thread;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
Add Isolate::DiscardThreadSpecificMetadata method to embedder API.
If many threads use the same Isolate (or many Isolates) and then
terminate, their PerIsolateThreadData objects are never cleaned
up, resulting in a slow memory leak and, worse, the
PerIsolateThreadData chain getting larger and larger, adversely
affecting performance.
In this situation, embedders will now be encouraged to apply
DiscardThreadSpecificMetadata against any Isolate a thread is
done with, especially if the thread is about to terminate.
Note that it is harmless to run DiscardThreadSpecificMetadata
against an Isolate for which a thread has no thread data and
per-Isolate thread data can be reestablished if a thread starts
using an Isolate again after running DiscardThreadSpecificMetadata
against it.
It is, however, an embedder error to run
DiscardThreadSpecificMetadata against an Isolate in thread with a
Locker for the Isolate in the stack or against an Entered Isolate.
This change cannot cause any change in behavior in existing apps
as the only added coded can only be reached via the new
DiscardThreadSpecificMetadata method.
R=Jakob, jochen
BUG=
Review URL: https://codereview.chromium.org/1522703002
Cr-Commit-Position: refs/heads/master@{#32909}
2015-12-16 15:49:28 +00:00
|
|
|
void Isolate::DiscardPerThreadDataForThisThread() {
|
|
|
|
int thread_id_int = base::Thread::GetThreadLocalInt(Isolate::thread_id_key_);
|
|
|
|
if (thread_id_int) {
|
|
|
|
ThreadId thread_id = ThreadId(thread_id_int);
|
|
|
|
DCHECK(!thread_manager_->mutex_owner_.Equals(thread_id));
|
|
|
|
base::LockGuard<base::Mutex> lock_guard(thread_data_table_mutex_.Pointer());
|
|
|
|
PerIsolateThreadData* per_thread =
|
|
|
|
thread_data_table_->Lookup(this, thread_id);
|
|
|
|
if (per_thread) {
|
|
|
|
DCHECK(!per_thread->thread_state_);
|
|
|
|
thread_data_table_->Remove(per_thread);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-05-05 18:55:31 +00:00
|
|
|
Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() {
|
|
|
|
ThreadId thread_id = ThreadId::Current();
|
2013-04-11 14:22:04 +00:00
|
|
|
return FindPerThreadDataForThread(thread_id);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread(
|
|
|
|
ThreadId thread_id) {
|
2011-05-05 18:55:31 +00:00
|
|
|
PerIsolateThreadData* per_thread = NULL;
|
|
|
|
{
|
2014-09-19 08:01:35 +00:00
|
|
|
base::LockGuard<base::Mutex> lock_guard(thread_data_table_mutex_.Pointer());
|
2012-03-30 14:30:46 +00:00
|
|
|
per_thread = thread_data_table_->Lookup(this, thread_id);
|
2011-05-05 18:55:31 +00:00
|
|
|
}
|
|
|
|
return per_thread;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-19 08:01:35 +00:00
|
|
|
void Isolate::InitializeOncePerProcess() {
|
|
|
|
base::LockGuard<base::Mutex> lock_guard(thread_data_table_mutex_.Pointer());
|
|
|
|
CHECK(thread_data_table_ == NULL);
|
|
|
|
isolate_key_ = base::Thread::CreateThreadLocalKey();
|
2014-11-26 05:15:17 +00:00
|
|
|
#if DEBUG
|
|
|
|
base::NoBarrier_Store(&isolate_key_created_, 1);
|
|
|
|
#endif
|
2014-09-19 08:01:35 +00:00
|
|
|
thread_id_key_ = base::Thread::CreateThreadLocalKey();
|
|
|
|
per_isolate_thread_data_key_ = base::Thread::CreateThreadLocalKey();
|
|
|
|
thread_data_table_ = new Isolate::ThreadDataTable();
|
2013-10-01 14:26:53 +00:00
|
|
|
}
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2011-05-13 08:54:16 +00:00
|
|
|
Address Isolate::get_address_from_id(Isolate::AddressId id) {
|
|
|
|
return isolate_addresses_[id];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
char* Isolate::Iterate(ObjectVisitor* v, char* thread_storage) {
|
|
|
|
ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage);
|
|
|
|
Iterate(v, thread);
|
|
|
|
return thread_storage + sizeof(ThreadLocalTop);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::IterateThread(ThreadVisitor* v, char* t) {
|
|
|
|
ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(t);
|
|
|
|
v->VisitThread(this, thread);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::Iterate(ObjectVisitor* v, ThreadLocalTop* thread) {
|
|
|
|
// Visit the roots from the top for a given thread.
|
2014-04-08 09:44:24 +00:00
|
|
|
v->VisitPointer(&thread->pending_exception_);
|
2011-05-13 08:54:16 +00:00
|
|
|
v->VisitPointer(&(thread->pending_message_obj_));
|
2014-09-08 09:11:11 +00:00
|
|
|
v->VisitPointer(bit_cast<Object**>(&(thread->context_)));
|
2014-04-08 09:44:24 +00:00
|
|
|
v->VisitPointer(&thread->scheduled_exception_);
|
2011-05-13 08:54:16 +00:00
|
|
|
|
2014-05-20 10:13:46 +00:00
|
|
|
for (v8::TryCatch* block = thread->try_catch_handler();
|
2011-05-13 08:54:16 +00:00
|
|
|
block != NULL;
|
2014-05-20 10:13:46 +00:00
|
|
|
block = block->next_) {
|
2014-09-08 09:11:11 +00:00
|
|
|
v->VisitPointer(bit_cast<Object**>(&(block->exception_)));
|
|
|
|
v->VisitPointer(bit_cast<Object**>(&(block->message_obj_)));
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Iterate over pointers on native execution stack.
|
|
|
|
for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) {
|
|
|
|
it.frame()->Iterate(v);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::Iterate(ObjectVisitor* v) {
|
|
|
|
ThreadLocalTop* current_t = thread_local_top();
|
|
|
|
Iterate(v, current_t);
|
|
|
|
}
|
|
|
|
|
2013-07-05 09:52:11 +00:00
|
|
|
|
2012-07-18 14:15:02 +00:00
|
|
|
void Isolate::IterateDeferredHandles(ObjectVisitor* visitor) {
|
|
|
|
for (DeferredHandles* deferred = deferred_handles_head_;
|
|
|
|
deferred != NULL;
|
|
|
|
deferred = deferred->next_) {
|
|
|
|
deferred->Iterate(visitor);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-05-13 08:54:16 +00:00
|
|
|
|
2013-04-23 09:23:07 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
bool Isolate::IsDeferredHandle(Object** handle) {
|
|
|
|
// Each DeferredHandles instance keeps the handles to one job in the
|
2013-08-22 16:14:37 +00:00
|
|
|
// concurrent recompilation queue, containing a list of blocks. Each block
|
2013-04-23 09:23:07 +00:00
|
|
|
// contains kHandleBlockSize handles except for the first block, which may
|
|
|
|
// not be fully filled.
|
|
|
|
// We iterate through all the blocks to see whether the argument handle
|
|
|
|
// belongs to one of the blocks. If so, it is deferred.
|
|
|
|
for (DeferredHandles* deferred = deferred_handles_head_;
|
|
|
|
deferred != NULL;
|
|
|
|
deferred = deferred->next_) {
|
|
|
|
List<Object**>* blocks = &deferred->blocks_;
|
|
|
|
for (int i = 0; i < blocks->length(); i++) {
|
|
|
|
Object** block_limit = (i == 0) ? deferred->first_block_limit_
|
|
|
|
: blocks->at(i) + kHandleBlockSize;
|
|
|
|
if (blocks->at(i) <= handle && handle < block_limit) return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
#endif // DEBUG
|
|
|
|
|
|
|
|
|
2011-05-13 08:54:16 +00:00
|
|
|
void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) {
|
2014-05-20 10:13:46 +00:00
|
|
|
thread_local_top()->set_try_catch_handler(that);
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(thread_local_top()->try_catch_handler() == that);
|
2014-05-20 10:13:46 +00:00
|
|
|
thread_local_top()->set_try_catch_handler(that->next_);
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<String> Isolate::StackTraceString() {
|
|
|
|
if (stack_trace_nesting_level_ == 0) {
|
|
|
|
stack_trace_nesting_level_++;
|
|
|
|
HeapStringAllocator allocator;
|
2013-09-03 11:54:08 +00:00
|
|
|
StringStream::ClearMentionedObjectCache(this);
|
2011-05-13 08:54:16 +00:00
|
|
|
StringStream accumulator(&allocator);
|
|
|
|
incomplete_message_ = &accumulator;
|
|
|
|
PrintStack(&accumulator);
|
2013-09-03 11:54:08 +00:00
|
|
|
Handle<String> stack_trace = accumulator.ToString(this);
|
2011-05-13 08:54:16 +00:00
|
|
|
incomplete_message_ = NULL;
|
|
|
|
stack_trace_nesting_level_ = 0;
|
|
|
|
return stack_trace;
|
|
|
|
} else if (stack_trace_nesting_level_ == 1) {
|
|
|
|
stack_trace_nesting_level_++;
|
2014-06-30 13:25:46 +00:00
|
|
|
base::OS::PrintError(
|
2011-05-13 08:54:16 +00:00
|
|
|
"\n\nAttempt to print stack while printing stack (double fault)\n");
|
2014-06-30 13:25:46 +00:00
|
|
|
base::OS::PrintError(
|
2011-05-13 08:54:16 +00:00
|
|
|
"If you are lucky you may find a partial stack dump on stdout.\n\n");
|
|
|
|
incomplete_message_->OutputToStdOut();
|
2013-02-28 17:03:34 +00:00
|
|
|
return factory()->empty_string();
|
2011-05-13 08:54:16 +00:00
|
|
|
} else {
|
2014-06-30 13:25:46 +00:00
|
|
|
base::OS::Abort();
|
2011-05-13 08:54:16 +00:00
|
|
|
// Unreachable
|
2013-02-28 17:03:34 +00:00
|
|
|
return factory()->empty_string();
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-05-12 15:48:04 +00:00
|
|
|
void Isolate::PushStackTraceAndDie(unsigned int magic, void* ptr1, void* ptr2,
|
2012-09-05 12:30:49 +00:00
|
|
|
unsigned int magic2) {
|
2015-04-17 11:46:02 +00:00
|
|
|
const int kMaxStackTraceSize = 32 * KB;
|
2012-09-05 12:30:49 +00:00
|
|
|
Handle<String> trace = StackTraceString();
|
2013-01-09 15:47:53 +00:00
|
|
|
uint8_t buffer[kMaxStackTraceSize];
|
2012-09-05 12:30:49 +00:00
|
|
|
int length = Min(kMaxStackTraceSize - 1, trace->length());
|
|
|
|
String::WriteToFlat(*trace, buffer, 0, length);
|
|
|
|
buffer[length] = '\0';
|
2013-01-09 15:47:53 +00:00
|
|
|
// TODO(dcarney): convert buffer to utf8?
|
2015-05-12 15:48:04 +00:00
|
|
|
base::OS::PrintError("Stacktrace (%x-%x) %p %p: %s\n", magic, magic2, ptr1,
|
|
|
|
ptr2, reinterpret_cast<char*>(buffer));
|
2014-06-30 13:25:46 +00:00
|
|
|
base::OS::Abort();
|
2012-09-05 12:30:49 +00:00
|
|
|
}
|
|
|
|
|
2016-02-23 19:38:50 +00:00
|
|
|
static Handle<FixedArray> MaybeGrow(Isolate* isolate,
|
|
|
|
Handle<FixedArray> elements,
|
|
|
|
int cur_position, int new_size) {
|
|
|
|
if (new_size > elements->length()) {
|
|
|
|
int new_capacity = JSObject::NewElementsCapacity(elements->length());
|
|
|
|
Handle<FixedArray> new_elements =
|
|
|
|
isolate->factory()->NewFixedArrayWithHoles(new_capacity);
|
|
|
|
for (int i = 0; i < cur_position; i++) {
|
|
|
|
new_elements->set(i, elements->get(i));
|
|
|
|
}
|
|
|
|
elements = new_elements;
|
|
|
|
}
|
|
|
|
DCHECK(new_size <= elements->length());
|
|
|
|
return elements;
|
|
|
|
}
|
2012-11-12 14:54:29 +00:00
|
|
|
|
2016-07-11 07:48:15 +00:00
|
|
|
class StackTraceHelper {
|
|
|
|
public:
|
2016-07-21 14:29:34 +00:00
|
|
|
StackTraceHelper(Isolate* isolate, FrameSkipMode mode, Handle<Object> caller)
|
|
|
|
: isolate_(isolate),
|
|
|
|
mode_(mode),
|
|
|
|
caller_(caller),
|
|
|
|
skip_next_frame_(true) {
|
2016-07-20 13:02:36 +00:00
|
|
|
// The caller parameter can be used to skip a specific set of frames in the
|
|
|
|
// stack trace. It can be:
|
|
|
|
// * null, when called from a standard error constructor. We unconditionally
|
|
|
|
// skip the top frame, which is always a builtin-exit frame for the error
|
|
|
|
// constructor builtin.
|
|
|
|
// * a JSFunction, when called by the user from Error.captureStackTrace().
|
|
|
|
// We skip each frame until encountering the caller function.
|
|
|
|
// * For any other value, all frames are included in the trace.
|
2016-07-21 14:29:34 +00:00
|
|
|
switch (mode_) {
|
|
|
|
case SKIP_FIRST:
|
|
|
|
DCHECK(caller_.is_null());
|
|
|
|
skip_next_frame_ = true;
|
|
|
|
break;
|
|
|
|
case SKIP_UNTIL_SEEN:
|
|
|
|
DCHECK(caller_->IsJSFunction());
|
|
|
|
skip_next_frame_ = true;
|
|
|
|
break;
|
|
|
|
case SKIP_NONE:
|
|
|
|
skip_next_frame_ = false;
|
|
|
|
break;
|
2016-07-20 13:02:36 +00:00
|
|
|
}
|
2016-07-11 07:48:15 +00:00
|
|
|
encountered_strict_function_ = false;
|
|
|
|
sloppy_frames_ = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The stack trace API should not expose receivers and function
|
|
|
|
// objects on frames deeper than the top-most one with a strict mode
|
|
|
|
// function. The number of sloppy frames is stored as first element in
|
|
|
|
// the result array.
|
|
|
|
void CountSloppyFrames(JSFunction* fun) {
|
|
|
|
if (!encountered_strict_function_) {
|
|
|
|
if (is_strict(fun->shared()->language_mode())) {
|
|
|
|
encountered_strict_function_ = true;
|
|
|
|
} else {
|
|
|
|
sloppy_frames_++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Determines whether the given stack frame should be displayed in a stack
|
|
|
|
// trace.
|
|
|
|
bool IsVisibleInStackTrace(JSFunction* fun) {
|
2016-07-20 13:02:36 +00:00
|
|
|
return ShouldIncludeFrame(fun) && IsNotInNativeScript(fun) &&
|
2016-07-11 07:48:15 +00:00
|
|
|
IsInSameSecurityContext(fun);
|
|
|
|
}
|
|
|
|
|
|
|
|
int sloppy_frames() const { return sloppy_frames_; }
|
|
|
|
|
|
|
|
private:
|
2016-07-20 13:02:36 +00:00
|
|
|
// This mechanism excludes a number of uninteresting frames from the stack
|
|
|
|
// trace. This can be be the first frame (which will be a builtin-exit frame
|
|
|
|
// for the error constructor builtin) or every frame until encountering a
|
|
|
|
// user-specified function.
|
|
|
|
bool ShouldIncludeFrame(JSFunction* fun) {
|
|
|
|
switch (mode_) {
|
|
|
|
case SKIP_NONE:
|
|
|
|
return true;
|
|
|
|
case SKIP_FIRST:
|
|
|
|
if (!skip_next_frame_) return true;
|
|
|
|
skip_next_frame_ = false;
|
|
|
|
return false;
|
|
|
|
case SKIP_UNTIL_SEEN:
|
|
|
|
if (skip_next_frame_ && (fun == *caller_)) {
|
|
|
|
skip_next_frame_ = false;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return !skip_next_frame_;
|
2016-07-11 07:48:15 +00:00
|
|
|
}
|
2016-07-20 13:02:36 +00:00
|
|
|
UNREACHABLE();
|
|
|
|
return false;
|
2016-07-11 07:48:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool IsNotInNativeScript(JSFunction* fun) {
|
|
|
|
// Functions defined in native scripts are not visible unless directly
|
|
|
|
// exposed, in which case the native flag is set.
|
|
|
|
// The --builtins-in-stack-traces command line flag allows including
|
|
|
|
// internal call sites in the stack trace for debugging purposes.
|
|
|
|
if (!FLAG_builtins_in_stack_traces && fun->shared()->IsBuiltin()) {
|
|
|
|
return fun->shared()->native();
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool IsInSameSecurityContext(JSFunction* fun) {
|
|
|
|
return isolate_->context()->HasSameSecurityTokenAs(fun->context());
|
|
|
|
}
|
|
|
|
|
|
|
|
Isolate* isolate_;
|
2016-07-20 13:02:36 +00:00
|
|
|
|
2016-07-21 14:29:34 +00:00
|
|
|
const FrameSkipMode mode_;
|
|
|
|
const Handle<Object> caller_;
|
2016-07-20 13:02:36 +00:00
|
|
|
bool skip_next_frame_;
|
2016-07-11 07:48:15 +00:00
|
|
|
|
|
|
|
int sloppy_frames_;
|
|
|
|
bool encountered_strict_function_;
|
|
|
|
};
|
|
|
|
|
2016-04-04 08:35:12 +00:00
|
|
|
Handle<Object> Isolate::CaptureSimpleStackTrace(Handle<JSReceiver> error_object,
|
2016-07-21 14:29:34 +00:00
|
|
|
FrameSkipMode mode,
|
2014-07-02 14:18:10 +00:00
|
|
|
Handle<Object> caller) {
|
2016-07-18 12:54:51 +00:00
|
|
|
DisallowJavascriptExecution no_js(this);
|
|
|
|
|
2014-07-02 14:18:10 +00:00
|
|
|
// Get stack trace limit.
|
2015-08-14 15:12:34 +00:00
|
|
|
Handle<JSObject> error = error_function();
|
2014-07-02 14:18:10 +00:00
|
|
|
Handle<String> stackTraceLimit =
|
|
|
|
factory()->InternalizeUtf8String("stackTraceLimit");
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!stackTraceLimit.is_null());
|
2015-08-14 15:12:34 +00:00
|
|
|
Handle<Object> stack_trace_limit =
|
|
|
|
JSReceiver::GetDataProperty(error, stackTraceLimit);
|
2014-07-02 14:18:10 +00:00
|
|
|
if (!stack_trace_limit->IsNumber()) return factory()->undefined_value();
|
|
|
|
int limit = FastD2IChecked(stack_trace_limit->Number());
|
2012-11-12 14:54:29 +00:00
|
|
|
limit = Max(limit, 0); // Ensure that limit is not negative.
|
2014-07-02 14:18:10 +00:00
|
|
|
|
2012-11-12 14:54:29 +00:00
|
|
|
int initial_size = Min(limit, 10);
|
|
|
|
Handle<FixedArray> elements =
|
2013-03-28 10:40:07 +00:00
|
|
|
factory()->NewFixedArrayWithHoles(initial_size * 4 + 1);
|
2012-11-12 14:54:29 +00:00
|
|
|
|
2016-07-21 14:29:34 +00:00
|
|
|
StackTraceHelper helper(this, mode, caller);
|
2016-07-11 07:48:15 +00:00
|
|
|
|
2014-03-11 14:39:08 +00:00
|
|
|
// First element is reserved to store the number of sloppy frames.
|
2013-03-28 10:40:07 +00:00
|
|
|
int cursor = 1;
|
2012-11-12 14:54:29 +00:00
|
|
|
int frames_seen = 0;
|
2016-02-23 19:38:50 +00:00
|
|
|
for (StackFrameIterator iter(this); !iter.done() && frames_seen < limit;
|
2012-11-12 14:54:29 +00:00
|
|
|
iter.Advance()) {
|
2016-02-23 19:38:50 +00:00
|
|
|
StackFrame* frame = iter.frame();
|
|
|
|
|
|
|
|
switch (frame->type()) {
|
|
|
|
case StackFrame::JAVA_SCRIPT:
|
|
|
|
case StackFrame::OPTIMIZED:
|
2016-06-17 07:40:13 +00:00
|
|
|
case StackFrame::INTERPRETED:
|
|
|
|
case StackFrame::BUILTIN: {
|
2016-02-23 19:38:50 +00:00
|
|
|
JavaScriptFrame* js_frame = JavaScriptFrame::cast(frame);
|
|
|
|
// Set initial size to the maximum inlining level + 1 for the outermost
|
|
|
|
// function.
|
|
|
|
List<FrameSummary> frames(FLAG_max_inlining_levels + 1);
|
|
|
|
js_frame->Summarize(&frames);
|
|
|
|
for (int i = frames.length() - 1; i >= 0; i--) {
|
|
|
|
Handle<JSFunction> fun = frames[i].function();
|
2016-07-11 07:48:15 +00:00
|
|
|
|
2016-02-23 19:38:50 +00:00
|
|
|
// Filter out internal frames that we do not want to show.
|
2016-07-11 07:48:15 +00:00
|
|
|
if (!helper.IsVisibleInStackTrace(*fun)) continue;
|
|
|
|
helper.CountSloppyFrames(*fun);
|
2016-02-23 19:38:50 +00:00
|
|
|
|
2016-07-11 07:48:15 +00:00
|
|
|
Handle<Object> recv = frames[i].receiver();
|
2016-02-23 19:38:50 +00:00
|
|
|
Handle<AbstractCode> abstract_code = frames[i].abstract_code();
|
2016-07-11 10:03:02 +00:00
|
|
|
if (frame->type() == StackFrame::BUILTIN) {
|
|
|
|
// Help CallSite::IsConstructor correctly detect hand-written
|
|
|
|
// construct stubs.
|
|
|
|
Code* code = Code::cast(*abstract_code);
|
|
|
|
if (code->is_construct_stub()) {
|
|
|
|
recv = handle(heap()->call_site_constructor_symbol(), this);
|
|
|
|
}
|
|
|
|
}
|
2016-02-23 19:38:50 +00:00
|
|
|
Handle<Smi> offset(Smi::FromInt(frames[i].code_offset()), this);
|
2016-07-11 07:48:15 +00:00
|
|
|
|
|
|
|
elements = MaybeGrow(this, elements, cursor, cursor + 4);
|
2016-02-23 19:38:50 +00:00
|
|
|
elements->set(cursor++, *recv);
|
|
|
|
elements->set(cursor++, *fun);
|
|
|
|
elements->set(cursor++, *abstract_code);
|
|
|
|
elements->set(cursor++, *offset);
|
|
|
|
frames_seen++;
|
2012-11-12 14:54:29 +00:00
|
|
|
}
|
2016-02-23 19:38:50 +00:00
|
|
|
} break;
|
|
|
|
|
2016-06-30 06:55:22 +00:00
|
|
|
case StackFrame::BUILTIN_EXIT: {
|
|
|
|
BuiltinExitFrame* exit_frame = BuiltinExitFrame::cast(frame);
|
|
|
|
Handle<JSFunction> fun = handle(exit_frame->function(), this);
|
2016-07-11 07:48:15 +00:00
|
|
|
|
|
|
|
// Filter out internal frames that we do not want to show.
|
|
|
|
if (!helper.IsVisibleInStackTrace(*fun)) continue;
|
|
|
|
helper.CountSloppyFrames(*fun);
|
|
|
|
|
2016-06-30 06:55:22 +00:00
|
|
|
Handle<Code> code = handle(exit_frame->LookupCode(), this);
|
|
|
|
int offset =
|
|
|
|
static_cast<int>(exit_frame->pc() - code->instruction_start());
|
|
|
|
|
2016-07-04 12:44:18 +00:00
|
|
|
// In order to help CallSite::IsConstructor detect builtin constructors,
|
|
|
|
// we reuse the receiver field to pass along a special symbol.
|
|
|
|
Handle<Object> recv;
|
|
|
|
if (exit_frame->IsConstructor()) {
|
|
|
|
recv = handle(heap()->call_site_constructor_symbol(), this);
|
|
|
|
} else {
|
|
|
|
recv = handle(exit_frame->receiver(), this);
|
|
|
|
}
|
|
|
|
|
2016-06-30 06:55:22 +00:00
|
|
|
elements = MaybeGrow(this, elements, cursor, cursor + 4);
|
2016-07-04 12:44:18 +00:00
|
|
|
elements->set(cursor++, *recv);
|
2016-06-30 06:55:22 +00:00
|
|
|
elements->set(cursor++, *fun);
|
|
|
|
elements->set(cursor++, *code);
|
|
|
|
elements->set(cursor++, Smi::FromInt(offset));
|
|
|
|
frames_seen++;
|
|
|
|
} break;
|
|
|
|
|
2016-02-23 19:38:50 +00:00
|
|
|
case StackFrame::WASM: {
|
2016-03-29 18:07:03 +00:00
|
|
|
WasmFrame* wasm_frame = WasmFrame::cast(frame);
|
|
|
|
Code* code = wasm_frame->unchecked_code();
|
|
|
|
Handle<AbstractCode> abstract_code =
|
|
|
|
Handle<AbstractCode>(AbstractCode::cast(code));
|
2016-05-06 09:07:30 +00:00
|
|
|
int offset =
|
|
|
|
static_cast<int>(wasm_frame->pc() - code->instruction_start());
|
2016-02-23 19:38:50 +00:00
|
|
|
elements = MaybeGrow(this, elements, cursor, cursor + 4);
|
2016-05-06 09:07:30 +00:00
|
|
|
elements->set(cursor++, wasm_frame->wasm_obj());
|
|
|
|
elements->set(cursor++, Smi::FromInt(wasm_frame->function_index()));
|
2016-03-29 18:07:03 +00:00
|
|
|
elements->set(cursor++, *abstract_code);
|
2016-05-06 09:07:30 +00:00
|
|
|
elements->set(cursor++, Smi::FromInt(offset));
|
2016-02-23 19:38:50 +00:00
|
|
|
frames_seen++;
|
|
|
|
} break;
|
|
|
|
|
|
|
|
default:
|
|
|
|
break;
|
2012-11-12 14:54:29 +00:00
|
|
|
}
|
|
|
|
}
|
2016-07-11 07:48:15 +00:00
|
|
|
elements->set(0, Smi::FromInt(helper.sloppy_frames()));
|
2015-04-28 11:13:22 +00:00
|
|
|
elements->Shrink(cursor);
|
2012-11-12 14:54:29 +00:00
|
|
|
Handle<JSArray> result = factory()->NewJSArrayWithElements(elements);
|
|
|
|
result->set_length(Smi::FromInt(cursor));
|
2015-04-30 15:04:17 +00:00
|
|
|
// TODO(yangguo): Queue this structured stack trace for preprocessing on GC.
|
2012-11-12 14:54:29 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2016-04-04 08:35:12 +00:00
|
|
|
MaybeHandle<JSReceiver> Isolate::CaptureAndSetDetailedStackTrace(
|
|
|
|
Handle<JSReceiver> error_object) {
|
2012-02-07 09:31:06 +00:00
|
|
|
if (capture_stack_trace_for_uncaught_exceptions_) {
|
|
|
|
// Capture stack trace for a detailed exception message.
|
2014-07-02 14:18:10 +00:00
|
|
|
Handle<Name> key = factory()->detailed_stack_trace_symbol();
|
2012-02-07 09:31:06 +00:00
|
|
|
Handle<JSArray> stack_trace = CaptureCurrentStackTrace(
|
|
|
|
stack_trace_for_uncaught_exceptions_frame_limit_,
|
|
|
|
stack_trace_for_uncaught_exceptions_options_);
|
2016-07-20 13:02:36 +00:00
|
|
|
// TODO(jgruber): Set back to STRICT once we have eagerly formatted traces.
|
2015-04-20 14:41:00 +00:00
|
|
|
RETURN_ON_EXCEPTION(
|
2016-07-20 13:02:36 +00:00
|
|
|
this, JSReceiver::SetProperty(error_object, key, stack_trace, SLOPPY),
|
2016-04-04 08:35:12 +00:00
|
|
|
JSReceiver);
|
2012-02-07 09:31:06 +00:00
|
|
|
}
|
2015-04-20 14:41:00 +00:00
|
|
|
return error_object;
|
2012-02-07 09:31:06 +00:00
|
|
|
}
|
|
|
|
|
2016-04-04 08:35:12 +00:00
|
|
|
MaybeHandle<JSReceiver> Isolate::CaptureAndSetSimpleStackTrace(
|
2016-07-21 14:29:34 +00:00
|
|
|
Handle<JSReceiver> error_object, FrameSkipMode mode,
|
|
|
|
Handle<Object> caller) {
|
2014-07-02 14:18:10 +00:00
|
|
|
// Capture stack trace for simple stack trace string formatting.
|
|
|
|
Handle<Name> key = factory()->stack_trace_symbol();
|
2016-07-21 14:29:34 +00:00
|
|
|
Handle<Object> stack_trace =
|
|
|
|
CaptureSimpleStackTrace(error_object, mode, caller);
|
2016-07-20 13:02:36 +00:00
|
|
|
// TODO(jgruber): Set back to STRICT once we have eagerly formatted traces.
|
2015-04-20 14:41:00 +00:00
|
|
|
RETURN_ON_EXCEPTION(
|
2016-07-20 13:02:36 +00:00
|
|
|
this, JSReceiver::SetProperty(error_object, key, stack_trace, SLOPPY),
|
2016-04-04 08:35:12 +00:00
|
|
|
JSReceiver);
|
2015-04-20 14:41:00 +00:00
|
|
|
return error_object;
|
2014-07-02 14:18:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-28 13:24:36 +00:00
|
|
|
Handle<JSArray> Isolate::GetDetailedStackTrace(Handle<JSObject> error_object) {
|
|
|
|
Handle<Name> key_detailed = factory()->detailed_stack_trace_symbol();
|
|
|
|
Handle<Object> stack_trace =
|
2015-05-12 13:52:26 +00:00
|
|
|
JSReceiver::GetDataProperty(error_object, key_detailed);
|
2014-10-28 13:24:36 +00:00
|
|
|
if (stack_trace->IsJSArray()) return Handle<JSArray>::cast(stack_trace);
|
2016-07-12 10:22:53 +00:00
|
|
|
return Handle<JSArray>();
|
2014-10-28 13:24:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
class CaptureStackTraceHelper {
|
|
|
|
public:
|
|
|
|
CaptureStackTraceHelper(Isolate* isolate,
|
|
|
|
StackTrace::StackTraceOptions options)
|
|
|
|
: isolate_(isolate) {
|
|
|
|
if (options & StackTrace::kColumnOffset) {
|
|
|
|
column_key_ =
|
|
|
|
factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("column"));
|
|
|
|
}
|
|
|
|
if (options & StackTrace::kLineNumber) {
|
|
|
|
line_key_ =
|
|
|
|
factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("lineNumber"));
|
|
|
|
}
|
|
|
|
if (options & StackTrace::kScriptId) {
|
|
|
|
script_id_key_ =
|
|
|
|
factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("scriptId"));
|
|
|
|
}
|
|
|
|
if (options & StackTrace::kScriptName) {
|
|
|
|
script_name_key_ =
|
|
|
|
factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("scriptName"));
|
|
|
|
}
|
|
|
|
if (options & StackTrace::kScriptNameOrSourceURL) {
|
|
|
|
script_name_or_source_url_key_ = factory()->InternalizeOneByteString(
|
|
|
|
STATIC_CHAR_VECTOR("scriptNameOrSourceURL"));
|
|
|
|
}
|
|
|
|
if (options & StackTrace::kFunctionName) {
|
|
|
|
function_key_ = factory()->InternalizeOneByteString(
|
|
|
|
STATIC_CHAR_VECTOR("functionName"));
|
|
|
|
}
|
|
|
|
if (options & StackTrace::kIsEval) {
|
|
|
|
eval_key_ =
|
|
|
|
factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("isEval"));
|
|
|
|
}
|
|
|
|
if (options & StackTrace::kIsConstructor) {
|
|
|
|
constructor_key_ = factory()->InternalizeOneByteString(
|
|
|
|
STATIC_CHAR_VECTOR("isConstructor"));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-06 09:07:30 +00:00
|
|
|
Handle<JSObject> NewStackFrameObject(FrameSummary& summ) {
|
|
|
|
int position = summ.abstract_code()->SourcePosition(summ.code_offset());
|
|
|
|
return NewStackFrameObject(summ.function(), position,
|
|
|
|
summ.is_constructor());
|
|
|
|
}
|
|
|
|
|
2015-05-05 07:28:54 +00:00
|
|
|
Handle<JSObject> NewStackFrameObject(Handle<JSFunction> fun, int position,
|
2014-10-28 13:24:36 +00:00
|
|
|
bool is_constructor) {
|
|
|
|
Handle<JSObject> stack_frame =
|
|
|
|
factory()->NewJSObject(isolate_->object_function());
|
2016-05-06 09:07:30 +00:00
|
|
|
Handle<Script> script(Script::cast(fun->shared()->script()));
|
|
|
|
|
|
|
|
if (!line_key_.is_null()) {
|
2016-05-24 06:40:06 +00:00
|
|
|
Script::PositionInfo info;
|
|
|
|
bool valid_pos =
|
|
|
|
script->GetPositionInfo(position, &info, Script::WITH_OFFSET);
|
|
|
|
|
|
|
|
if (!column_key_.is_null() && valid_pos) {
|
2016-05-06 09:07:30 +00:00
|
|
|
JSObject::AddProperty(stack_frame, column_key_,
|
2016-05-24 06:40:06 +00:00
|
|
|
handle(Smi::FromInt(info.column + 1), isolate_),
|
2015-06-08 14:24:25 +00:00
|
|
|
NONE);
|
2014-10-28 13:24:36 +00:00
|
|
|
}
|
2016-05-06 09:07:30 +00:00
|
|
|
JSObject::AddProperty(stack_frame, line_key_,
|
2016-05-24 06:40:06 +00:00
|
|
|
handle(Smi::FromInt(info.line + 1), isolate_),
|
2016-05-06 09:07:30 +00:00
|
|
|
NONE);
|
|
|
|
}
|
2014-10-28 13:24:36 +00:00
|
|
|
|
2016-05-06 09:07:30 +00:00
|
|
|
if (!script_id_key_.is_null()) {
|
|
|
|
JSObject::AddProperty(stack_frame, script_id_key_,
|
|
|
|
handle(Smi::FromInt(script->id()), isolate_), NONE);
|
|
|
|
}
|
2014-10-28 13:24:36 +00:00
|
|
|
|
2016-05-06 09:07:30 +00:00
|
|
|
if (!script_name_key_.is_null()) {
|
|
|
|
JSObject::AddProperty(stack_frame, script_name_key_,
|
|
|
|
handle(script->name(), isolate_), NONE);
|
|
|
|
}
|
2016-04-19 11:55:23 +00:00
|
|
|
|
2016-05-06 09:07:30 +00:00
|
|
|
if (!script_name_or_source_url_key_.is_null()) {
|
|
|
|
Handle<Object> result = Script::GetNameOrSourceURL(script);
|
|
|
|
JSObject::AddProperty(stack_frame, script_name_or_source_url_key_, result,
|
|
|
|
NONE);
|
|
|
|
}
|
2014-10-28 13:24:36 +00:00
|
|
|
|
2016-05-06 09:07:30 +00:00
|
|
|
if (!eval_key_.is_null()) {
|
|
|
|
Handle<Object> is_eval = factory()->ToBoolean(
|
|
|
|
script->compilation_type() == Script::COMPILATION_TYPE_EVAL);
|
|
|
|
JSObject::AddProperty(stack_frame, eval_key_, is_eval, NONE);
|
2014-10-28 13:24:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (!function_key_.is_null()) {
|
2015-03-13 15:11:47 +00:00
|
|
|
Handle<Object> fun_name = JSFunction::GetDebugName(fun);
|
2014-10-28 13:24:36 +00:00
|
|
|
JSObject::AddProperty(stack_frame, function_key_, fun_name, NONE);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!constructor_key_.is_null()) {
|
|
|
|
Handle<Object> is_constructor_obj = factory()->ToBoolean(is_constructor);
|
|
|
|
JSObject::AddProperty(stack_frame, constructor_key_, is_constructor_obj,
|
|
|
|
NONE);
|
|
|
|
}
|
2016-05-06 09:07:30 +00:00
|
|
|
return stack_frame;
|
|
|
|
}
|
|
|
|
|
2016-06-30 06:55:22 +00:00
|
|
|
Handle<JSObject> NewStackFrameObject(BuiltinExitFrame* frame) {
|
|
|
|
Handle<JSObject> stack_frame =
|
|
|
|
factory()->NewJSObject(isolate_->object_function());
|
|
|
|
Handle<JSFunction> fun = handle(frame->function(), isolate_);
|
|
|
|
if (!function_key_.is_null()) {
|
|
|
|
Handle<Object> fun_name = JSFunction::GetDebugName(fun);
|
|
|
|
JSObject::AddProperty(stack_frame, function_key_, fun_name, NONE);
|
|
|
|
}
|
|
|
|
|
|
|
|
// We don't have a script and hence cannot set line and col positions.
|
|
|
|
DCHECK(!fun->shared()->script()->IsScript());
|
|
|
|
|
|
|
|
return stack_frame;
|
|
|
|
}
|
|
|
|
|
2016-05-06 09:07:30 +00:00
|
|
|
Handle<JSObject> NewStackFrameObject(WasmFrame* frame) {
|
|
|
|
Handle<JSObject> stack_frame =
|
|
|
|
factory()->NewJSObject(isolate_->object_function());
|
|
|
|
|
|
|
|
if (!function_key_.is_null()) {
|
2016-06-14 09:06:56 +00:00
|
|
|
Handle<String> name = wasm::GetWasmFunctionName(
|
|
|
|
isolate_, handle(frame->wasm_obj(), isolate_),
|
|
|
|
frame->function_index());
|
2016-05-19 07:52:35 +00:00
|
|
|
JSObject::AddProperty(stack_frame, function_key_, name, NONE);
|
2016-05-06 09:07:30 +00:00
|
|
|
}
|
|
|
|
// Encode the function index as line number.
|
|
|
|
if (!line_key_.is_null()) {
|
|
|
|
JSObject::AddProperty(
|
|
|
|
stack_frame, line_key_,
|
|
|
|
isolate_->factory()->NewNumberFromInt(frame->function_index()), NONE);
|
|
|
|
}
|
|
|
|
// Encode the byte offset as column.
|
|
|
|
if (!column_key_.is_null()) {
|
|
|
|
Code* code = frame->LookupCode();
|
|
|
|
int offset = static_cast<int>(frame->pc() - code->instruction_start());
|
2016-06-29 12:02:39 +00:00
|
|
|
int position = AbstractCode::cast(code)->SourcePosition(offset);
|
2016-06-16 12:04:57 +00:00
|
|
|
// Make position 1-based.
|
|
|
|
if (position >= 0) ++position;
|
2016-05-06 09:07:30 +00:00
|
|
|
JSObject::AddProperty(stack_frame, column_key_,
|
|
|
|
isolate_->factory()->NewNumberFromInt(position),
|
|
|
|
NONE);
|
|
|
|
}
|
2016-06-29 10:21:43 +00:00
|
|
|
if (!script_id_key_.is_null()) {
|
|
|
|
int script_id = frame->script()->id();
|
|
|
|
JSObject::AddProperty(stack_frame, script_id_key_,
|
|
|
|
handle(Smi::FromInt(script_id), isolate_), NONE);
|
|
|
|
}
|
2014-10-28 13:24:36 +00:00
|
|
|
|
|
|
|
return stack_frame;
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
inline Factory* factory() { return isolate_->factory(); }
|
|
|
|
|
|
|
|
Isolate* isolate_;
|
|
|
|
Handle<String> column_key_;
|
|
|
|
Handle<String> line_key_;
|
|
|
|
Handle<String> script_id_key_;
|
|
|
|
Handle<String> script_name_key_;
|
|
|
|
Handle<String> script_name_or_source_url_key_;
|
|
|
|
Handle<String> function_key_;
|
|
|
|
Handle<String> eval_key_;
|
|
|
|
Handle<String> constructor_key_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2015-05-05 07:28:54 +00:00
|
|
|
int PositionFromStackTrace(Handle<FixedArray> elements, int index) {
|
|
|
|
DisallowHeapAllocation no_gc;
|
|
|
|
Object* maybe_code = elements->get(index + 2);
|
|
|
|
if (maybe_code->IsSmi()) {
|
|
|
|
return Smi::cast(maybe_code)->value();
|
|
|
|
} else {
|
2016-01-28 12:11:55 +00:00
|
|
|
AbstractCode* abstract_code = AbstractCode::cast(maybe_code);
|
|
|
|
int code_offset = Smi::cast(elements->get(index + 3))->value();
|
|
|
|
return abstract_code->SourcePosition(code_offset);
|
2015-05-05 07:28:54 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-05-13 08:54:16 +00:00
|
|
|
Handle<JSArray> Isolate::CaptureCurrentStackTrace(
|
|
|
|
int frame_limit, StackTrace::StackTraceOptions options) {
|
2016-07-18 12:54:51 +00:00
|
|
|
DisallowJavascriptExecution no_js(this);
|
2014-10-28 13:24:36 +00:00
|
|
|
CaptureStackTraceHelper helper(this, options);
|
|
|
|
|
2011-05-13 08:54:16 +00:00
|
|
|
// Ensure no negative values.
|
|
|
|
int limit = Max(frame_limit, 0);
|
|
|
|
Handle<JSArray> stack_trace = factory()->NewJSArray(frame_limit);
|
2016-05-06 09:07:30 +00:00
|
|
|
Handle<FixedArray> stack_trace_elems(
|
|
|
|
FixedArray::cast(stack_trace->elements()), this);
|
2011-05-13 08:54:16 +00:00
|
|
|
|
|
|
|
int frames_seen = 0;
|
2016-05-06 09:07:30 +00:00
|
|
|
for (StackTraceFrameIterator it(this); !it.done() && (frames_seen < limit);
|
|
|
|
it.Advance()) {
|
2016-04-06 11:37:15 +00:00
|
|
|
StandardFrame* frame = it.frame();
|
2016-05-06 09:07:30 +00:00
|
|
|
if (frame->is_java_script()) {
|
|
|
|
// Set initial size to the maximum inlining level + 1 for the outermost
|
|
|
|
// function.
|
|
|
|
List<FrameSummary> frames(FLAG_max_inlining_levels + 1);
|
|
|
|
JavaScriptFrame::cast(frame)->Summarize(&frames);
|
|
|
|
for (int i = frames.length() - 1; i >= 0 && frames_seen < limit; i--) {
|
|
|
|
Handle<JSFunction> fun = frames[i].function();
|
|
|
|
// Filter frames from other security contexts.
|
|
|
|
if (!(options & StackTrace::kExposeFramesAcrossSecurityOrigins) &&
|
|
|
|
!this->context()->HasSameSecurityTokenAs(fun->context()))
|
|
|
|
continue;
|
|
|
|
Handle<JSObject> new_frame_obj = helper.NewStackFrameObject(frames[i]);
|
|
|
|
stack_trace_elems->set(frames_seen, *new_frame_obj);
|
|
|
|
frames_seen++;
|
|
|
|
}
|
|
|
|
} else {
|
2016-06-30 06:55:22 +00:00
|
|
|
DCHECK(frame->is_wasm());
|
2016-05-06 09:07:30 +00:00
|
|
|
WasmFrame* wasm_frame = WasmFrame::cast(frame);
|
|
|
|
Handle<JSObject> new_frame_obj = helper.NewStackFrameObject(wasm_frame);
|
|
|
|
stack_trace_elems->set(frames_seen, *new_frame_obj);
|
2011-05-13 08:54:16 +00:00
|
|
|
frames_seen++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
stack_trace->set_length(Smi::FromInt(frames_seen));
|
|
|
|
return stack_trace;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-04-30 12:14:34 +00:00
|
|
|
void Isolate::PrintStack(FILE* out, PrintStackMode mode) {
|
2011-05-13 08:54:16 +00:00
|
|
|
if (stack_trace_nesting_level_ == 0) {
|
|
|
|
stack_trace_nesting_level_++;
|
2013-09-03 11:54:08 +00:00
|
|
|
StringStream::ClearMentionedObjectCache(this);
|
2013-11-20 12:35:58 +00:00
|
|
|
HeapStringAllocator allocator;
|
|
|
|
StringStream accumulator(&allocator);
|
2011-05-13 08:54:16 +00:00
|
|
|
incomplete_message_ = &accumulator;
|
2015-04-30 12:14:34 +00:00
|
|
|
PrintStack(&accumulator, mode);
|
2013-05-21 09:25:57 +00:00
|
|
|
accumulator.OutputToFile(out);
|
2011-08-04 15:18:18 +00:00
|
|
|
InitializeLoggingAndCounters();
|
2013-09-11 10:59:39 +00:00
|
|
|
accumulator.Log(this);
|
2011-05-13 08:54:16 +00:00
|
|
|
incomplete_message_ = NULL;
|
|
|
|
stack_trace_nesting_level_ = 0;
|
|
|
|
} else if (stack_trace_nesting_level_ == 1) {
|
|
|
|
stack_trace_nesting_level_++;
|
2014-06-30 13:25:46 +00:00
|
|
|
base::OS::PrintError(
|
2011-05-13 08:54:16 +00:00
|
|
|
"\n\nAttempt to print stack while printing stack (double fault)\n");
|
2014-06-30 13:25:46 +00:00
|
|
|
base::OS::PrintError(
|
2011-05-13 08:54:16 +00:00
|
|
|
"If you are lucky you may find a partial stack dump on stdout.\n\n");
|
2013-05-21 09:25:57 +00:00
|
|
|
incomplete_message_->OutputToFile(out);
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-15 09:27:10 +00:00
|
|
|
static void PrintFrames(Isolate* isolate,
|
|
|
|
StringStream* accumulator,
|
2011-05-13 08:54:16 +00:00
|
|
|
StackFrame::PrintMode mode) {
|
2013-02-15 09:27:10 +00:00
|
|
|
StackFrameIterator it(isolate);
|
2011-05-13 08:54:16 +00:00
|
|
|
for (int i = 0; !it.done(); it.Advance()) {
|
|
|
|
it.frame()->Print(accumulator, mode, i++);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-04-30 12:14:34 +00:00
|
|
|
void Isolate::PrintStack(StringStream* accumulator, PrintStackMode mode) {
|
2011-05-13 08:54:16 +00:00
|
|
|
// The MentionedObjectCache is not GC-proof at the moment.
|
2013-06-03 15:32:22 +00:00
|
|
|
DisallowHeapAllocation no_gc;
|
2016-07-06 05:53:46 +00:00
|
|
|
HandleScope scope(this);
|
2015-06-05 17:15:51 +00:00
|
|
|
DCHECK(accumulator->IsMentionedObjectCacheClear(this));
|
2011-05-13 08:54:16 +00:00
|
|
|
|
|
|
|
// Avoid printing anything if there are no frames.
|
|
|
|
if (c_entry_fp(thread_local_top()) == 0) return;
|
|
|
|
|
|
|
|
accumulator->Add(
|
2012-12-14 14:27:06 +00:00
|
|
|
"\n==== JS stack trace =========================================\n\n");
|
2013-02-15 09:27:10 +00:00
|
|
|
PrintFrames(this, accumulator, StackFrame::OVERVIEW);
|
2015-04-30 12:14:34 +00:00
|
|
|
if (mode == kPrintStackVerbose) {
|
|
|
|
accumulator->Add(
|
|
|
|
"\n==== Details ================================================\n\n");
|
|
|
|
PrintFrames(this, accumulator, StackFrame::DETAILS);
|
|
|
|
accumulator->PrintMentionedObjectCache(this);
|
|
|
|
}
|
2011-05-13 08:54:16 +00:00
|
|
|
accumulator->Add("=====================\n\n");
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::SetFailedAccessCheckCallback(
|
|
|
|
v8::FailedAccessCheckCallback callback) {
|
|
|
|
thread_local_top()->failed_access_check_callback_ = callback;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-02-26 10:34:44 +00:00
|
|
|
void Isolate::ReportFailedAccessCheck(Handle<JSObject> receiver) {
|
2014-07-30 09:31:06 +00:00
|
|
|
if (!thread_local_top()->failed_access_check_callback_) {
|
2015-05-15 13:32:32 +00:00
|
|
|
return ScheduleThrow(*factory()->NewTypeError(MessageTemplate::kNoAccess));
|
2014-07-30 09:31:06 +00:00
|
|
|
}
|
2011-05-13 08:54:16 +00:00
|
|
|
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(receiver->IsAccessCheckNeeded());
|
|
|
|
DCHECK(context());
|
2011-05-13 08:54:16 +00:00
|
|
|
|
|
|
|
// Get the data object from access check info.
|
2013-02-15 09:27:10 +00:00
|
|
|
HandleScope scope(this);
|
2014-04-14 14:03:20 +00:00
|
|
|
Handle<Object> data;
|
|
|
|
{ DisallowHeapAllocation no_gc;
|
2016-06-27 11:48:04 +00:00
|
|
|
AccessCheckInfo* access_check_info = AccessCheckInfo::Get(this, receiver);
|
2015-04-16 17:46:45 +00:00
|
|
|
if (!access_check_info) {
|
|
|
|
AllowHeapAllocation doesnt_matter_anymore;
|
2015-05-15 13:32:32 +00:00
|
|
|
return ScheduleThrow(
|
|
|
|
*factory()->NewTypeError(MessageTemplate::kNoAccess));
|
2015-04-16 17:46:45 +00:00
|
|
|
}
|
2014-04-14 14:03:20 +00:00
|
|
|
data = handle(access_check_info->data(), this);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Leaving JavaScript.
|
|
|
|
VMState<EXTERNAL> state(this);
|
|
|
|
thread_local_top()->failed_access_check_callback_(
|
2015-02-26 10:34:44 +00:00
|
|
|
v8::Utils::ToLocal(receiver), v8::ACCESS_HAS, v8::Utils::ToLocal(data));
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-10-09 13:25:39 +00:00
|
|
|
bool Isolate::MayAccess(Handle<Context> accessing_context,
|
|
|
|
Handle<JSObject> receiver) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(receiver->IsJSGlobalProxy() || receiver->IsAccessCheckNeeded());
|
2011-05-13 08:54:16 +00:00
|
|
|
|
|
|
|
// Check for compatibility between the security tokens in the
|
|
|
|
// current lexical context and the accessed object.
|
|
|
|
|
2015-12-16 14:31:23 +00:00
|
|
|
// During bootstrapping, callback functions are not enabled yet.
|
|
|
|
if (bootstrapper()->IsActive()) return true;
|
2015-02-26 10:34:44 +00:00
|
|
|
{
|
|
|
|
DisallowHeapAllocation no_gc;
|
|
|
|
|
|
|
|
if (receiver->IsJSGlobalProxy()) {
|
|
|
|
Object* receiver_context =
|
|
|
|
JSGlobalProxy::cast(*receiver)->native_context();
|
|
|
|
if (!receiver_context->IsContext()) return false;
|
|
|
|
|
|
|
|
// Get the native context of current top context.
|
|
|
|
// avoid using Isolate::native_context() because it uses Handle.
|
2015-10-09 13:25:39 +00:00
|
|
|
Context* native_context =
|
|
|
|
accessing_context->global_object()->native_context();
|
2015-02-26 10:34:44 +00:00
|
|
|
if (receiver_context == native_context) return true;
|
|
|
|
|
|
|
|
if (Context::cast(receiver_context)->security_token() ==
|
|
|
|
native_context->security_token())
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
2011-05-13 08:54:16 +00:00
|
|
|
|
|
|
|
HandleScope scope(this);
|
2014-04-14 14:03:20 +00:00
|
|
|
Handle<Object> data;
|
2015-10-23 08:13:24 +00:00
|
|
|
v8::AccessCheckCallback callback = nullptr;
|
2014-04-14 14:03:20 +00:00
|
|
|
{ DisallowHeapAllocation no_gc;
|
2016-06-27 11:48:04 +00:00
|
|
|
AccessCheckInfo* access_check_info = AccessCheckInfo::Get(this, receiver);
|
2014-04-14 14:03:20 +00:00
|
|
|
if (!access_check_info) return false;
|
2015-10-23 08:13:24 +00:00
|
|
|
Object* fun_obj = access_check_info->callback();
|
|
|
|
callback = v8::ToCData<v8::AccessCheckCallback>(fun_obj);
|
2016-02-05 12:55:39 +00:00
|
|
|
data = handle(access_check_info->data(), this);
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
2014-04-14 14:03:20 +00:00
|
|
|
|
2015-02-26 10:34:44 +00:00
|
|
|
LOG(this, ApiSecurityCheck());
|
2014-04-14 14:03:20 +00:00
|
|
|
|
2015-10-09 13:25:39 +00:00
|
|
|
{
|
|
|
|
// Leaving JavaScript.
|
|
|
|
VMState<EXTERNAL> state(this);
|
2016-06-08 14:42:07 +00:00
|
|
|
return callback(v8::Utils::ToLocal(accessing_context),
|
|
|
|
v8::Utils::ToLocal(receiver), v8::Utils::ToLocal(data));
|
2015-10-09 13:25:39 +00:00
|
|
|
}
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-22 12:50:58 +00:00
|
|
|
Object* Isolate::StackOverflow() {
|
2016-07-18 12:54:51 +00:00
|
|
|
DisallowJavascriptExecution no_js(this);
|
2013-02-15 09:27:10 +00:00
|
|
|
HandleScope scope(this);
|
2016-07-21 14:29:34 +00:00
|
|
|
|
|
|
|
Handle<JSFunction> fun = range_error_function();
|
|
|
|
Handle<Object> msg = factory()->NewStringFromAsciiChecked(
|
|
|
|
MessageTemplate::TemplateString(MessageTemplate::kStackOverflow));
|
2015-08-18 09:55:40 +00:00
|
|
|
Handle<Object> exception;
|
2016-07-21 14:29:34 +00:00
|
|
|
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
|
|
|
|
this, exception, ConstructError(this, fun, fun, msg, SKIP_NONE, true));
|
|
|
|
|
2015-02-27 11:15:35 +00:00
|
|
|
Throw(*exception, nullptr);
|
2012-11-12 14:54:29 +00:00
|
|
|
|
2015-04-07 09:44:47 +00:00
|
|
|
#ifdef VERIFY_HEAP
|
|
|
|
if (FLAG_verify_heap && FLAG_stress_compaction) {
|
2016-03-14 13:18:56 +00:00
|
|
|
heap()->CollectAllGarbage(Heap::kNoGCFlags, "trigger compaction");
|
2015-04-07 09:44:47 +00:00
|
|
|
}
|
|
|
|
#endif // VERIFY_HEAP
|
|
|
|
|
2014-04-22 12:50:58 +00:00
|
|
|
return heap()->exception();
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-22 12:50:58 +00:00
|
|
|
Object* Isolate::TerminateExecution() {
|
2015-02-27 11:15:35 +00:00
|
|
|
return Throw(heap_.termination_exception(), nullptr);
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-22 15:01:45 +00:00
|
|
|
void Isolate::CancelTerminateExecution() {
|
|
|
|
if (try_catch_handler()) {
|
|
|
|
try_catch_handler()->has_terminated_ = false;
|
|
|
|
}
|
|
|
|
if (has_pending_exception() &&
|
|
|
|
pending_exception() == heap_.termination_exception()) {
|
|
|
|
thread_local_top()->external_caught_exception_ = false;
|
|
|
|
clear_pending_exception();
|
|
|
|
}
|
|
|
|
if (has_scheduled_exception() &&
|
|
|
|
scheduled_exception() == heap_.termination_exception()) {
|
|
|
|
thread_local_top()->external_caught_exception_ = false;
|
|
|
|
clear_scheduled_exception();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-12-20 07:54:03 +00:00
|
|
|
void Isolate::RequestInterrupt(InterruptCallback callback, void* data) {
|
|
|
|
ExecutionAccess access(this);
|
|
|
|
api_interrupts_queue_.push(InterruptEntry(callback, data));
|
|
|
|
stack_guard()->RequestApiInterrupt();
|
|
|
|
}
|
2014-05-09 09:13:12 +00:00
|
|
|
|
2014-12-20 07:54:03 +00:00
|
|
|
|
|
|
|
void Isolate::InvokeApiInterruptCallbacks() {
|
|
|
|
// Note: callback below should be called outside of execution access lock.
|
|
|
|
while (true) {
|
|
|
|
InterruptEntry entry;
|
|
|
|
{
|
|
|
|
ExecutionAccess access(this);
|
|
|
|
if (api_interrupts_queue_.empty()) return;
|
|
|
|
entry = api_interrupts_queue_.front();
|
|
|
|
api_interrupts_queue_.pop();
|
|
|
|
}
|
2014-05-09 09:13:12 +00:00
|
|
|
VMState<EXTERNAL> state(this);
|
|
|
|
HandleScope handle_scope(this);
|
2014-12-20 07:54:03 +00:00
|
|
|
entry.first(reinterpret_cast<v8::Isolate*>(this), entry.second);
|
2014-05-09 09:13:12 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-04-30 17:33:13 +00:00
|
|
|
void ReportBootstrappingException(Handle<Object> exception,
|
|
|
|
MessageLocation* location) {
|
2015-02-27 11:15:35 +00:00
|
|
|
base::OS::PrintError("Exception thrown during bootstrapping\n");
|
2015-04-30 17:33:13 +00:00
|
|
|
if (location == NULL || location->script().is_null()) return;
|
2015-02-27 11:15:35 +00:00
|
|
|
// We are bootstrapping and caught an error where the location is set
|
|
|
|
// and we have a script for the location.
|
|
|
|
// In this case we could have an extension (or an internal error
|
|
|
|
// somewhere) and we print out the line number at which the error occured
|
|
|
|
// to the console for easier debugging.
|
|
|
|
int line_number =
|
|
|
|
location->script()->GetLineNumber(location->start_pos()) + 1;
|
|
|
|
if (exception->IsString() && location->script()->name()->IsString()) {
|
|
|
|
base::OS::PrintError(
|
|
|
|
"Extension or internal compilation error: %s in %s at line %d.\n",
|
|
|
|
String::cast(*exception)->ToCString().get(),
|
|
|
|
String::cast(location->script()->name())->ToCString().get(),
|
|
|
|
line_number);
|
|
|
|
} else if (location->script()->name()->IsString()) {
|
|
|
|
base::OS::PrintError(
|
|
|
|
"Extension or internal compilation error in %s at line %d.\n",
|
|
|
|
String::cast(location->script()->name())->ToCString().get(),
|
|
|
|
line_number);
|
2015-06-10 17:42:01 +00:00
|
|
|
} else if (exception->IsString()) {
|
|
|
|
base::OS::PrintError("Extension or internal compilation error: %s.\n",
|
|
|
|
String::cast(*exception)->ToCString().get());
|
2015-02-27 11:15:35 +00:00
|
|
|
} else {
|
|
|
|
base::OS::PrintError("Extension or internal compilation error.\n");
|
|
|
|
}
|
|
|
|
#ifdef OBJECT_PRINT
|
|
|
|
// Since comments and empty lines have been stripped from the source of
|
|
|
|
// builtins, print the actual source here so that line numbers match.
|
|
|
|
if (location->script()->source()->IsString()) {
|
|
|
|
Handle<String> src(String::cast(location->script()->source()));
|
2015-06-05 15:52:04 +00:00
|
|
|
PrintF("Failing script:");
|
2015-02-27 11:15:35 +00:00
|
|
|
int len = src->length();
|
2015-06-05 15:52:04 +00:00
|
|
|
if (len == 0) {
|
|
|
|
PrintF(" <not available>\n");
|
|
|
|
} else {
|
|
|
|
PrintF("\n");
|
|
|
|
int line_number = 1;
|
|
|
|
PrintF("%5d: ", line_number);
|
|
|
|
for (int i = 0; i < len; i++) {
|
|
|
|
uint16_t character = src->Get(i);
|
|
|
|
PrintF("%c", character);
|
|
|
|
if (character == '\n' && i < len - 2) {
|
|
|
|
PrintF("%5d: ", ++line_number);
|
|
|
|
}
|
2015-02-27 11:15:35 +00:00
|
|
|
}
|
2015-06-05 15:52:04 +00:00
|
|
|
PrintF("\n");
|
2015-02-27 11:15:35 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-22 12:50:58 +00:00
|
|
|
Object* Isolate::Throw(Object* exception, MessageLocation* location) {
|
2015-02-27 11:15:35 +00:00
|
|
|
DCHECK(!has_pending_exception());
|
|
|
|
|
|
|
|
HandleScope scope(this);
|
|
|
|
Handle<Object> exception_handle(exception, this);
|
|
|
|
|
2015-03-18 10:19:04 +00:00
|
|
|
// Determine whether a message needs to be created for the given exception
|
|
|
|
// depending on the following criteria:
|
|
|
|
// 1) External v8::TryCatch missing: Always create a message because any
|
|
|
|
// JavaScript handler for a finally-block might re-throw to top-level.
|
|
|
|
// 2) External v8::TryCatch exists: Only create a message if the handler
|
|
|
|
// captures messages or is verbose (which reports despite the catch).
|
|
|
|
// 3) ReThrow from v8::TryCatch: The message from a previous throw still
|
|
|
|
// exists and we preserve it instead of creating a new message.
|
|
|
|
bool requires_message = try_catch_handler() == nullptr ||
|
|
|
|
try_catch_handler()->is_verbose_ ||
|
|
|
|
try_catch_handler()->capture_message_;
|
2015-02-27 11:15:35 +00:00
|
|
|
bool rethrowing_message = thread_local_top()->rethrowing_message_;
|
|
|
|
|
|
|
|
thread_local_top()->rethrowing_message_ = false;
|
|
|
|
|
|
|
|
// Notify debugger of exception.
|
2015-03-18 10:19:04 +00:00
|
|
|
if (is_catchable_by_javascript(exception)) {
|
|
|
|
debug()->OnThrow(exception_handle);
|
2015-02-27 11:15:35 +00:00
|
|
|
}
|
|
|
|
|
2015-04-30 17:33:13 +00:00
|
|
|
// Generate the message if required.
|
|
|
|
if (requires_message && !rethrowing_message) {
|
2015-08-21 13:18:54 +00:00
|
|
|
MessageLocation computed_location;
|
|
|
|
// If no location was specified we try to use a computed one instead.
|
|
|
|
if (location == NULL && ComputeLocation(&computed_location)) {
|
|
|
|
location = &computed_location;
|
2015-02-27 11:15:35 +00:00
|
|
|
}
|
|
|
|
|
2015-04-30 17:33:13 +00:00
|
|
|
if (bootstrapper()->IsActive()) {
|
|
|
|
// It's not safe to try to make message objects or collect stack traces
|
|
|
|
// while the bootstrapper is active since the infrastructure may not have
|
|
|
|
// been properly initialized.
|
|
|
|
ReportBootstrappingException(exception_handle, location);
|
|
|
|
} else {
|
|
|
|
Handle<Object> message_obj = CreateMessage(exception_handle, location);
|
|
|
|
thread_local_top()->pending_message_obj_ = *message_obj;
|
|
|
|
|
2015-10-05 18:55:05 +00:00
|
|
|
// For any exception not caught by JavaScript, even when an external
|
|
|
|
// handler is present:
|
|
|
|
// If the abort-on-uncaught-exception flag is specified, and if the
|
|
|
|
// embedder didn't specify a custom uncaught exception callback,
|
|
|
|
// or if the custom callback determined that V8 should abort, then
|
|
|
|
// abort.
|
2015-04-30 17:33:13 +00:00
|
|
|
if (FLAG_abort_on_uncaught_exception &&
|
2015-10-05 18:55:05 +00:00
|
|
|
PredictExceptionCatcher() != CAUGHT_BY_JAVASCRIPT &&
|
|
|
|
(!abort_on_uncaught_exception_callback_ ||
|
|
|
|
abort_on_uncaught_exception_callback_(
|
|
|
|
reinterpret_cast<v8::Isolate*>(this)))) {
|
|
|
|
// Prevent endless recursion.
|
|
|
|
FLAG_abort_on_uncaught_exception = false;
|
|
|
|
// This flag is intended for use by JavaScript developers, so
|
|
|
|
// print a user-friendly stack trace (not an internal one).
|
2015-04-30 17:33:13 +00:00
|
|
|
PrintF(stderr, "%s\n\nFROM\n",
|
|
|
|
MessageHandler::GetLocalizedMessage(this, message_obj).get());
|
|
|
|
PrintCurrentStackTrace(stderr);
|
|
|
|
base::OS::Abort();
|
|
|
|
}
|
2015-02-27 11:15:35 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-03-03 12:03:04 +00:00
|
|
|
// Set the exception being thrown.
|
2015-02-27 11:15:35 +00:00
|
|
|
set_pending_exception(*exception_handle);
|
2014-04-22 12:50:58 +00:00
|
|
|
return heap()->exception();
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-22 12:50:58 +00:00
|
|
|
Object* Isolate::ReThrow(Object* exception) {
|
2015-03-11 13:36:08 +00:00
|
|
|
DCHECK(!has_pending_exception());
|
2011-05-13 08:54:16 +00:00
|
|
|
|
|
|
|
// Set the exception being re-thrown.
|
|
|
|
set_pending_exception(exception);
|
2014-04-22 12:50:58 +00:00
|
|
|
return heap()->exception();
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-05-04 16:43:56 +00:00
|
|
|
Object* Isolate::UnwindAndFindHandler() {
|
2015-03-03 12:03:04 +00:00
|
|
|
Object* exception = pending_exception();
|
|
|
|
|
2015-03-05 13:02:34 +00:00
|
|
|
Code* code = nullptr;
|
|
|
|
Context* context = nullptr;
|
|
|
|
intptr_t offset = 0;
|
|
|
|
Address handler_sp = nullptr;
|
|
|
|
Address handler_fp = nullptr;
|
|
|
|
|
|
|
|
// Special handling of termination exceptions, uncatchable by JavaScript code,
|
|
|
|
// we unwind the handlers until the top ENTRY handler is found.
|
|
|
|
bool catchable_by_js = is_catchable_by_javascript(exception);
|
|
|
|
|
|
|
|
// Compute handler and stack unwinding information by performing a full walk
|
|
|
|
// over the stack and dispatching according to the frame type.
|
|
|
|
for (StackFrameIterator iter(this); !iter.done(); iter.Advance()) {
|
|
|
|
StackFrame* frame = iter.frame();
|
|
|
|
|
|
|
|
// For JSEntryStub frames we always have a handler.
|
|
|
|
if (frame->is_entry() || frame->is_entry_construct()) {
|
|
|
|
StackHandler* handler = frame->top_handler();
|
|
|
|
|
|
|
|
// Restore the next handler.
|
|
|
|
thread_local_top()->handler_ = handler->next()->address();
|
|
|
|
|
|
|
|
// Gather information from the handler.
|
2015-03-10 13:20:11 +00:00
|
|
|
code = frame->LookupCode();
|
2015-03-05 13:02:34 +00:00
|
|
|
handler_sp = handler->address() + StackHandlerConstants::kSize;
|
|
|
|
offset = Smi::cast(code->handler_table()->get(0))->value();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2015-03-25 13:13:51 +00:00
|
|
|
// For optimized frames we perform a lookup in the handler table.
|
|
|
|
if (frame->is_optimized() && catchable_by_js) {
|
|
|
|
OptimizedFrame* js_frame = static_cast<OptimizedFrame*>(frame);
|
|
|
|
int stack_slots = 0; // Will contain stack slot count of frame.
|
2016-02-04 13:43:45 +00:00
|
|
|
offset = js_frame->LookupExceptionHandlerInTable(&stack_slots, nullptr);
|
2015-05-04 16:43:56 +00:00
|
|
|
if (offset >= 0) {
|
|
|
|
// Compute the stack pointer from the frame pointer. This ensures that
|
|
|
|
// argument slots on the stack are dropped as returning would.
|
2016-02-18 12:51:48 +00:00
|
|
|
Address return_sp = frame->fp() +
|
|
|
|
StandardFrameConstants::kFixedFrameSizeAboveFp -
|
2015-05-04 16:43:56 +00:00
|
|
|
stack_slots * kPointerSize;
|
|
|
|
|
|
|
|
// Gather information from the frame.
|
|
|
|
code = frame->LookupCode();
|
2016-02-12 10:14:42 +00:00
|
|
|
if (code->marked_for_deoptimization()) {
|
|
|
|
// If the target code is lazy deoptimized, we jump to the original
|
|
|
|
// return address, but we make a note that we are throwing, so that
|
|
|
|
// the deoptimizer can do the right thing.
|
|
|
|
offset = static_cast<int>(frame->pc() - code->entry());
|
|
|
|
set_deoptimizer_lazy_throw(true);
|
|
|
|
}
|
2015-05-04 16:43:56 +00:00
|
|
|
handler_sp = return_sp;
|
|
|
|
handler_fp = frame->fp();
|
|
|
|
break;
|
|
|
|
}
|
2015-03-05 13:02:34 +00:00
|
|
|
}
|
|
|
|
|
2016-01-20 18:10:13 +00:00
|
|
|
// For interpreted frame we perform a range lookup in the handler table.
|
|
|
|
if (frame->is_interpreted() && catchable_by_js) {
|
|
|
|
InterpretedFrame* js_frame = static_cast<InterpretedFrame*>(frame);
|
2016-02-04 13:43:45 +00:00
|
|
|
int context_reg = 0; // Will contain register index holding context.
|
|
|
|
offset = js_frame->LookupExceptionHandlerInTable(&context_reg, nullptr);
|
2016-01-20 18:10:13 +00:00
|
|
|
if (offset >= 0) {
|
|
|
|
// Patch the bytecode offset in the interpreted frame to reflect the
|
|
|
|
// position of the exception handler. The special builtin below will
|
2016-02-04 13:43:45 +00:00
|
|
|
// take care of continuing to dispatch at that position. Also restore
|
|
|
|
// the correct context for the handler from the interpreter register.
|
2016-04-18 14:13:04 +00:00
|
|
|
context = Context::cast(js_frame->ReadInterpreterRegister(context_reg));
|
2016-01-20 18:10:13 +00:00
|
|
|
js_frame->PatchBytecodeOffset(static_cast<int>(offset));
|
|
|
|
offset = 0;
|
|
|
|
|
|
|
|
// Gather information from the frame.
|
2016-01-26 12:22:45 +00:00
|
|
|
code = *builtins()->InterpreterEnterBytecodeDispatch();
|
2016-01-20 18:10:13 +00:00
|
|
|
handler_sp = frame->sp();
|
|
|
|
handler_fp = frame->fp();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-03-25 13:13:51 +00:00
|
|
|
// For JavaScript frames we perform a range lookup in the handler table.
|
|
|
|
if (frame->is_java_script() && catchable_by_js) {
|
|
|
|
JavaScriptFrame* js_frame = static_cast<JavaScriptFrame*>(frame);
|
2016-02-04 13:43:45 +00:00
|
|
|
int stack_depth = 0; // Will contain operand stack depth of handler.
|
|
|
|
offset = js_frame->LookupExceptionHandlerInTable(&stack_depth, nullptr);
|
2015-05-04 16:43:56 +00:00
|
|
|
if (offset >= 0) {
|
|
|
|
// Compute the stack pointer from the frame pointer. This ensures that
|
|
|
|
// operand stack slots are dropped for nested statements. Also restore
|
|
|
|
// correct context for the handler which is pushed within the try-block.
|
|
|
|
Address return_sp = frame->fp() -
|
|
|
|
StandardFrameConstants::kFixedFrameSizeFromFp -
|
2016-02-04 13:43:45 +00:00
|
|
|
stack_depth * kPointerSize;
|
2015-05-04 16:43:56 +00:00
|
|
|
STATIC_ASSERT(TryBlockConstant::kElementCount == 1);
|
|
|
|
context = Context::cast(Memory::Object_at(return_sp - kPointerSize));
|
|
|
|
|
|
|
|
// Gather information from the frame.
|
|
|
|
code = frame->LookupCode();
|
|
|
|
handler_sp = return_sp;
|
|
|
|
handler_fp = frame->fp();
|
|
|
|
break;
|
|
|
|
}
|
2015-03-05 13:02:34 +00:00
|
|
|
}
|
2015-05-04 16:43:56 +00:00
|
|
|
|
|
|
|
RemoveMaterializedObjectsOnUnwind(frame);
|
2015-03-03 12:03:04 +00:00
|
|
|
}
|
|
|
|
|
2015-03-05 13:02:34 +00:00
|
|
|
// Handler must exist.
|
|
|
|
CHECK(code != nullptr);
|
2015-03-03 12:03:04 +00:00
|
|
|
|
|
|
|
// Store information to be consumed by the CEntryStub.
|
|
|
|
thread_local_top()->pending_handler_context_ = context;
|
|
|
|
thread_local_top()->pending_handler_code_ = code;
|
|
|
|
thread_local_top()->pending_handler_offset_ = offset;
|
|
|
|
thread_local_top()->pending_handler_fp_ = handler_fp;
|
|
|
|
thread_local_top()->pending_handler_sp_ = handler_sp;
|
|
|
|
|
|
|
|
// Return and clear pending exception.
|
|
|
|
clear_pending_exception();
|
|
|
|
return exception;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-03-25 13:13:51 +00:00
|
|
|
Isolate::CatchType Isolate::PredictExceptionCatcher() {
|
|
|
|
Address external_handler = thread_local_top()->try_catch_handler_address();
|
|
|
|
Address entry_handler = Isolate::handler(thread_local_top());
|
|
|
|
if (IsExternalHandlerOnTop(nullptr)) return CAUGHT_BY_EXTERNAL;
|
2015-03-18 10:19:04 +00:00
|
|
|
|
2015-03-25 13:13:51 +00:00
|
|
|
// Search for an exception handler by performing a full walk over the stack.
|
2015-03-18 10:19:04 +00:00
|
|
|
for (StackFrameIterator iter(this); !iter.done(); iter.Advance()) {
|
|
|
|
StackFrame* frame = iter.frame();
|
|
|
|
|
2015-03-25 13:13:51 +00:00
|
|
|
// For JSEntryStub frames we update the JS_ENTRY handler.
|
|
|
|
if (frame->is_entry() || frame->is_entry_construct()) {
|
|
|
|
entry_handler = frame->top_handler()->next()->address();
|
2015-03-18 10:19:04 +00:00
|
|
|
}
|
|
|
|
|
2015-03-25 13:13:51 +00:00
|
|
|
// For JavaScript frames we perform a lookup in the handler table.
|
|
|
|
if (frame->is_java_script()) {
|
|
|
|
JavaScriptFrame* js_frame = static_cast<JavaScriptFrame*>(frame);
|
2015-05-29 10:05:22 +00:00
|
|
|
HandlerTable::CatchPrediction prediction;
|
2016-02-05 13:51:42 +00:00
|
|
|
if (js_frame->LookupExceptionHandlerInTable(nullptr, &prediction) > 0) {
|
2015-05-29 10:05:22 +00:00
|
|
|
// We are conservative with our prediction: try-finally is considered
|
|
|
|
// to always rethrow, to meet the expectation of the debugger.
|
|
|
|
if (prediction == HandlerTable::CAUGHT) return CAUGHT_BY_JAVASCRIPT;
|
2015-03-25 13:13:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// The exception has been externally caught if and only if there is an
|
|
|
|
// external handler which is on top of the top-most JS_ENTRY handler.
|
|
|
|
if (external_handler != nullptr && !try_catch_handler()->is_verbose_) {
|
|
|
|
if (entry_handler == nullptr || entry_handler > external_handler) {
|
|
|
|
return CAUGHT_BY_EXTERNAL;
|
|
|
|
}
|
2015-03-18 10:19:04 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Handler not found.
|
2015-03-25 13:13:51 +00:00
|
|
|
return NOT_CAUGHT;
|
2015-03-18 10:19:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-05-04 16:43:56 +00:00
|
|
|
void Isolate::RemoveMaterializedObjectsOnUnwind(StackFrame* frame) {
|
|
|
|
if (frame->is_optimized()) {
|
|
|
|
bool removed = materialized_object_store_->Remove(frame->fp());
|
|
|
|
USE(removed);
|
|
|
|
// If there were any materialized objects, the code should be
|
|
|
|
// marked for deopt.
|
|
|
|
DCHECK(!removed || frame->LookupCode()->marked_for_deoptimization());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-22 12:50:58 +00:00
|
|
|
Object* Isolate::ThrowIllegalOperation() {
|
2014-02-11 07:28:05 +00:00
|
|
|
if (FLAG_stack_trace_on_illegal) PrintStack(stdout);
|
2015-03-18 10:19:04 +00:00
|
|
|
return Throw(heap()->illegal_access_string());
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::ScheduleThrow(Object* exception) {
|
|
|
|
// When scheduling a throw we first throw the exception to get the
|
|
|
|
// error reporting if it is uncaught before rescheduling it.
|
|
|
|
Throw(exception);
|
2015-03-19 13:22:42 +00:00
|
|
|
PropagatePendingExceptionToExternalTryCatch();
|
2012-12-04 10:45:59 +00:00
|
|
|
if (has_pending_exception()) {
|
|
|
|
thread_local_top()->scheduled_exception_ = pending_exception();
|
|
|
|
thread_local_top()->external_caught_exception_ = false;
|
|
|
|
clear_pending_exception();
|
|
|
|
}
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-07-01 10:54:39 +00:00
|
|
|
void Isolate::RestorePendingMessageFromTryCatch(v8::TryCatch* handler) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(handler == try_catch_handler());
|
|
|
|
DCHECK(handler->HasCaught());
|
|
|
|
DCHECK(handler->rethrow_);
|
|
|
|
DCHECK(handler->capture_message_);
|
2013-07-01 10:54:39 +00:00
|
|
|
Object* message = reinterpret_cast<Object*>(handler->message_obj_);
|
2016-06-06 12:58:10 +00:00
|
|
|
DCHECK(message->IsJSMessageObject() || message->IsTheHole(this));
|
2013-07-01 10:54:39 +00:00
|
|
|
thread_local_top()->pending_message_obj_ = message;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-18 13:28:12 +00:00
|
|
|
void Isolate::CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(has_scheduled_exception());
|
2014-07-18 13:28:12 +00:00
|
|
|
if (scheduled_exception() == handler->exception_) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(scheduled_exception() != heap()->termination_exception());
|
2014-07-18 13:28:12 +00:00
|
|
|
clear_scheduled_exception();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-22 12:50:58 +00:00
|
|
|
Object* Isolate::PromoteScheduledException() {
|
2014-04-08 09:44:24 +00:00
|
|
|
Object* thrown = scheduled_exception();
|
2011-05-13 08:54:16 +00:00
|
|
|
clear_scheduled_exception();
|
|
|
|
// Re-throw the exception to avoid getting repeated error reporting.
|
|
|
|
return ReThrow(thrown);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::PrintCurrentStackTrace(FILE* out) {
|
|
|
|
StackTraceFrameIterator it(this);
|
|
|
|
while (!it.done()) {
|
2013-02-15 09:27:10 +00:00
|
|
|
HandleScope scope(this);
|
2011-05-13 08:54:16 +00:00
|
|
|
// Find code position if recorded in relocation info.
|
2016-04-06 11:37:15 +00:00
|
|
|
StandardFrame* frame = it.frame();
|
2016-06-29 12:02:39 +00:00
|
|
|
AbstractCode* abstract_code;
|
|
|
|
int code_offset;
|
2016-04-22 08:08:38 +00:00
|
|
|
if (frame->is_interpreted()) {
|
|
|
|
InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
|
2016-06-29 12:02:39 +00:00
|
|
|
abstract_code = AbstractCode::cast(iframe->GetBytecodeArray());
|
|
|
|
code_offset = iframe->GetBytecodeOffset();
|
2016-05-06 09:07:30 +00:00
|
|
|
} else {
|
2016-06-29 12:02:39 +00:00
|
|
|
DCHECK(frame->is_java_script() || frame->is_wasm());
|
|
|
|
Code* code = frame->LookupCode();
|
|
|
|
abstract_code = AbstractCode::cast(code);
|
|
|
|
code_offset = static_cast<int>(frame->pc() - code->instruction_start());
|
2016-04-22 08:08:38 +00:00
|
|
|
}
|
2016-06-29 12:02:39 +00:00
|
|
|
int pos = abstract_code->SourcePosition(code_offset);
|
2016-05-06 09:07:30 +00:00
|
|
|
JavaScriptFrame* js_frame = JavaScriptFrame::cast(frame);
|
2013-02-25 14:46:09 +00:00
|
|
|
Handle<Object> pos_obj(Smi::FromInt(pos), this);
|
2011-05-13 08:54:16 +00:00
|
|
|
// Fetch function and receiver.
|
2016-05-06 09:07:30 +00:00
|
|
|
Handle<JSFunction> fun(js_frame->function());
|
|
|
|
Handle<Object> recv(js_frame->receiver(), this);
|
2011-05-13 08:54:16 +00:00
|
|
|
// Advance to the next JavaScript frame and determine if the
|
|
|
|
// current frame is the top-level frame.
|
|
|
|
it.Advance();
|
2014-10-28 13:24:36 +00:00
|
|
|
Handle<Object> is_top_level = factory()->ToBoolean(it.done());
|
2011-05-13 08:54:16 +00:00
|
|
|
// Generate and print stack trace line.
|
|
|
|
Handle<String> line =
|
|
|
|
Execution::GetStackTraceLine(recv, fun, pos_obj, is_top_level);
|
|
|
|
if (line->length() > 0) {
|
|
|
|
line->PrintOn(out);
|
2013-04-10 09:18:41 +00:00
|
|
|
PrintF(out, "\n");
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-08-21 13:18:54 +00:00
|
|
|
bool Isolate::ComputeLocation(MessageLocation* target) {
|
2011-05-13 08:54:16 +00:00
|
|
|
StackTraceFrameIterator it(this);
|
2016-05-06 09:07:30 +00:00
|
|
|
if (it.done()) return false;
|
|
|
|
StandardFrame* frame = it.frame();
|
|
|
|
// TODO(clemensh): handle wasm frames
|
|
|
|
if (!frame->is_java_script()) return false;
|
|
|
|
JSFunction* fun = JavaScriptFrame::cast(frame)->function();
|
|
|
|
Object* script = fun->shared()->script();
|
2016-06-06 12:58:10 +00:00
|
|
|
if (!script->IsScript() ||
|
|
|
|
(Script::cast(script)->source()->IsUndefined(this))) {
|
2016-05-06 09:07:30 +00:00
|
|
|
return false;
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
2016-05-06 09:07:30 +00:00
|
|
|
Handle<Script> casted_script(Script::cast(script));
|
|
|
|
// Compute the location from the function and the relocation info of the
|
|
|
|
// baseline code. For optimized code this will use the deoptimization
|
|
|
|
// information to get canonical location information.
|
|
|
|
List<FrameSummary> frames(FLAG_max_inlining_levels + 1);
|
|
|
|
JavaScriptFrame::cast(frame)->Summarize(&frames);
|
|
|
|
FrameSummary& summary = frames.last();
|
|
|
|
int pos = summary.abstract_code()->SourcePosition(summary.code_offset());
|
|
|
|
*target = MessageLocation(casted_script, pos, pos + 1, handle(fun));
|
|
|
|
return true;
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
2015-02-03 08:57:18 +00:00
|
|
|
bool Isolate::ComputeLocationFromException(MessageLocation* target,
|
|
|
|
Handle<Object> exception) {
|
|
|
|
if (!exception->IsJSObject()) return false;
|
|
|
|
|
|
|
|
Handle<Name> start_pos_symbol = factory()->error_start_pos_symbol();
|
2015-05-12 13:52:26 +00:00
|
|
|
Handle<Object> start_pos = JSReceiver::GetDataProperty(
|
2015-02-03 08:57:18 +00:00
|
|
|
Handle<JSObject>::cast(exception), start_pos_symbol);
|
|
|
|
if (!start_pos->IsSmi()) return false;
|
|
|
|
int start_pos_value = Handle<Smi>::cast(start_pos)->value();
|
|
|
|
|
|
|
|
Handle<Name> end_pos_symbol = factory()->error_end_pos_symbol();
|
2015-05-12 13:52:26 +00:00
|
|
|
Handle<Object> end_pos = JSReceiver::GetDataProperty(
|
2015-02-03 08:57:18 +00:00
|
|
|
Handle<JSObject>::cast(exception), end_pos_symbol);
|
|
|
|
if (!end_pos->IsSmi()) return false;
|
|
|
|
int end_pos_value = Handle<Smi>::cast(end_pos)->value();
|
|
|
|
|
|
|
|
Handle<Name> script_symbol = factory()->error_script_symbol();
|
2015-05-12 13:52:26 +00:00
|
|
|
Handle<Object> script = JSReceiver::GetDataProperty(
|
2015-02-03 08:57:18 +00:00
|
|
|
Handle<JSObject>::cast(exception), script_symbol);
|
|
|
|
if (!script->IsScript()) return false;
|
|
|
|
|
|
|
|
Handle<Script> cast_script(Script::cast(*script));
|
|
|
|
*target = MessageLocation(cast_script, start_pos_value, end_pos_value);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-04 10:06:44 +00:00
|
|
|
bool Isolate::ComputeLocationFromStackTrace(MessageLocation* target,
|
2014-10-30 14:51:17 +00:00
|
|
|
Handle<Object> exception) {
|
2014-11-04 10:06:44 +00:00
|
|
|
if (!exception->IsJSObject()) return false;
|
2014-10-30 14:51:17 +00:00
|
|
|
Handle<Name> key = factory()->stack_trace_symbol();
|
|
|
|
Handle<Object> property =
|
2015-05-12 13:52:26 +00:00
|
|
|
JSReceiver::GetDataProperty(Handle<JSObject>::cast(exception), key);
|
2014-11-04 10:06:44 +00:00
|
|
|
if (!property->IsJSArray()) return false;
|
2014-10-30 14:51:17 +00:00
|
|
|
Handle<JSArray> simple_stack_trace = Handle<JSArray>::cast(property);
|
|
|
|
|
|
|
|
Handle<FixedArray> elements(FixedArray::cast(simple_stack_trace->elements()));
|
|
|
|
int elements_limit = Smi::cast(simple_stack_trace->length())->value();
|
|
|
|
|
|
|
|
for (int i = 1; i < elements_limit; i += 4) {
|
2016-05-06 09:07:30 +00:00
|
|
|
Handle<Object> fun_obj = handle(elements->get(i + 1), this);
|
|
|
|
if (fun_obj->IsSmi()) {
|
|
|
|
// TODO(clemensh): handle wasm frames
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
Handle<JSFunction> fun = Handle<JSFunction>::cast(fun_obj);
|
2015-11-04 14:56:11 +00:00
|
|
|
if (!fun->shared()->IsSubjectToDebugging()) continue;
|
2014-10-30 14:51:17 +00:00
|
|
|
|
|
|
|
Object* script = fun->shared()->script();
|
|
|
|
if (script->IsScript() &&
|
2016-06-06 12:58:10 +00:00
|
|
|
!(Script::cast(script)->source()->IsUndefined(this))) {
|
2015-05-05 07:28:54 +00:00
|
|
|
int pos = PositionFromStackTrace(elements, i);
|
2014-10-30 14:51:17 +00:00
|
|
|
Handle<Script> casted_script(Script::cast(script));
|
|
|
|
*target = MessageLocation(casted_script, pos, pos + 1);
|
2014-11-04 10:06:44 +00:00
|
|
|
return true;
|
2014-10-30 14:51:17 +00:00
|
|
|
}
|
|
|
|
}
|
2014-11-04 10:06:44 +00:00
|
|
|
return false;
|
2014-10-30 14:51:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-30 15:29:08 +00:00
|
|
|
Handle<JSMessageObject> Isolate::CreateMessage(Handle<Object> exception,
|
|
|
|
MessageLocation* location) {
|
|
|
|
Handle<JSArray> stack_trace_object;
|
|
|
|
if (capture_stack_trace_for_uncaught_exceptions_) {
|
2016-06-27 09:31:59 +00:00
|
|
|
if (exception->IsJSError()) {
|
2014-09-30 15:29:08 +00:00
|
|
|
// We fetch the stack trace that corresponds to this error object.
|
2014-10-28 13:24:36 +00:00
|
|
|
// If the lookup fails, the exception is probably not a valid Error
|
|
|
|
// object. In that case, we fall through and capture the stack trace
|
|
|
|
// at this throw site.
|
|
|
|
stack_trace_object =
|
|
|
|
GetDetailedStackTrace(Handle<JSObject>::cast(exception));
|
2014-09-30 15:29:08 +00:00
|
|
|
}
|
|
|
|
if (stack_trace_object.is_null()) {
|
2014-10-30 14:51:17 +00:00
|
|
|
// Not an error object, we capture stack and location at throw site.
|
2014-09-30 15:29:08 +00:00
|
|
|
stack_trace_object = CaptureCurrentStackTrace(
|
|
|
|
stack_trace_for_uncaught_exceptions_frame_limit_,
|
|
|
|
stack_trace_for_uncaught_exceptions_options_);
|
|
|
|
}
|
|
|
|
}
|
2015-08-21 13:18:54 +00:00
|
|
|
MessageLocation computed_location;
|
|
|
|
if (location == NULL &&
|
|
|
|
(ComputeLocationFromException(&computed_location, exception) ||
|
|
|
|
ComputeLocationFromStackTrace(&computed_location, exception) ||
|
|
|
|
ComputeLocation(&computed_location))) {
|
|
|
|
location = &computed_location;
|
2014-10-30 14:51:17 +00:00
|
|
|
}
|
2014-09-30 15:29:08 +00:00
|
|
|
|
2015-05-18 08:34:05 +00:00
|
|
|
return MessageHandler::MakeMessageObject(
|
|
|
|
this, MessageTemplate::kUncaughtException, location, exception,
|
|
|
|
stack_trace_object);
|
2014-09-30 15:29:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-03-18 10:19:04 +00:00
|
|
|
bool Isolate::IsJavaScriptHandlerOnTop(Object* exception) {
|
|
|
|
DCHECK_NE(heap()->the_hole_value(), exception);
|
|
|
|
|
|
|
|
// For uncatchable exceptions, the JavaScript handler cannot be on top.
|
|
|
|
if (!is_catchable_by_javascript(exception)) return false;
|
|
|
|
|
|
|
|
// Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
|
|
|
|
Address entry_handler = Isolate::handler(thread_local_top());
|
|
|
|
if (entry_handler == nullptr) return false;
|
|
|
|
|
2011-05-13 08:54:16 +00:00
|
|
|
// Get the address of the external handler so we can compare the address to
|
|
|
|
// determine which one is closer to the top of the stack.
|
2015-03-18 10:19:04 +00:00
|
|
|
Address external_handler = thread_local_top()->try_catch_handler_address();
|
|
|
|
if (external_handler == nullptr) return true;
|
|
|
|
|
|
|
|
// The exception has been externally caught if and only if there is an
|
|
|
|
// external handler which is on top of the top-most JS_ENTRY handler.
|
2011-05-13 08:54:16 +00:00
|
|
|
//
|
2015-03-18 10:19:04 +00:00
|
|
|
// Note, that finally clauses would re-throw an exception unless it's aborted
|
|
|
|
// by jumps in control flow (like return, break, etc.) and we'll have another
|
|
|
|
// chance to set proper v8::TryCatch later.
|
|
|
|
return (entry_handler < external_handler);
|
|
|
|
}
|
2015-03-16 10:06:11 +00:00
|
|
|
|
2015-03-18 10:19:04 +00:00
|
|
|
|
|
|
|
bool Isolate::IsExternalHandlerOnTop(Object* exception) {
|
|
|
|
DCHECK_NE(heap()->the_hole_value(), exception);
|
|
|
|
|
|
|
|
// Get the address of the external handler so we can compare the address to
|
|
|
|
// determine which one is closer to the top of the stack.
|
|
|
|
Address external_handler = thread_local_top()->try_catch_handler_address();
|
|
|
|
if (external_handler == nullptr) return false;
|
|
|
|
|
|
|
|
// For uncatchable exceptions, the external handler is always on top.
|
|
|
|
if (!is_catchable_by_javascript(exception)) return true;
|
|
|
|
|
|
|
|
// Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
|
|
|
|
Address entry_handler = Isolate::handler(thread_local_top());
|
|
|
|
if (entry_handler == nullptr) return true;
|
|
|
|
|
|
|
|
// The exception has been externally caught if and only if there is an
|
|
|
|
// external handler which is on top of the top-most JS_ENTRY handler.
|
|
|
|
//
|
|
|
|
// Note, that finally clauses would re-throw an exception unless it's aborted
|
|
|
|
// by jumps in control flow (like return, break, etc.) and we'll have another
|
|
|
|
// chance to set proper v8::TryCatch later.
|
|
|
|
return (entry_handler > external_handler);
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::ReportPendingMessages() {
|
2015-03-12 09:50:30 +00:00
|
|
|
Object* exception = pending_exception();
|
2011-05-13 08:54:16 +00:00
|
|
|
|
2015-03-19 13:22:42 +00:00
|
|
|
// Try to propagate the exception to an external v8::TryCatch handler. If
|
|
|
|
// propagation was unsuccessful, then we will get another chance at reporting
|
|
|
|
// the pending message if the exception is re-thrown.
|
|
|
|
bool has_been_propagated = PropagatePendingExceptionToExternalTryCatch();
|
|
|
|
if (!has_been_propagated) return;
|
|
|
|
|
|
|
|
// Clear the pending message object early to avoid endless recursion.
|
|
|
|
Object* message_obj = thread_local_top_.pending_message_obj_;
|
|
|
|
clear_pending_message();
|
|
|
|
|
|
|
|
// For uncatchable exceptions we do nothing. If needed, the exception and the
|
|
|
|
// message have already been propagated to v8::TryCatch.
|
|
|
|
if (!is_catchable_by_javascript(exception)) return;
|
2015-03-12 09:50:30 +00:00
|
|
|
|
2015-03-18 10:19:04 +00:00
|
|
|
// Determine whether the message needs to be reported to all message handlers
|
2015-03-19 13:22:42 +00:00
|
|
|
// depending on whether and external v8::TryCatch or an internal JavaScript
|
|
|
|
// handler is on top.
|
2015-03-18 10:19:04 +00:00
|
|
|
bool should_report_exception;
|
|
|
|
if (IsExternalHandlerOnTop(exception)) {
|
|
|
|
// Only report the exception if the external handler is verbose.
|
|
|
|
should_report_exception = try_catch_handler()->is_verbose_;
|
2011-05-13 08:54:16 +00:00
|
|
|
} else {
|
2015-03-19 13:22:42 +00:00
|
|
|
// Report the exception if it isn't caught by JavaScript code.
|
|
|
|
should_report_exception = !IsJavaScriptHandlerOnTop(exception);
|
2015-03-18 10:19:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Actually report the pending message to all message handlers.
|
2016-06-06 12:58:10 +00:00
|
|
|
if (!message_obj->IsTheHole(this) && should_report_exception) {
|
2015-03-18 10:19:04 +00:00
|
|
|
HandleScope scope(this);
|
|
|
|
Handle<JSMessageObject> message(JSMessageObject::cast(message_obj));
|
|
|
|
Handle<JSValue> script_wrapper(JSValue::cast(message->script()));
|
|
|
|
Handle<Script> script(Script::cast(script_wrapper->value()));
|
|
|
|
int start_pos = message->start_position();
|
|
|
|
int end_pos = message->end_position();
|
|
|
|
MessageLocation location(script, start_pos, end_pos);
|
|
|
|
MessageHandler::ReportMessage(this, &location, message);
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-12-03 21:47:39 +00:00
|
|
|
MessageLocation Isolate::GetMessageLocation() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(has_pending_exception());
|
2012-12-03 21:47:39 +00:00
|
|
|
|
2014-03-24 10:07:15 +00:00
|
|
|
if (thread_local_top_.pending_exception_ != heap()->termination_exception() &&
|
2016-06-06 12:58:10 +00:00
|
|
|
!thread_local_top_.pending_message_obj_->IsTheHole(this)) {
|
2015-03-10 14:45:14 +00:00
|
|
|
Handle<JSMessageObject> message_obj(
|
|
|
|
JSMessageObject::cast(thread_local_top_.pending_message_obj_));
|
2015-03-11 10:02:39 +00:00
|
|
|
Handle<JSValue> script_wrapper(JSValue::cast(message_obj->script()));
|
|
|
|
Handle<Script> script(Script::cast(script_wrapper->value()));
|
2015-03-10 14:45:14 +00:00
|
|
|
int start_pos = message_obj->start_position();
|
|
|
|
int end_pos = message_obj->end_position();
|
2012-12-03 21:47:39 +00:00
|
|
|
return MessageLocation(script, start_pos, end_pos);
|
|
|
|
}
|
|
|
|
|
|
|
|
return MessageLocation();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-05-13 08:54:16 +00:00
|
|
|
bool Isolate::OptionalRescheduleException(bool is_bottom_call) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(has_pending_exception());
|
2015-03-19 13:22:42 +00:00
|
|
|
PropagatePendingExceptionToExternalTryCatch();
|
2011-05-13 08:54:16 +00:00
|
|
|
|
2014-03-24 10:07:15 +00:00
|
|
|
bool is_termination_exception =
|
|
|
|
pending_exception() == heap_.termination_exception();
|
2011-05-13 08:54:16 +00:00
|
|
|
|
2014-03-24 10:07:15 +00:00
|
|
|
// Do not reschedule the exception if this is the bottom call.
|
|
|
|
bool clear_exception = is_bottom_call;
|
2011-05-13 08:54:16 +00:00
|
|
|
|
2014-03-24 10:07:15 +00:00
|
|
|
if (is_termination_exception) {
|
|
|
|
if (is_bottom_call) {
|
2011-05-13 08:54:16 +00:00
|
|
|
thread_local_top()->external_caught_exception_ = false;
|
|
|
|
clear_pending_exception();
|
|
|
|
return false;
|
|
|
|
}
|
2014-03-24 10:07:15 +00:00
|
|
|
} else if (thread_local_top()->external_caught_exception_) {
|
|
|
|
// If the exception is externally caught, clear it if there are no
|
|
|
|
// JavaScript frames on the way to the C++ frame that has the
|
|
|
|
// external handler.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(thread_local_top()->try_catch_handler_address() != NULL);
|
2014-03-24 10:07:15 +00:00
|
|
|
Address external_handler_address =
|
|
|
|
thread_local_top()->try_catch_handler_address();
|
|
|
|
JavaScriptFrameIterator it(this);
|
|
|
|
if (it.done() || (it.frame()->sp() > external_handler_address)) {
|
|
|
|
clear_exception = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Clear the exception if needed.
|
|
|
|
if (clear_exception) {
|
|
|
|
thread_local_top()->external_caught_exception_ = false;
|
|
|
|
clear_pending_exception();
|
|
|
|
return false;
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Reschedule the exception.
|
|
|
|
thread_local_top()->scheduled_exception_ = pending_exception();
|
|
|
|
clear_pending_exception();
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-03-25 13:13:51 +00:00
|
|
|
void Isolate::PushPromise(Handle<JSObject> promise,
|
|
|
|
Handle<JSFunction> function) {
|
2014-08-13 11:14:35 +00:00
|
|
|
ThreadLocalTop* tltop = thread_local_top();
|
|
|
|
PromiseOnStack* prev = tltop->promise_on_stack_;
|
2015-03-25 13:13:51 +00:00
|
|
|
Handle<JSObject> global_promise =
|
2014-08-13 11:14:35 +00:00
|
|
|
Handle<JSObject>::cast(global_handles()->Create(*promise));
|
2015-03-25 13:13:51 +00:00
|
|
|
Handle<JSFunction> global_function =
|
|
|
|
Handle<JSFunction>::cast(global_handles()->Create(*function));
|
|
|
|
tltop->promise_on_stack_ =
|
|
|
|
new PromiseOnStack(global_function, global_promise, prev);
|
2014-08-13 11:14:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::PopPromise() {
|
|
|
|
ThreadLocalTop* tltop = thread_local_top();
|
|
|
|
if (tltop->promise_on_stack_ == NULL) return;
|
|
|
|
PromiseOnStack* prev = tltop->promise_on_stack_->prev();
|
2015-03-25 13:13:51 +00:00
|
|
|
Handle<Object> global_function = tltop->promise_on_stack_->function();
|
|
|
|
Handle<Object> global_promise = tltop->promise_on_stack_->promise();
|
2014-08-13 11:14:35 +00:00
|
|
|
delete tltop->promise_on_stack_;
|
|
|
|
tltop->promise_on_stack_ = prev;
|
2015-03-25 13:13:51 +00:00
|
|
|
global_handles()->Destroy(global_function.location());
|
|
|
|
global_handles()->Destroy(global_promise.location());
|
2014-08-13 11:14:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<Object> Isolate::GetPromiseOnStackOnThrow() {
|
|
|
|
Handle<Object> undefined = factory()->undefined_value();
|
|
|
|
ThreadLocalTop* tltop = thread_local_top();
|
|
|
|
if (tltop->promise_on_stack_ == NULL) return undefined;
|
2015-03-25 13:13:51 +00:00
|
|
|
Handle<JSFunction> promise_function = tltop->promise_on_stack_->function();
|
|
|
|
// Find the top-most try-catch or try-finally handler.
|
|
|
|
if (PredictExceptionCatcher() != CAUGHT_BY_JAVASCRIPT) return undefined;
|
|
|
|
for (JavaScriptFrameIterator it(this); !it.done(); it.Advance()) {
|
|
|
|
JavaScriptFrame* frame = it.frame();
|
2016-02-05 13:51:42 +00:00
|
|
|
if (frame->LookupExceptionHandlerInTable(nullptr, nullptr) > 0) {
|
2015-03-25 13:13:51 +00:00
|
|
|
// Throwing inside a Promise only leads to a reject if not caught by an
|
|
|
|
// inner try-catch or try-finally.
|
|
|
|
if (frame->function() == *promise_function) {
|
|
|
|
return tltop->promise_on_stack_->promise();
|
|
|
|
}
|
|
|
|
return undefined;
|
|
|
|
}
|
|
|
|
}
|
2014-08-13 11:14:35 +00:00
|
|
|
return undefined;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-05-13 08:54:16 +00:00
|
|
|
void Isolate::SetCaptureStackTraceForUncaughtExceptions(
|
|
|
|
bool capture,
|
|
|
|
int frame_limit,
|
|
|
|
StackTrace::StackTraceOptions options) {
|
|
|
|
capture_stack_trace_for_uncaught_exceptions_ = capture;
|
|
|
|
stack_trace_for_uncaught_exceptions_frame_limit_ = frame_limit;
|
|
|
|
stack_trace_for_uncaught_exceptions_options_ = options;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-10-05 18:55:05 +00:00
|
|
|
void Isolate::SetAbortOnUncaughtExceptionCallback(
|
|
|
|
v8::Isolate::AbortOnUncaughtExceptionCallback callback) {
|
|
|
|
abort_on_uncaught_exception_callback_ = callback;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-08-17 09:03:08 +00:00
|
|
|
Handle<Context> Isolate::native_context() {
|
2014-07-01 12:12:34 +00:00
|
|
|
return handle(context()->native_context());
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-08-17 09:03:08 +00:00
|
|
|
Handle<Context> Isolate::GetCallingNativeContext() {
|
2013-02-15 09:27:10 +00:00
|
|
|
JavaScriptFrameIterator it(this);
|
2014-06-03 14:39:55 +00:00
|
|
|
if (debug_->in_debug_scope()) {
|
2011-05-13 08:54:16 +00:00
|
|
|
while (!it.done()) {
|
|
|
|
JavaScriptFrame* frame = it.frame();
|
|
|
|
Context* context = Context::cast(frame->context());
|
2012-08-17 09:03:08 +00:00
|
|
|
if (context->native_context() == *debug_->debug_context()) {
|
2011-05-13 08:54:16 +00:00
|
|
|
it.Advance();
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (it.done()) return Handle<Context>::null();
|
|
|
|
JavaScriptFrame* frame = it.frame();
|
|
|
|
Context* context = Context::cast(frame->context());
|
2012-08-17 09:03:08 +00:00
|
|
|
return Handle<Context>(context->native_context());
|
2011-05-13 08:54:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
char* Isolate::ArchiveThread(char* to) {
|
2014-05-27 07:57:22 +00:00
|
|
|
MemCopy(to, reinterpret_cast<char*>(thread_local_top()),
|
|
|
|
sizeof(ThreadLocalTop));
|
2011-05-13 08:54:16 +00:00
|
|
|
InitializeThreadLocal();
|
2011-10-10 09:21:48 +00:00
|
|
|
clear_pending_exception();
|
|
|
|
clear_pending_message();
|
|
|
|
clear_scheduled_exception();
|
2011-05-13 08:54:16 +00:00
|
|
|
return to + sizeof(ThreadLocalTop);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
char* Isolate::RestoreThread(char* from) {
|
2014-05-27 07:57:22 +00:00
|
|
|
MemCopy(reinterpret_cast<char*>(thread_local_top()), from,
|
|
|
|
sizeof(ThreadLocalTop));
|
|
|
|
// This might be just paranoia, but it seems to be needed in case a
|
|
|
|
// thread_local_top_ is restored on a separate OS thread.
|
2011-05-13 08:54:16 +00:00
|
|
|
#ifdef USE_SIMULATOR
|
|
|
|
thread_local_top()->simulator_ = Simulator::current(this);
|
|
|
|
#endif
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(context() == NULL || context()->IsContext());
|
2011-05-13 08:54:16 +00:00
|
|
|
return from + sizeof(ThreadLocalTop);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
Isolate::ThreadDataTable::ThreadDataTable()
|
|
|
|
: list_(NULL) {
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-12 11:57:51 +00:00
|
|
|
Isolate::ThreadDataTable::~ThreadDataTable() {
|
|
|
|
// TODO(svenpanne) The assertion below would fire if an embedder does not
|
|
|
|
// cleanly dispose all Isolates before disposing v8, so we are conservative
|
|
|
|
// and leave it out for now.
|
2015-01-30 09:29:25 +00:00
|
|
|
// DCHECK_NULL(list_);
|
2013-02-12 11:57:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-28 10:55:47 +00:00
|
|
|
Isolate::PerIsolateThreadData::~PerIsolateThreadData() {
|
|
|
|
#if defined(USE_SIMULATOR)
|
|
|
|
delete simulator_;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
Isolate::PerIsolateThreadData*
|
2011-04-11 23:46:22 +00:00
|
|
|
Isolate::ThreadDataTable::Lookup(Isolate* isolate,
|
|
|
|
ThreadId thread_id) {
|
2011-03-18 20:35:07 +00:00
|
|
|
for (PerIsolateThreadData* data = list_; data != NULL; data = data->next_) {
|
|
|
|
if (data->Matches(isolate, thread_id)) return data;
|
|
|
|
}
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::ThreadDataTable::Insert(Isolate::PerIsolateThreadData* data) {
|
|
|
|
if (list_ != NULL) list_->prev_ = data;
|
|
|
|
data->next_ = list_;
|
|
|
|
list_ = data;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::ThreadDataTable::Remove(PerIsolateThreadData* data) {
|
|
|
|
if (list_ == data) list_ = data->next_;
|
|
|
|
if (data->next_ != NULL) data->next_->prev_ = data->prev_;
|
|
|
|
if (data->prev_ != NULL) data->prev_->next_ = data->next_;
|
2011-08-18 12:14:12 +00:00
|
|
|
delete data;
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-06-07 18:33:03 +00:00
|
|
|
void Isolate::ThreadDataTable::RemoveAllThreads(Isolate* isolate) {
|
|
|
|
PerIsolateThreadData* data = list_;
|
|
|
|
while (data != NULL) {
|
|
|
|
PerIsolateThreadData* next = data->next_;
|
|
|
|
if (data->isolate() == isolate) Remove(data);
|
|
|
|
data = next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
#define TRACE_ISOLATE(tag) \
|
|
|
|
do { \
|
|
|
|
if (FLAG_trace_isolates) { \
|
2013-03-06 07:25:46 +00:00
|
|
|
PrintF("Isolate %p (id %d)" #tag "\n", \
|
|
|
|
reinterpret_cast<void*>(this), id()); \
|
2011-03-18 20:35:07 +00:00
|
|
|
} \
|
|
|
|
} while (false)
|
|
|
|
#else
|
|
|
|
#define TRACE_ISOLATE(tag)
|
|
|
|
#endif
|
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
Isolate::Isolate(bool enable_serializer)
|
2013-11-20 15:16:18 +00:00
|
|
|
: embedder_data_(),
|
2011-03-18 20:35:07 +00:00
|
|
|
entry_stack_(NULL),
|
|
|
|
stack_trace_nesting_level_(0),
|
|
|
|
incomplete_message_(NULL),
|
|
|
|
bootstrapper_(NULL),
|
|
|
|
runtime_profiler_(NULL),
|
|
|
|
compilation_cache_(NULL),
|
2011-08-04 15:18:18 +00:00
|
|
|
counters_(NULL),
|
|
|
|
logger_(NULL),
|
|
|
|
stats_table_(NULL),
|
2016-07-13 10:24:55 +00:00
|
|
|
load_stub_cache_(NULL),
|
|
|
|
store_stub_cache_(NULL),
|
2014-05-06 11:25:37 +00:00
|
|
|
code_aging_helper_(NULL),
|
2011-03-18 20:35:07 +00:00
|
|
|
deoptimizer_data_(NULL),
|
2016-02-12 10:14:42 +00:00
|
|
|
deoptimizer_lazy_throw_(false),
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
materialized_object_store_(NULL),
|
2011-03-18 20:35:07 +00:00
|
|
|
capture_stack_trace_for_uncaught_exceptions_(false),
|
|
|
|
stack_trace_for_uncaught_exceptions_frame_limit_(0),
|
|
|
|
stack_trace_for_uncaught_exceptions_options_(StackTrace::kOverview),
|
|
|
|
keyed_lookup_cache_(NULL),
|
|
|
|
context_slot_cache_(NULL),
|
|
|
|
descriptor_lookup_cache_(NULL),
|
|
|
|
handle_scope_implementer_(NULL),
|
2011-04-12 08:27:38 +00:00
|
|
|
unicode_cache_(NULL),
|
2016-04-01 10:00:30 +00:00
|
|
|
runtime_zone_(&allocator_),
|
|
|
|
interface_descriptor_zone_(&allocator_),
|
2011-09-20 10:08:39 +00:00
|
|
|
inner_pointer_to_code_cache_(NULL),
|
2011-03-18 20:35:07 +00:00
|
|
|
global_handles_(NULL),
|
2013-08-05 09:46:23 +00:00
|
|
|
eternal_handles_(NULL),
|
2011-03-18 20:35:07 +00:00
|
|
|
thread_manager_(NULL),
|
2011-11-15 22:48:55 +00:00
|
|
|
has_installed_extensions_(false),
|
2011-03-18 20:35:07 +00:00
|
|
|
regexp_stack_(NULL),
|
2012-03-09 12:07:29 +00:00
|
|
|
date_cache_(NULL),
|
2014-09-03 10:51:51 +00:00
|
|
|
call_descriptor_data_(NULL),
|
2013-09-10 11:13:55 +00:00
|
|
|
// TODO(bmeurer) Initialized lazily because it depends on flags; can
|
|
|
|
// be fixed once the default isolate cleanup is done.
|
|
|
|
random_number_generator_(NULL),
|
2016-06-28 17:30:20 +00:00
|
|
|
rail_mode_(PERFORMANCE_ANIMATION),
|
2014-10-08 11:51:57 +00:00
|
|
|
serializer_enabled_(enable_serializer),
|
2013-09-03 09:35:26 +00:00
|
|
|
has_fatal_error_(false),
|
2013-06-28 13:40:41 +00:00
|
|
|
initialized_from_snapshot_(false),
|
2016-03-29 11:43:46 +00:00
|
|
|
is_tail_call_elimination_enabled_(true),
|
2013-04-02 07:53:50 +00:00
|
|
|
cpu_profiler_(NULL),
|
2013-04-02 08:03:01 +00:00
|
|
|
heap_profiler_(NULL),
|
2016-06-15 13:22:39 +00:00
|
|
|
code_event_dispatcher_(new CodeEventDispatcher()),
|
2013-06-28 13:40:41 +00:00
|
|
|
function_entry_hook_(NULL),
|
2012-07-19 18:58:23 +00:00
|
|
|
deferred_handles_head_(NULL),
|
2015-04-14 13:57:35 +00:00
|
|
|
optimizing_compile_dispatcher_(NULL),
|
2014-02-13 16:09:28 +00:00
|
|
|
stress_deopt_count_(0),
|
2015-10-12 07:34:18 +00:00
|
|
|
virtual_handler_register_(NULL),
|
|
|
|
virtual_slot_register_(NULL),
|
2014-06-24 14:13:15 +00:00
|
|
|
next_optimization_id_(0),
|
2015-10-29 13:59:34 +00:00
|
|
|
js_calls_from_api_counter_(0),
|
2014-11-07 16:03:11 +00:00
|
|
|
#if TRACE_MAPS
|
|
|
|
next_unique_sfi_id_(0),
|
|
|
|
#endif
|
2016-04-25 15:21:11 +00:00
|
|
|
is_running_microtasks_(false),
|
2014-09-29 07:29:14 +00:00
|
|
|
use_counter_callback_(NULL),
|
2015-10-05 18:55:05 +00:00
|
|
|
basic_block_profiler_(NULL),
|
2015-11-14 01:20:07 +00:00
|
|
|
cancelable_task_manager_(new CancelableTaskManager()),
|
2015-10-05 18:55:05 +00:00
|
|
|
abort_on_uncaught_exception_callback_(NULL) {
|
2014-09-19 08:01:35 +00:00
|
|
|
{
|
|
|
|
base::LockGuard<base::Mutex> lock_guard(thread_data_table_mutex_.Pointer());
|
|
|
|
CHECK(thread_data_table_);
|
|
|
|
}
|
2014-06-05 12:14:47 +00:00
|
|
|
id_ = base::NoBarrier_AtomicIncrement(&isolate_counter_, 1);
|
2011-03-18 20:35:07 +00:00
|
|
|
TRACE_ISOLATE(constructor);
|
|
|
|
|
|
|
|
memset(isolate_addresses_, 0,
|
2011-09-08 16:29:57 +00:00
|
|
|
sizeof(isolate_addresses_[0]) * (kIsolateAddressCount + 1));
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
heap_.isolate_ = this;
|
|
|
|
stack_guard_.isolate_ = this;
|
|
|
|
|
2011-05-05 18:55:31 +00:00
|
|
|
// ThreadManager is initialized early to support locking an isolate
|
|
|
|
// before it is entered.
|
|
|
|
thread_manager_ = new ThreadManager();
|
|
|
|
thread_manager_->isolate_ = this;
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
// heap_histograms_ initializes itself.
|
|
|
|
memset(&js_spill_information_, 0, sizeof(js_spill_information_));
|
|
|
|
#endif
|
|
|
|
|
|
|
|
handle_scope_data_.Initialize();
|
|
|
|
|
|
|
|
#define ISOLATE_INIT_EXECUTE(type, name, initial_value) \
|
|
|
|
name##_ = (initial_value);
|
|
|
|
ISOLATE_INIT_LIST(ISOLATE_INIT_EXECUTE)
|
|
|
|
#undef ISOLATE_INIT_EXECUTE
|
|
|
|
|
|
|
|
#define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length) \
|
|
|
|
memset(name##_, 0, sizeof(type) * length);
|
|
|
|
ISOLATE_INIT_ARRAY_LIST(ISOLATE_INIT_ARRAY_EXECUTE)
|
|
|
|
#undef ISOLATE_INIT_ARRAY_EXECUTE
|
2014-05-05 07:10:38 +00:00
|
|
|
|
|
|
|
InitializeLoggingAndCounters();
|
|
|
|
debug_ = new Debug(this);
|
2015-11-26 14:40:01 +00:00
|
|
|
|
|
|
|
init_memcopy_functions(this);
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
2013-01-30 12:19:32 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void Isolate::TearDown() {
|
|
|
|
TRACE_ISOLATE(tear_down);
|
|
|
|
|
|
|
|
// Temporarily set this isolate as current so that various parts of
|
|
|
|
// the isolate can access it in their destructors without having a
|
|
|
|
// direct pointer. We don't use Enter/Exit here to avoid
|
|
|
|
// initializing the thread data.
|
|
|
|
PerIsolateThreadData* saved_data = CurrentPerIsolateThreadData();
|
2015-11-23 08:44:26 +00:00
|
|
|
DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1);
|
|
|
|
Isolate* saved_isolate =
|
|
|
|
reinterpret_cast<Isolate*>(base::Thread::GetThreadLocal(isolate_key_));
|
2011-03-18 20:35:07 +00:00
|
|
|
SetIsolateThreadLocals(this, NULL);
|
|
|
|
|
|
|
|
Deinit();
|
|
|
|
|
2014-07-02 14:43:54 +00:00
|
|
|
{
|
2014-09-19 08:01:35 +00:00
|
|
|
base::LockGuard<base::Mutex> lock_guard(thread_data_table_mutex_.Pointer());
|
2012-03-30 14:30:46 +00:00
|
|
|
thread_data_table_->RemoveAllThreads(this);
|
2011-06-07 18:33:03 +00:00
|
|
|
}
|
|
|
|
|
2014-05-09 08:40:18 +00:00
|
|
|
delete this;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Restore the previous current isolate.
|
|
|
|
SetIsolateThreadLocals(saved_isolate, saved_data);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-12 11:57:51 +00:00
|
|
|
void Isolate::GlobalTearDown() {
|
|
|
|
delete thread_data_table_;
|
2014-08-26 13:07:18 +00:00
|
|
|
thread_data_table_ = NULL;
|
2013-02-12 11:57:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-03-20 11:17:09 +00:00
|
|
|
void Isolate::ClearSerializerData() {
|
|
|
|
delete external_reference_table_;
|
|
|
|
external_reference_table_ = NULL;
|
|
|
|
delete external_reference_map_;
|
|
|
|
external_reference_map_ = NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void Isolate::Deinit() {
|
2014-10-08 11:51:57 +00:00
|
|
|
TRACE_ISOLATE(deinit);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
debug()->Unload();
|
2014-05-26 08:05:04 +00:00
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
FreeThreadResources();
|
2014-08-13 11:14:35 +00:00
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
if (concurrent_recompilation_enabled()) {
|
2015-04-14 13:57:35 +00:00
|
|
|
optimizing_compile_dispatcher_->Stop();
|
|
|
|
delete optimizing_compile_dispatcher_;
|
|
|
|
optimizing_compile_dispatcher_ = NULL;
|
2014-10-08 11:51:57 +00:00
|
|
|
}
|
2013-03-05 16:22:08 +00:00
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
if (heap_.mark_compact_collector()->sweeping_in_progress()) {
|
|
|
|
heap_.mark_compact_collector()->EnsureSweepingCompleted();
|
|
|
|
}
|
2013-01-30 12:19:32 +00:00
|
|
|
|
2014-11-21 09:53:04 +00:00
|
|
|
DumpAndResetCompilationStats();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
if (FLAG_print_deopt_stress) {
|
|
|
|
PrintF(stdout, "=== Stress deopt counter: %u\n", stress_deopt_count_);
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2015-12-17 15:08:54 +00:00
|
|
|
if (cpu_profiler_) {
|
|
|
|
cpu_profiler_->DeleteAllProfiles();
|
|
|
|
}
|
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
// We must stop the logger before we tear down other components.
|
2016-05-26 02:13:58 +00:00
|
|
|
sampler::Sampler* sampler = logger_->sampler();
|
2014-10-08 11:51:57 +00:00
|
|
|
if (sampler && sampler->IsActive()) sampler->Stop();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
delete deoptimizer_data_;
|
|
|
|
deoptimizer_data_ = NULL;
|
|
|
|
builtins_.TearDown();
|
|
|
|
bootstrapper_->TearDown();
|
2014-09-29 07:29:14 +00:00
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
if (runtime_profiler_ != NULL) {
|
|
|
|
delete runtime_profiler_;
|
|
|
|
runtime_profiler_ = NULL;
|
|
|
|
}
|
2014-09-29 07:29:14 +00:00
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
delete basic_block_profiler_;
|
|
|
|
basic_block_profiler_ = NULL;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2016-01-22 16:36:40 +00:00
|
|
|
delete heap_profiler_;
|
|
|
|
heap_profiler_ = NULL;
|
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
heap_.TearDown();
|
|
|
|
logger_->TearDown();
|
2013-04-16 08:54:33 +00:00
|
|
|
|
2016-02-05 14:32:38 +00:00
|
|
|
delete interpreter_;
|
|
|
|
interpreter_ = NULL;
|
|
|
|
|
2015-11-18 17:36:54 +00:00
|
|
|
cancelable_task_manager()->CancelAndWait();
|
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
delete cpu_profiler_;
|
|
|
|
cpu_profiler_ = NULL;
|
2015-03-20 11:17:09 +00:00
|
|
|
|
2016-06-15 13:22:39 +00:00
|
|
|
code_event_dispatcher_.reset();
|
|
|
|
|
2015-10-26 15:33:02 +00:00
|
|
|
delete root_index_map_;
|
|
|
|
root_index_map_ = NULL;
|
|
|
|
|
2015-03-20 11:17:09 +00:00
|
|
|
ClearSerializerData();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::SetIsolateThreadLocals(Isolate* isolate,
|
|
|
|
PerIsolateThreadData* data) {
|
2014-06-30 13:25:46 +00:00
|
|
|
base::Thread::SetThreadLocal(isolate_key_, isolate);
|
|
|
|
base::Thread::SetThreadLocal(per_isolate_thread_data_key_, data);
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Isolate::~Isolate() {
|
|
|
|
TRACE_ISOLATE(destructor);
|
|
|
|
|
2013-07-03 11:40:30 +00:00
|
|
|
// Has to be called while counters_ are still alive
|
|
|
|
runtime_zone_.DeleteKeptSegment();
|
|
|
|
|
2014-04-15 10:45:34 +00:00
|
|
|
// The entry stack must be empty when we get here.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(entry_stack_ == NULL || entry_stack_->previous_item == NULL);
|
2013-07-02 07:21:07 +00:00
|
|
|
|
|
|
|
delete entry_stack_;
|
|
|
|
entry_stack_ = NULL;
|
|
|
|
|
2011-04-12 08:27:38 +00:00
|
|
|
delete unicode_cache_;
|
|
|
|
unicode_cache_ = NULL;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2012-03-09 12:07:29 +00:00
|
|
|
delete date_cache_;
|
|
|
|
date_cache_ = NULL;
|
|
|
|
|
2014-09-03 10:51:51 +00:00
|
|
|
delete[] call_descriptor_data_;
|
|
|
|
call_descriptor_data_ = NULL;
|
This is a preview of a first step towards unification of the hydrogen
call machinery. The change replaces CallNamed, CallKeyed,
CallConstantFunction and CallKnownGlobal hydrogen instructions with two
new instructions with a more lower level semantics:
1. CallJSFunction for direct calls of JSFunction objects (no
argument adaptation)
2. CallWithDescriptor for calls of a given Code object according to
the supplied calling convention.
Details:
CallJSFunction should be straightforward, the main difference from the
existing InvokeFunction instruction is the absence of argument adaptor
handling. (As a next step, we will replace InvokeFunction with an
equivalent hydrogen code.)
For CallWithDescriptor, the calling conventions are represented by a
tweaked version of CallStubInterfaceDescriptor. In addition to the
parameter-register mapping, we also define parameter-representation
mapping there. The CallWithDescriptor instruction has variable number of
parameters now - this required some simple tweaks in Lithium, which
assumed fixed number of arguments in some places.
The calling conventions used in the calls are initialized in the
CallDescriptors class (code-stubs.h, <arch>/code-stubs-<arch>.cc), and
they live in a new table in the Isolate class. I should say I am not
quite sure about Representation::Integer32() representation for some of
the params of ArgumentAdaptorCall - it is not clear to me wether the
params could not end up on the stack and thus confuse the GC.
The change also includes an earlier small change to argument adaptor
(https://codereview.chromium.org/98463007) that avoids passing a naked
pointer to the code entry as a parameter. I am sorry for packaging that
with an already biggish change.
Performance implications:
Locally, I see a small regression (.2% or so). It is hard to say where
exactly it comes from, but I do see inefficient call sequences to the
adaptor trampoline. For example:
;;; <@78,#24> constant-t
bf85aa515a mov edi,0x5a51aa85 ;; debug: position 29
;;; <@72,#53> load-named-field
8b7717 mov esi,[edi+0x17] ;; debug: position 195
;;; <@80,#51> constant-s
b902000000 mov ecx,0x2 ;; debug: position 195
;;; <@81,#51> gap
894df0 mov [ebp+0xf0],ecx
;;; <@82,#103> constant-i
bb01000000 mov ebx,0x1
;;; <@84,#102> constant-i
b902000000 mov ecx,0x2
;;; <@85,#102> gap
89d8 mov eax,ebx
89cb mov ebx,ecx
8b4df0 mov ecx,[ebp+0xf0]
;;; <@86,#58> call-with-descriptor
e8ef57fcff call ArgumentsAdaptorTrampoline (0x2d80e6e0) ;; code: BUILTIN
Note the silly handling of ecx; the hydrogen for this code is:
0 4 s27 Constant 1 range:1_1 <|@
0 3 t30 Constant 0x5bc1aa85 <JS Function xyz (SharedFunctionInfo 0x5bc1a919)> type:object <|@
0 1 t36 LoadNamedField t30.[in-object]@24 <|@
0 1 t38 Constant 0x2300e6a1 <Code> <|@
0 1 i102 Constant 2 range:2_2 <|@
0 1 i103 Constant 1 range:1_1 <|@
0 2 t41 CallWithDescriptor t38 t30 t36 s27 i103 i102 #2 changes[*] <|@
BUG=
R=verwaest@chromium.org, danno@chromium.org
Review URL: https://codereview.chromium.org/104663004
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18626 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-15 17:00:35 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
delete regexp_stack_;
|
|
|
|
regexp_stack_ = NULL;
|
|
|
|
|
|
|
|
delete descriptor_lookup_cache_;
|
|
|
|
descriptor_lookup_cache_ = NULL;
|
|
|
|
delete context_slot_cache_;
|
|
|
|
context_slot_cache_ = NULL;
|
|
|
|
delete keyed_lookup_cache_;
|
|
|
|
keyed_lookup_cache_ = NULL;
|
|
|
|
|
2016-07-13 10:24:55 +00:00
|
|
|
delete load_stub_cache_;
|
|
|
|
load_stub_cache_ = NULL;
|
|
|
|
delete store_stub_cache_;
|
|
|
|
store_stub_cache_ = NULL;
|
2014-05-06 11:25:37 +00:00
|
|
|
delete code_aging_helper_;
|
|
|
|
code_aging_helper_ = NULL;
|
2011-03-18 20:35:07 +00:00
|
|
|
delete stats_table_;
|
|
|
|
stats_table_ = NULL;
|
|
|
|
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
delete materialized_object_store_;
|
|
|
|
materialized_object_store_ = NULL;
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
delete logger_;
|
|
|
|
logger_ = NULL;
|
|
|
|
|
|
|
|
delete counters_;
|
|
|
|
counters_ = NULL;
|
|
|
|
|
|
|
|
delete handle_scope_implementer_;
|
|
|
|
handle_scope_implementer_ = NULL;
|
|
|
|
|
2015-02-17 12:26:05 +00:00
|
|
|
delete code_tracer();
|
|
|
|
set_code_tracer(NULL);
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
delete compilation_cache_;
|
|
|
|
compilation_cache_ = NULL;
|
|
|
|
delete bootstrapper_;
|
|
|
|
bootstrapper_ = NULL;
|
2011-09-20 10:08:39 +00:00
|
|
|
delete inner_pointer_to_code_cache_;
|
|
|
|
inner_pointer_to_code_cache_ = NULL;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
delete thread_manager_;
|
|
|
|
thread_manager_ = NULL;
|
|
|
|
|
|
|
|
delete global_handles_;
|
|
|
|
global_handles_ = NULL;
|
2013-08-05 09:46:23 +00:00
|
|
|
delete eternal_handles_;
|
|
|
|
eternal_handles_ = NULL;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2013-09-02 11:39:23 +00:00
|
|
|
delete string_stream_debug_object_cache_;
|
2013-07-01 12:07:53 +00:00
|
|
|
string_stream_debug_object_cache_ = NULL;
|
|
|
|
|
2013-09-10 11:13:55 +00:00
|
|
|
delete random_number_generator_;
|
|
|
|
random_number_generator_ = NULL;
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
delete debug_;
|
|
|
|
debug_ = NULL;
|
2015-05-20 05:56:06 +00:00
|
|
|
|
2015-11-14 01:20:07 +00:00
|
|
|
delete cancelable_task_manager_;
|
|
|
|
cancelable_task_manager_ = nullptr;
|
|
|
|
|
2015-05-20 05:56:06 +00:00
|
|
|
#if USE_SIMULATOR
|
|
|
|
Simulator::TearDown(simulator_i_cache_, simulator_redirection_);
|
|
|
|
simulator_i_cache_ = nullptr;
|
|
|
|
simulator_redirection_ = nullptr;
|
|
|
|
#endif
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::InitializeThreadLocal() {
|
2011-05-05 18:55:31 +00:00
|
|
|
thread_local_top_.isolate_ = this;
|
2011-03-18 20:35:07 +00:00
|
|
|
thread_local_top_.Initialize();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-03-19 13:22:42 +00:00
|
|
|
bool Isolate::PropagatePendingExceptionToExternalTryCatch() {
|
|
|
|
Object* exception = pending_exception();
|
|
|
|
|
|
|
|
if (IsJavaScriptHandlerOnTop(exception)) {
|
|
|
|
thread_local_top_.external_caught_exception_ = false;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!IsExternalHandlerOnTop(exception)) {
|
|
|
|
thread_local_top_.external_caught_exception_ = false;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
thread_local_top_.external_caught_exception_ = true;
|
|
|
|
if (!is_catchable_by_javascript(exception)) {
|
|
|
|
try_catch_handler()->can_continue_ = false;
|
|
|
|
try_catch_handler()->has_terminated_ = true;
|
|
|
|
try_catch_handler()->exception_ = heap()->null_value();
|
|
|
|
} else {
|
|
|
|
v8::TryCatch* handler = try_catch_handler();
|
|
|
|
DCHECK(thread_local_top_.pending_message_obj_->IsJSMessageObject() ||
|
2016-06-06 12:58:10 +00:00
|
|
|
thread_local_top_.pending_message_obj_->IsTheHole(this));
|
2015-03-19 13:22:42 +00:00
|
|
|
handler->can_continue_ = true;
|
|
|
|
handler->has_terminated_ = false;
|
|
|
|
handler->exception_ = pending_exception();
|
|
|
|
// Propagate to the external try-catch only if we got an actual message.
|
2016-06-06 12:58:10 +00:00
|
|
|
if (thread_local_top_.pending_message_obj_->IsTheHole(this)) return true;
|
2015-03-19 13:22:42 +00:00
|
|
|
|
|
|
|
handler->message_obj_ = thread_local_top_.pending_message_obj_;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-08-04 15:18:18 +00:00
|
|
|
void Isolate::InitializeLoggingAndCounters() {
|
|
|
|
if (logger_ == NULL) {
|
2013-02-15 09:27:10 +00:00
|
|
|
logger_ = new Logger(this);
|
2011-08-04 15:18:18 +00:00
|
|
|
}
|
|
|
|
if (counters_ == NULL) {
|
2013-04-24 13:52:26 +00:00
|
|
|
counters_ = new Counters(this);
|
2011-08-04 15:18:18 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
bool Isolate::Init(Deserializer* des) {
|
|
|
|
TRACE_ISOLATE(init);
|
|
|
|
|
2013-07-18 08:12:01 +00:00
|
|
|
stress_deopt_count_ = FLAG_deopt_every_n_times;
|
|
|
|
|
2013-09-03 09:35:26 +00:00
|
|
|
has_fatal_error_ = false;
|
|
|
|
|
2013-06-28 13:40:41 +00:00
|
|
|
if (function_entry_hook() != NULL) {
|
|
|
|
// When function entry hooking is in effect, we have to create the code
|
|
|
|
// stubs from scratch to get entry hooks, rather than loading the previously
|
|
|
|
// generated stubs from disk.
|
|
|
|
// If this assert fires, the initialization path has regressed.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(des == NULL);
|
2013-06-28 13:40:41 +00:00
|
|
|
}
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// The initialization process does not handle memory exhaustion.
|
2015-09-08 14:42:27 +00:00
|
|
|
AlwaysAllocateScope always_allocate(this);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2013-09-05 11:27:22 +00:00
|
|
|
// Safe after setting Heap::isolate_, and initializing StackGuard
|
2011-08-04 15:18:18 +00:00
|
|
|
heap_.SetStackLimits();
|
|
|
|
|
2011-09-08 16:29:57 +00:00
|
|
|
#define ASSIGN_ELEMENT(CamelName, hacker_name) \
|
|
|
|
isolate_addresses_[Isolate::k##CamelName##Address] = \
|
|
|
|
reinterpret_cast<Address>(hacker_name##_address());
|
|
|
|
FOR_EACH_ISOLATE_ADDRESS_NAME(ASSIGN_ELEMENT)
|
2013-06-20 06:13:03 +00:00
|
|
|
#undef ASSIGN_ELEMENT
|
2011-08-04 15:18:18 +00:00
|
|
|
|
|
|
|
compilation_cache_ = new CompilationCache(this);
|
|
|
|
keyed_lookup_cache_ = new KeyedLookupCache();
|
|
|
|
context_slot_cache_ = new ContextSlotCache();
|
|
|
|
descriptor_lookup_cache_ = new DescriptorLookupCache();
|
|
|
|
unicode_cache_ = new UnicodeCache();
|
2011-09-20 10:08:39 +00:00
|
|
|
inner_pointer_to_code_cache_ = new InnerPointerToCodeCache(this);
|
2011-08-04 15:18:18 +00:00
|
|
|
global_handles_ = new GlobalHandles(this);
|
2013-08-05 09:46:23 +00:00
|
|
|
eternal_handles_ = new EternalHandles();
|
2013-02-15 09:27:10 +00:00
|
|
|
bootstrapper_ = new Bootstrapper(this);
|
2011-08-04 15:18:18 +00:00
|
|
|
handle_scope_implementer_ = new HandleScopeImplementer(this);
|
2016-07-13 10:24:55 +00:00
|
|
|
load_stub_cache_ = new StubCache(this, Code::LOAD_IC);
|
|
|
|
store_stub_cache_ = new StubCache(this, Code::STORE_IC);
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
materialized_object_store_ = new MaterializedObjectStore(this);
|
2011-08-04 15:18:18 +00:00
|
|
|
regexp_stack_ = new RegExpStack();
|
|
|
|
regexp_stack_->isolate_ = this;
|
2012-03-09 12:07:29 +00:00
|
|
|
date_cache_ = new DateCache();
|
2014-09-03 10:51:51 +00:00
|
|
|
call_descriptor_data_ =
|
|
|
|
new CallInterfaceDescriptorData[CallDescriptors::NUMBER_OF_DESCRIPTORS];
|
2013-04-16 08:54:33 +00:00
|
|
|
cpu_profiler_ = new CpuProfiler(this);
|
|
|
|
heap_profiler_ = new HeapProfiler(heap());
|
2015-07-23 14:21:26 +00:00
|
|
|
interpreter_ = new interpreter::Interpreter(this);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Enable logging before setting up the heap
|
2013-04-24 14:44:08 +00:00
|
|
|
logger_->SetUp(this);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Initialize other runtime facilities
|
|
|
|
#if defined(USE_SIMULATOR)
|
2015-01-16 07:42:00 +00:00
|
|
|
#if V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_MIPS || \
|
2016-03-10 14:02:50 +00:00
|
|
|
V8_TARGET_ARCH_MIPS64 || V8_TARGET_ARCH_PPC || V8_TARGET_ARCH_S390
|
2011-05-05 18:55:31 +00:00
|
|
|
Simulator::Initialize(this);
|
2011-03-18 20:35:07 +00:00
|
|
|
#endif
|
|
|
|
#endif
|
|
|
|
|
2015-11-25 15:24:51 +00:00
|
|
|
code_aging_helper_ = new CodeAgingHelper(this);
|
2014-05-06 11:25:37 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
{ // NOLINT
|
|
|
|
// Ensure that the thread has a valid stack guard. The v8::Locker object
|
|
|
|
// will ensure this too, but we don't have to use lockers if we are only
|
|
|
|
// using one thread.
|
|
|
|
ExecutionAccess lock(this);
|
|
|
|
stack_guard_.InitThread(lock);
|
|
|
|
}
|
|
|
|
|
2012-01-13 13:09:52 +00:00
|
|
|
// SetUp the object heap.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!heap_.HasBeenSetUp());
|
2013-02-25 14:03:09 +00:00
|
|
|
if (!heap_.SetUp()) {
|
2013-01-10 15:53:11 +00:00
|
|
|
V8::FatalProcessOutOfMemory("heap setup");
|
2011-03-18 20:35:07 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2016-04-06 11:38:31 +00:00
|
|
|
deoptimizer_data_ = new DeoptimizerData(heap()->memory_allocator());
|
2013-02-25 14:03:09 +00:00
|
|
|
|
|
|
|
const bool create_heap_objects = (des == NULL);
|
|
|
|
if (create_heap_objects && !heap_.CreateHeapObjects()) {
|
|
|
|
V8::FatalProcessOutOfMemory("heap object creation");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2012-06-19 18:38:03 +00:00
|
|
|
if (create_heap_objects) {
|
2016-03-17 10:32:36 +00:00
|
|
|
// Terminate the partial snapshot cache so we can iterate.
|
2015-02-24 11:31:12 +00:00
|
|
|
partial_snapshot_cache_.Add(heap_.undefined_value());
|
2012-06-19 18:38:03 +00:00
|
|
|
}
|
|
|
|
|
2011-07-05 15:49:39 +00:00
|
|
|
InitializeThreadLocal();
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
bootstrapper_->Initialize(create_heap_objects);
|
2013-09-03 06:57:16 +00:00
|
|
|
builtins_.SetUp(this, create_heap_objects);
|
2016-07-21 09:10:42 +00:00
|
|
|
if (create_heap_objects) {
|
|
|
|
heap_.CreateFixedStubs();
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-10-15 11:16:42 +00:00
|
|
|
if (FLAG_log_internal_timer_events) {
|
|
|
|
set_event_logger(Logger::DefaultEventLoggerSentinel);
|
|
|
|
}
|
|
|
|
|
2016-05-10 07:56:36 +00:00
|
|
|
if (FLAG_trace_hydrogen || FLAG_trace_hydrogen_stubs || FLAG_trace_turbo ||
|
|
|
|
FLAG_trace_turbo_graph) {
|
2013-11-19 11:52:47 +00:00
|
|
|
PrintF("Concurrent recompilation has been disabled for tracing.\n");
|
2015-04-15 07:15:52 +00:00
|
|
|
} else if (OptimizingCompileDispatcher::Enabled()) {
|
2015-04-14 13:57:35 +00:00
|
|
|
optimizing_compile_dispatcher_ = new OptimizingCompileDispatcher(this);
|
2013-11-19 11:52:47 +00:00
|
|
|
}
|
|
|
|
|
2014-10-20 11:42:56 +00:00
|
|
|
// Initialize runtime profiler before deserialization, because collections may
|
|
|
|
// occur, clearing/updating ICs.
|
|
|
|
runtime_profiler_ = new RuntimeProfiler(this);
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// If we are deserializing, read the state into the now-empty heap.
|
2012-06-19 18:38:03 +00:00
|
|
|
if (!create_heap_objects) {
|
2013-09-03 11:54:08 +00:00
|
|
|
des->Deserialize(this);
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
2016-07-13 10:24:55 +00:00
|
|
|
load_stub_cache_->Initialize();
|
|
|
|
store_stub_cache_->Initialize();
|
2016-04-08 13:24:30 +00:00
|
|
|
if (FLAG_ignition || serializer_enabled()) {
|
2016-03-24 17:12:16 +00:00
|
|
|
interpreter_->Initialize();
|
|
|
|
}
|
2015-08-17 11:24:34 +00:00
|
|
|
|
2011-10-10 09:21:48 +00:00
|
|
|
// Finish initialization of ThreadLocal after deserialization is done.
|
|
|
|
clear_pending_exception();
|
|
|
|
clear_pending_message();
|
|
|
|
clear_scheduled_exception();
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// Deserializing may put strange things in the root array's copy of the
|
|
|
|
// stack guard.
|
|
|
|
heap_.SetStackLimits();
|
|
|
|
|
2012-04-20 14:12:49 +00:00
|
|
|
// Quiet the heap NaN if needed on target platform.
|
2012-07-13 07:22:11 +00:00
|
|
|
if (!create_heap_objects) Assembler::QuietNaN(heap_.nan_value());
|
2012-04-20 14:12:49 +00:00
|
|
|
|
2014-10-14 08:43:33 +00:00
|
|
|
if (FLAG_trace_turbo) {
|
2014-10-23 11:56:26 +00:00
|
|
|
// Create an empty file.
|
|
|
|
std::ofstream(GetTurboCfgFileName().c_str(), std::ios_base::trunc);
|
2014-10-14 08:43:33 +00:00
|
|
|
}
|
|
|
|
|
2012-04-23 16:42:34 +00:00
|
|
|
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, embedder_data_)),
|
|
|
|
Internals::kIsolateEmbedderDataOffset);
|
|
|
|
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.roots_)),
|
|
|
|
Internals::kIsolateRootsOffset);
|
2016-06-21 09:25:08 +00:00
|
|
|
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.external_memory_)),
|
|
|
|
Internals::kExternalMemoryOffset);
|
|
|
|
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.external_memory_limit_)),
|
|
|
|
Internals::kExternalMemoryLimitOffset);
|
2012-04-23 15:09:59 +00:00
|
|
|
|
2015-11-12 13:42:49 +00:00
|
|
|
time_millis_at_init_ = heap_.MonotonicallyIncreasingTimeInMs();
|
2012-12-18 16:25:45 +00:00
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
heap_.NotifyDeserializationComplete();
|
|
|
|
|
2012-12-18 16:25:45 +00:00
|
|
|
if (!create_heap_objects) {
|
|
|
|
// Now that the heap is consistent, it's OK to generate the code for the
|
|
|
|
// deopt entry table that might have been referred to by optimized code in
|
|
|
|
// the snapshot.
|
|
|
|
HandleScope scope(this);
|
|
|
|
Deoptimizer::EnsureCodeForDeoptimizationEntry(
|
2016-03-01 14:42:57 +00:00
|
|
|
this, Deoptimizer::LAZY,
|
|
|
|
ExternalReferenceTable::kDeoptTableSerializeEntryCount - 1);
|
2013-01-29 12:50:42 +00:00
|
|
|
}
|
2013-01-29 09:12:20 +00:00
|
|
|
|
2014-05-22 09:36:20 +00:00
|
|
|
if (!serializer_enabled()) {
|
2013-03-06 16:15:01 +00:00
|
|
|
// Ensure that all stubs which need to be generated ahead of time, but
|
|
|
|
// cannot be serialized into the snapshot have been generated.
|
2013-01-29 12:50:42 +00:00
|
|
|
HandleScope scope(this);
|
2013-02-27 12:33:24 +00:00
|
|
|
CodeStub::GenerateFPStubs(this);
|
2013-04-04 17:55:43 +00:00
|
|
|
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(this);
|
2013-02-27 12:33:24 +00:00
|
|
|
StubFailureTrampolineStub::GenerateAheadOfTime(this);
|
2012-12-18 16:25:45 +00:00
|
|
|
}
|
|
|
|
|
2013-06-28 13:40:41 +00:00
|
|
|
initialized_from_snapshot_ = (des != NULL);
|
|
|
|
|
2014-12-15 14:01:57 +00:00
|
|
|
if (!FLAG_inline_new) heap_.DisableInlineAllocation();
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-08-04 15:18:18 +00:00
|
|
|
// Initialized lazily to allow early
|
|
|
|
// v8::V8::SetAddHistogramSampleFunction calls.
|
|
|
|
StatsTable* Isolate::stats_table() {
|
|
|
|
if (stats_table_ == NULL) {
|
|
|
|
stats_table_ = new StatsTable;
|
|
|
|
}
|
|
|
|
return stats_table_;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void Isolate::Enter() {
|
|
|
|
Isolate* current_isolate = NULL;
|
|
|
|
PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
|
|
|
|
if (current_data != NULL) {
|
|
|
|
current_isolate = current_data->isolate_;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(current_isolate != NULL);
|
2011-03-18 20:35:07 +00:00
|
|
|
if (current_isolate == this) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(Current() == this);
|
|
|
|
DCHECK(entry_stack_ != NULL);
|
|
|
|
DCHECK(entry_stack_->previous_thread_data == NULL ||
|
2011-04-11 23:46:22 +00:00
|
|
|
entry_stack_->previous_thread_data->thread_id().Equals(
|
|
|
|
ThreadId::Current()));
|
2011-03-18 20:35:07 +00:00
|
|
|
// Same thread re-enters the isolate, no need to re-init anything.
|
|
|
|
entry_stack_->entry_count++;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
PerIsolateThreadData* data = FindOrAllocatePerThreadDataForThisThread();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(data != NULL);
|
|
|
|
DCHECK(data->isolate_ == this);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
EntryStackItem* item = new EntryStackItem(current_data,
|
|
|
|
current_isolate,
|
|
|
|
entry_stack_);
|
|
|
|
entry_stack_ = item;
|
|
|
|
|
|
|
|
SetIsolateThreadLocals(this, data);
|
|
|
|
|
|
|
|
// In case it's the first time some thread enters the isolate.
|
|
|
|
set_thread_id(data->thread_id());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::Exit() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(entry_stack_ != NULL);
|
|
|
|
DCHECK(entry_stack_->previous_thread_data == NULL ||
|
2011-04-11 23:46:22 +00:00
|
|
|
entry_stack_->previous_thread_data->thread_id().Equals(
|
|
|
|
ThreadId::Current()));
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
if (--entry_stack_->entry_count > 0) return;
|
|
|
|
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(CurrentPerIsolateThreadData() != NULL);
|
|
|
|
DCHECK(CurrentPerIsolateThreadData()->isolate_ == this);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Pop the stack.
|
|
|
|
EntryStackItem* item = entry_stack_;
|
|
|
|
entry_stack_ = item->previous_item;
|
|
|
|
|
|
|
|
PerIsolateThreadData* previous_thread_data = item->previous_thread_data;
|
|
|
|
Isolate* previous_isolate = item->previous_isolate;
|
|
|
|
|
|
|
|
delete item;
|
|
|
|
|
|
|
|
// Reinit the current thread for the isolate it was running before this one.
|
|
|
|
SetIsolateThreadLocals(previous_isolate, previous_thread_data);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-18 14:15:02 +00:00
|
|
|
void Isolate::LinkDeferredHandles(DeferredHandles* deferred) {
|
|
|
|
deferred->next_ = deferred_handles_head_;
|
|
|
|
if (deferred_handles_head_ != NULL) {
|
|
|
|
deferred_handles_head_->previous_ = deferred;
|
|
|
|
}
|
|
|
|
deferred_handles_head_ = deferred;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::UnlinkDeferredHandles(DeferredHandles* deferred) {
|
|
|
|
#ifdef DEBUG
|
|
|
|
// In debug mode assert that the linked list is well-formed.
|
|
|
|
DeferredHandles* deferred_iterator = deferred;
|
|
|
|
while (deferred_iterator->previous_ != NULL) {
|
|
|
|
deferred_iterator = deferred_iterator->previous_;
|
|
|
|
}
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(deferred_handles_head_ == deferred_iterator);
|
2012-07-18 14:15:02 +00:00
|
|
|
#endif
|
|
|
|
if (deferred_handles_head_ == deferred) {
|
|
|
|
deferred_handles_head_ = deferred_handles_head_->next_;
|
|
|
|
}
|
|
|
|
if (deferred->next_ != NULL) {
|
|
|
|
deferred->next_->previous_ = deferred->previous_;
|
|
|
|
}
|
|
|
|
if (deferred->previous_ != NULL) {
|
|
|
|
deferred->previous_->next_ = deferred->next_;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-21 09:53:04 +00:00
|
|
|
void Isolate::DumpAndResetCompilationStats() {
|
|
|
|
if (turbo_statistics() != nullptr) {
|
2016-06-13 09:35:37 +00:00
|
|
|
DCHECK(FLAG_turbo_stats || FLAG_turbo_stats_nvp);
|
|
|
|
|
2014-11-21 09:53:04 +00:00
|
|
|
OFStream os(stdout);
|
2016-06-13 09:35:37 +00:00
|
|
|
if (FLAG_turbo_stats) {
|
|
|
|
AsPrintableStatistics ps = {*turbo_statistics(), false};
|
|
|
|
os << ps << std::endl;
|
|
|
|
}
|
|
|
|
if (FLAG_turbo_stats_nvp) {
|
|
|
|
AsPrintableStatistics ps = {*turbo_statistics(), true};
|
|
|
|
os << ps << std::endl;
|
|
|
|
}
|
2014-11-21 09:53:04 +00:00
|
|
|
}
|
|
|
|
if (hstatistics() != nullptr) hstatistics()->Print();
|
|
|
|
delete turbo_statistics_;
|
|
|
|
turbo_statistics_ = nullptr;
|
|
|
|
delete hstatistics_;
|
|
|
|
hstatistics_ = nullptr;
|
2016-01-22 10:11:10 +00:00
|
|
|
if (FLAG_runtime_call_stats) {
|
|
|
|
OFStream os(stdout);
|
2016-02-11 12:30:06 +00:00
|
|
|
counters()->runtime_call_stats()->Print(os);
|
|
|
|
counters()->runtime_call_stats()->Reset();
|
2016-01-22 10:11:10 +00:00
|
|
|
}
|
2014-11-21 09:53:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-06 10:49:34 +00:00
|
|
|
HStatistics* Isolate::GetHStatistics() {
|
|
|
|
if (hstatistics() == NULL) set_hstatistics(new HStatistics());
|
|
|
|
return hstatistics();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-23 09:14:35 +00:00
|
|
|
CompilationStatistics* Isolate::GetTurboStatistics() {
|
|
|
|
if (turbo_statistics() == NULL)
|
|
|
|
set_turbo_statistics(new CompilationStatistics());
|
|
|
|
return turbo_statistics();
|
2014-07-30 13:54:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-06 07:25:46 +00:00
|
|
|
HTracer* Isolate::GetHTracer() {
|
|
|
|
if (htracer() == NULL) set_htracer(new HTracer(id()));
|
|
|
|
return htracer();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-11-07 16:35:27 +00:00
|
|
|
CodeTracer* Isolate::GetCodeTracer() {
|
|
|
|
if (code_tracer() == NULL) set_code_tracer(new CodeTracer(id()));
|
|
|
|
return code_tracer();
|
|
|
|
}
|
|
|
|
|
2016-03-01 16:05:38 +00:00
|
|
|
Map* Isolate::get_initial_js_array_map(ElementsKind kind) {
|
2015-12-11 06:50:46 +00:00
|
|
|
if (IsFastElementsKind(kind)) {
|
|
|
|
DisallowHeapAllocation no_gc;
|
2016-03-01 16:05:38 +00:00
|
|
|
Object* const initial_js_array_map =
|
|
|
|
context()->native_context()->get(Context::ArrayMapIndex(kind));
|
2016-06-06 12:58:10 +00:00
|
|
|
if (!initial_js_array_map->IsUndefined(this)) {
|
2015-12-11 06:50:46 +00:00
|
|
|
return Map::cast(initial_js_array_map);
|
2013-05-13 07:35:26 +00:00
|
|
|
}
|
|
|
|
}
|
2015-12-11 06:50:46 +00:00
|
|
|
return nullptr;
|
2013-05-13 07:35:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-05-22 09:36:20 +00:00
|
|
|
bool Isolate::use_crankshaft() const {
|
|
|
|
return FLAG_crankshaft &&
|
|
|
|
!serializer_enabled_ &&
|
|
|
|
CpuFeatures::SupportsCrankshaft();
|
|
|
|
}
|
|
|
|
|
2016-05-12 08:50:18 +00:00
|
|
|
bool Isolate::IsArrayOrObjectPrototype(Object* object) {
|
|
|
|
Object* context = heap()->native_contexts_list();
|
2016-06-06 12:58:10 +00:00
|
|
|
while (!context->IsUndefined(this)) {
|
2016-05-12 08:50:18 +00:00
|
|
|
Context* current_context = Context::cast(context);
|
|
|
|
if (current_context->initial_object_prototype() == object ||
|
|
|
|
current_context->initial_array_prototype() == object) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
context = current_context->next_context_link();
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Isolate::IsInAnyContext(Object* object, uint32_t index) {
|
|
|
|
DisallowHeapAllocation no_gc;
|
|
|
|
Object* context = heap()->native_contexts_list();
|
2016-06-06 12:58:10 +00:00
|
|
|
while (!context->IsUndefined(this)) {
|
2016-05-12 08:50:18 +00:00
|
|
|
Context* current_context = Context::cast(context);
|
|
|
|
if (current_context->get(index) == object) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
context = current_context->next_context_link();
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
2014-05-22 09:36:20 +00:00
|
|
|
|
2013-05-13 07:35:26 +00:00
|
|
|
bool Isolate::IsFastArrayConstructorPrototypeChainIntact() {
|
2015-04-23 16:09:09 +00:00
|
|
|
PropertyCell* no_elements_cell = heap()->array_protector();
|
2015-04-27 08:46:58 +00:00
|
|
|
bool cell_reports_intact =
|
|
|
|
no_elements_cell->value()->IsSmi() &&
|
|
|
|
Smi::cast(no_elements_cell->value())->value() == kArrayProtectorValid;
|
2015-04-22 10:35:23 +00:00
|
|
|
|
|
|
|
#ifdef DEBUG
|
2013-05-13 07:35:26 +00:00
|
|
|
Map* root_array_map =
|
|
|
|
get_initial_js_array_map(GetInitialFastElementsKind());
|
2015-04-23 16:09:09 +00:00
|
|
|
Context* native_context = context()->native_context();
|
|
|
|
JSObject* initial_array_proto = JSObject::cast(
|
|
|
|
native_context->get(Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
|
|
|
|
JSObject* initial_object_proto = JSObject::cast(
|
|
|
|
native_context->get(Context::INITIAL_OBJECT_PROTOTYPE_INDEX));
|
2015-04-22 10:35:23 +00:00
|
|
|
|
|
|
|
if (root_array_map == NULL || initial_array_proto == initial_object_proto) {
|
|
|
|
// We are in the bootstrapping process, and the entire check sequence
|
|
|
|
// shouldn't be performed.
|
|
|
|
return cell_reports_intact;
|
|
|
|
}
|
2013-05-13 07:35:26 +00:00
|
|
|
|
|
|
|
// Check that the array prototype hasn't been altered WRT empty elements.
|
2015-04-22 10:35:23 +00:00
|
|
|
if (root_array_map->prototype() != initial_array_proto) {
|
|
|
|
DCHECK_EQ(false, cell_reports_intact);
|
|
|
|
return cell_reports_intact;
|
|
|
|
}
|
|
|
|
|
2015-07-15 15:57:27 +00:00
|
|
|
FixedArrayBase* elements = initial_array_proto->elements();
|
|
|
|
if (elements != heap()->empty_fixed_array() &&
|
|
|
|
elements != heap()->empty_slow_element_dictionary()) {
|
2015-04-22 10:35:23 +00:00
|
|
|
DCHECK_EQ(false, cell_reports_intact);
|
|
|
|
return cell_reports_intact;
|
2013-05-13 07:35:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Check that the object prototype hasn't been altered WRT empty elements.
|
2014-07-17 09:44:37 +00:00
|
|
|
PrototypeIterator iter(this, initial_array_proto);
|
|
|
|
if (iter.IsAtEnd() || iter.GetCurrent() != initial_object_proto) {
|
2015-04-22 10:35:23 +00:00
|
|
|
DCHECK_EQ(false, cell_reports_intact);
|
|
|
|
return cell_reports_intact;
|
2014-07-17 09:44:37 +00:00
|
|
|
}
|
2015-07-15 15:57:27 +00:00
|
|
|
|
|
|
|
elements = initial_object_proto->elements();
|
|
|
|
if (elements != heap()->empty_fixed_array() &&
|
|
|
|
elements != heap()->empty_slow_element_dictionary()) {
|
2015-04-22 10:35:23 +00:00
|
|
|
DCHECK_EQ(false, cell_reports_intact);
|
|
|
|
return cell_reports_intact;
|
2013-05-13 07:35:26 +00:00
|
|
|
}
|
|
|
|
|
2014-07-17 09:44:37 +00:00
|
|
|
iter.Advance();
|
2015-04-22 10:35:23 +00:00
|
|
|
if (!iter.IsAtEnd()) {
|
|
|
|
DCHECK_EQ(false, cell_reports_intact);
|
|
|
|
return cell_reports_intact;
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
return cell_reports_intact;
|
|
|
|
}
|
|
|
|
|
2016-05-12 08:50:18 +00:00
|
|
|
bool Isolate::IsIsConcatSpreadableLookupChainIntact() {
|
|
|
|
Cell* is_concat_spreadable_cell = heap()->is_concat_spreadable_protector();
|
|
|
|
bool is_is_concat_spreadable_set =
|
|
|
|
Smi::cast(is_concat_spreadable_cell->value())->value() ==
|
|
|
|
kArrayProtectorInvalid;
|
|
|
|
#ifdef DEBUG
|
|
|
|
Map* root_array_map = get_initial_js_array_map(GetInitialFastElementsKind());
|
|
|
|
if (root_array_map == NULL) {
|
|
|
|
// Ignore the value of is_concat_spreadable during bootstrap.
|
|
|
|
return !is_is_concat_spreadable_set;
|
|
|
|
}
|
|
|
|
Handle<Object> array_prototype(array_function()->prototype(), this);
|
|
|
|
Handle<Symbol> key = factory()->is_concat_spreadable_symbol();
|
|
|
|
Handle<Object> value;
|
|
|
|
LookupIterator it(array_prototype, key);
|
2016-06-06 12:58:10 +00:00
|
|
|
if (it.IsFound() && !JSReceiver::GetDataProperty(&it)->IsUndefined(this)) {
|
2016-05-12 08:50:18 +00:00
|
|
|
// TODO(cbruni): Currently we do not revert if we unset the
|
|
|
|
// @@isConcatSpreadable property on Array.prototype or Object.prototype
|
|
|
|
// hence the reverse implication doesn't hold.
|
|
|
|
DCHECK(is_is_concat_spreadable_set);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
#endif // DEBUG
|
|
|
|
|
|
|
|
return !is_is_concat_spreadable_set;
|
Optimize @@species based on a global 'protector' cell
This patch makes ArraySpeciesCreate fast in V8 by avoiding two property reads
when the following conditions are met:
- No Array instance has had its __proto__ reset
- No Array instance has had a constructor property defined
- Array.prototype has not had its constructor changed
- Array[Symbol.species] has not been reset
For subclasses of Array, or for conditions where one of these assumptions is
violated, the full lookup of species is done according to the ArraySpeciesCreate
algorithm. Although this is a "performance cliff", it does not come up in the
expected typical use case of @@species (Array subclassing), so it is hoped that
this can form a good start. Array subclasses will incur the slowness of looking
up @@species, but their use won't slow down invocations of, for example,
Array.prototype.slice on Array base class instances.
Possible future optimizations:
- For the fallback case where the assumptions don't hold, optimize the two
property lookups.
- For Array.prototype.slice and Array.prototype.splice, even if the full lookup
of @@species needs to take place, we still could take the rest of the C++
fastpath. However, to do this correctly requires changing the calling convention
from C++ to JS to pass the @@species out, so it is not attempted in this patch.
With this patch, microbenchmarks of Array.prototype.slice do not suffer a
noticeable performance regression, unlike their previous 2.5x penalty.
TBR=hpayer@chromium.org
Review URL: https://codereview.chromium.org/1689733002
Cr-Commit-Position: refs/heads/master@{#34199}
2016-02-22 21:01:29 +00:00
|
|
|
}
|
2015-04-22 10:35:23 +00:00
|
|
|
|
|
|
|
void Isolate::UpdateArrayProtectorOnSetElement(Handle<JSObject> object) {
|
Optimize @@species based on a global 'protector' cell
This patch makes ArraySpeciesCreate fast in V8 by avoiding two property reads
when the following conditions are met:
- No Array instance has had its __proto__ reset
- No Array instance has had a constructor property defined
- Array.prototype has not had its constructor changed
- Array[Symbol.species] has not been reset
For subclasses of Array, or for conditions where one of these assumptions is
violated, the full lookup of species is done according to the ArraySpeciesCreate
algorithm. Although this is a "performance cliff", it does not come up in the
expected typical use case of @@species (Array subclassing), so it is hoped that
this can form a good start. Array subclasses will incur the slowness of looking
up @@species, but their use won't slow down invocations of, for example,
Array.prototype.slice on Array base class instances.
Possible future optimizations:
- For the fallback case where the assumptions don't hold, optimize the two
property lookups.
- For Array.prototype.slice and Array.prototype.splice, even if the full lookup
of @@species needs to take place, we still could take the rest of the C++
fastpath. However, to do this correctly requires changing the calling convention
from C++ to JS to pass the @@species out, so it is not attempted in this patch.
With this patch, microbenchmarks of Array.prototype.slice do not suffer a
noticeable performance regression, unlike their previous 2.5x penalty.
TBR=hpayer@chromium.org
Review URL: https://codereview.chromium.org/1689733002
Cr-Commit-Position: refs/heads/master@{#34199}
2016-02-22 21:01:29 +00:00
|
|
|
DisallowHeapAllocation no_gc;
|
2016-05-12 08:50:18 +00:00
|
|
|
if (!object->map()->is_prototype_map()) return;
|
|
|
|
if (!IsFastArrayConstructorPrototypeChainIntact()) return;
|
|
|
|
if (!IsArrayOrObjectPrototype(*object)) return;
|
|
|
|
PropertyCell::SetValueWithInvalidation(
|
|
|
|
factory()->array_protector(),
|
|
|
|
handle(Smi::FromInt(kArrayProtectorInvalid), this));
|
2015-04-22 10:35:23 +00:00
|
|
|
}
|
|
|
|
|
2016-05-17 11:23:59 +00:00
|
|
|
void Isolate::InvalidateHasInstanceProtector() {
|
|
|
|
DCHECK(factory()->has_instance_protector()->value()->IsSmi());
|
|
|
|
DCHECK(IsHasInstanceLookupChainIntact());
|
|
|
|
PropertyCell::SetValueWithInvalidation(
|
|
|
|
factory()->has_instance_protector(),
|
|
|
|
handle(Smi::FromInt(kArrayProtectorInvalid), this));
|
|
|
|
DCHECK(!IsHasInstanceLookupChainIntact());
|
|
|
|
}
|
|
|
|
|
2016-05-12 08:50:18 +00:00
|
|
|
void Isolate::InvalidateIsConcatSpreadableProtector() {
|
|
|
|
DCHECK(factory()->is_concat_spreadable_protector()->value()->IsSmi());
|
|
|
|
DCHECK(IsIsConcatSpreadableLookupChainIntact());
|
|
|
|
factory()->is_concat_spreadable_protector()->set_value(
|
|
|
|
Smi::FromInt(kArrayProtectorInvalid));
|
|
|
|
DCHECK(!IsIsConcatSpreadableLookupChainIntact());
|
|
|
|
}
|
|
|
|
|
|
|
|
void Isolate::InvalidateArraySpeciesProtector() {
|
|
|
|
DCHECK(factory()->species_protector()->value()->IsSmi());
|
|
|
|
DCHECK(IsArraySpeciesLookupChainIntact());
|
|
|
|
factory()->species_protector()->set_value(
|
|
|
|
Smi::FromInt(kArrayProtectorInvalid));
|
|
|
|
DCHECK(!IsArraySpeciesLookupChainIntact());
|
|
|
|
}
|
2015-04-22 10:35:23 +00:00
|
|
|
|
|
|
|
bool Isolate::IsAnyInitialArrayPrototype(Handle<JSArray> array) {
|
2016-05-12 08:50:18 +00:00
|
|
|
DisallowHeapAllocation no_gc;
|
|
|
|
return IsInAnyContext(*array, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
|
2013-05-13 07:35:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-03 10:51:51 +00:00
|
|
|
CallInterfaceDescriptorData* Isolate::call_descriptor_data(int index) {
|
|
|
|
DCHECK(0 <= index && index < CallDescriptors::NUMBER_OF_DESCRIPTORS);
|
|
|
|
return &call_descriptor_data_[index];
|
This is a preview of a first step towards unification of the hydrogen
call machinery. The change replaces CallNamed, CallKeyed,
CallConstantFunction and CallKnownGlobal hydrogen instructions with two
new instructions with a more lower level semantics:
1. CallJSFunction for direct calls of JSFunction objects (no
argument adaptation)
2. CallWithDescriptor for calls of a given Code object according to
the supplied calling convention.
Details:
CallJSFunction should be straightforward, the main difference from the
existing InvokeFunction instruction is the absence of argument adaptor
handling. (As a next step, we will replace InvokeFunction with an
equivalent hydrogen code.)
For CallWithDescriptor, the calling conventions are represented by a
tweaked version of CallStubInterfaceDescriptor. In addition to the
parameter-register mapping, we also define parameter-representation
mapping there. The CallWithDescriptor instruction has variable number of
parameters now - this required some simple tweaks in Lithium, which
assumed fixed number of arguments in some places.
The calling conventions used in the calls are initialized in the
CallDescriptors class (code-stubs.h, <arch>/code-stubs-<arch>.cc), and
they live in a new table in the Isolate class. I should say I am not
quite sure about Representation::Integer32() representation for some of
the params of ArgumentAdaptorCall - it is not clear to me wether the
params could not end up on the stack and thus confuse the GC.
The change also includes an earlier small change to argument adaptor
(https://codereview.chromium.org/98463007) that avoids passing a naked
pointer to the code entry as a parameter. I am sorry for packaging that
with an already biggish change.
Performance implications:
Locally, I see a small regression (.2% or so). It is hard to say where
exactly it comes from, but I do see inefficient call sequences to the
adaptor trampoline. For example:
;;; <@78,#24> constant-t
bf85aa515a mov edi,0x5a51aa85 ;; debug: position 29
;;; <@72,#53> load-named-field
8b7717 mov esi,[edi+0x17] ;; debug: position 195
;;; <@80,#51> constant-s
b902000000 mov ecx,0x2 ;; debug: position 195
;;; <@81,#51> gap
894df0 mov [ebp+0xf0],ecx
;;; <@82,#103> constant-i
bb01000000 mov ebx,0x1
;;; <@84,#102> constant-i
b902000000 mov ecx,0x2
;;; <@85,#102> gap
89d8 mov eax,ebx
89cb mov ebx,ecx
8b4df0 mov ecx,[ebp+0xf0]
;;; <@86,#58> call-with-descriptor
e8ef57fcff call ArgumentsAdaptorTrampoline (0x2d80e6e0) ;; code: BUILTIN
Note the silly handling of ecx; the hydrogen for this code is:
0 4 s27 Constant 1 range:1_1 <|@
0 3 t30 Constant 0x5bc1aa85 <JS Function xyz (SharedFunctionInfo 0x5bc1a919)> type:object <|@
0 1 t36 LoadNamedField t30.[in-object]@24 <|@
0 1 t38 Constant 0x2300e6a1 <Code> <|@
0 1 i102 Constant 2 range:2_2 <|@
0 1 i103 Constant 1 range:1_1 <|@
0 2 t41 CallWithDescriptor t38 t30 t36 s27 i103 i102 #2 changes[*] <|@
BUG=
R=verwaest@chromium.org, danno@chromium.org
Review URL: https://codereview.chromium.org/104663004
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18626 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-15 17:00:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-04-21 10:21:50 +00:00
|
|
|
base::RandomNumberGenerator* Isolate::random_number_generator() {
|
|
|
|
if (random_number_generator_ == NULL) {
|
|
|
|
if (FLAG_random_seed != 0) {
|
|
|
|
random_number_generator_ =
|
|
|
|
new base::RandomNumberGenerator(FLAG_random_seed);
|
|
|
|
} else {
|
|
|
|
random_number_generator_ = new base::RandomNumberGenerator();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return random_number_generator_;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-07-19 09:39:01 +00:00
|
|
|
Object* Isolate::FindCodeObject(Address a) {
|
|
|
|
return inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer(a);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
#define ISOLATE_FIELD_OFFSET(type, name, ignored) \
|
|
|
|
const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_);
|
|
|
|
ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
|
|
|
|
ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
|
|
|
|
#undef ISOLATE_FIELD_OFFSET
|
|
|
|
#endif
|
|
|
|
|
2014-03-24 16:34:06 +00:00
|
|
|
|
Move hash code from hidden string to a private symbol
* Hash code is now just done with a private own symbol instead of the hidden string, which predates symbols.
* In the long run we should do all hidden properties this way and get rid of the
hidden magic 0-length string with the zero hash code. The advantages include
less complexity and being able to do things from JS in a natural way.
* Initially, the performance of weak set regressed, because it's a little harder
to do the lookup in C++. Instead of heroics in C++ to make things faster I
moved some functionality into JS and got the performance back. JS is supposed to be good at looking up named properties on objects.
* This also changes hash codes of Smis so that they are always Smis.
Performance figures are in the comments to the code review. Summary: Most of js-perf-test/Collections is neutral. Set and Map with object keys are 40-50% better. WeakMap is -5% and WeakSet is +9%. After the measurements, I fixed global proxies, which cost 1% on most tests and 5% on the weak ones :-(.
In the code review comments is a patch with an example of the heroics we could do in C++ to make lookup faster (I hope we don't have to do this. Instead of checking for the property, then doing a new lookup to insert it, we could do one lookup and handle the addition immediately). With the current benchmarks above this buys us nothing, but if we go back to doing more lookups in C++ instead of in stubs and JS then it's a win.
In a similar vein we could give the magic zero hash code to the hash code
symbol. Then when we look up the hash code we would sometimes see the table
with all the hidden properties. This dual use of the field for either the hash
code or the table with all hidden properties and the hash code is rather ugly,
and this CL gets rid of it. I'd be loath to bring it back. On the benchmarks quoted above it's slightly slower than moving the hash code lookup to JS like in this CL.
One worry is that the benchmark results above are more monomorphic than real
world code, so may be overstating the performance benefits of moving to JS. I
think this is part of a general issue we have with handling polymorphic code in
JS and any solutions there will benefit this solution, which boils down to
regular property access. Any improvement there will lift all boats.
R=adamk@chromium.org, verwaest@chromium.org
BUG=
Review URL: https://codereview.chromium.org/1149863005
Cr-Commit-Position: refs/heads/master@{#28622}
2015-05-26 11:26:26 +00:00
|
|
|
Handle<JSObject> Isolate::SetUpSubregistry(Handle<JSObject> registry,
|
|
|
|
Handle<Map> map, const char* cname) {
|
|
|
|
Handle<String> name = factory()->InternalizeUtf8String(cname);
|
|
|
|
Handle<JSObject> obj = factory()->NewJSObjectFromMap(map);
|
|
|
|
JSObject::NormalizeProperties(obj, CLEAR_INOBJECT_PROPERTIES, 0,
|
|
|
|
"SetupSymbolRegistry");
|
|
|
|
JSObject::AddProperty(registry, name, obj, NONE);
|
|
|
|
return obj;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-24 16:34:06 +00:00
|
|
|
Handle<JSObject> Isolate::GetSymbolRegistry() {
|
2014-10-23 08:43:17 +00:00
|
|
|
if (heap()->symbol_registry()->IsSmi()) {
|
2014-03-24 16:34:06 +00:00
|
|
|
Handle<Map> map = factory()->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
|
|
|
|
Handle<JSObject> registry = factory()->NewJSObjectFromMap(map);
|
|
|
|
heap()->set_symbol_registry(*registry);
|
|
|
|
|
Move hash code from hidden string to a private symbol
* Hash code is now just done with a private own symbol instead of the hidden string, which predates symbols.
* In the long run we should do all hidden properties this way and get rid of the
hidden magic 0-length string with the zero hash code. The advantages include
less complexity and being able to do things from JS in a natural way.
* Initially, the performance of weak set regressed, because it's a little harder
to do the lookup in C++. Instead of heroics in C++ to make things faster I
moved some functionality into JS and got the performance back. JS is supposed to be good at looking up named properties on objects.
* This also changes hash codes of Smis so that they are always Smis.
Performance figures are in the comments to the code review. Summary: Most of js-perf-test/Collections is neutral. Set and Map with object keys are 40-50% better. WeakMap is -5% and WeakSet is +9%. After the measurements, I fixed global proxies, which cost 1% on most tests and 5% on the weak ones :-(.
In the code review comments is a patch with an example of the heroics we could do in C++ to make lookup faster (I hope we don't have to do this. Instead of checking for the property, then doing a new lookup to insert it, we could do one lookup and handle the addition immediately). With the current benchmarks above this buys us nothing, but if we go back to doing more lookups in C++ instead of in stubs and JS then it's a win.
In a similar vein we could give the magic zero hash code to the hash code
symbol. Then when we look up the hash code we would sometimes see the table
with all the hidden properties. This dual use of the field for either the hash
code or the table with all hidden properties and the hash code is rather ugly,
and this CL gets rid of it. I'd be loath to bring it back. On the benchmarks quoted above it's slightly slower than moving the hash code lookup to JS like in this CL.
One worry is that the benchmark results above are more monomorphic than real
world code, so may be overstating the performance benefits of moving to JS. I
think this is part of a general issue we have with handling polymorphic code in
JS and any solutions there will benefit this solution, which boils down to
regular property access. Any improvement there will lift all boats.
R=adamk@chromium.org, verwaest@chromium.org
BUG=
Review URL: https://codereview.chromium.org/1149863005
Cr-Commit-Position: refs/heads/master@{#28622}
2015-05-26 11:26:26 +00:00
|
|
|
SetUpSubregistry(registry, map, "for");
|
|
|
|
SetUpSubregistry(registry, map, "for_api");
|
|
|
|
SetUpSubregistry(registry, map, "keyFor");
|
2015-10-29 14:17:24 +00:00
|
|
|
SetUpSubregistry(registry, map, "private_api");
|
2014-03-24 16:34:06 +00:00
|
|
|
}
|
|
|
|
return Handle<JSObject>::cast(factory()->symbol_registry());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2016-02-19 18:48:09 +00:00
|
|
|
void Isolate::AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback) {
|
|
|
|
for (int i = 0; i < before_call_entered_callbacks_.length(); i++) {
|
|
|
|
if (callback == before_call_entered_callbacks_.at(i)) return;
|
|
|
|
}
|
|
|
|
before_call_entered_callbacks_.Add(callback);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::RemoveBeforeCallEnteredCallback(
|
|
|
|
BeforeCallEnteredCallback callback) {
|
|
|
|
for (int i = 0; i < before_call_entered_callbacks_.length(); i++) {
|
|
|
|
if (callback == before_call_entered_callbacks_.at(i)) {
|
|
|
|
before_call_entered_callbacks_.Remove(i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::FireBeforeCallEnteredCallback() {
|
|
|
|
for (int i = 0; i < before_call_entered_callbacks_.length(); i++) {
|
|
|
|
before_call_entered_callbacks_.at(i)(reinterpret_cast<v8::Isolate*>(this));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-25 13:49:22 +00:00
|
|
|
void Isolate::AddCallCompletedCallback(CallCompletedCallback callback) {
|
|
|
|
for (int i = 0; i < call_completed_callbacks_.length(); i++) {
|
|
|
|
if (callback == call_completed_callbacks_.at(i)) return;
|
|
|
|
}
|
|
|
|
call_completed_callbacks_.Add(callback);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::RemoveCallCompletedCallback(CallCompletedCallback callback) {
|
|
|
|
for (int i = 0; i < call_completed_callbacks_.length(); i++) {
|
|
|
|
if (callback == call_completed_callbacks_.at(i)) {
|
|
|
|
call_completed_callbacks_.Remove(i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::FireCallCompletedCallback() {
|
2016-06-14 11:07:40 +00:00
|
|
|
if (!handle_scope_implementer()->CallDepthIsZero()) return;
|
|
|
|
|
2016-03-04 17:41:36 +00:00
|
|
|
bool run_microtasks =
|
|
|
|
pending_microtask_count() &&
|
|
|
|
!handle_scope_implementer()->HasMicrotasksSuppressions() &&
|
|
|
|
handle_scope_implementer()->microtasks_policy() ==
|
|
|
|
v8::MicrotasksPolicy::kAuto;
|
2014-04-25 13:49:22 +00:00
|
|
|
|
2014-05-19 07:57:04 +00:00
|
|
|
if (run_microtasks) RunMicrotasks();
|
2016-06-14 11:07:40 +00:00
|
|
|
// Prevent stepping from spilling into the next call made by the embedder.
|
|
|
|
if (debug()->is_active()) debug()->ClearStepping();
|
|
|
|
|
|
|
|
if (call_completed_callbacks_.is_empty()) return;
|
2014-04-25 13:49:22 +00:00
|
|
|
// Fire callbacks. Increase call depth to prevent recursive callbacks.
|
2016-02-19 18:48:09 +00:00
|
|
|
v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this);
|
|
|
|
v8::Isolate::SuppressMicrotaskExecutionScope suppress(isolate);
|
2014-04-25 13:49:22 +00:00
|
|
|
for (int i = 0; i < call_completed_callbacks_.length(); i++) {
|
2016-02-19 18:48:09 +00:00
|
|
|
call_completed_callbacks_.at(i)(isolate);
|
2014-04-25 13:49:22 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-30 15:29:08 +00:00
|
|
|
void Isolate::SetPromiseRejectCallback(PromiseRejectCallback callback) {
|
|
|
|
promise_reject_callback_ = callback;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::ReportPromiseReject(Handle<JSObject> promise,
|
|
|
|
Handle<Object> value,
|
|
|
|
v8::PromiseRejectEvent event) {
|
|
|
|
if (promise_reject_callback_ == NULL) return;
|
2014-10-07 12:03:55 +00:00
|
|
|
Handle<JSArray> stack_trace;
|
|
|
|
if (event == v8::kPromiseRejectWithNoHandler && value->IsJSObject()) {
|
2014-10-28 13:24:36 +00:00
|
|
|
stack_trace = GetDetailedStackTrace(Handle<JSObject>::cast(value));
|
2014-10-07 12:03:55 +00:00
|
|
|
}
|
|
|
|
promise_reject_callback_(v8::PromiseRejectMessage(
|
|
|
|
v8::Utils::PromiseToLocal(promise), event, v8::Utils::ToLocal(value),
|
|
|
|
v8::Utils::StackTraceToLocal(stack_trace)));
|
2014-09-30 15:29:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 20:12:19 +00:00
|
|
|
void Isolate::EnqueueMicrotask(Handle<Object> microtask) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(microtask->IsJSFunction() || microtask->IsCallHandlerInfo());
|
2014-05-19 07:57:04 +00:00
|
|
|
Handle<FixedArray> queue(heap()->microtask_queue(), this);
|
|
|
|
int num_tasks = pending_microtask_count();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(num_tasks <= queue->length());
|
2014-05-19 07:57:04 +00:00
|
|
|
if (num_tasks == 0) {
|
|
|
|
queue = factory()->NewFixedArray(8);
|
|
|
|
heap()->set_microtask_queue(*queue);
|
|
|
|
} else if (num_tasks == queue->length()) {
|
2015-08-05 08:53:17 +00:00
|
|
|
queue = factory()->CopyFixedArrayAndGrow(queue, num_tasks);
|
2014-05-19 07:57:04 +00:00
|
|
|
heap()->set_microtask_queue(*queue);
|
|
|
|
}
|
2016-06-06 12:58:10 +00:00
|
|
|
DCHECK(queue->get(num_tasks)->IsUndefined(this));
|
2014-05-19 07:57:04 +00:00
|
|
|
queue->set(num_tasks, *microtask);
|
|
|
|
set_pending_microtask_count(num_tasks + 1);
|
|
|
|
}
|
|
|
|
|
2014-05-02 19:30:54 +00:00
|
|
|
|
2014-05-19 07:57:04 +00:00
|
|
|
void Isolate::RunMicrotasks() {
|
2014-05-02 19:30:54 +00:00
|
|
|
// Increase call depth to prevent recursive callbacks.
|
2014-05-28 18:40:04 +00:00
|
|
|
v8::Isolate::SuppressMicrotaskExecutionScope suppress(
|
|
|
|
reinterpret_cast<v8::Isolate*>(this));
|
2016-04-25 15:21:11 +00:00
|
|
|
is_running_microtasks_ = true;
|
2016-02-25 19:48:31 +00:00
|
|
|
RunMicrotasksInternal();
|
2016-04-25 15:21:11 +00:00
|
|
|
is_running_microtasks_ = false;
|
2016-02-25 19:48:31 +00:00
|
|
|
FireMicrotasksCompletedCallback();
|
|
|
|
}
|
|
|
|
|
2014-05-19 07:57:04 +00:00
|
|
|
|
2016-02-25 19:48:31 +00:00
|
|
|
void Isolate::RunMicrotasksInternal() {
|
2014-05-19 07:57:04 +00:00
|
|
|
while (pending_microtask_count() > 0) {
|
|
|
|
HandleScope scope(this);
|
|
|
|
int num_tasks = pending_microtask_count();
|
|
|
|
Handle<FixedArray> queue(heap()->microtask_queue(), this);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(num_tasks <= queue->length());
|
2014-05-19 07:57:04 +00:00
|
|
|
set_pending_microtask_count(0);
|
|
|
|
heap()->set_microtask_queue(heap()->empty_fixed_array());
|
|
|
|
|
2016-03-17 12:41:38 +00:00
|
|
|
Isolate* isolate = this;
|
|
|
|
FOR_WITH_HANDLE_SCOPE(isolate, int, i = 0, i, i < num_tasks, i++, {
|
2014-06-03 20:12:19 +00:00
|
|
|
Handle<Object> microtask(queue->get(i), this);
|
|
|
|
if (microtask->IsJSFunction()) {
|
|
|
|
Handle<JSFunction> microtask_function =
|
|
|
|
Handle<JSFunction>::cast(microtask);
|
2014-06-23 11:47:20 +00:00
|
|
|
SaveContext save(this);
|
|
|
|
set_context(microtask_function->context()->native_context());
|
2016-07-18 15:34:25 +00:00
|
|
|
handle_scope_implementer_->EnterMicrotaskContext(
|
|
|
|
handle(microtask_function->context(), this));
|
2014-09-01 09:11:44 +00:00
|
|
|
MaybeHandle<Object> maybe_exception;
|
2015-10-23 12:26:49 +00:00
|
|
|
MaybeHandle<Object> result = Execution::TryCall(
|
|
|
|
this, microtask_function, factory()->undefined_value(), 0, NULL,
|
|
|
|
&maybe_exception);
|
2016-07-18 15:34:25 +00:00
|
|
|
handle_scope_implementer_->LeaveMicrotaskContext();
|
2014-06-03 20:12:19 +00:00
|
|
|
// If execution is terminating, just bail out.
|
2014-09-01 09:11:44 +00:00
|
|
|
Handle<Object> exception;
|
|
|
|
if (result.is_null() && maybe_exception.is_null()) {
|
2014-06-03 20:12:19 +00:00
|
|
|
// Clear out any remaining callbacks in the queue.
|
|
|
|
heap()->set_microtask_queue(heap()->empty_fixed_array());
|
|
|
|
set_pending_microtask_count(0);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Handle<CallHandlerInfo> callback_info =
|
|
|
|
Handle<CallHandlerInfo>::cast(microtask);
|
|
|
|
v8::MicrotaskCallback callback =
|
|
|
|
v8::ToCData<v8::MicrotaskCallback>(callback_info->callback());
|
|
|
|
void* data = v8::ToCData<void*>(callback_info->data());
|
|
|
|
callback(data);
|
2014-05-28 18:40:04 +00:00
|
|
|
}
|
2016-03-17 12:41:38 +00:00
|
|
|
});
|
2014-05-19 07:57:04 +00:00
|
|
|
}
|
2014-05-02 19:30:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2016-02-25 19:48:31 +00:00
|
|
|
void Isolate::AddMicrotasksCompletedCallback(
|
|
|
|
MicrotasksCompletedCallback callback) {
|
|
|
|
for (int i = 0; i < microtasks_completed_callbacks_.length(); i++) {
|
|
|
|
if (callback == microtasks_completed_callbacks_.at(i)) return;
|
|
|
|
}
|
|
|
|
microtasks_completed_callbacks_.Add(callback);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::RemoveMicrotasksCompletedCallback(
|
|
|
|
MicrotasksCompletedCallback callback) {
|
|
|
|
for (int i = 0; i < microtasks_completed_callbacks_.length(); i++) {
|
|
|
|
if (callback == microtasks_completed_callbacks_.at(i)) {
|
|
|
|
microtasks_completed_callbacks_.Remove(i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::FireMicrotasksCompletedCallback() {
|
|
|
|
for (int i = 0; i < microtasks_completed_callbacks_.length(); i++) {
|
|
|
|
microtasks_completed_callbacks_.at(i)(reinterpret_cast<v8::Isolate*>(this));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-23 09:46:58 +00:00
|
|
|
void Isolate::SetUseCounterCallback(v8::Isolate::UseCounterCallback callback) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!use_counter_callback_);
|
2014-06-23 09:46:58 +00:00
|
|
|
use_counter_callback_ = callback;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::CountUsage(v8::Isolate::UseCounterFeature feature) {
|
2015-05-11 10:57:55 +00:00
|
|
|
// The counter callback may cause the embedder to call into V8, which is not
|
|
|
|
// generally possible during GC.
|
|
|
|
if (heap_.gc_state() == Heap::NOT_IN_GC) {
|
|
|
|
if (use_counter_callback_) {
|
|
|
|
HandleScope handle_scope(this);
|
|
|
|
use_counter_callback_(reinterpret_cast<v8::Isolate*>(this), feature);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
heap_.IncrementDeferredCount(feature);
|
2014-06-23 09:46:58 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-29 07:29:14 +00:00
|
|
|
BasicBlockProfiler* Isolate::GetOrCreateBasicBlockProfiler() {
|
|
|
|
if (basic_block_profiler_ == NULL) {
|
|
|
|
basic_block_profiler_ = new BasicBlockProfiler();
|
|
|
|
}
|
|
|
|
return basic_block_profiler_;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-23 11:56:26 +00:00
|
|
|
std::string Isolate::GetTurboCfgFileName() {
|
2014-10-23 07:36:39 +00:00
|
|
|
if (FLAG_trace_turbo_cfg_file == NULL) {
|
2014-10-23 11:56:26 +00:00
|
|
|
std::ostringstream os;
|
|
|
|
os << "turbo-" << base::OS::GetCurrentProcessId() << "-" << id() << ".cfg";
|
|
|
|
return os.str();
|
2014-10-23 07:36:39 +00:00
|
|
|
} else {
|
2014-10-23 11:56:26 +00:00
|
|
|
return FLAG_trace_turbo_cfg_file;
|
2014-10-23 07:36:39 +00:00
|
|
|
}
|
2014-10-14 08:43:33 +00:00
|
|
|
}
|
|
|
|
|
2016-03-30 11:04:17 +00:00
|
|
|
void Isolate::SetTailCallEliminationEnabled(bool enabled) {
|
|
|
|
if (is_tail_call_elimination_enabled_ == enabled) return;
|
|
|
|
is_tail_call_elimination_enabled_ = enabled;
|
|
|
|
// TODO(ishell): Introduce DependencyGroup::kTailCallChangedGroup to
|
|
|
|
// deoptimize only those functions that are affected by the change of this
|
|
|
|
// flag.
|
|
|
|
internal::Deoptimizer::DeoptimizeAll(this);
|
|
|
|
}
|
2014-10-14 08:43:33 +00:00
|
|
|
|
2015-02-05 09:35:47 +00:00
|
|
|
// Heap::detached_contexts tracks detached contexts as pairs
|
|
|
|
// (number of GC since the context was detached, the context).
|
|
|
|
void Isolate::AddDetachedContext(Handle<Context> context) {
|
|
|
|
HandleScope scope(this);
|
|
|
|
Handle<WeakCell> cell = factory()->NewWeakCell(context);
|
|
|
|
Handle<FixedArray> detached_contexts(heap()->detached_contexts());
|
|
|
|
int length = detached_contexts->length();
|
2015-08-05 08:53:17 +00:00
|
|
|
detached_contexts = factory()->CopyFixedArrayAndGrow(detached_contexts, 2);
|
2015-02-05 09:35:47 +00:00
|
|
|
detached_contexts->set(length, Smi::FromInt(0));
|
|
|
|
detached_contexts->set(length + 1, *cell);
|
|
|
|
heap()->set_detached_contexts(*detached_contexts);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Isolate::CheckDetachedContextsAfterGC() {
|
|
|
|
HandleScope scope(this);
|
|
|
|
Handle<FixedArray> detached_contexts(heap()->detached_contexts());
|
|
|
|
int length = detached_contexts->length();
|
|
|
|
if (length == 0) return;
|
|
|
|
int new_length = 0;
|
|
|
|
for (int i = 0; i < length; i += 2) {
|
|
|
|
int mark_sweeps = Smi::cast(detached_contexts->get(i))->value();
|
2015-03-19 14:22:28 +00:00
|
|
|
DCHECK(detached_contexts->get(i + 1)->IsWeakCell());
|
2015-02-05 09:35:47 +00:00
|
|
|
WeakCell* cell = WeakCell::cast(detached_contexts->get(i + 1));
|
|
|
|
if (!cell->cleared()) {
|
|
|
|
detached_contexts->set(new_length, Smi::FromInt(mark_sweeps + 1));
|
|
|
|
detached_contexts->set(new_length + 1, cell);
|
|
|
|
new_length += 2;
|
|
|
|
}
|
2015-02-18 13:50:41 +00:00
|
|
|
counters()->detached_context_age_in_gc()->AddSample(mark_sweeps + 1);
|
2015-02-05 09:35:47 +00:00
|
|
|
}
|
2015-02-17 13:21:50 +00:00
|
|
|
if (FLAG_trace_detached_contexts) {
|
|
|
|
PrintF("%d detached contexts are collected out of %d\n",
|
|
|
|
length - new_length, length);
|
|
|
|
for (int i = 0; i < new_length; i += 2) {
|
|
|
|
int mark_sweeps = Smi::cast(detached_contexts->get(i))->value();
|
2015-03-19 14:22:28 +00:00
|
|
|
DCHECK(detached_contexts->get(i + 1)->IsWeakCell());
|
2015-02-17 13:21:50 +00:00
|
|
|
WeakCell* cell = WeakCell::cast(detached_contexts->get(i + 1));
|
|
|
|
if (mark_sweeps > 3) {
|
2016-04-12 16:13:08 +00:00
|
|
|
PrintF("detached context %p\n survived %d GCs (leak?)\n",
|
2015-02-17 13:21:50 +00:00
|
|
|
static_cast<void*>(cell->value()), mark_sweeps);
|
|
|
|
}
|
2015-02-05 09:35:47 +00:00
|
|
|
}
|
|
|
|
}
|
2015-02-17 13:21:50 +00:00
|
|
|
if (new_length == 0) {
|
2015-02-05 09:35:47 +00:00
|
|
|
heap()->set_detached_contexts(heap()->empty_fixed_array());
|
2015-02-17 13:21:50 +00:00
|
|
|
} else if (new_length < length) {
|
2015-04-16 08:39:19 +00:00
|
|
|
heap()->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(
|
|
|
|
*detached_contexts, length - new_length);
|
2015-02-05 09:35:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-20 13:36:02 +00:00
|
|
|
void Isolate::SetRAILMode(RAILMode rail_mode) {
|
2016-06-28 17:30:20 +00:00
|
|
|
rail_mode_.SetValue(rail_mode);
|
2016-05-20 13:36:02 +00:00
|
|
|
if (FLAG_trace_rail) {
|
2016-06-28 17:30:20 +00:00
|
|
|
PrintIsolate(this, "RAIL mode: %s\n", RAILModeName(rail_mode));
|
2016-05-20 13:36:02 +00:00
|
|
|
}
|
|
|
|
}
|
2015-02-05 09:35:47 +00:00
|
|
|
|
2015-06-24 14:57:39 +00:00
|
|
|
bool StackLimitCheck::JsHasOverflowed(uintptr_t gap) const {
|
2014-06-17 13:54:49 +00:00
|
|
|
StackGuard* stack_guard = isolate_->stack_guard();
|
|
|
|
#ifdef USE_SIMULATOR
|
|
|
|
// The simulator uses a separate JS stack.
|
|
|
|
Address jssp_address = Simulator::current(isolate_)->get_sp();
|
|
|
|
uintptr_t jssp = reinterpret_cast<uintptr_t>(jssp_address);
|
2015-06-24 14:57:39 +00:00
|
|
|
if (jssp - gap < stack_guard->real_jslimit()) return true;
|
2014-06-17 13:54:49 +00:00
|
|
|
#endif // USE_SIMULATOR
|
2015-06-24 14:57:39 +00:00
|
|
|
return GetCurrentStackPosition() - gap < stack_guard->real_climit();
|
2014-06-17 13:54:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-04-21 10:21:50 +00:00
|
|
|
SaveContext::SaveContext(Isolate* isolate)
|
|
|
|
: isolate_(isolate), prev_(isolate->save_context()) {
|
|
|
|
if (isolate->context() != NULL) {
|
|
|
|
context_ = Handle<Context>(isolate->context());
|
|
|
|
}
|
|
|
|
isolate->set_save_context(this);
|
|
|
|
|
|
|
|
c_entry_fp_ = isolate->c_entry_fp(isolate->thread_local_top());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-09-01 09:25:19 +00:00
|
|
|
SaveContext::~SaveContext() {
|
|
|
|
isolate_->set_context(context_.is_null() ? NULL : *context_);
|
|
|
|
isolate_->set_save_context(prev_);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef DEBUG
|
|
|
|
AssertNoContextChange::AssertNoContextChange(Isolate* isolate)
|
|
|
|
: isolate_(isolate), context_(isolate->context(), isolate) {}
|
|
|
|
#endif // DEBUG
|
|
|
|
|
|
|
|
|
2014-07-02 08:05:40 +00:00
|
|
|
bool PostponeInterruptsScope::Intercept(StackGuard::InterruptFlag flag) {
|
|
|
|
// First check whether the previous scope intercepts.
|
|
|
|
if (prev_ && prev_->Intercept(flag)) return true;
|
|
|
|
// Then check whether this scope intercepts.
|
|
|
|
if ((flag & intercept_mask_)) {
|
|
|
|
intercepted_flags_ |= flag;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2015-06-01 22:46:54 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|