2012-01-04 14:45:29 +00:00
|
|
|
// Copyright 2012 the V8 project authors. All rights reserved.
|
2014-04-29 06:42:26 +00:00
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
#ifndef V8_ISOLATE_H_
|
|
|
|
#define V8_ISOLATE_H_
|
|
|
|
|
2014-12-20 07:54:03 +00:00
|
|
|
#include <queue>
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "include/v8-debug.h"
|
|
|
|
#include "src/allocation.h"
|
|
|
|
#include "src/assert-scope.h"
|
2014-06-05 12:14:47 +00:00
|
|
|
#include "src/base/atomicops.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/builtins.h"
|
|
|
|
#include "src/contexts.h"
|
2014-06-20 08:40:11 +00:00
|
|
|
#include "src/date.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/execution.h"
|
|
|
|
#include "src/frames.h"
|
|
|
|
#include "src/global-handles.h"
|
|
|
|
#include "src/handles.h"
|
|
|
|
#include "src/hashmap.h"
|
2014-08-05 08:18:22 +00:00
|
|
|
#include "src/heap/heap.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/optimizing-compiler-thread.h"
|
|
|
|
#include "src/regexp-stack.h"
|
2014-09-25 07:16:15 +00:00
|
|
|
#include "src/runtime/runtime.h"
|
2014-06-20 08:40:11 +00:00
|
|
|
#include "src/runtime-profiler.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/zone.h"
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
namespace v8 {
|
2014-06-30 13:25:46 +00:00
|
|
|
|
|
|
|
namespace base {
|
|
|
|
class RandomNumberGenerator;
|
|
|
|
}
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
namespace internal {
|
|
|
|
|
2014-09-29 07:29:14 +00:00
|
|
|
class BasicBlockProfiler;
|
2011-03-18 20:35:07 +00:00
|
|
|
class Bootstrapper;
|
2014-09-03 10:51:51 +00:00
|
|
|
class CallInterfaceDescriptorData;
|
2011-03-18 20:35:07 +00:00
|
|
|
class CodeGenerator;
|
|
|
|
class CodeRange;
|
2014-09-08 15:18:54 +00:00
|
|
|
class CodeStubDescriptor;
|
2013-11-07 16:35:27 +00:00
|
|
|
class CodeTracer;
|
2011-03-18 20:35:07 +00:00
|
|
|
class CompilationCache;
|
2014-10-23 09:14:35 +00:00
|
|
|
class CompilationStatistics;
|
2011-03-18 20:35:07 +00:00
|
|
|
class ContextSlotCache;
|
|
|
|
class Counters;
|
|
|
|
class CpuFeatures;
|
|
|
|
class CpuProfiler;
|
|
|
|
class DeoptimizerData;
|
|
|
|
class Deserializer;
|
|
|
|
class EmptyStatement;
|
2013-07-23 15:01:38 +00:00
|
|
|
class ExternalCallbackScope;
|
2011-03-18 20:35:07 +00:00
|
|
|
class ExternalReferenceTable;
|
|
|
|
class Factory;
|
|
|
|
class FunctionInfoListener;
|
|
|
|
class HandleScopeImplementer;
|
|
|
|
class HeapProfiler;
|
2013-03-06 10:49:34 +00:00
|
|
|
class HStatistics;
|
2013-03-06 07:25:46 +00:00
|
|
|
class HTracer;
|
2011-03-18 20:35:07 +00:00
|
|
|
class InlineRuntimeFunctionsTable;
|
2014-01-29 15:49:48 +00:00
|
|
|
class InnerPointerToCodeCache;
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
class MaterializedObjectStore;
|
2014-05-06 11:25:37 +00:00
|
|
|
class CodeAgingHelper;
|
2011-03-18 20:35:07 +00:00
|
|
|
class RegExpStack;
|
|
|
|
class SaveContext;
|
|
|
|
class StringTracker;
|
|
|
|
class StubCache;
|
2013-01-30 12:19:32 +00:00
|
|
|
class SweeperThread;
|
2011-03-18 20:35:07 +00:00
|
|
|
class ThreadManager;
|
|
|
|
class ThreadState;
|
|
|
|
class ThreadVisitor; // Defined in v8threads.h
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
class UnicodeCache;
|
2013-04-24 14:44:08 +00:00
|
|
|
template <StateTag Tag> class VMState;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// 'void function pointer', used to roundtrip the
|
|
|
|
// ExternalReference::ExternalReferenceRedirector since we can not include
|
|
|
|
// assembler.h, where it is defined, here.
|
|
|
|
typedef void* ExternalReferenceRedirectorPointer();
|
|
|
|
|
|
|
|
|
|
|
|
class Debug;
|
|
|
|
class Debugger;
|
2014-08-13 11:14:35 +00:00
|
|
|
class PromiseOnStack;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2015-01-16 07:42:00 +00:00
|
|
|
#if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
|
2014-03-21 09:28:26 +00:00
|
|
|
!defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
|
2015-01-16 07:42:00 +00:00
|
|
|
!defined(__PPC__) && V8_TARGET_ARCH_PPC || \
|
|
|
|
!defined(__mips__) && V8_TARGET_ARCH_MIPS || \
|
2014-07-09 11:08:26 +00:00
|
|
|
!defined(__mips__) && V8_TARGET_ARCH_MIPS64
|
2011-03-18 20:35:07 +00:00
|
|
|
class Redirection;
|
|
|
|
class Simulator;
|
|
|
|
#endif
|
|
|
|
|
2011-03-28 13:05:36 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// Static indirection table for handles to constants. If a frame
|
|
|
|
// element represents a constant, the data contains an index into
|
|
|
|
// this table of handles to the actual constants.
|
|
|
|
// Static indirection table for handles to constants. If a Result
|
|
|
|
// represents a constant, the data contains an index into this table
|
|
|
|
// of handles to the actual constants.
|
|
|
|
typedef ZoneList<Handle<Object> > ZoneObjectList;
|
|
|
|
|
2014-04-17 08:33:18 +00:00
|
|
|
#define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate) \
|
2012-01-04 14:45:29 +00:00
|
|
|
do { \
|
|
|
|
Isolate* __isolate__ = (isolate); \
|
|
|
|
if (__isolate__->has_scheduled_exception()) { \
|
|
|
|
return __isolate__->PromoteScheduledException(); \
|
|
|
|
} \
|
|
|
|
} while (false)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-04-03 05:57:43 +00:00
|
|
|
// Macros for MaybeHandle.
|
|
|
|
|
2014-07-25 18:32:11 +00:00
|
|
|
#define RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, value) \
|
|
|
|
do { \
|
|
|
|
Isolate* __isolate__ = (isolate); \
|
|
|
|
if (__isolate__->has_scheduled_exception()) { \
|
|
|
|
__isolate__->PromoteScheduledException(); \
|
|
|
|
return value; \
|
|
|
|
} \
|
2014-04-03 05:57:43 +00:00
|
|
|
} while (false)
|
|
|
|
|
2014-07-25 18:32:11 +00:00
|
|
|
#define RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, T) \
|
|
|
|
RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, MaybeHandle<T>())
|
|
|
|
|
2014-04-03 05:57:43 +00:00
|
|
|
#define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value) \
|
|
|
|
do { \
|
|
|
|
if (!(call).ToHandle(&dst)) { \
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((isolate)->has_pending_exception()); \
|
2014-04-03 05:57:43 +00:00
|
|
|
return value; \
|
|
|
|
} \
|
|
|
|
} while (false)
|
|
|
|
|
|
|
|
#define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call) \
|
2014-04-22 12:50:58 +00:00
|
|
|
ASSIGN_RETURN_ON_EXCEPTION_VALUE( \
|
|
|
|
isolate, dst, call, isolate->heap()->exception())
|
2014-04-03 05:57:43 +00:00
|
|
|
|
|
|
|
#define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call, T) \
|
|
|
|
ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, MaybeHandle<T>())
|
|
|
|
|
2015-02-24 09:01:33 +00:00
|
|
|
#define THROW_NEW_ERROR(isolate, call, T) \
|
|
|
|
do { \
|
|
|
|
return isolate->Throw<T>(isolate->factory()->call); \
|
2014-09-01 09:11:44 +00:00
|
|
|
} while (false)
|
|
|
|
|
2015-02-24 09:01:33 +00:00
|
|
|
#define THROW_NEW_ERROR_RETURN_FAILURE(isolate, call) \
|
|
|
|
do { \
|
|
|
|
return isolate->Throw(*isolate->factory()->call); \
|
2014-09-01 09:11:44 +00:00
|
|
|
} while (false)
|
|
|
|
|
2014-04-04 12:06:11 +00:00
|
|
|
#define RETURN_ON_EXCEPTION_VALUE(isolate, call, value) \
|
2014-04-03 05:57:43 +00:00
|
|
|
do { \
|
2014-04-03 12:30:37 +00:00
|
|
|
if ((call).is_null()) { \
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((isolate)->has_pending_exception()); \
|
2014-04-03 05:57:43 +00:00
|
|
|
return value; \
|
|
|
|
} \
|
|
|
|
} while (false)
|
|
|
|
|
|
|
|
#define RETURN_FAILURE_ON_EXCEPTION(isolate, call) \
|
2014-04-22 12:50:58 +00:00
|
|
|
RETURN_ON_EXCEPTION_VALUE(isolate, call, isolate->heap()->exception())
|
2014-04-03 05:57:43 +00:00
|
|
|
|
|
|
|
#define RETURN_ON_EXCEPTION(isolate, call, T) \
|
2014-04-04 12:06:11 +00:00
|
|
|
RETURN_ON_EXCEPTION_VALUE(isolate, call, MaybeHandle<T>())
|
2014-04-03 05:57:43 +00:00
|
|
|
|
|
|
|
|
2011-09-08 16:29:57 +00:00
|
|
|
#define FOR_EACH_ISOLATE_ADDRESS_NAME(C) \
|
|
|
|
C(Handler, handler) \
|
|
|
|
C(CEntryFP, c_entry_fp) \
|
2014-10-17 15:44:02 +00:00
|
|
|
C(CFunction, c_function) \
|
2011-09-08 16:29:57 +00:00
|
|
|
C(Context, context) \
|
|
|
|
C(PendingException, pending_exception) \
|
2015-03-03 12:03:04 +00:00
|
|
|
C(PendingHandlerContext, pending_handler_context) \
|
|
|
|
C(PendingHandlerCode, pending_handler_code) \
|
|
|
|
C(PendingHandlerOffset, pending_handler_offset) \
|
|
|
|
C(PendingHandlerFP, pending_handler_fp) \
|
|
|
|
C(PendingHandlerSP, pending_handler_sp) \
|
2011-09-08 16:29:57 +00:00
|
|
|
C(ExternalCaughtException, external_caught_exception) \
|
|
|
|
C(JSEntrySP, js_entry_sp)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
|
2011-04-11 23:46:22 +00:00
|
|
|
// Platform-independent, reliable thread identifier.
|
|
|
|
class ThreadId {
|
|
|
|
public:
|
|
|
|
// Creates an invalid ThreadId.
|
2014-10-08 08:17:04 +00:00
|
|
|
ThreadId() { base::NoBarrier_Store(&id_, kInvalidId); }
|
|
|
|
|
|
|
|
ThreadId& operator=(const ThreadId& other) {
|
|
|
|
base::NoBarrier_Store(&id_, base::NoBarrier_Load(&other.id_));
|
|
|
|
return *this;
|
|
|
|
}
|
2011-04-11 23:46:22 +00:00
|
|
|
|
|
|
|
// Returns ThreadId for current thread.
|
|
|
|
static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
|
|
|
|
|
|
|
|
// Returns invalid ThreadId (guaranteed not to be equal to any thread).
|
|
|
|
static ThreadId Invalid() { return ThreadId(kInvalidId); }
|
|
|
|
|
|
|
|
// Compares ThreadIds for equality.
|
|
|
|
INLINE(bool Equals(const ThreadId& other) const) {
|
2014-10-08 08:17:04 +00:00
|
|
|
return base::NoBarrier_Load(&id_) == base::NoBarrier_Load(&other.id_);
|
2011-04-11 23:46:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Checks whether this ThreadId refers to any thread.
|
|
|
|
INLINE(bool IsValid() const) {
|
2014-10-08 08:17:04 +00:00
|
|
|
return base::NoBarrier_Load(&id_) != kInvalidId;
|
2011-04-11 23:46:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Converts ThreadId to an integer representation
|
|
|
|
// (required for public API: V8::V8::GetCurrentThreadId).
|
2014-10-08 08:17:04 +00:00
|
|
|
int ToInteger() const { return static_cast<int>(base::NoBarrier_Load(&id_)); }
|
2011-04-11 23:46:22 +00:00
|
|
|
|
|
|
|
// Converts ThreadId to an integer representation
|
|
|
|
// (required for public API: V8::V8::TerminateExecution).
|
|
|
|
static ThreadId FromInteger(int id) { return ThreadId(id); }
|
|
|
|
|
|
|
|
private:
|
|
|
|
static const int kInvalidId = -1;
|
|
|
|
|
2014-10-08 08:17:04 +00:00
|
|
|
explicit ThreadId(int id) { base::NoBarrier_Store(&id_, id); }
|
2011-04-11 23:46:22 +00:00
|
|
|
|
|
|
|
static int AllocateThreadId();
|
|
|
|
|
|
|
|
static int GetCurrentThreadId();
|
|
|
|
|
2014-10-08 08:17:04 +00:00
|
|
|
base::Atomic32 id_;
|
2011-04-11 23:46:22 +00:00
|
|
|
|
2014-06-05 12:14:47 +00:00
|
|
|
static base::Atomic32 highest_thread_id_;
|
2011-04-11 23:46:22 +00:00
|
|
|
|
|
|
|
friend class Isolate;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
#define FIELD_ACCESSOR(type, name) \
|
|
|
|
inline void set_##name(type v) { name##_ = v; } \
|
|
|
|
inline type name() const { return name##_; }
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
class ThreadLocalTop BASE_EMBEDDED {
|
|
|
|
public:
|
2011-04-15 20:47:27 +00:00
|
|
|
// Does early low-level initialization that does not depend on the
|
|
|
|
// isolate being present.
|
|
|
|
ThreadLocalTop();
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// Initialize the thread data.
|
|
|
|
void Initialize();
|
|
|
|
|
|
|
|
// Get the top C++ try catch handler or NULL if none are registered.
|
|
|
|
//
|
2014-05-20 10:13:46 +00:00
|
|
|
// This method is not guaranteed to return an address that can be
|
2011-03-18 20:35:07 +00:00
|
|
|
// used for comparison with addresses into the JS stack. If such an
|
|
|
|
// address is needed, use try_catch_handler_address.
|
2014-05-20 10:13:46 +00:00
|
|
|
FIELD_ACCESSOR(v8::TryCatch*, try_catch_handler)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Get the address of the top C++ try catch handler or NULL if
|
|
|
|
// none are registered.
|
|
|
|
//
|
|
|
|
// This method always returns an address that can be compared to
|
|
|
|
// pointers into the JavaScript stack. When running on actual
|
|
|
|
// hardware, try_catch_handler_address and TryCatchHandler return
|
|
|
|
// the same pointer. When running on a simulator with a separate JS
|
|
|
|
// stack, try_catch_handler_address returns a JS stack address that
|
|
|
|
// corresponds to the place on the JS stack where the C++ handler
|
|
|
|
// would have been if the stack were not separate.
|
2014-05-20 10:13:46 +00:00
|
|
|
Address try_catch_handler_address() {
|
|
|
|
return reinterpret_cast<Address>(
|
|
|
|
v8::TryCatch::JSStackComparableAddress(try_catch_handler()));
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-08-13 11:14:35 +00:00
|
|
|
void Free();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2011-05-05 18:55:31 +00:00
|
|
|
Isolate* isolate_;
|
2011-03-18 20:35:07 +00:00
|
|
|
// The context where the current execution method is created and for variable
|
|
|
|
// lookups.
|
|
|
|
Context* context_;
|
2011-04-11 23:46:22 +00:00
|
|
|
ThreadId thread_id_;
|
2014-04-08 09:44:24 +00:00
|
|
|
Object* pending_exception_;
|
2015-03-03 12:03:04 +00:00
|
|
|
|
|
|
|
// Communication channel between Isolate::FindHandler and the CEntryStub.
|
|
|
|
Context* pending_handler_context_;
|
|
|
|
Code* pending_handler_code_;
|
|
|
|
intptr_t pending_handler_offset_;
|
|
|
|
Address pending_handler_fp_;
|
|
|
|
Address pending_handler_sp_;
|
|
|
|
|
|
|
|
// Communication channel between Isolate::Throw and message consumers.
|
2013-07-01 10:54:39 +00:00
|
|
|
bool rethrowing_message_;
|
2011-03-18 20:35:07 +00:00
|
|
|
Object* pending_message_obj_;
|
2015-03-03 12:03:04 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// Use a separate value for scheduled exceptions to preserve the
|
|
|
|
// invariants that hold about pending_exception. We may want to
|
|
|
|
// unify them later.
|
2014-04-08 09:44:24 +00:00
|
|
|
Object* scheduled_exception_;
|
2011-03-18 20:35:07 +00:00
|
|
|
bool external_caught_exception_;
|
|
|
|
SaveContext* save_context_;
|
|
|
|
|
|
|
|
// Stack.
|
|
|
|
Address c_entry_fp_; // the frame pointer of the top c entry frame
|
2015-03-03 12:03:04 +00:00
|
|
|
Address handler_; // try-blocks are chained through the stack
|
2014-10-17 15:44:02 +00:00
|
|
|
Address c_function_; // C function that was called at c entry.
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-08-13 11:14:35 +00:00
|
|
|
// Throwing an exception may cause a Promise rejection. For this purpose
|
|
|
|
// we keep track of a stack of nested promises and the corresponding
|
|
|
|
// try-catch handlers.
|
|
|
|
PromiseOnStack* promise_on_stack_;
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#ifdef USE_SIMULATOR
|
|
|
|
Simulator* simulator_;
|
|
|
|
#endif
|
|
|
|
|
2012-01-16 12:38:59 +00:00
|
|
|
Address js_entry_sp_; // the stack pointer of the bottom JS entry frame
|
2013-07-23 15:01:38 +00:00
|
|
|
// the external callback we're currently in
|
|
|
|
ExternalCallbackScope* external_callback_scope_;
|
2011-03-18 20:35:07 +00:00
|
|
|
StateTag current_vm_state_;
|
|
|
|
|
|
|
|
// Call back function to report unsafe JS accesses.
|
|
|
|
v8::FailedAccessCheckCallback failed_access_check_callback_;
|
|
|
|
|
|
|
|
private:
|
2011-04-15 20:47:27 +00:00
|
|
|
void InitializeInternal();
|
|
|
|
|
2014-05-20 10:13:46 +00:00
|
|
|
v8::TryCatch* try_catch_handler_;
|
2011-03-18 20:35:07 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
2015-01-16 07:42:00 +00:00
|
|
|
#if V8_TARGET_ARCH_ARM && !defined(__arm__) || \
|
2014-03-21 09:28:26 +00:00
|
|
|
V8_TARGET_ARCH_ARM64 && !defined(__aarch64__) || \
|
2015-01-16 07:42:00 +00:00
|
|
|
V8_TARGET_ARCH_PPC && !defined(__PPC__) || \
|
|
|
|
V8_TARGET_ARCH_MIPS && !defined(__mips__) || \
|
2014-07-09 11:08:26 +00:00
|
|
|
V8_TARGET_ARCH_MIPS64 && !defined(__mips__)
|
2014-03-14 15:06:17 +00:00
|
|
|
|
|
|
|
#define ISOLATE_INIT_SIMULATOR_LIST(V) \
|
|
|
|
V(bool, simulator_initialized, false) \
|
|
|
|
V(HashMap*, simulator_i_cache, NULL) \
|
|
|
|
V(Redirection*, simulator_redirection, NULL)
|
|
|
|
#else
|
|
|
|
|
|
|
|
#define ISOLATE_INIT_SIMULATOR_LIST(V)
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
|
|
|
|
#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) \
|
|
|
|
V(CommentStatistic, paged_space_comments_statistics, \
|
2014-03-14 15:06:17 +00:00
|
|
|
CommentStatistic::kMaxComments + 1) \
|
|
|
|
V(int, code_kind_statistics, Code::NUMBER_OF_KINDS)
|
2011-03-18 20:35:07 +00:00
|
|
|
#else
|
|
|
|
|
|
|
|
#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#define ISOLATE_INIT_ARRAY_LIST(V) \
|
|
|
|
/* SerializerDeserializer state. */ \
|
2012-08-28 09:37:41 +00:00
|
|
|
V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
|
2011-03-18 20:35:07 +00:00
|
|
|
V(int, bad_char_shift_table, kUC16AlphabetSize) \
|
|
|
|
V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \
|
|
|
|
V(int, suffix_table, (kBMMaxShift + 1)) \
|
2011-07-04 11:34:29 +00:00
|
|
|
V(uint32_t, private_random_seed, 2) \
|
2011-03-18 20:35:07 +00:00
|
|
|
ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
|
|
|
|
|
2013-11-20 12:35:58 +00:00
|
|
|
typedef List<HeapObject*> DebugObjectCache;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
#define ISOLATE_INIT_LIST(V) \
|
|
|
|
/* Assembler state. */ \
|
|
|
|
V(FatalErrorCallback, exception_behavior, NULL) \
|
2014-03-10 08:56:48 +00:00
|
|
|
V(LogEventCallback, event_logger, NULL) \
|
2011-05-03 05:40:47 +00:00
|
|
|
V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL) \
|
2011-03-18 20:35:07 +00:00
|
|
|
/* To distinguish the function templates, so that we can find them in the */ \
|
2012-08-17 09:03:08 +00:00
|
|
|
/* function cache of the native context. */ \
|
2011-03-18 20:35:07 +00:00
|
|
|
V(int, next_serial_number, 0) \
|
|
|
|
V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL) \
|
|
|
|
/* Part of the state of liveedit. */ \
|
|
|
|
V(FunctionInfoListener*, active_function_info_listener, NULL) \
|
|
|
|
/* State for Relocatable. */ \
|
|
|
|
V(Relocatable*, relocatable_top, NULL) \
|
|
|
|
V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \
|
|
|
|
V(Object*, string_stream_current_security_token, NULL) \
|
|
|
|
V(ExternalReferenceTable*, external_reference_table, NULL) \
|
2015-03-20 11:17:09 +00:00
|
|
|
V(HashMap*, external_reference_map, NULL) \
|
|
|
|
V(HashMap*, root_index_map, NULL) \
|
2014-05-19 07:57:04 +00:00
|
|
|
V(int, pending_microtask_count, 0) \
|
2014-02-12 22:04:19 +00:00
|
|
|
V(bool, autorun_microtasks, true) \
|
2013-03-06 10:49:34 +00:00
|
|
|
V(HStatistics*, hstatistics, NULL) \
|
2014-10-23 09:14:35 +00:00
|
|
|
V(CompilationStatistics*, turbo_statistics, NULL) \
|
2013-03-06 07:25:46 +00:00
|
|
|
V(HTracer*, htracer, NULL) \
|
2013-11-07 16:35:27 +00:00
|
|
|
V(CodeTracer*, code_tracer, NULL) \
|
2014-03-14 15:06:17 +00:00
|
|
|
V(bool, fp_stubs_generated, false) \
|
|
|
|
V(int, max_available_threads, 0) \
|
2014-03-19 11:31:43 +00:00
|
|
|
V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu) \
|
2014-09-30 15:29:08 +00:00
|
|
|
V(PromiseRejectCallback, promise_reject_callback, NULL) \
|
2015-02-25 11:14:40 +00:00
|
|
|
V(const v8::StartupData*, snapshot_blob, NULL) \
|
2014-04-25 11:00:37 +00:00
|
|
|
ISOLATE_INIT_SIMULATOR_LIST(V)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
#define THREAD_LOCAL_TOP_ACCESSOR(type, name) \
|
|
|
|
inline void set_##name(type v) { thread_local_top_.name##_ = v; } \
|
|
|
|
inline type name() const { return thread_local_top_.name##_; }
|
|
|
|
|
2015-03-03 12:03:04 +00:00
|
|
|
#define THREAD_LOCAL_TOP_ADDRESS(type, name) \
|
|
|
|
type* name##_address() { return &thread_local_top_.name##_; }
|
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
class Isolate {
|
|
|
|
// These forward declarations are required to make the friend declarations in
|
|
|
|
// PerIsolateThreadData work on some older versions of gcc.
|
|
|
|
class ThreadDataTable;
|
|
|
|
class EntryStackItem;
|
|
|
|
public:
|
|
|
|
~Isolate();
|
|
|
|
|
|
|
|
// A thread has a PerIsolateThreadData instance for each isolate that it has
|
|
|
|
// entered. That instance is allocated when the isolate is initially entered
|
|
|
|
// and reused on subsequent entries.
|
|
|
|
class PerIsolateThreadData {
|
|
|
|
public:
|
|
|
|
PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
|
|
|
|
: isolate_(isolate),
|
|
|
|
thread_id_(thread_id),
|
|
|
|
stack_limit_(0),
|
|
|
|
thread_state_(NULL),
|
2015-01-16 07:42:00 +00:00
|
|
|
#if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
|
2014-03-21 09:28:26 +00:00
|
|
|
!defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
|
2015-01-16 07:42:00 +00:00
|
|
|
!defined(__PPC__) && V8_TARGET_ARCH_PPC || \
|
|
|
|
!defined(__mips__) && V8_TARGET_ARCH_MIPS || \
|
2014-07-09 11:08:26 +00:00
|
|
|
!defined(__mips__) && V8_TARGET_ARCH_MIPS64
|
2011-03-18 20:35:07 +00:00
|
|
|
simulator_(NULL),
|
|
|
|
#endif
|
|
|
|
next_(NULL),
|
|
|
|
prev_(NULL) { }
|
2014-02-28 10:55:47 +00:00
|
|
|
~PerIsolateThreadData();
|
2011-03-18 20:35:07 +00:00
|
|
|
Isolate* isolate() const { return isolate_; }
|
|
|
|
ThreadId thread_id() const { return thread_id_; }
|
2014-02-11 14:03:31 +00:00
|
|
|
|
|
|
|
FIELD_ACCESSOR(uintptr_t, stack_limit)
|
|
|
|
FIELD_ACCESSOR(ThreadState*, thread_state)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2015-01-16 07:42:00 +00:00
|
|
|
#if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
|
2014-03-21 09:28:26 +00:00
|
|
|
!defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
|
2015-01-16 07:42:00 +00:00
|
|
|
!defined(__PPC__) && V8_TARGET_ARCH_PPC || \
|
|
|
|
!defined(__mips__) && V8_TARGET_ARCH_MIPS || \
|
2014-07-09 11:08:26 +00:00
|
|
|
!defined(__mips__) && V8_TARGET_ARCH_MIPS64
|
2014-02-11 14:03:31 +00:00
|
|
|
FIELD_ACCESSOR(Simulator*, simulator)
|
2011-03-18 20:35:07 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
bool Matches(Isolate* isolate, ThreadId thread_id) const {
|
2011-04-11 23:46:22 +00:00
|
|
|
return isolate_ == isolate && thread_id_.Equals(thread_id);
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
Isolate* isolate_;
|
|
|
|
ThreadId thread_id_;
|
|
|
|
uintptr_t stack_limit_;
|
|
|
|
ThreadState* thread_state_;
|
|
|
|
|
2015-01-16 07:42:00 +00:00
|
|
|
#if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
|
2014-03-21 09:28:26 +00:00
|
|
|
!defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
|
2015-01-16 07:42:00 +00:00
|
|
|
!defined(__PPC__) && V8_TARGET_ARCH_PPC || \
|
|
|
|
!defined(__mips__) && V8_TARGET_ARCH_MIPS || \
|
2014-07-09 11:08:26 +00:00
|
|
|
!defined(__mips__) && V8_TARGET_ARCH_MIPS64
|
2011-03-18 20:35:07 +00:00
|
|
|
Simulator* simulator_;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
PerIsolateThreadData* next_;
|
|
|
|
PerIsolateThreadData* prev_;
|
|
|
|
|
|
|
|
friend class Isolate;
|
|
|
|
friend class ThreadDataTable;
|
|
|
|
friend class EntryStackItem;
|
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
enum AddressId {
|
2011-09-08 16:29:57 +00:00
|
|
|
#define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
|
|
|
|
FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
|
2012-04-23 15:09:59 +00:00
|
|
|
#undef DECLARE_ENUM
|
2011-09-08 16:29:57 +00:00
|
|
|
kIsolateAddressCount
|
2011-03-18 20:35:07 +00:00
|
|
|
};
|
|
|
|
|
2014-09-19 08:01:35 +00:00
|
|
|
static void InitializeOncePerProcess();
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// Returns the PerIsolateThreadData for the current thread (or NULL if one is
|
|
|
|
// not currently set).
|
|
|
|
static PerIsolateThreadData* CurrentPerIsolateThreadData() {
|
|
|
|
return reinterpret_cast<PerIsolateThreadData*>(
|
2014-06-30 13:25:46 +00:00
|
|
|
base::Thread::GetThreadLocal(per_isolate_thread_data_key_));
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Returns the isolate inside which the current thread is running.
|
|
|
|
INLINE(static Isolate* Current()) {
|
2014-11-26 05:15:17 +00:00
|
|
|
DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1);
|
2011-03-27 16:14:20 +00:00
|
|
|
Isolate* isolate = reinterpret_cast<Isolate*>(
|
2014-06-30 13:25:46 +00:00
|
|
|
base::Thread::GetExistingThreadLocal(isolate_key_));
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(isolate != NULL);
|
2011-03-18 20:35:07 +00:00
|
|
|
return isolate;
|
|
|
|
}
|
|
|
|
|
|
|
|
INLINE(static Isolate* UncheckedCurrent()) {
|
2014-11-26 05:15:17 +00:00
|
|
|
DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1);
|
2014-07-07 10:56:36 +00:00
|
|
|
return reinterpret_cast<Isolate*>(
|
|
|
|
base::Thread::GetThreadLocal(isolate_key_));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Like UncheckedCurrent, but skips the check that |isolate_key_| was
|
|
|
|
// initialized. Callers have to ensure that themselves.
|
|
|
|
INLINE(static Isolate* UnsafeCurrent()) {
|
2014-06-30 13:25:46 +00:00
|
|
|
return reinterpret_cast<Isolate*>(
|
|
|
|
base::Thread::GetThreadLocal(isolate_key_));
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
2011-08-04 15:18:18 +00:00
|
|
|
// Usually called by Init(), but can be called early e.g. to allow
|
|
|
|
// testing components that require logging but not the whole
|
|
|
|
// isolate.
|
|
|
|
//
|
|
|
|
// Safe to call more than once.
|
|
|
|
void InitializeLoggingAndCounters();
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
bool Init(Deserializer* des);
|
|
|
|
|
|
|
|
// True if at least one thread Enter'ed this isolate.
|
|
|
|
bool IsInUse() { return entry_stack_ != NULL; }
|
|
|
|
|
|
|
|
// Destroys the non-default isolates.
|
|
|
|
// Sets default isolate into "has_been_disposed" state rather then destroying,
|
|
|
|
// for legacy API reasons.
|
|
|
|
void TearDown();
|
|
|
|
|
2013-02-12 11:57:51 +00:00
|
|
|
static void GlobalTearDown();
|
|
|
|
|
2015-03-20 11:17:09 +00:00
|
|
|
void ClearSerializerData();
|
|
|
|
|
2011-05-05 18:55:31 +00:00
|
|
|
// Find the PerThread for this particular (isolate, thread) combination
|
|
|
|
// If one does not yet exist, return null.
|
|
|
|
PerIsolateThreadData* FindPerThreadDataForThisThread();
|
|
|
|
|
2013-04-11 14:22:04 +00:00
|
|
|
// Find the PerThread for given (isolate, thread) combination
|
|
|
|
// If one does not yet exist, return null.
|
|
|
|
PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id);
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// Returns the key used to store the pointer to the current isolate.
|
|
|
|
// Used internally for V8 threads that do not execute JavaScript but still
|
|
|
|
// are part of the domain of an isolate (like the context switcher).
|
2014-06-30 13:25:46 +00:00
|
|
|
static base::Thread::LocalStorageKey isolate_key() {
|
2012-03-30 14:30:46 +00:00
|
|
|
return isolate_key_;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Returns the key used to store process-wide thread IDs.
|
2014-06-30 13:25:46 +00:00
|
|
|
static base::Thread::LocalStorageKey thread_id_key() {
|
2012-03-30 14:30:46 +00:00
|
|
|
return thread_id_key_;
|
|
|
|
}
|
2012-03-12 13:56:56 +00:00
|
|
|
|
2014-06-30 13:25:46 +00:00
|
|
|
static base::Thread::LocalStorageKey per_isolate_thread_data_key();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Mutex for serializing access to break control structures.
|
2014-06-30 13:25:46 +00:00
|
|
|
base::RecursiveMutex* break_access() { return &break_access_; }
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
Address get_address_from_id(AddressId id);
|
|
|
|
|
|
|
|
// Access to top context (where the current function object was created).
|
|
|
|
Context* context() { return thread_local_top_.context_; }
|
|
|
|
void set_context(Context* context) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(context == NULL || context->IsContext());
|
2011-03-18 20:35:07 +00:00
|
|
|
thread_local_top_.context_ = context;
|
|
|
|
}
|
|
|
|
Context** context_address() { return &thread_local_top_.context_; }
|
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
THREAD_LOCAL_TOP_ACCESSOR(SaveContext*, save_context)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Access to current thread id.
|
2014-02-11 14:03:31 +00:00
|
|
|
THREAD_LOCAL_TOP_ACCESSOR(ThreadId, thread_id)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Interface to pending exception.
|
2014-04-08 09:44:24 +00:00
|
|
|
Object* pending_exception() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(has_pending_exception());
|
|
|
|
DCHECK(!thread_local_top_.pending_exception_->IsException());
|
2011-03-18 20:35:07 +00:00
|
|
|
return thread_local_top_.pending_exception_;
|
|
|
|
}
|
2014-02-11 14:03:31 +00:00
|
|
|
|
2014-04-22 07:33:20 +00:00
|
|
|
void set_pending_exception(Object* exception_obj) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!exception_obj->IsException());
|
2014-04-22 07:33:20 +00:00
|
|
|
thread_local_top_.pending_exception_ = exception_obj;
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
2014-02-11 14:03:31 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void clear_pending_exception() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!thread_local_top_.pending_exception_->IsException());
|
2011-03-18 20:35:07 +00:00
|
|
|
thread_local_top_.pending_exception_ = heap_.the_hole_value();
|
|
|
|
}
|
2014-02-11 14:03:31 +00:00
|
|
|
|
2015-03-03 12:03:04 +00:00
|
|
|
THREAD_LOCAL_TOP_ADDRESS(Object*, pending_exception)
|
2014-02-11 14:03:31 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
bool has_pending_exception() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!thread_local_top_.pending_exception_->IsException());
|
2011-03-18 20:35:07 +00:00
|
|
|
return !thread_local_top_.pending_exception_->IsTheHole();
|
|
|
|
}
|
2014-02-11 14:03:31 +00:00
|
|
|
|
2015-03-03 12:03:04 +00:00
|
|
|
THREAD_LOCAL_TOP_ADDRESS(Context*, pending_handler_context)
|
|
|
|
THREAD_LOCAL_TOP_ADDRESS(Code*, pending_handler_code)
|
|
|
|
THREAD_LOCAL_TOP_ADDRESS(intptr_t, pending_handler_offset)
|
|
|
|
THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_fp)
|
|
|
|
THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_sp)
|
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
THREAD_LOCAL_TOP_ACCESSOR(bool, external_caught_exception)
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void clear_pending_message() {
|
|
|
|
thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
|
|
|
|
}
|
|
|
|
v8::TryCatch* try_catch_handler() {
|
2014-05-20 10:13:46 +00:00
|
|
|
return thread_local_top_.try_catch_handler();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
bool* external_caught_exception_address() {
|
|
|
|
return &thread_local_top_.external_caught_exception_;
|
|
|
|
}
|
2014-02-11 14:03:31 +00:00
|
|
|
|
2015-03-03 12:03:04 +00:00
|
|
|
THREAD_LOCAL_TOP_ADDRESS(Object*, scheduled_exception)
|
2012-06-11 13:18:05 +00:00
|
|
|
|
|
|
|
Address pending_message_obj_address() {
|
|
|
|
return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
|
|
|
|
}
|
|
|
|
|
2014-04-08 09:44:24 +00:00
|
|
|
Object* scheduled_exception() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(has_scheduled_exception());
|
|
|
|
DCHECK(!thread_local_top_.scheduled_exception_->IsException());
|
2011-03-18 20:35:07 +00:00
|
|
|
return thread_local_top_.scheduled_exception_;
|
|
|
|
}
|
|
|
|
bool has_scheduled_exception() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!thread_local_top_.scheduled_exception_->IsException());
|
2011-06-10 09:54:04 +00:00
|
|
|
return thread_local_top_.scheduled_exception_ != heap_.the_hole_value();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
void clear_scheduled_exception() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!thread_local_top_.scheduled_exception_->IsException());
|
2011-03-18 20:35:07 +00:00
|
|
|
thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
|
|
|
|
}
|
|
|
|
|
2015-03-18 10:19:04 +00:00
|
|
|
bool IsJavaScriptHandlerOnTop(Object* exception);
|
|
|
|
bool IsExternalHandlerOnTop(Object* exception);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-04-08 09:44:24 +00:00
|
|
|
bool is_catchable_by_javascript(Object* exception) {
|
2014-03-24 10:07:15 +00:00
|
|
|
return exception != heap()->termination_exception();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// JS execution stack (see frames.h).
|
|
|
|
static Address c_entry_fp(ThreadLocalTop* thread) {
|
|
|
|
return thread->c_entry_fp_;
|
|
|
|
}
|
|
|
|
static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
|
2014-10-17 15:44:02 +00:00
|
|
|
Address c_function() { return thread_local_top_.c_function_; }
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
inline Address* c_entry_fp_address() {
|
|
|
|
return &thread_local_top_.c_entry_fp_;
|
|
|
|
}
|
|
|
|
inline Address* handler_address() { return &thread_local_top_.handler_; }
|
2014-10-17 15:44:02 +00:00
|
|
|
inline Address* c_function_address() {
|
|
|
|
return &thread_local_top_.c_function_;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2013-08-07 17:04:27 +00:00
|
|
|
// Bottom JS entry.
|
|
|
|
Address js_entry_sp() {
|
|
|
|
return thread_local_top_.js_entry_sp_;
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
inline Address* js_entry_sp_address() {
|
|
|
|
return &thread_local_top_.js_entry_sp_;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns the global object of the current context. It could be
|
2012-01-16 12:38:59 +00:00
|
|
|
// a builtin object, or a JS global object.
|
2012-08-17 12:59:00 +00:00
|
|
|
Handle<GlobalObject> global_object() {
|
|
|
|
return Handle<GlobalObject>(context()->global_object());
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Returns the global proxy object of the current context.
|
2014-07-01 12:12:34 +00:00
|
|
|
JSObject* global_proxy() {
|
2011-03-18 20:35:07 +00:00
|
|
|
return context()->global_proxy();
|
|
|
|
}
|
|
|
|
|
|
|
|
Handle<JSBuiltinsObject> js_builtins_object() {
|
|
|
|
return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
|
|
|
|
}
|
|
|
|
|
|
|
|
static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
|
|
|
|
void FreeThreadResources() { thread_local_top_.Free(); }
|
|
|
|
|
|
|
|
// This method is called by the api after operations that may throw
|
|
|
|
// exceptions. If an exception was thrown and not handled by an external
|
|
|
|
// handler the exception is scheduled to be rethrown when we return to running
|
|
|
|
// JavaScript code. If an exception is scheduled true is returned.
|
|
|
|
bool OptionalRescheduleException(bool is_bottom_call);
|
|
|
|
|
2014-08-13 11:14:35 +00:00
|
|
|
// Push and pop a promise and the current try-catch handler.
|
2015-03-25 13:13:51 +00:00
|
|
|
void PushPromise(Handle<JSObject> promise, Handle<JSFunction> function);
|
2014-08-13 11:14:35 +00:00
|
|
|
void PopPromise();
|
|
|
|
Handle<Object> GetPromiseOnStackOnThrow();
|
|
|
|
|
2011-04-07 19:52:24 +00:00
|
|
|
class ExceptionScope {
|
|
|
|
public:
|
2015-03-11 13:36:08 +00:00
|
|
|
// Scope currently can only be used for regular exceptions,
|
|
|
|
// not termination exception.
|
|
|
|
explicit ExceptionScope(Isolate* isolate)
|
|
|
|
: isolate_(isolate),
|
|
|
|
pending_exception_(isolate_->pending_exception(), isolate_) {}
|
2011-04-07 19:52:24 +00:00
|
|
|
|
|
|
|
~ExceptionScope() {
|
|
|
|
isolate_->set_pending_exception(*pending_exception_);
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
Isolate* isolate_;
|
|
|
|
Handle<Object> pending_exception_;
|
|
|
|
};
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void SetCaptureStackTraceForUncaughtExceptions(
|
|
|
|
bool capture,
|
|
|
|
int frame_limit,
|
|
|
|
StackTrace::StackTraceOptions options);
|
|
|
|
|
|
|
|
void PrintCurrentStackTrace(FILE* out);
|
|
|
|
void PrintStack(StringStream* accumulator);
|
2013-05-21 09:25:57 +00:00
|
|
|
void PrintStack(FILE* out);
|
2011-03-18 20:35:07 +00:00
|
|
|
Handle<String> StackTraceString();
|
2012-09-05 12:30:49 +00:00
|
|
|
NO_INLINE(void PushStackTraceAndDie(unsigned int magic,
|
|
|
|
Object* object,
|
|
|
|
Map* map,
|
|
|
|
unsigned int magic2));
|
2011-03-18 20:35:07 +00:00
|
|
|
Handle<JSArray> CaptureCurrentStackTrace(
|
|
|
|
int frame_limit,
|
|
|
|
StackTrace::StackTraceOptions options);
|
2014-07-02 14:18:10 +00:00
|
|
|
Handle<Object> CaptureSimpleStackTrace(Handle<JSObject> error_object,
|
|
|
|
Handle<Object> caller);
|
2012-11-12 14:54:29 +00:00
|
|
|
void CaptureAndSetDetailedStackTrace(Handle<JSObject> error_object);
|
2014-07-02 14:18:10 +00:00
|
|
|
void CaptureAndSetSimpleStackTrace(Handle<JSObject> error_object,
|
|
|
|
Handle<Object> caller);
|
2014-10-28 13:24:36 +00:00
|
|
|
Handle<JSArray> GetDetailedStackTrace(Handle<JSObject> error_object);
|
|
|
|
Handle<JSArray> GetDetailedFromSimpleStackTrace(
|
|
|
|
Handle<JSObject> error_object);
|
2012-02-07 09:31:06 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// Returns if the top context may access the given global object. If
|
|
|
|
// the result is false, the pending exception is guaranteed to be
|
|
|
|
// set.
|
2013-10-02 08:29:34 +00:00
|
|
|
|
2015-02-26 10:34:44 +00:00
|
|
|
bool MayAccess(Handle<JSObject> receiver);
|
2014-12-15 19:57:37 +00:00
|
|
|
bool IsInternallyUsedPropertyName(Handle<Object> name);
|
|
|
|
bool IsInternallyUsedPropertyName(Object* name);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
|
2015-02-26 10:34:44 +00:00
|
|
|
void ReportFailedAccessCheck(Handle<JSObject> receiver);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Exception throwing support. The caller should use the result
|
|
|
|
// of Throw() as its return value.
|
2014-04-22 12:50:58 +00:00
|
|
|
Object* Throw(Object* exception, MessageLocation* location = NULL);
|
2015-03-18 10:19:04 +00:00
|
|
|
Object* ThrowIllegalOperation();
|
2014-04-03 05:57:43 +00:00
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
MUST_USE_RESULT MaybeHandle<T> Throw(Handle<Object> exception,
|
|
|
|
MessageLocation* location = NULL) {
|
|
|
|
Throw(*exception, location);
|
|
|
|
return MaybeHandle<T>();
|
|
|
|
}
|
|
|
|
|
2015-03-18 10:19:04 +00:00
|
|
|
// Re-throw an exception. This involves no error reporting since error
|
|
|
|
// reporting was handled when the exception was thrown originally.
|
2014-04-22 12:50:58 +00:00
|
|
|
Object* ReThrow(Object* exception);
|
2015-03-03 12:03:04 +00:00
|
|
|
|
|
|
|
// Find the correct handler for the current pending exception. This also
|
|
|
|
// clears and returns the current pending exception.
|
|
|
|
Object* FindHandler();
|
|
|
|
|
2015-03-25 13:13:51 +00:00
|
|
|
// Tries to predict whether an exception will be caught. Note that this can
|
2015-03-18 10:19:04 +00:00
|
|
|
// only produce an estimate, because it is undecidable whether a finally
|
|
|
|
// clause will consume or re-throw an exception. We conservatively assume any
|
|
|
|
// finally clause will behave as if the exception were consumed.
|
2015-03-25 13:13:51 +00:00
|
|
|
enum CatchType { NOT_CAUGHT, CAUGHT_BY_JAVASCRIPT, CAUGHT_BY_EXTERNAL };
|
|
|
|
CatchType PredictExceptionCatcher();
|
2015-03-18 10:19:04 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void ScheduleThrow(Object* exception);
|
2013-07-01 10:54:39 +00:00
|
|
|
// Re-set pending message, script and positions reported to the TryCatch
|
|
|
|
// back to the TLS for re-use when rethrowing.
|
|
|
|
void RestorePendingMessageFromTryCatch(v8::TryCatch* handler);
|
2014-07-18 13:28:12 +00:00
|
|
|
// Un-schedule an exception that was caught by a TryCatch handler.
|
|
|
|
void CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler);
|
2015-03-19 13:22:42 +00:00
|
|
|
void ReportPendingMessages();
|
2012-12-03 21:47:39 +00:00
|
|
|
// Return pending location if any or unfilled structure.
|
|
|
|
MessageLocation GetMessageLocation();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Promote a scheduled exception to pending. Asserts has_scheduled_exception.
|
2014-04-22 12:50:58 +00:00
|
|
|
Object* PromoteScheduledException();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Attempts to compute the current source location, storing the
|
|
|
|
// result in the target out parameter.
|
|
|
|
void ComputeLocation(MessageLocation* target);
|
2015-02-03 08:57:18 +00:00
|
|
|
bool ComputeLocationFromException(MessageLocation* target,
|
|
|
|
Handle<Object> exception);
|
2014-11-04 10:06:44 +00:00
|
|
|
bool ComputeLocationFromStackTrace(MessageLocation* target,
|
2014-10-30 14:51:17 +00:00
|
|
|
Handle<Object> exception);
|
|
|
|
|
|
|
|
Handle<JSMessageObject> CreateMessage(Handle<Object> exception,
|
|
|
|
MessageLocation* location);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Out of resource exception helpers.
|
2014-04-22 12:50:58 +00:00
|
|
|
Object* StackOverflow();
|
|
|
|
Object* TerminateExecution();
|
2013-04-22 15:01:45 +00:00
|
|
|
void CancelTerminateExecution();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-12-20 07:54:03 +00:00
|
|
|
void RequestInterrupt(InterruptCallback callback, void* data);
|
|
|
|
void InvokeApiInterruptCallbacks();
|
2014-05-09 09:13:12 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// Administration
|
|
|
|
void Iterate(ObjectVisitor* v);
|
|
|
|
void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
|
|
|
|
char* Iterate(ObjectVisitor* v, char* t);
|
|
|
|
void IterateThread(ThreadVisitor* v, char* t);
|
|
|
|
|
2014-11-12 11:34:09 +00:00
|
|
|
// Returns the current native context.
|
2012-08-17 09:03:08 +00:00
|
|
|
Handle<Context> native_context();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2012-08-17 09:03:08 +00:00
|
|
|
// Returns the native context of the calling JavaScript code. That
|
|
|
|
// is, the native context of the top-most JavaScript frame.
|
|
|
|
Handle<Context> GetCallingNativeContext();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
void RegisterTryCatchHandler(v8::TryCatch* that);
|
|
|
|
void UnregisterTryCatchHandler(v8::TryCatch* that);
|
|
|
|
|
|
|
|
char* ArchiveThread(char* to);
|
|
|
|
char* RestoreThread(char* from);
|
|
|
|
|
|
|
|
static const char* const kStackOverflowMessage;
|
|
|
|
|
|
|
|
static const int kUC16AlphabetSize = 256; // See StringSearchBase.
|
|
|
|
static const int kBMMaxShift = 250; // See StringSearchBase.
|
|
|
|
|
|
|
|
// Accessors.
|
|
|
|
#define GLOBAL_ACCESSOR(type, name, initialvalue) \
|
|
|
|
inline type name() const { \
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
|
2011-03-18 20:35:07 +00:00
|
|
|
return name##_; \
|
|
|
|
} \
|
|
|
|
inline void set_##name(type value) { \
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
|
2011-03-18 20:35:07 +00:00
|
|
|
name##_ = value; \
|
|
|
|
}
|
|
|
|
ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
|
|
|
|
#undef GLOBAL_ACCESSOR
|
|
|
|
|
|
|
|
#define GLOBAL_ARRAY_ACCESSOR(type, name, length) \
|
|
|
|
inline type* name() { \
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
|
2011-03-18 20:35:07 +00:00
|
|
|
return &(name##_)[0]; \
|
|
|
|
}
|
|
|
|
ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
|
|
|
|
#undef GLOBAL_ARRAY_ACCESSOR
|
|
|
|
|
2013-02-25 14:46:09 +00:00
|
|
|
#define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name) \
|
|
|
|
Handle<type> name() { \
|
2014-07-01 12:12:34 +00:00
|
|
|
return Handle<type>(native_context()->name(), this); \
|
2013-05-13 07:35:26 +00:00
|
|
|
} \
|
|
|
|
bool is_##name(type* value) { \
|
2014-07-01 12:12:34 +00:00
|
|
|
return native_context()->is_##name(value); \
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
2012-08-17 09:03:08 +00:00
|
|
|
NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR)
|
|
|
|
#undef NATIVE_CONTEXT_FIELD_ACCESSOR
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
Bootstrapper* bootstrapper() { return bootstrapper_; }
|
2011-08-04 15:18:18 +00:00
|
|
|
Counters* counters() {
|
|
|
|
// Call InitializeLoggingAndCounters() if logging is needed before
|
|
|
|
// the isolate is fully initialized.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(counters_ != NULL);
|
2011-08-04 15:18:18 +00:00
|
|
|
return counters_;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
CodeRange* code_range() { return code_range_; }
|
|
|
|
RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
|
|
|
|
CompilationCache* compilation_cache() { return compilation_cache_; }
|
2011-08-04 15:18:18 +00:00
|
|
|
Logger* logger() {
|
|
|
|
// Call InitializeLoggingAndCounters() if logging is needed before
|
|
|
|
// the isolate is fully initialized.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(logger_ != NULL);
|
2011-08-04 15:18:18 +00:00
|
|
|
return logger_;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
StackGuard* stack_guard() { return &stack_guard_; }
|
|
|
|
Heap* heap() { return &heap_; }
|
2011-08-04 15:18:18 +00:00
|
|
|
StatsTable* stats_table();
|
2011-03-18 20:35:07 +00:00
|
|
|
StubCache* stub_cache() { return stub_cache_; }
|
2014-05-06 11:25:37 +00:00
|
|
|
CodeAgingHelper* code_aging_helper() { return code_aging_helper_; }
|
2011-03-18 20:35:07 +00:00
|
|
|
DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
|
|
|
|
ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
MaterializedObjectStore* materialized_object_store() {
|
|
|
|
return materialized_object_store_;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
MemoryAllocator* memory_allocator() {
|
|
|
|
return memory_allocator_;
|
|
|
|
}
|
|
|
|
|
|
|
|
KeyedLookupCache* keyed_lookup_cache() {
|
|
|
|
return keyed_lookup_cache_;
|
|
|
|
}
|
|
|
|
|
|
|
|
ContextSlotCache* context_slot_cache() {
|
|
|
|
return context_slot_cache_;
|
|
|
|
}
|
|
|
|
|
|
|
|
DescriptorLookupCache* descriptor_lookup_cache() {
|
|
|
|
return descriptor_lookup_cache_;
|
|
|
|
}
|
|
|
|
|
2014-01-16 08:17:40 +00:00
|
|
|
HandleScopeData* handle_scope_data() { return &handle_scope_data_; }
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
HandleScopeImplementer* handle_scope_implementer() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(handle_scope_implementer_);
|
2011-03-18 20:35:07 +00:00
|
|
|
return handle_scope_implementer_;
|
|
|
|
}
|
2013-07-03 11:40:30 +00:00
|
|
|
Zone* runtime_zone() { return &runtime_zone_; }
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2011-04-12 08:27:38 +00:00
|
|
|
UnicodeCache* unicode_cache() {
|
|
|
|
return unicode_cache_;
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
2011-09-20 10:08:39 +00:00
|
|
|
InnerPointerToCodeCache* inner_pointer_to_code_cache() {
|
|
|
|
return inner_pointer_to_code_cache_;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
GlobalHandles* global_handles() { return global_handles_; }
|
|
|
|
|
2013-08-05 09:46:23 +00:00
|
|
|
EternalHandles* eternal_handles() { return eternal_handles_; }
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
ThreadManager* thread_manager() { return thread_manager_; }
|
|
|
|
|
|
|
|
StringTracker* string_tracker() { return string_tracker_; }
|
|
|
|
|
|
|
|
unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
|
|
|
|
return &jsregexp_uncanonicalize_;
|
|
|
|
}
|
|
|
|
|
|
|
|
unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
|
|
|
|
return &jsregexp_canonrange_;
|
|
|
|
}
|
|
|
|
|
|
|
|
RuntimeState* runtime_state() { return &runtime_state_; }
|
|
|
|
|
|
|
|
Builtins* builtins() { return &builtins_; }
|
|
|
|
|
2011-11-15 22:48:55 +00:00
|
|
|
void NotifyExtensionInstalled() {
|
|
|
|
has_installed_extensions_ = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool has_installed_extensions() { return has_installed_extensions_; }
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
unibrow::Mapping<unibrow::Ecma262Canonicalize>*
|
|
|
|
regexp_macro_assembler_canonicalize() {
|
|
|
|
return ®exp_macro_assembler_canonicalize_;
|
|
|
|
}
|
|
|
|
|
|
|
|
RegExpStack* regexp_stack() { return regexp_stack_; }
|
|
|
|
|
|
|
|
unibrow::Mapping<unibrow::Ecma262Canonicalize>*
|
|
|
|
interp_canonicalize_mapping() {
|
|
|
|
return &interp_canonicalize_mapping_;
|
|
|
|
}
|
|
|
|
|
2014-05-05 07:10:38 +00:00
|
|
|
Debug* debug() { return debug_; }
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2013-04-02 07:53:50 +00:00
|
|
|
CpuProfiler* cpu_profiler() const { return cpu_profiler_; }
|
2013-04-02 08:03:01 +00:00
|
|
|
HeapProfiler* heap_profiler() const { return heap_profiler_; }
|
2013-04-02 07:53:50 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
HistogramInfo* heap_histograms() { return heap_histograms_; }
|
|
|
|
|
|
|
|
JSObject::SpillInformation* js_spill_information() {
|
|
|
|
return &js_spill_information_;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
Factory* factory() { return reinterpret_cast<Factory*>(this); }
|
|
|
|
|
2012-05-22 14:05:44 +00:00
|
|
|
static const int kJSRegexpStaticOffsetsVectorSize = 128;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2013-11-20 15:16:18 +00:00
|
|
|
void SetData(uint32_t slot, void* data) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(slot < Internals::kNumIsolateDataSlots);
|
2013-11-20 15:16:18 +00:00
|
|
|
embedder_data_[slot] = data;
|
|
|
|
}
|
|
|
|
void* GetData(uint32_t slot) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(slot < Internals::kNumIsolateDataSlots);
|
2013-11-20 15:16:18 +00:00
|
|
|
return embedder_data_[slot];
|
|
|
|
}
|
2011-05-18 23:26:38 +00:00
|
|
|
|
2014-05-22 09:36:20 +00:00
|
|
|
bool serializer_enabled() const { return serializer_enabled_; }
|
2015-02-25 11:14:40 +00:00
|
|
|
bool snapshot_available() const { return snapshot_blob_ != NULL; }
|
2014-05-22 09:36:20 +00:00
|
|
|
|
2013-09-03 09:35:26 +00:00
|
|
|
bool IsDead() { return has_fatal_error_; }
|
|
|
|
void SignalFatalError() { has_fatal_error_ = true; }
|
|
|
|
|
2014-05-22 09:36:20 +00:00
|
|
|
bool use_crankshaft() const;
|
2013-09-03 08:49:44 +00:00
|
|
|
|
2013-06-28 13:40:41 +00:00
|
|
|
bool initialized_from_snapshot() { return initialized_from_snapshot_; }
|
|
|
|
|
2012-02-06 08:59:43 +00:00
|
|
|
double time_millis_since_init() {
|
2014-06-30 13:25:46 +00:00
|
|
|
return base::OS::TimeCurrentMillis() - time_millis_at_init_;
|
2012-02-06 08:59:43 +00:00
|
|
|
}
|
|
|
|
|
2012-03-09 12:07:29 +00:00
|
|
|
DateCache* date_cache() {
|
|
|
|
return date_cache_;
|
|
|
|
}
|
|
|
|
|
|
|
|
void set_date_cache(DateCache* date_cache) {
|
|
|
|
if (date_cache != date_cache_) {
|
|
|
|
delete date_cache_;
|
|
|
|
}
|
|
|
|
date_cache_ = date_cache;
|
|
|
|
}
|
|
|
|
|
2013-05-13 07:35:26 +00:00
|
|
|
Map* get_initial_js_array_map(ElementsKind kind);
|
|
|
|
|
|
|
|
bool IsFastArrayConstructorPrototypeChainIntact();
|
|
|
|
|
2014-09-03 10:51:51 +00:00
|
|
|
CallInterfaceDescriptorData* call_descriptor_data(int index);
|
This is a preview of a first step towards unification of the hydrogen
call machinery. The change replaces CallNamed, CallKeyed,
CallConstantFunction and CallKnownGlobal hydrogen instructions with two
new instructions with a more lower level semantics:
1. CallJSFunction for direct calls of JSFunction objects (no
argument adaptation)
2. CallWithDescriptor for calls of a given Code object according to
the supplied calling convention.
Details:
CallJSFunction should be straightforward, the main difference from the
existing InvokeFunction instruction is the absence of argument adaptor
handling. (As a next step, we will replace InvokeFunction with an
equivalent hydrogen code.)
For CallWithDescriptor, the calling conventions are represented by a
tweaked version of CallStubInterfaceDescriptor. In addition to the
parameter-register mapping, we also define parameter-representation
mapping there. The CallWithDescriptor instruction has variable number of
parameters now - this required some simple tweaks in Lithium, which
assumed fixed number of arguments in some places.
The calling conventions used in the calls are initialized in the
CallDescriptors class (code-stubs.h, <arch>/code-stubs-<arch>.cc), and
they live in a new table in the Isolate class. I should say I am not
quite sure about Representation::Integer32() representation for some of
the params of ArgumentAdaptorCall - it is not clear to me wether the
params could not end up on the stack and thus confuse the GC.
The change also includes an earlier small change to argument adaptor
(https://codereview.chromium.org/98463007) that avoids passing a naked
pointer to the code entry as a parameter. I am sorry for packaging that
with an already biggish change.
Performance implications:
Locally, I see a small regression (.2% or so). It is hard to say where
exactly it comes from, but I do see inefficient call sequences to the
adaptor trampoline. For example:
;;; <@78,#24> constant-t
bf85aa515a mov edi,0x5a51aa85 ;; debug: position 29
;;; <@72,#53> load-named-field
8b7717 mov esi,[edi+0x17] ;; debug: position 195
;;; <@80,#51> constant-s
b902000000 mov ecx,0x2 ;; debug: position 195
;;; <@81,#51> gap
894df0 mov [ebp+0xf0],ecx
;;; <@82,#103> constant-i
bb01000000 mov ebx,0x1
;;; <@84,#102> constant-i
b902000000 mov ecx,0x2
;;; <@85,#102> gap
89d8 mov eax,ebx
89cb mov ebx,ecx
8b4df0 mov ecx,[ebp+0xf0]
;;; <@86,#58> call-with-descriptor
e8ef57fcff call ArgumentsAdaptorTrampoline (0x2d80e6e0) ;; code: BUILTIN
Note the silly handling of ecx; the hydrogen for this code is:
0 4 s27 Constant 1 range:1_1 <|@
0 3 t30 Constant 0x5bc1aa85 <JS Function xyz (SharedFunctionInfo 0x5bc1a919)> type:object <|@
0 1 t36 LoadNamedField t30.[in-object]@24 <|@
0 1 t38 Constant 0x2300e6a1 <Code> <|@
0 1 i102 Constant 2 range:2_2 <|@
0 1 i103 Constant 1 range:1_1 <|@
0 2 t41 CallWithDescriptor t38 t30 t36 s27 i103 i102 #2 changes[*] <|@
BUG=
R=verwaest@chromium.org, danno@chromium.org
Review URL: https://codereview.chromium.org/104663004
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18626 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-15 17:00:35 +00:00
|
|
|
|
2012-07-18 14:15:02 +00:00
|
|
|
void IterateDeferredHandles(ObjectVisitor* visitor);
|
|
|
|
void LinkDeferredHandles(DeferredHandles* deferred_handles);
|
|
|
|
void UnlinkDeferredHandles(DeferredHandles* deferred_handles);
|
|
|
|
|
2013-04-23 09:23:07 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
bool IsDeferredHandle(Object** location);
|
|
|
|
#endif // DEBUG
|
|
|
|
|
2013-11-19 11:52:47 +00:00
|
|
|
bool concurrent_recompilation_enabled() {
|
|
|
|
// Thread is only available with flag enabled.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(optimizing_compiler_thread_ == NULL ||
|
2013-11-19 11:52:47 +00:00
|
|
|
FLAG_concurrent_recompilation);
|
|
|
|
return optimizing_compiler_thread_ != NULL;
|
|
|
|
}
|
|
|
|
|
2013-11-19 12:14:22 +00:00
|
|
|
bool concurrent_osr_enabled() const {
|
2013-11-19 11:52:47 +00:00
|
|
|
// Thread is only available with flag enabled.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(optimizing_compiler_thread_ == NULL ||
|
2013-11-19 11:52:47 +00:00
|
|
|
FLAG_concurrent_recompilation);
|
|
|
|
return optimizing_compiler_thread_ != NULL && FLAG_concurrent_osr;
|
|
|
|
}
|
|
|
|
|
2012-07-19 18:58:23 +00:00
|
|
|
OptimizingCompilerThread* optimizing_compiler_thread() {
|
2013-09-25 15:14:12 +00:00
|
|
|
return optimizing_compiler_thread_;
|
2012-07-19 18:58:23 +00:00
|
|
|
}
|
|
|
|
|
2013-06-20 10:05:33 +00:00
|
|
|
int id() const { return static_cast<int>(id_); }
|
|
|
|
|
2013-03-06 10:49:34 +00:00
|
|
|
HStatistics* GetHStatistics();
|
2014-10-23 09:14:35 +00:00
|
|
|
CompilationStatistics* GetTurboStatistics();
|
2013-03-06 07:25:46 +00:00
|
|
|
HTracer* GetHTracer();
|
2013-11-07 16:35:27 +00:00
|
|
|
CodeTracer* GetCodeTracer();
|
2013-03-06 07:25:46 +00:00
|
|
|
|
2014-11-21 09:53:04 +00:00
|
|
|
void DumpAndResetCompilationStats();
|
|
|
|
|
2013-06-28 13:40:41 +00:00
|
|
|
FunctionEntryHook function_entry_hook() { return function_entry_hook_; }
|
|
|
|
void set_function_entry_hook(FunctionEntryHook function_entry_hook) {
|
|
|
|
function_entry_hook_ = function_entry_hook;
|
|
|
|
}
|
|
|
|
|
2013-07-18 08:12:01 +00:00
|
|
|
void* stress_deopt_count_address() { return &stress_deopt_count_; }
|
|
|
|
|
2014-06-30 13:25:46 +00:00
|
|
|
inline base::RandomNumberGenerator* random_number_generator();
|
2013-09-10 11:13:55 +00:00
|
|
|
|
2013-07-19 09:39:01 +00:00
|
|
|
// Given an address occupied by a live code object, return that object.
|
|
|
|
Object* FindCodeObject(Address a);
|
|
|
|
|
2014-02-13 16:09:28 +00:00
|
|
|
int NextOptimizationId() {
|
|
|
|
int id = next_optimization_id_++;
|
|
|
|
if (!Smi::IsValid(next_optimization_id_)) {
|
|
|
|
next_optimization_id_ = 0;
|
|
|
|
}
|
|
|
|
return id;
|
|
|
|
}
|
|
|
|
|
2014-03-24 16:34:06 +00:00
|
|
|
// Get (and lazily initialize) the registry for per-isolate symbols.
|
|
|
|
Handle<JSObject> GetSymbolRegistry();
|
|
|
|
|
2014-04-25 13:49:22 +00:00
|
|
|
void AddCallCompletedCallback(CallCompletedCallback callback);
|
|
|
|
void RemoveCallCompletedCallback(CallCompletedCallback callback);
|
|
|
|
void FireCallCompletedCallback();
|
|
|
|
|
2014-09-30 15:29:08 +00:00
|
|
|
void SetPromiseRejectCallback(PromiseRejectCallback callback);
|
|
|
|
void ReportPromiseReject(Handle<JSObject> promise, Handle<Object> value,
|
|
|
|
v8::PromiseRejectEvent event);
|
|
|
|
|
2014-06-03 20:12:19 +00:00
|
|
|
void EnqueueMicrotask(Handle<Object> microtask);
|
2014-05-02 19:30:54 +00:00
|
|
|
void RunMicrotasks();
|
|
|
|
|
2014-06-23 09:46:58 +00:00
|
|
|
void SetUseCounterCallback(v8::Isolate::UseCounterCallback callback);
|
|
|
|
void CountUsage(v8::Isolate::UseCounterFeature feature);
|
|
|
|
|
2014-09-29 07:29:14 +00:00
|
|
|
BasicBlockProfiler* GetOrCreateBasicBlockProfiler();
|
|
|
|
BasicBlockProfiler* basic_block_profiler() { return basic_block_profiler_; }
|
|
|
|
|
2014-10-08 11:51:57 +00:00
|
|
|
static Isolate* NewForTesting() { return new Isolate(false); }
|
2014-09-18 21:57:10 +00:00
|
|
|
|
2014-10-23 11:56:26 +00:00
|
|
|
std::string GetTurboCfgFileName();
|
2014-10-14 08:43:33 +00:00
|
|
|
|
2014-11-07 16:03:11 +00:00
|
|
|
#if TRACE_MAPS
|
|
|
|
int GetNextUniqueSharedFunctionInfoId() { return next_unique_sfi_id_++; }
|
|
|
|
#endif
|
|
|
|
|
2015-01-08 18:17:23 +00:00
|
|
|
void set_store_buffer_hash_set_1_address(
|
|
|
|
uintptr_t* store_buffer_hash_set_1_address) {
|
|
|
|
store_buffer_hash_set_1_address_ = store_buffer_hash_set_1_address;
|
|
|
|
}
|
|
|
|
|
|
|
|
uintptr_t* store_buffer_hash_set_1_address() {
|
|
|
|
return store_buffer_hash_set_1_address_;
|
|
|
|
}
|
|
|
|
|
|
|
|
void set_store_buffer_hash_set_2_address(
|
|
|
|
uintptr_t* store_buffer_hash_set_2_address) {
|
|
|
|
store_buffer_hash_set_2_address_ = store_buffer_hash_set_2_address;
|
|
|
|
}
|
|
|
|
|
|
|
|
uintptr_t* store_buffer_hash_set_2_address() {
|
|
|
|
return store_buffer_hash_set_2_address_;
|
|
|
|
}
|
|
|
|
|
2015-02-05 09:35:47 +00:00
|
|
|
void AddDetachedContext(Handle<Context> context);
|
|
|
|
void CheckDetachedContextsAfterGC();
|
|
|
|
|
2015-02-24 11:31:12 +00:00
|
|
|
List<Object*>* partial_snapshot_cache() { return &partial_snapshot_cache_; }
|
|
|
|
|
2015-02-25 11:14:40 +00:00
|
|
|
protected:
|
2014-10-08 11:51:57 +00:00
|
|
|
explicit Isolate(bool enable_serializer);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2015-02-25 11:14:40 +00:00
|
|
|
private:
|
2012-03-12 13:56:56 +00:00
|
|
|
friend struct GlobalState;
|
|
|
|
friend struct InitializeGlobalState;
|
|
|
|
|
2012-04-23 15:09:59 +00:00
|
|
|
// These fields are accessed through the API, offsets must be kept in sync
|
|
|
|
// with v8::internal::Internals (in include/v8.h) constants. This is also
|
|
|
|
// verified in Isolate::Init() using runtime checks.
|
2013-11-20 15:16:18 +00:00
|
|
|
void* embedder_data_[Internals::kNumIsolateDataSlots];
|
2012-04-23 15:09:59 +00:00
|
|
|
Heap heap_;
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// The per-process lock should be acquired before the ThreadDataTable is
|
|
|
|
// modified.
|
|
|
|
class ThreadDataTable {
|
|
|
|
public:
|
|
|
|
ThreadDataTable();
|
|
|
|
~ThreadDataTable();
|
|
|
|
|
|
|
|
PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
|
|
|
|
void Insert(PerIsolateThreadData* data);
|
|
|
|
void Remove(PerIsolateThreadData* data);
|
2011-06-07 18:33:03 +00:00
|
|
|
void RemoveAllThreads(Isolate* isolate);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
PerIsolateThreadData* list_;
|
|
|
|
};
|
|
|
|
|
|
|
|
// These items form a stack synchronously with threads Enter'ing and Exit'ing
|
|
|
|
// the Isolate. The top of the stack points to a thread which is currently
|
|
|
|
// running the Isolate. When the stack is empty, the Isolate is considered
|
|
|
|
// not entered by any thread and can be Disposed.
|
|
|
|
// If the same thread enters the Isolate more then once, the entry_count_
|
|
|
|
// is incremented rather then a new item pushed to the stack.
|
|
|
|
class EntryStackItem {
|
|
|
|
public:
|
|
|
|
EntryStackItem(PerIsolateThreadData* previous_thread_data,
|
|
|
|
Isolate* previous_isolate,
|
|
|
|
EntryStackItem* previous_item)
|
|
|
|
: entry_count(1),
|
|
|
|
previous_thread_data(previous_thread_data),
|
|
|
|
previous_isolate(previous_isolate),
|
|
|
|
previous_item(previous_item) { }
|
|
|
|
|
|
|
|
int entry_count;
|
|
|
|
PerIsolateThreadData* previous_thread_data;
|
|
|
|
Isolate* previous_isolate;
|
|
|
|
EntryStackItem* previous_item;
|
|
|
|
|
2012-01-20 16:17:08 +00:00
|
|
|
private:
|
2011-03-18 20:35:07 +00:00
|
|
|
DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
|
|
|
|
};
|
|
|
|
|
2014-09-19 08:01:35 +00:00
|
|
|
static base::LazyMutex thread_data_table_mutex_;
|
2012-03-30 14:30:46 +00:00
|
|
|
|
2014-06-30 13:25:46 +00:00
|
|
|
static base::Thread::LocalStorageKey per_isolate_thread_data_key_;
|
|
|
|
static base::Thread::LocalStorageKey isolate_key_;
|
|
|
|
static base::Thread::LocalStorageKey thread_id_key_;
|
2012-03-30 14:30:46 +00:00
|
|
|
static ThreadDataTable* thread_data_table_;
|
|
|
|
|
2013-03-06 07:25:46 +00:00
|
|
|
// A global counter for all generated Isolates, might overflow.
|
2014-06-05 12:14:47 +00:00
|
|
|
static base::Atomic32 isolate_counter_;
|
2013-03-06 07:25:46 +00:00
|
|
|
|
2014-11-26 05:15:17 +00:00
|
|
|
#if DEBUG
|
|
|
|
static base::Atomic32 isolate_key_created_;
|
|
|
|
#endif
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void Deinit();
|
|
|
|
|
|
|
|
static void SetIsolateThreadLocals(Isolate* isolate,
|
|
|
|
PerIsolateThreadData* data);
|
|
|
|
|
|
|
|
// Find the PerThread for this particular (isolate, thread) combination.
|
|
|
|
// If one does not yet exist, allocate a new one.
|
|
|
|
PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
|
|
|
|
|
|
|
|
// Initializes the current thread to run this Isolate.
|
|
|
|
// Not thread-safe. Multiple threads should not Enter/Exit the same isolate
|
|
|
|
// at the same time, this should be prevented using external locking.
|
|
|
|
void Enter();
|
|
|
|
|
|
|
|
// Exits the current thread. The previosuly entered Isolate is restored
|
|
|
|
// for the thread.
|
|
|
|
// Not thread-safe. Multiple threads should not Enter/Exit the same isolate
|
|
|
|
// at the same time, this should be prevented using external locking.
|
|
|
|
void Exit();
|
|
|
|
|
|
|
|
void InitializeThreadLocal();
|
|
|
|
|
|
|
|
void MarkCompactPrologue(bool is_compacting,
|
|
|
|
ThreadLocalTop* archived_thread_data);
|
|
|
|
void MarkCompactEpilogue(bool is_compacting,
|
|
|
|
ThreadLocalTop* archived_thread_data);
|
|
|
|
|
|
|
|
void FillCache();
|
|
|
|
|
2015-03-19 13:22:42 +00:00
|
|
|
// Propagate pending exception message to the v8::TryCatch.
|
|
|
|
// If there is no external try-catch or message was successfully propagated,
|
|
|
|
// then return true.
|
|
|
|
bool PropagatePendingExceptionToExternalTryCatch();
|
|
|
|
|
2012-02-07 09:31:06 +00:00
|
|
|
// Traverse prototype chain to find out whether the object is derived from
|
|
|
|
// the Error object.
|
|
|
|
bool IsErrorObject(Handle<Object> obj);
|
|
|
|
|
2014-06-05 12:14:47 +00:00
|
|
|
base::Atomic32 id_;
|
2012-04-23 15:09:59 +00:00
|
|
|
EntryStackItem* entry_stack_;
|
2011-03-18 20:35:07 +00:00
|
|
|
int stack_trace_nesting_level_;
|
|
|
|
StringStream* incomplete_message_;
|
2011-09-08 16:29:57 +00:00
|
|
|
Address isolate_addresses_[kIsolateAddressCount + 1]; // NOLINT
|
2011-03-18 20:35:07 +00:00
|
|
|
Bootstrapper* bootstrapper_;
|
|
|
|
RuntimeProfiler* runtime_profiler_;
|
|
|
|
CompilationCache* compilation_cache_;
|
|
|
|
Counters* counters_;
|
|
|
|
CodeRange* code_range_;
|
2014-06-30 13:25:46 +00:00
|
|
|
base::RecursiveMutex break_access_;
|
2011-03-18 20:35:07 +00:00
|
|
|
Logger* logger_;
|
|
|
|
StackGuard stack_guard_;
|
|
|
|
StatsTable* stats_table_;
|
|
|
|
StubCache* stub_cache_;
|
2014-05-06 11:25:37 +00:00
|
|
|
CodeAgingHelper* code_aging_helper_;
|
2011-03-18 20:35:07 +00:00
|
|
|
DeoptimizerData* deoptimizer_data_;
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
MaterializedObjectStore* materialized_object_store_;
|
2011-03-18 20:35:07 +00:00
|
|
|
ThreadLocalTop thread_local_top_;
|
|
|
|
bool capture_stack_trace_for_uncaught_exceptions_;
|
|
|
|
int stack_trace_for_uncaught_exceptions_frame_limit_;
|
|
|
|
StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
|
|
|
|
MemoryAllocator* memory_allocator_;
|
|
|
|
KeyedLookupCache* keyed_lookup_cache_;
|
|
|
|
ContextSlotCache* context_slot_cache_;
|
|
|
|
DescriptorLookupCache* descriptor_lookup_cache_;
|
2014-01-16 08:17:40 +00:00
|
|
|
HandleScopeData handle_scope_data_;
|
2011-03-18 20:35:07 +00:00
|
|
|
HandleScopeImplementer* handle_scope_implementer_;
|
2011-04-12 08:27:38 +00:00
|
|
|
UnicodeCache* unicode_cache_;
|
2013-07-03 11:40:30 +00:00
|
|
|
Zone runtime_zone_;
|
2011-09-20 10:08:39 +00:00
|
|
|
InnerPointerToCodeCache* inner_pointer_to_code_cache_;
|
2011-03-18 20:35:07 +00:00
|
|
|
GlobalHandles* global_handles_;
|
2013-08-05 09:46:23 +00:00
|
|
|
EternalHandles* eternal_handles_;
|
2011-03-18 20:35:07 +00:00
|
|
|
ThreadManager* thread_manager_;
|
|
|
|
RuntimeState runtime_state_;
|
|
|
|
Builtins builtins_;
|
2011-11-15 22:48:55 +00:00
|
|
|
bool has_installed_extensions_;
|
2011-03-18 20:35:07 +00:00
|
|
|
StringTracker* string_tracker_;
|
|
|
|
unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
|
|
|
|
unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
|
|
|
|
unibrow::Mapping<unibrow::Ecma262Canonicalize>
|
|
|
|
regexp_macro_assembler_canonicalize_;
|
|
|
|
RegExpStack* regexp_stack_;
|
2012-03-09 12:07:29 +00:00
|
|
|
DateCache* date_cache_;
|
2011-03-18 20:35:07 +00:00
|
|
|
unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
|
2014-09-03 10:51:51 +00:00
|
|
|
CallInterfaceDescriptorData* call_descriptor_data_;
|
2014-06-30 13:25:46 +00:00
|
|
|
base::RandomNumberGenerator* random_number_generator_;
|
2015-01-08 18:17:23 +00:00
|
|
|
// TODO(hpayer): Remove the following store buffer addresses.
|
|
|
|
uintptr_t* store_buffer_hash_set_1_address_;
|
|
|
|
uintptr_t* store_buffer_hash_set_2_address_;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-05-22 09:36:20 +00:00
|
|
|
// Whether the isolate has been created for snapshotting.
|
|
|
|
bool serializer_enabled_;
|
|
|
|
|
2013-09-03 09:35:26 +00:00
|
|
|
// True if fatal error has been signaled for this isolate.
|
|
|
|
bool has_fatal_error_;
|
|
|
|
|
2013-06-28 13:40:41 +00:00
|
|
|
// True if this isolate was initialized from a snapshot.
|
|
|
|
bool initialized_from_snapshot_;
|
|
|
|
|
2012-02-06 08:59:43 +00:00
|
|
|
// Time stamp at initialization.
|
|
|
|
double time_millis_at_init_;
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
// A static array of histogram info for each type.
|
|
|
|
HistogramInfo heap_histograms_[LAST_TYPE + 1];
|
|
|
|
JSObject::SpillInformation js_spill_information_;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
Debug* debug_;
|
2013-04-02 07:53:50 +00:00
|
|
|
CpuProfiler* cpu_profiler_;
|
2013-04-02 08:03:01 +00:00
|
|
|
HeapProfiler* heap_profiler_;
|
2013-06-28 13:40:41 +00:00
|
|
|
FunctionEntryHook function_entry_hook_;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-12-20 07:54:03 +00:00
|
|
|
typedef std::pair<InterruptCallback, void*> InterruptEntry;
|
|
|
|
std::queue<InterruptEntry> api_interrupts_queue_;
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#define GLOBAL_BACKING_STORE(type, name, initialvalue) \
|
|
|
|
type name##_;
|
|
|
|
ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
|
|
|
|
#undef GLOBAL_BACKING_STORE
|
|
|
|
|
|
|
|
#define GLOBAL_ARRAY_BACKING_STORE(type, name, length) \
|
|
|
|
type name##_[length];
|
|
|
|
ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
|
|
|
|
#undef GLOBAL_ARRAY_BACKING_STORE
|
|
|
|
|
|
|
|
#ifdef DEBUG
|
|
|
|
// This class is huge and has a number of fields controlled by
|
|
|
|
// preprocessor defines. Make sure the offsets of these fields agree
|
|
|
|
// between compilation units.
|
|
|
|
#define ISOLATE_FIELD_OFFSET(type, name, ignored) \
|
|
|
|
static const intptr_t name##_debug_offset_;
|
|
|
|
ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
|
|
|
|
ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
|
|
|
|
#undef ISOLATE_FIELD_OFFSET
|
|
|
|
#endif
|
|
|
|
|
2012-07-18 14:15:02 +00:00
|
|
|
DeferredHandles* deferred_handles_head_;
|
2013-09-25 15:14:12 +00:00
|
|
|
OptimizingCompilerThread* optimizing_compiler_thread_;
|
2013-11-19 11:52:47 +00:00
|
|
|
|
2013-07-18 08:12:01 +00:00
|
|
|
// Counts deopt points if deopt_every_n_times is enabled.
|
|
|
|
unsigned int stress_deopt_count_;
|
|
|
|
|
2014-02-13 16:09:28 +00:00
|
|
|
int next_optimization_id_;
|
|
|
|
|
2014-11-07 16:03:11 +00:00
|
|
|
#if TRACE_MAPS
|
|
|
|
int next_unique_sfi_id_;
|
|
|
|
#endif
|
|
|
|
|
2014-04-25 13:49:22 +00:00
|
|
|
// List of callbacks when a Call completes.
|
|
|
|
List<CallCompletedCallback> call_completed_callbacks_;
|
|
|
|
|
2014-06-23 09:46:58 +00:00
|
|
|
v8::Isolate::UseCounterCallback use_counter_callback_;
|
2014-09-29 07:29:14 +00:00
|
|
|
BasicBlockProfiler* basic_block_profiler_;
|
2014-06-23 09:46:58 +00:00
|
|
|
|
2015-02-24 11:31:12 +00:00
|
|
|
List<Object*> partial_snapshot_cache_;
|
2015-02-05 09:35:47 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
friend class ExecutionAccess;
|
2012-07-19 18:58:23 +00:00
|
|
|
friend class HandleScopeImplementer;
|
|
|
|
friend class OptimizingCompilerThread;
|
2013-01-30 12:19:32 +00:00
|
|
|
friend class SweeperThread;
|
2011-05-05 18:55:31 +00:00
|
|
|
friend class ThreadManager;
|
|
|
|
friend class Simulator;
|
|
|
|
friend class StackGuard;
|
2011-04-11 23:46:22 +00:00
|
|
|
friend class ThreadId;
|
2011-08-04 15:18:18 +00:00
|
|
|
friend class TestMemoryAllocatorScope;
|
2013-01-29 09:09:55 +00:00
|
|
|
friend class TestCodeRangeScope;
|
2011-03-18 20:35:07 +00:00
|
|
|
friend class v8::Isolate;
|
|
|
|
friend class v8::Locker;
|
2011-05-05 18:55:31 +00:00
|
|
|
friend class v8::Unlocker;
|
2015-02-25 11:14:40 +00:00
|
|
|
friend v8::StartupData v8::V8::CreateSnapshotDataBlob(const char*);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(Isolate);
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
#undef FIELD_ACCESSOR
|
|
|
|
#undef THREAD_LOCAL_TOP_ACCESSOR
|
|
|
|
|
|
|
|
|
2014-08-13 11:14:35 +00:00
|
|
|
class PromiseOnStack {
|
|
|
|
public:
|
2015-03-25 13:13:51 +00:00
|
|
|
PromiseOnStack(Handle<JSFunction> function, Handle<JSObject> promise,
|
2014-08-13 11:14:35 +00:00
|
|
|
PromiseOnStack* prev)
|
2015-03-25 13:13:51 +00:00
|
|
|
: function_(function), promise_(promise), prev_(prev) {}
|
|
|
|
Handle<JSFunction> function() { return function_; }
|
2014-08-13 11:14:35 +00:00
|
|
|
Handle<JSObject> promise() { return promise_; }
|
|
|
|
PromiseOnStack* prev() { return prev_; }
|
|
|
|
|
|
|
|
private:
|
2015-03-25 13:13:51 +00:00
|
|
|
Handle<JSFunction> function_;
|
2014-08-13 11:14:35 +00:00
|
|
|
Handle<JSObject> promise_;
|
|
|
|
PromiseOnStack* prev_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// If the GCC version is 4.1.x or 4.2.x an additional field is added to the
|
|
|
|
// class as a work around for a bug in the generated code found with these
|
|
|
|
// versions of GCC. See V8 issue 122 for details.
|
|
|
|
class SaveContext BASE_EMBEDDED {
|
|
|
|
public:
|
2011-10-03 11:13:20 +00:00
|
|
|
inline explicit SaveContext(Isolate* isolate);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
~SaveContext() {
|
2013-09-03 11:47:16 +00:00
|
|
|
isolate_->set_context(context_.is_null() ? NULL : *context_);
|
|
|
|
isolate_->set_save_context(prev_);
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Handle<Context> context() { return context_; }
|
|
|
|
SaveContext* prev() { return prev_; }
|
|
|
|
|
|
|
|
// Returns true if this save context is below a given JavaScript frame.
|
2011-10-28 12:49:09 +00:00
|
|
|
bool IsBelowFrame(JavaScriptFrame* frame) {
|
|
|
|
return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2013-09-03 11:47:16 +00:00
|
|
|
Isolate* isolate_;
|
2011-03-18 20:35:07 +00:00
|
|
|
Handle<Context> context_;
|
|
|
|
SaveContext* prev_;
|
2011-10-28 12:49:09 +00:00
|
|
|
Address c_entry_fp_;
|
2011-03-18 20:35:07 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
class AssertNoContextChange BASE_EMBEDDED {
|
|
|
|
#ifdef DEBUG
|
|
|
|
public:
|
2013-10-01 14:53:45 +00:00
|
|
|
explicit AssertNoContextChange(Isolate* isolate)
|
|
|
|
: isolate_(isolate),
|
|
|
|
context_(isolate->context(), isolate) { }
|
2013-08-29 17:13:57 +00:00
|
|
|
~AssertNoContextChange() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(isolate_->context() == *context_);
|
2013-08-29 17:13:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2013-09-11 08:39:38 +00:00
|
|
|
Isolate* isolate_;
|
2013-08-29 17:13:57 +00:00
|
|
|
Handle<Context> context_;
|
|
|
|
#else
|
|
|
|
public:
|
2013-10-01 14:53:45 +00:00
|
|
|
explicit AssertNoContextChange(Isolate* isolate) { }
|
2011-03-18 20:35:07 +00:00
|
|
|
#endif
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
class ExecutionAccess BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
|
|
|
|
Lock(isolate);
|
|
|
|
}
|
|
|
|
~ExecutionAccess() { Unlock(isolate_); }
|
|
|
|
|
2013-08-29 09:58:30 +00:00
|
|
|
static void Lock(Isolate* isolate) { isolate->break_access()->Lock(); }
|
|
|
|
static void Unlock(Isolate* isolate) { isolate->break_access()->Unlock(); }
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
static bool TryLock(Isolate* isolate) {
|
2013-08-29 09:58:30 +00:00
|
|
|
return isolate->break_access()->TryLock();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
Isolate* isolate_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2014-06-17 13:54:49 +00:00
|
|
|
// Support for checking for stack-overflows.
|
2011-03-18 20:35:07 +00:00
|
|
|
class StackLimitCheck BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
|
|
|
|
|
2014-06-17 13:54:49 +00:00
|
|
|
// Use this to check for stack-overflows in C++ code.
|
|
|
|
inline bool HasOverflowed() const {
|
2011-03-18 20:35:07 +00:00
|
|
|
StackGuard* stack_guard = isolate_->stack_guard();
|
2014-07-03 08:52:28 +00:00
|
|
|
return GetCurrentStackPosition() < stack_guard->real_climit();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
2014-06-17 13:54:49 +00:00
|
|
|
|
|
|
|
// Use this to check for stack-overflow when entering runtime from JS code.
|
|
|
|
bool JsHasOverflowed() const;
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
private:
|
|
|
|
Isolate* isolate_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
// Support for temporarily postponing interrupts. When the outermost
|
|
|
|
// postpone scope is left the interrupts will be re-enabled and any
|
|
|
|
// interrupts that occurred while in the scope will be taken into
|
|
|
|
// account.
|
|
|
|
class PostponeInterruptsScope BASE_EMBEDDED {
|
|
|
|
public:
|
2014-07-02 08:05:40 +00:00
|
|
|
PostponeInterruptsScope(Isolate* isolate,
|
|
|
|
int intercept_mask = StackGuard::ALL_INTERRUPTS)
|
|
|
|
: stack_guard_(isolate->stack_guard()),
|
|
|
|
intercept_mask_(intercept_mask),
|
|
|
|
intercepted_flags_(0) {
|
|
|
|
stack_guard_->PushPostponeInterruptsScope(this);
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
~PostponeInterruptsScope() {
|
2014-07-02 08:05:40 +00:00
|
|
|
stack_guard_->PopPostponeInterruptsScope();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
2014-07-02 08:05:40 +00:00
|
|
|
|
|
|
|
// Find the bottom-most scope that intercepts this interrupt.
|
|
|
|
// Return whether the interrupt has been intercepted.
|
|
|
|
bool Intercept(StackGuard::InterruptFlag flag);
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
private:
|
|
|
|
StackGuard* stack_guard_;
|
2014-07-02 08:05:40 +00:00
|
|
|
int intercept_mask_;
|
|
|
|
int intercepted_flags_;
|
|
|
|
PostponeInterruptsScope* prev_;
|
|
|
|
|
|
|
|
friend class StackGuard;
|
2011-03-18 20:35:07 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
2014-09-02 07:07:52 +00:00
|
|
|
class CodeTracer FINAL : public Malloced {
|
2013-11-07 16:35:27 +00:00
|
|
|
public:
|
|
|
|
explicit CodeTracer(int isolate_id)
|
|
|
|
: file_(NULL),
|
|
|
|
scope_depth_(0) {
|
|
|
|
if (!ShouldRedirect()) {
|
|
|
|
file_ = stdout;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (FLAG_redirect_code_traces_to == NULL) {
|
2014-06-13 16:43:27 +00:00
|
|
|
SNPrintF(filename_,
|
|
|
|
"code-%d-%d.asm",
|
2014-06-30 13:25:46 +00:00
|
|
|
base::OS::GetCurrentProcessId(),
|
2014-06-13 16:43:27 +00:00
|
|
|
isolate_id);
|
2013-11-07 16:35:27 +00:00
|
|
|
} else {
|
2014-06-13 16:43:27 +00:00
|
|
|
StrNCpy(filename_, FLAG_redirect_code_traces_to, filename_.length());
|
2013-11-07 16:35:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
WriteChars(filename_.start(), "", 0, false);
|
|
|
|
}
|
|
|
|
|
|
|
|
class Scope {
|
|
|
|
public:
|
|
|
|
explicit Scope(CodeTracer* tracer) : tracer_(tracer) { tracer->OpenFile(); }
|
|
|
|
~Scope() { tracer_->CloseFile(); }
|
|
|
|
|
|
|
|
FILE* file() const { return tracer_->file(); }
|
|
|
|
|
|
|
|
private:
|
|
|
|
CodeTracer* tracer_;
|
|
|
|
};
|
|
|
|
|
|
|
|
void OpenFile() {
|
|
|
|
if (!ShouldRedirect()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (file_ == NULL) {
|
2015-02-17 07:55:33 +00:00
|
|
|
file_ = base::OS::FOpen(filename_.start(), "ab");
|
2013-11-07 16:35:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
scope_depth_++;
|
|
|
|
}
|
|
|
|
|
|
|
|
void CloseFile() {
|
|
|
|
if (!ShouldRedirect()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (--scope_depth_ == 0) {
|
|
|
|
fclose(file_);
|
|
|
|
file_ = NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
FILE* file() const { return file_; }
|
|
|
|
|
|
|
|
private:
|
|
|
|
static bool ShouldRedirect() {
|
|
|
|
return FLAG_redirect_code_traces;
|
|
|
|
}
|
|
|
|
|
|
|
|
EmbeddedVector<char, 128> filename_;
|
|
|
|
FILE* file_;
|
|
|
|
int scope_depth_;
|
|
|
|
};
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
} } // namespace v8::internal
|
|
|
|
|
|
|
|
#endif // V8_ISOLATE_H_
|