2012-01-04 14:45:29 +00:00
|
|
|
// Copyright 2012 the V8 project authors. All rights reserved.
|
2011-03-18 20:35:07 +00:00
|
|
|
// Redistribution and use in source and binary forms, with or without
|
|
|
|
// modification, are permitted provided that the following conditions are
|
|
|
|
// met:
|
|
|
|
//
|
|
|
|
// * Redistributions of source code must retain the above copyright
|
|
|
|
// notice, this list of conditions and the following disclaimer.
|
|
|
|
// * Redistributions in binary form must reproduce the above
|
|
|
|
// copyright notice, this list of conditions and the following
|
|
|
|
// disclaimer in the documentation and/or other materials provided
|
|
|
|
// with the distribution.
|
|
|
|
// * Neither the name of Google Inc. nor the names of its
|
|
|
|
// contributors may be used to endorse or promote products derived
|
|
|
|
// from this software without specific prior written permission.
|
|
|
|
//
|
|
|
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
|
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
|
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
|
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
|
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
|
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
|
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
|
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
|
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
|
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
|
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
|
|
|
|
#ifndef V8_ISOLATE_H_
|
|
|
|
#define V8_ISOLATE_H_
|
|
|
|
|
|
|
|
#include "../include/v8-debug.h"
|
|
|
|
#include "allocation.h"
|
2013-06-03 15:32:22 +00:00
|
|
|
#include "assert-scope.h"
|
2011-03-18 20:35:07 +00:00
|
|
|
#include "atomicops.h"
|
|
|
|
#include "builtins.h"
|
|
|
|
#include "contexts.h"
|
|
|
|
#include "execution.h"
|
|
|
|
#include "frames.h"
|
2012-03-09 12:07:29 +00:00
|
|
|
#include "date.h"
|
2011-03-18 20:35:07 +00:00
|
|
|
#include "global-handles.h"
|
|
|
|
#include "handles.h"
|
2012-02-23 09:12:57 +00:00
|
|
|
#include "hashmap.h"
|
2011-03-18 20:35:07 +00:00
|
|
|
#include "heap.h"
|
2012-07-19 18:58:23 +00:00
|
|
|
#include "optimizing-compiler-thread.h"
|
2011-03-18 20:35:07 +00:00
|
|
|
#include "regexp-stack.h"
|
|
|
|
#include "runtime-profiler.h"
|
|
|
|
#include "runtime.h"
|
|
|
|
#include "zone.h"
|
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
|
|
|
|
class Bootstrapper;
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
struct CallInterfaceDescriptor;
|
2011-03-18 20:35:07 +00:00
|
|
|
class CodeGenerator;
|
|
|
|
class CodeRange;
|
2012-12-18 16:25:45 +00:00
|
|
|
struct CodeStubInterfaceDescriptor;
|
2013-11-07 16:35:27 +00:00
|
|
|
class CodeTracer;
|
2011-03-18 20:35:07 +00:00
|
|
|
class CompilationCache;
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
class ConsStringIteratorOp;
|
2011-03-18 20:35:07 +00:00
|
|
|
class ContextSlotCache;
|
|
|
|
class Counters;
|
|
|
|
class CpuFeatures;
|
|
|
|
class CpuProfiler;
|
|
|
|
class DeoptimizerData;
|
|
|
|
class Deserializer;
|
|
|
|
class EmptyStatement;
|
2013-07-23 15:01:38 +00:00
|
|
|
class ExternalCallbackScope;
|
2011-03-18 20:35:07 +00:00
|
|
|
class ExternalReferenceTable;
|
|
|
|
class Factory;
|
|
|
|
class FunctionInfoListener;
|
|
|
|
class HandleScopeImplementer;
|
|
|
|
class HeapProfiler;
|
2013-03-06 10:49:34 +00:00
|
|
|
class HStatistics;
|
2013-03-06 07:25:46 +00:00
|
|
|
class HTracer;
|
2011-03-18 20:35:07 +00:00
|
|
|
class InlineRuntimeFunctionsTable;
|
2014-01-29 15:49:48 +00:00
|
|
|
class InnerPointerToCodeCache;
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
class MaterializedObjectStore;
|
|
|
|
class NoAllocationStringAllocator;
|
2013-09-10 11:13:55 +00:00
|
|
|
class RandomNumberGenerator;
|
2011-03-18 20:35:07 +00:00
|
|
|
class RegExpStack;
|
|
|
|
class SaveContext;
|
|
|
|
class StringTracker;
|
|
|
|
class StubCache;
|
2013-01-30 12:19:32 +00:00
|
|
|
class SweeperThread;
|
2011-03-18 20:35:07 +00:00
|
|
|
class ThreadManager;
|
|
|
|
class ThreadState;
|
|
|
|
class ThreadVisitor; // Defined in v8threads.h
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
class UnicodeCache;
|
2013-04-24 14:44:08 +00:00
|
|
|
template <StateTag Tag> class VMState;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// 'void function pointer', used to roundtrip the
|
|
|
|
// ExternalReference::ExternalReferenceRedirector since we can not include
|
|
|
|
// assembler.h, where it is defined, here.
|
|
|
|
typedef void* ExternalReferenceRedirectorPointer();
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
|
|
|
class Debug;
|
|
|
|
class Debugger;
|
|
|
|
class DebuggerAgent;
|
|
|
|
#endif
|
|
|
|
|
2013-06-28 15:34:48 +00:00
|
|
|
#if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
|
2014-03-21 09:28:26 +00:00
|
|
|
!defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
|
2013-06-28 15:34:48 +00:00
|
|
|
!defined(__mips__) && V8_TARGET_ARCH_MIPS
|
2011-03-18 20:35:07 +00:00
|
|
|
class Redirection;
|
|
|
|
class Simulator;
|
|
|
|
#endif
|
|
|
|
|
2011-03-28 13:05:36 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// Static indirection table for handles to constants. If a frame
|
|
|
|
// element represents a constant, the data contains an index into
|
|
|
|
// this table of handles to the actual constants.
|
|
|
|
// Static indirection table for handles to constants. If a Result
|
|
|
|
// represents a constant, the data contains an index into this table
|
|
|
|
// of handles to the actual constants.
|
|
|
|
typedef ZoneList<Handle<Object> > ZoneObjectList;
|
|
|
|
|
2012-01-04 14:45:29 +00:00
|
|
|
#define RETURN_IF_SCHEDULED_EXCEPTION(isolate) \
|
|
|
|
do { \
|
|
|
|
Isolate* __isolate__ = (isolate); \
|
|
|
|
if (__isolate__->has_scheduled_exception()) { \
|
|
|
|
return __isolate__->PromoteScheduledException(); \
|
|
|
|
} \
|
|
|
|
} while (false)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2013-07-17 16:38:49 +00:00
|
|
|
#define RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, T) \
|
|
|
|
do { \
|
|
|
|
Isolate* __isolate__ = (isolate); \
|
|
|
|
if (__isolate__->has_scheduled_exception()) { \
|
|
|
|
__isolate__->PromoteScheduledException(); \
|
|
|
|
return Handle<T>::null(); \
|
|
|
|
} \
|
|
|
|
} while (false)
|
|
|
|
|
2014-04-03 05:57:43 +00:00
|
|
|
#define RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, value) \
|
|
|
|
do { \
|
|
|
|
if ((call).is_null()) { \
|
|
|
|
ASSERT((isolate)->has_pending_exception()); \
|
|
|
|
return (value); \
|
|
|
|
} \
|
2012-01-04 14:45:29 +00:00
|
|
|
} while (false)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2012-01-05 17:16:19 +00:00
|
|
|
#define CHECK_NOT_EMPTY_HANDLE(isolate, call) \
|
|
|
|
do { \
|
|
|
|
ASSERT(!(isolate)->has_pending_exception()); \
|
|
|
|
CHECK(!(call).is_null()); \
|
|
|
|
} while (false)
|
|
|
|
|
2014-04-03 05:57:43 +00:00
|
|
|
#define RETURN_IF_EMPTY_HANDLE(isolate, call) \
|
2011-03-18 20:35:07 +00:00
|
|
|
RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, Failure::Exception())
|
|
|
|
|
2014-04-03 05:57:43 +00:00
|
|
|
|
|
|
|
// Macros for MaybeHandle.
|
|
|
|
|
|
|
|
#define RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, T) \
|
|
|
|
do { \
|
|
|
|
Isolate* __isolate__ = (isolate); \
|
|
|
|
if (__isolate__->has_scheduled_exception()) { \
|
|
|
|
__isolate__->PromoteScheduledException(); \
|
|
|
|
return MaybeHandle<T>(); \
|
|
|
|
} \
|
|
|
|
} while (false)
|
|
|
|
|
|
|
|
#define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value) \
|
|
|
|
do { \
|
|
|
|
if (!(call).ToHandle(&dst)) { \
|
|
|
|
ASSERT((isolate)->has_pending_exception()); \
|
|
|
|
return value; \
|
|
|
|
} \
|
|
|
|
} while (false)
|
|
|
|
|
|
|
|
#define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call) \
|
|
|
|
ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, Failure::Exception())
|
|
|
|
|
|
|
|
#define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call, T) \
|
|
|
|
ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, MaybeHandle<T>())
|
|
|
|
|
|
|
|
#define RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value) \
|
|
|
|
do { \
|
2014-04-03 12:09:14 +00:00
|
|
|
if (call.is_null()) { \
|
2014-04-03 05:57:43 +00:00
|
|
|
ASSERT((isolate)->has_pending_exception()); \
|
|
|
|
return value; \
|
|
|
|
} \
|
|
|
|
} while (false)
|
|
|
|
|
|
|
|
#define RETURN_FAILURE_ON_EXCEPTION(isolate, call) \
|
|
|
|
RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, Failure::Exception())
|
|
|
|
|
|
|
|
#define RETURN_ON_EXCEPTION(isolate, call, T) \
|
|
|
|
RETURN_ON_EXCEPTION_VALUE( \
|
|
|
|
isolate, dst, call, MaybeHandle<T>::Exception())
|
|
|
|
|
|
|
|
|
2011-09-08 16:29:57 +00:00
|
|
|
#define FOR_EACH_ISOLATE_ADDRESS_NAME(C) \
|
|
|
|
C(Handler, handler) \
|
|
|
|
C(CEntryFP, c_entry_fp) \
|
|
|
|
C(Context, context) \
|
|
|
|
C(PendingException, pending_exception) \
|
|
|
|
C(ExternalCaughtException, external_caught_exception) \
|
|
|
|
C(JSEntrySP, js_entry_sp)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
|
2011-04-11 23:46:22 +00:00
|
|
|
// Platform-independent, reliable thread identifier.
|
|
|
|
class ThreadId {
|
|
|
|
public:
|
|
|
|
// Creates an invalid ThreadId.
|
|
|
|
ThreadId() : id_(kInvalidId) {}
|
|
|
|
|
|
|
|
// Returns ThreadId for current thread.
|
|
|
|
static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
|
|
|
|
|
|
|
|
// Returns invalid ThreadId (guaranteed not to be equal to any thread).
|
|
|
|
static ThreadId Invalid() { return ThreadId(kInvalidId); }
|
|
|
|
|
|
|
|
// Compares ThreadIds for equality.
|
|
|
|
INLINE(bool Equals(const ThreadId& other) const) {
|
|
|
|
return id_ == other.id_;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Checks whether this ThreadId refers to any thread.
|
|
|
|
INLINE(bool IsValid() const) {
|
|
|
|
return id_ != kInvalidId;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Converts ThreadId to an integer representation
|
|
|
|
// (required for public API: V8::V8::GetCurrentThreadId).
|
|
|
|
int ToInteger() const { return id_; }
|
|
|
|
|
|
|
|
// Converts ThreadId to an integer representation
|
|
|
|
// (required for public API: V8::V8::TerminateExecution).
|
|
|
|
static ThreadId FromInteger(int id) { return ThreadId(id); }
|
|
|
|
|
|
|
|
private:
|
|
|
|
static const int kInvalidId = -1;
|
|
|
|
|
|
|
|
explicit ThreadId(int id) : id_(id) {}
|
|
|
|
|
|
|
|
static int AllocateThreadId();
|
|
|
|
|
|
|
|
static int GetCurrentThreadId();
|
|
|
|
|
|
|
|
int id_;
|
|
|
|
|
|
|
|
static Atomic32 highest_thread_id_;
|
|
|
|
|
|
|
|
friend class Isolate;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
#define FIELD_ACCESSOR(type, name) \
|
|
|
|
inline void set_##name(type v) { name##_ = v; } \
|
|
|
|
inline type name() const { return name##_; }
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
class ThreadLocalTop BASE_EMBEDDED {
|
|
|
|
public:
|
2011-04-15 20:47:27 +00:00
|
|
|
// Does early low-level initialization that does not depend on the
|
|
|
|
// isolate being present.
|
|
|
|
ThreadLocalTop();
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// Initialize the thread data.
|
|
|
|
void Initialize();
|
|
|
|
|
|
|
|
// Get the top C++ try catch handler or NULL if none are registered.
|
|
|
|
//
|
|
|
|
// This method is not guarenteed to return an address that can be
|
|
|
|
// used for comparison with addresses into the JS stack. If such an
|
|
|
|
// address is needed, use try_catch_handler_address.
|
|
|
|
v8::TryCatch* TryCatchHandler();
|
|
|
|
|
|
|
|
// Get the address of the top C++ try catch handler or NULL if
|
|
|
|
// none are registered.
|
|
|
|
//
|
|
|
|
// This method always returns an address that can be compared to
|
|
|
|
// pointers into the JavaScript stack. When running on actual
|
|
|
|
// hardware, try_catch_handler_address and TryCatchHandler return
|
|
|
|
// the same pointer. When running on a simulator with a separate JS
|
|
|
|
// stack, try_catch_handler_address returns a JS stack address that
|
|
|
|
// corresponds to the place on the JS stack where the C++ handler
|
|
|
|
// would have been if the stack were not separate.
|
2014-02-11 14:03:31 +00:00
|
|
|
FIELD_ACCESSOR(Address, try_catch_handler_address)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
void Free() {
|
|
|
|
ASSERT(!has_pending_message_);
|
|
|
|
ASSERT(!external_caught_exception_);
|
|
|
|
ASSERT(try_catch_handler_address_ == NULL);
|
|
|
|
}
|
|
|
|
|
2011-05-05 18:55:31 +00:00
|
|
|
Isolate* isolate_;
|
2011-03-18 20:35:07 +00:00
|
|
|
// The context where the current execution method is created and for variable
|
|
|
|
// lookups.
|
|
|
|
Context* context_;
|
2011-04-11 23:46:22 +00:00
|
|
|
ThreadId thread_id_;
|
2011-03-18 20:35:07 +00:00
|
|
|
MaybeObject* pending_exception_;
|
|
|
|
bool has_pending_message_;
|
2013-07-01 10:54:39 +00:00
|
|
|
bool rethrowing_message_;
|
2011-03-18 20:35:07 +00:00
|
|
|
Object* pending_message_obj_;
|
2013-07-01 10:54:39 +00:00
|
|
|
Object* pending_message_script_;
|
2011-03-18 20:35:07 +00:00
|
|
|
int pending_message_start_pos_;
|
|
|
|
int pending_message_end_pos_;
|
|
|
|
// Use a separate value for scheduled exceptions to preserve the
|
|
|
|
// invariants that hold about pending_exception. We may want to
|
|
|
|
// unify them later.
|
|
|
|
MaybeObject* scheduled_exception_;
|
|
|
|
bool external_caught_exception_;
|
|
|
|
SaveContext* save_context_;
|
|
|
|
v8::TryCatch* catcher_;
|
|
|
|
|
|
|
|
// Stack.
|
|
|
|
Address c_entry_fp_; // the frame pointer of the top c entry frame
|
|
|
|
Address handler_; // try-blocks are chained through the stack
|
|
|
|
|
|
|
|
#ifdef USE_SIMULATOR
|
|
|
|
Simulator* simulator_;
|
|
|
|
#endif
|
|
|
|
|
2012-01-16 12:38:59 +00:00
|
|
|
Address js_entry_sp_; // the stack pointer of the bottom JS entry frame
|
2013-07-23 15:01:38 +00:00
|
|
|
// the external callback we're currently in
|
|
|
|
ExternalCallbackScope* external_callback_scope_;
|
2011-03-18 20:35:07 +00:00
|
|
|
StateTag current_vm_state_;
|
|
|
|
|
|
|
|
// Generated code scratch locations.
|
|
|
|
int32_t formal_count_;
|
|
|
|
|
|
|
|
// Call back function to report unsafe JS accesses.
|
|
|
|
v8::FailedAccessCheckCallback failed_access_check_callback_;
|
|
|
|
|
2011-10-18 11:18:55 +00:00
|
|
|
// Head of the list of live LookupResults.
|
|
|
|
LookupResult* top_lookup_result_;
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
private:
|
2011-04-15 20:47:27 +00:00
|
|
|
void InitializeInternal();
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
Address try_catch_handler_address_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
|
|
|
|
|
|
|
#define ISOLATE_DEBUGGER_INIT_LIST(V) \
|
|
|
|
V(DebuggerAgent*, debugger_agent_instance, NULL)
|
|
|
|
#else
|
|
|
|
|
|
|
|
#define ISOLATE_DEBUGGER_INIT_LIST(V)
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
2014-03-14 15:06:17 +00:00
|
|
|
|
|
|
|
#if V8_TARGET_ARCH_ARM && !defined(__arm__) || \
|
2014-03-21 09:28:26 +00:00
|
|
|
V8_TARGET_ARCH_ARM64 && !defined(__aarch64__) || \
|
2014-03-14 15:06:17 +00:00
|
|
|
V8_TARGET_ARCH_MIPS && !defined(__mips__)
|
|
|
|
|
|
|
|
#define ISOLATE_INIT_SIMULATOR_LIST(V) \
|
|
|
|
V(bool, simulator_initialized, false) \
|
|
|
|
V(HashMap*, simulator_i_cache, NULL) \
|
|
|
|
V(Redirection*, simulator_redirection, NULL)
|
|
|
|
#else
|
|
|
|
|
|
|
|
#define ISOLATE_INIT_SIMULATOR_LIST(V)
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
|
|
|
|
#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) \
|
|
|
|
V(CommentStatistic, paged_space_comments_statistics, \
|
2014-03-14 15:06:17 +00:00
|
|
|
CommentStatistic::kMaxComments + 1) \
|
|
|
|
V(int, code_kind_statistics, Code::NUMBER_OF_KINDS)
|
2011-03-18 20:35:07 +00:00
|
|
|
#else
|
|
|
|
|
|
|
|
#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#define ISOLATE_INIT_ARRAY_LIST(V) \
|
|
|
|
/* SerializerDeserializer state. */ \
|
2012-08-28 09:37:41 +00:00
|
|
|
V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
|
2011-03-18 20:35:07 +00:00
|
|
|
V(int, bad_char_shift_table, kUC16AlphabetSize) \
|
|
|
|
V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \
|
|
|
|
V(int, suffix_table, (kBMMaxShift + 1)) \
|
2011-07-04 11:34:29 +00:00
|
|
|
V(uint32_t, private_random_seed, 2) \
|
2011-03-18 20:35:07 +00:00
|
|
|
ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
|
|
|
|
|
2013-11-20 12:35:58 +00:00
|
|
|
typedef List<HeapObject*> DebugObjectCache;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
#define ISOLATE_INIT_LIST(V) \
|
|
|
|
/* SerializerDeserializer state. */ \
|
|
|
|
V(int, serialize_partial_snapshot_cache_length, 0) \
|
2012-06-19 18:38:03 +00:00
|
|
|
V(int, serialize_partial_snapshot_cache_capacity, 0) \
|
|
|
|
V(Object**, serialize_partial_snapshot_cache, NULL) \
|
2011-03-18 20:35:07 +00:00
|
|
|
/* Assembler state. */ \
|
|
|
|
/* A previously allocated buffer of kMinimalBufferSize bytes, or NULL. */ \
|
|
|
|
V(byte*, assembler_spare_buffer, NULL) \
|
|
|
|
V(FatalErrorCallback, exception_behavior, NULL) \
|
2014-03-10 08:56:48 +00:00
|
|
|
V(LogEventCallback, event_logger, NULL) \
|
2011-05-03 05:40:47 +00:00
|
|
|
V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL) \
|
2011-03-18 20:35:07 +00:00
|
|
|
/* To distinguish the function templates, so that we can find them in the */ \
|
2012-08-17 09:03:08 +00:00
|
|
|
/* function cache of the native context. */ \
|
2011-03-18 20:35:07 +00:00
|
|
|
V(int, next_serial_number, 0) \
|
|
|
|
V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL) \
|
|
|
|
/* Part of the state of liveedit. */ \
|
|
|
|
V(FunctionInfoListener*, active_function_info_listener, NULL) \
|
|
|
|
/* State for Relocatable. */ \
|
|
|
|
V(Relocatable*, relocatable_top, NULL) \
|
|
|
|
V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \
|
|
|
|
V(Object*, string_stream_current_security_token, NULL) \
|
|
|
|
/* Serializer state. */ \
|
|
|
|
V(ExternalReferenceTable*, external_reference_table, NULL) \
|
|
|
|
/* AstNode state. */ \
|
2012-02-08 09:56:33 +00:00
|
|
|
V(int, ast_node_id, 0) \
|
2011-03-18 20:35:07 +00:00
|
|
|
V(unsigned, ast_node_count, 0) \
|
2014-02-12 22:04:19 +00:00
|
|
|
V(bool, microtask_pending, false) \
|
|
|
|
V(bool, autorun_microtasks, true) \
|
2013-03-06 10:49:34 +00:00
|
|
|
V(HStatistics*, hstatistics, NULL) \
|
2013-03-06 07:25:46 +00:00
|
|
|
V(HTracer*, htracer, NULL) \
|
2013-11-07 16:35:27 +00:00
|
|
|
V(CodeTracer*, code_tracer, NULL) \
|
2014-03-14 15:06:17 +00:00
|
|
|
V(bool, fp_stubs_generated, false) \
|
|
|
|
V(int, max_available_threads, 0) \
|
2014-03-19 11:31:43 +00:00
|
|
|
V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu) \
|
2014-03-14 15:06:17 +00:00
|
|
|
ISOLATE_INIT_SIMULATOR_LIST(V) \
|
2011-03-18 20:35:07 +00:00
|
|
|
ISOLATE_DEBUGGER_INIT_LIST(V)
|
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
#define THREAD_LOCAL_TOP_ACCESSOR(type, name) \
|
|
|
|
inline void set_##name(type v) { thread_local_top_.name##_ = v; } \
|
|
|
|
inline type name() const { return thread_local_top_.name##_; }
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
class Isolate {
|
|
|
|
// These forward declarations are required to make the friend declarations in
|
|
|
|
// PerIsolateThreadData work on some older versions of gcc.
|
|
|
|
class ThreadDataTable;
|
|
|
|
class EntryStackItem;
|
|
|
|
public:
|
|
|
|
~Isolate();
|
|
|
|
|
|
|
|
// A thread has a PerIsolateThreadData instance for each isolate that it has
|
|
|
|
// entered. That instance is allocated when the isolate is initially entered
|
|
|
|
// and reused on subsequent entries.
|
|
|
|
class PerIsolateThreadData {
|
|
|
|
public:
|
|
|
|
PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
|
|
|
|
: isolate_(isolate),
|
|
|
|
thread_id_(thread_id),
|
|
|
|
stack_limit_(0),
|
|
|
|
thread_state_(NULL),
|
2013-06-28 15:34:48 +00:00
|
|
|
#if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
|
2014-03-21 09:28:26 +00:00
|
|
|
!defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
|
2013-06-28 15:34:48 +00:00
|
|
|
!defined(__mips__) && V8_TARGET_ARCH_MIPS
|
2011-03-18 20:35:07 +00:00
|
|
|
simulator_(NULL),
|
|
|
|
#endif
|
|
|
|
next_(NULL),
|
|
|
|
prev_(NULL) { }
|
2014-02-28 10:55:47 +00:00
|
|
|
~PerIsolateThreadData();
|
2011-03-18 20:35:07 +00:00
|
|
|
Isolate* isolate() const { return isolate_; }
|
|
|
|
ThreadId thread_id() const { return thread_id_; }
|
2014-02-11 14:03:31 +00:00
|
|
|
|
|
|
|
FIELD_ACCESSOR(uintptr_t, stack_limit)
|
|
|
|
FIELD_ACCESSOR(ThreadState*, thread_state)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2013-06-28 15:34:48 +00:00
|
|
|
#if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
|
2014-03-21 09:28:26 +00:00
|
|
|
!defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
|
2013-06-28 15:34:48 +00:00
|
|
|
!defined(__mips__) && V8_TARGET_ARCH_MIPS
|
2014-02-11 14:03:31 +00:00
|
|
|
FIELD_ACCESSOR(Simulator*, simulator)
|
2011-03-18 20:35:07 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
bool Matches(Isolate* isolate, ThreadId thread_id) const {
|
2011-04-11 23:46:22 +00:00
|
|
|
return isolate_ == isolate && thread_id_.Equals(thread_id);
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
Isolate* isolate_;
|
|
|
|
ThreadId thread_id_;
|
|
|
|
uintptr_t stack_limit_;
|
|
|
|
ThreadState* thread_state_;
|
|
|
|
|
2013-06-28 15:34:48 +00:00
|
|
|
#if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
|
2014-03-21 09:28:26 +00:00
|
|
|
!defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
|
2013-06-28 15:34:48 +00:00
|
|
|
!defined(__mips__) && V8_TARGET_ARCH_MIPS
|
2011-03-18 20:35:07 +00:00
|
|
|
Simulator* simulator_;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
PerIsolateThreadData* next_;
|
|
|
|
PerIsolateThreadData* prev_;
|
|
|
|
|
|
|
|
friend class Isolate;
|
|
|
|
friend class ThreadDataTable;
|
|
|
|
friend class EntryStackItem;
|
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
enum AddressId {
|
2011-09-08 16:29:57 +00:00
|
|
|
#define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
|
|
|
|
FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
|
2012-04-23 15:09:59 +00:00
|
|
|
#undef DECLARE_ENUM
|
2011-09-08 16:29:57 +00:00
|
|
|
kIsolateAddressCount
|
2011-03-18 20:35:07 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
// Returns the PerIsolateThreadData for the current thread (or NULL if one is
|
|
|
|
// not currently set).
|
|
|
|
static PerIsolateThreadData* CurrentPerIsolateThreadData() {
|
|
|
|
return reinterpret_cast<PerIsolateThreadData*>(
|
2012-03-30 14:30:46 +00:00
|
|
|
Thread::GetThreadLocal(per_isolate_thread_data_key_));
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Returns the isolate inside which the current thread is running.
|
|
|
|
INLINE(static Isolate* Current()) {
|
2011-03-27 16:14:20 +00:00
|
|
|
Isolate* isolate = reinterpret_cast<Isolate*>(
|
2012-03-30 14:30:46 +00:00
|
|
|
Thread::GetExistingThreadLocal(isolate_key_));
|
2011-03-18 20:35:07 +00:00
|
|
|
ASSERT(isolate != NULL);
|
|
|
|
return isolate;
|
|
|
|
}
|
|
|
|
|
|
|
|
INLINE(static Isolate* UncheckedCurrent()) {
|
2012-03-30 14:30:46 +00:00
|
|
|
return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_));
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
2011-08-04 15:18:18 +00:00
|
|
|
// Usually called by Init(), but can be called early e.g. to allow
|
|
|
|
// testing components that require logging but not the whole
|
|
|
|
// isolate.
|
|
|
|
//
|
|
|
|
// Safe to call more than once.
|
|
|
|
void InitializeLoggingAndCounters();
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
bool Init(Deserializer* des);
|
|
|
|
|
|
|
|
bool IsInitialized() { return state_ == INITIALIZED; }
|
|
|
|
|
|
|
|
// True if at least one thread Enter'ed this isolate.
|
|
|
|
bool IsInUse() { return entry_stack_ != NULL; }
|
|
|
|
|
|
|
|
// Destroys the non-default isolates.
|
|
|
|
// Sets default isolate into "has_been_disposed" state rather then destroying,
|
|
|
|
// for legacy API reasons.
|
|
|
|
void TearDown();
|
|
|
|
|
2013-02-12 11:57:51 +00:00
|
|
|
static void GlobalTearDown();
|
|
|
|
|
2013-10-02 09:01:40 +00:00
|
|
|
bool IsDefaultIsolate() const { return this == default_isolate_; }
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2013-09-19 07:33:45 +00:00
|
|
|
static void SetCrashIfDefaultIsolateInitialized();
|
2011-03-18 20:35:07 +00:00
|
|
|
// Ensures that process-wide resources and the default isolate have been
|
2012-01-16 12:38:59 +00:00
|
|
|
// allocated. It is only necessary to call this method in rare cases, for
|
2011-03-18 20:35:07 +00:00
|
|
|
// example if you are using V8 from within the body of a static initializer.
|
|
|
|
// Safe to call multiple times.
|
2013-10-02 09:01:40 +00:00
|
|
|
static void EnsureDefaultIsolate();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2011-05-05 18:55:31 +00:00
|
|
|
// Find the PerThread for this particular (isolate, thread) combination
|
|
|
|
// If one does not yet exist, return null.
|
|
|
|
PerIsolateThreadData* FindPerThreadDataForThisThread();
|
|
|
|
|
2013-04-11 14:22:04 +00:00
|
|
|
// Find the PerThread for given (isolate, thread) combination
|
|
|
|
// If one does not yet exist, return null.
|
|
|
|
PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id);
|
|
|
|
|
2011-04-26 13:53:19 +00:00
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
2011-03-18 20:35:07 +00:00
|
|
|
// Get the debugger from the default isolate. Preinitializes the
|
|
|
|
// default isolate if needed.
|
|
|
|
static Debugger* GetDefaultIsolateDebugger();
|
2011-04-26 13:53:19 +00:00
|
|
|
#endif
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Get the stack guard from the default isolate. Preinitializes the
|
|
|
|
// default isolate if needed.
|
|
|
|
static StackGuard* GetDefaultIsolateStackGuard();
|
|
|
|
|
|
|
|
// Returns the key used to store the pointer to the current isolate.
|
|
|
|
// Used internally for V8 threads that do not execute JavaScript but still
|
|
|
|
// are part of the domain of an isolate (like the context switcher).
|
2012-03-30 14:30:46 +00:00
|
|
|
static Thread::LocalStorageKey isolate_key() {
|
|
|
|
return isolate_key_;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Returns the key used to store process-wide thread IDs.
|
2012-03-30 14:30:46 +00:00
|
|
|
static Thread::LocalStorageKey thread_id_key() {
|
|
|
|
return thread_id_key_;
|
|
|
|
}
|
2012-03-12 13:56:56 +00:00
|
|
|
|
|
|
|
static Thread::LocalStorageKey per_isolate_thread_data_key();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// If a client attempts to create a Locker without specifying an isolate,
|
|
|
|
// we assume that the client is using legacy behavior. Set up the current
|
|
|
|
// thread to be inside the implicit isolate (or fail a check if we have
|
|
|
|
// switched to non-legacy behavior).
|
|
|
|
static void EnterDefaultIsolate();
|
|
|
|
|
|
|
|
// Mutex for serializing access to break control structures.
|
2013-08-29 09:58:30 +00:00
|
|
|
RecursiveMutex* break_access() { return &break_access_; }
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2011-08-04 15:18:18 +00:00
|
|
|
// Mutex for serializing access to debugger.
|
2013-08-29 09:58:30 +00:00
|
|
|
RecursiveMutex* debugger_access() { return &debugger_access_; }
|
2011-08-04 15:18:18 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
Address get_address_from_id(AddressId id);
|
|
|
|
|
|
|
|
// Access to top context (where the current function object was created).
|
|
|
|
Context* context() { return thread_local_top_.context_; }
|
|
|
|
void set_context(Context* context) {
|
2011-06-07 18:33:03 +00:00
|
|
|
ASSERT(context == NULL || context->IsContext());
|
2011-03-18 20:35:07 +00:00
|
|
|
thread_local_top_.context_ = context;
|
|
|
|
}
|
|
|
|
Context** context_address() { return &thread_local_top_.context_; }
|
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
THREAD_LOCAL_TOP_ACCESSOR(SaveContext*, save_context)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Access to current thread id.
|
2014-02-11 14:03:31 +00:00
|
|
|
THREAD_LOCAL_TOP_ACCESSOR(ThreadId, thread_id)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Interface to pending exception.
|
|
|
|
MaybeObject* pending_exception() {
|
|
|
|
ASSERT(has_pending_exception());
|
|
|
|
return thread_local_top_.pending_exception_;
|
|
|
|
}
|
2014-02-11 14:03:31 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void set_pending_exception(MaybeObject* exception) {
|
|
|
|
thread_local_top_.pending_exception_ = exception;
|
|
|
|
}
|
2014-02-11 14:03:31 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void clear_pending_exception() {
|
|
|
|
thread_local_top_.pending_exception_ = heap_.the_hole_value();
|
|
|
|
}
|
2014-02-11 14:03:31 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
MaybeObject** pending_exception_address() {
|
|
|
|
return &thread_local_top_.pending_exception_;
|
|
|
|
}
|
2014-02-11 14:03:31 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
bool has_pending_exception() {
|
|
|
|
return !thread_local_top_.pending_exception_->IsTheHole();
|
|
|
|
}
|
2014-02-11 14:03:31 +00:00
|
|
|
|
|
|
|
THREAD_LOCAL_TOP_ACCESSOR(bool, external_caught_exception)
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void clear_pending_message() {
|
|
|
|
thread_local_top_.has_pending_message_ = false;
|
|
|
|
thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
|
2013-07-01 10:54:39 +00:00
|
|
|
thread_local_top_.pending_message_script_ = heap_.the_hole_value();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
v8::TryCatch* try_catch_handler() {
|
|
|
|
return thread_local_top_.TryCatchHandler();
|
|
|
|
}
|
|
|
|
Address try_catch_handler_address() {
|
|
|
|
return thread_local_top_.try_catch_handler_address();
|
|
|
|
}
|
|
|
|
bool* external_caught_exception_address() {
|
|
|
|
return &thread_local_top_.external_caught_exception_;
|
|
|
|
}
|
2014-02-11 14:03:31 +00:00
|
|
|
|
|
|
|
THREAD_LOCAL_TOP_ACCESSOR(v8::TryCatch*, catcher)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
MaybeObject** scheduled_exception_address() {
|
|
|
|
return &thread_local_top_.scheduled_exception_;
|
|
|
|
}
|
2012-06-11 13:18:05 +00:00
|
|
|
|
|
|
|
Address pending_message_obj_address() {
|
|
|
|
return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
|
|
|
|
}
|
|
|
|
|
|
|
|
Address has_pending_message_address() {
|
|
|
|
return reinterpret_cast<Address>(&thread_local_top_.has_pending_message_);
|
|
|
|
}
|
|
|
|
|
|
|
|
Address pending_message_script_address() {
|
|
|
|
return reinterpret_cast<Address>(
|
|
|
|
&thread_local_top_.pending_message_script_);
|
|
|
|
}
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
MaybeObject* scheduled_exception() {
|
|
|
|
ASSERT(has_scheduled_exception());
|
|
|
|
return thread_local_top_.scheduled_exception_;
|
|
|
|
}
|
|
|
|
bool has_scheduled_exception() {
|
2011-06-10 09:54:04 +00:00
|
|
|
return thread_local_top_.scheduled_exception_ != heap_.the_hole_value();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
void clear_scheduled_exception() {
|
|
|
|
thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool IsExternallyCaught();
|
|
|
|
|
|
|
|
bool is_catchable_by_javascript(MaybeObject* exception) {
|
2014-03-24 10:07:15 +00:00
|
|
|
return exception != heap()->termination_exception();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
2012-06-19 18:38:03 +00:00
|
|
|
// Serializer.
|
|
|
|
void PushToPartialSnapshotCache(Object* obj);
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// JS execution stack (see frames.h).
|
|
|
|
static Address c_entry_fp(ThreadLocalTop* thread) {
|
|
|
|
return thread->c_entry_fp_;
|
|
|
|
}
|
|
|
|
static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
|
|
|
|
|
|
|
|
inline Address* c_entry_fp_address() {
|
|
|
|
return &thread_local_top_.c_entry_fp_;
|
|
|
|
}
|
|
|
|
inline Address* handler_address() { return &thread_local_top_.handler_; }
|
|
|
|
|
2013-08-07 17:04:27 +00:00
|
|
|
// Bottom JS entry.
|
|
|
|
Address js_entry_sp() {
|
|
|
|
return thread_local_top_.js_entry_sp_;
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
inline Address* js_entry_sp_address() {
|
|
|
|
return &thread_local_top_.js_entry_sp_;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Generated code scratch locations.
|
|
|
|
void* formal_count_address() { return &thread_local_top_.formal_count_; }
|
|
|
|
|
|
|
|
// Returns the global object of the current context. It could be
|
2012-01-16 12:38:59 +00:00
|
|
|
// a builtin object, or a JS global object.
|
2012-08-17 12:59:00 +00:00
|
|
|
Handle<GlobalObject> global_object() {
|
|
|
|
return Handle<GlobalObject>(context()->global_object());
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Returns the global proxy object of the current context.
|
|
|
|
Object* global_proxy() {
|
|
|
|
return context()->global_proxy();
|
|
|
|
}
|
|
|
|
|
|
|
|
Handle<JSBuiltinsObject> js_builtins_object() {
|
|
|
|
return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
|
|
|
|
}
|
|
|
|
|
|
|
|
static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
|
|
|
|
void FreeThreadResources() { thread_local_top_.Free(); }
|
|
|
|
|
|
|
|
// This method is called by the api after operations that may throw
|
|
|
|
// exceptions. If an exception was thrown and not handled by an external
|
|
|
|
// handler the exception is scheduled to be rethrown when we return to running
|
|
|
|
// JavaScript code. If an exception is scheduled true is returned.
|
|
|
|
bool OptionalRescheduleException(bool is_bottom_call);
|
|
|
|
|
2011-04-07 19:52:24 +00:00
|
|
|
class ExceptionScope {
|
|
|
|
public:
|
|
|
|
explicit ExceptionScope(Isolate* isolate) :
|
|
|
|
// Scope currently can only be used for regular exceptions, not
|
|
|
|
// failures like OOM or termination exception.
|
|
|
|
isolate_(isolate),
|
2013-02-25 14:46:09 +00:00
|
|
|
pending_exception_(isolate_->pending_exception()->ToObjectUnchecked(),
|
|
|
|
isolate_),
|
2011-04-07 19:52:24 +00:00
|
|
|
catcher_(isolate_->catcher())
|
|
|
|
{ }
|
|
|
|
|
|
|
|
~ExceptionScope() {
|
|
|
|
isolate_->set_catcher(catcher_);
|
|
|
|
isolate_->set_pending_exception(*pending_exception_);
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
Isolate* isolate_;
|
|
|
|
Handle<Object> pending_exception_;
|
|
|
|
v8::TryCatch* catcher_;
|
|
|
|
};
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void SetCaptureStackTraceForUncaughtExceptions(
|
|
|
|
bool capture,
|
|
|
|
int frame_limit,
|
|
|
|
StackTrace::StackTraceOptions options);
|
|
|
|
|
|
|
|
void PrintCurrentStackTrace(FILE* out);
|
|
|
|
void PrintStack(StringStream* accumulator);
|
2013-05-21 09:25:57 +00:00
|
|
|
void PrintStack(FILE* out);
|
2011-03-18 20:35:07 +00:00
|
|
|
Handle<String> StackTraceString();
|
2012-09-05 12:30:49 +00:00
|
|
|
NO_INLINE(void PushStackTraceAndDie(unsigned int magic,
|
|
|
|
Object* object,
|
|
|
|
Map* map,
|
|
|
|
unsigned int magic2));
|
2011-03-18 20:35:07 +00:00
|
|
|
Handle<JSArray> CaptureCurrentStackTrace(
|
|
|
|
int frame_limit,
|
|
|
|
StackTrace::StackTraceOptions options);
|
|
|
|
|
2012-11-12 14:54:29 +00:00
|
|
|
Handle<JSArray> CaptureSimpleStackTrace(Handle<JSObject> error_object,
|
|
|
|
Handle<Object> caller,
|
|
|
|
int limit);
|
|
|
|
void CaptureAndSetDetailedStackTrace(Handle<JSObject> error_object);
|
2012-02-07 09:31:06 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// Returns if the top context may access the given global object. If
|
|
|
|
// the result is false, the pending exception is guaranteed to be
|
|
|
|
// set.
|
2013-10-02 08:29:34 +00:00
|
|
|
|
|
|
|
// TODO(yangguo): temporary wrappers
|
|
|
|
bool MayNamedAccessWrapper(Handle<JSObject> receiver,
|
|
|
|
Handle<Object> key,
|
|
|
|
v8::AccessType type) {
|
|
|
|
return MayNamedAccess(*receiver, *key, type);
|
|
|
|
}
|
|
|
|
bool MayIndexedAccessWrapper(Handle<JSObject> receiver,
|
|
|
|
uint32_t index,
|
|
|
|
v8::AccessType type) {
|
|
|
|
return MayIndexedAccess(*receiver, index, type);
|
|
|
|
}
|
2014-03-10 08:28:59 +00:00
|
|
|
void ReportFailedAccessCheckWrapper(Handle<JSObject> receiver,
|
|
|
|
v8::AccessType type) {
|
|
|
|
ReportFailedAccessCheck(*receiver, type);
|
|
|
|
}
|
2013-10-02 08:29:34 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
bool MayNamedAccess(JSObject* receiver,
|
|
|
|
Object* key,
|
|
|
|
v8::AccessType type);
|
|
|
|
bool MayIndexedAccess(JSObject* receiver,
|
|
|
|
uint32_t index,
|
|
|
|
v8::AccessType type);
|
|
|
|
|
|
|
|
void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
|
|
|
|
void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type);
|
|
|
|
|
|
|
|
// Exception throwing support. The caller should use the result
|
|
|
|
// of Throw() as its return value.
|
|
|
|
Failure* Throw(Object* exception, MessageLocation* location = NULL);
|
2014-04-03 05:57:43 +00:00
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
MUST_USE_RESULT MaybeHandle<T> Throw(Handle<Object> exception,
|
|
|
|
MessageLocation* location = NULL) {
|
|
|
|
Throw(*exception, location);
|
|
|
|
return MaybeHandle<T>();
|
|
|
|
}
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// Re-throw an exception. This involves no error reporting since
|
|
|
|
// error reporting was handled when the exception was thrown
|
|
|
|
// originally.
|
2012-06-11 13:18:05 +00:00
|
|
|
Failure* ReThrow(MaybeObject* exception);
|
2011-03-18 20:35:07 +00:00
|
|
|
void ScheduleThrow(Object* exception);
|
2013-07-01 10:54:39 +00:00
|
|
|
// Re-set pending message, script and positions reported to the TryCatch
|
|
|
|
// back to the TLS for re-use when rethrowing.
|
|
|
|
void RestorePendingMessageFromTryCatch(v8::TryCatch* handler);
|
2011-03-18 20:35:07 +00:00
|
|
|
void ReportPendingMessages();
|
2012-12-03 21:47:39 +00:00
|
|
|
// Return pending location if any or unfilled structure.
|
|
|
|
MessageLocation GetMessageLocation();
|
2011-03-18 20:35:07 +00:00
|
|
|
Failure* ThrowIllegalOperation();
|
2014-03-20 12:27:36 +00:00
|
|
|
Failure* ThrowInvalidStringLength();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Promote a scheduled exception to pending. Asserts has_scheduled_exception.
|
|
|
|
Failure* PromoteScheduledException();
|
2012-02-07 09:31:06 +00:00
|
|
|
void DoThrow(Object* exception, MessageLocation* location);
|
2011-03-18 20:35:07 +00:00
|
|
|
// Checks if exception should be reported and finds out if it's
|
|
|
|
// caught externally.
|
|
|
|
bool ShouldReportException(bool* can_be_caught_externally,
|
|
|
|
bool catchable_by_javascript);
|
|
|
|
|
|
|
|
// Attempts to compute the current source location, storing the
|
|
|
|
// result in the target out parameter.
|
|
|
|
void ComputeLocation(MessageLocation* target);
|
|
|
|
|
|
|
|
// Out of resource exception helpers.
|
|
|
|
Failure* StackOverflow();
|
|
|
|
Failure* TerminateExecution();
|
2013-04-22 15:01:45 +00:00
|
|
|
void CancelTerminateExecution();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
// Administration
|
|
|
|
void Iterate(ObjectVisitor* v);
|
|
|
|
void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
|
|
|
|
char* Iterate(ObjectVisitor* v, char* t);
|
|
|
|
void IterateThread(ThreadVisitor* v, char* t);
|
|
|
|
|
|
|
|
|
2012-08-28 11:25:08 +00:00
|
|
|
// Returns the current native and global context.
|
2012-08-17 09:03:08 +00:00
|
|
|
Handle<Context> native_context();
|
2012-08-28 11:25:08 +00:00
|
|
|
Handle<Context> global_context();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2012-08-17 09:03:08 +00:00
|
|
|
// Returns the native context of the calling JavaScript code. That
|
|
|
|
// is, the native context of the top-most JavaScript frame.
|
|
|
|
Handle<Context> GetCallingNativeContext();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
void RegisterTryCatchHandler(v8::TryCatch* that);
|
|
|
|
void UnregisterTryCatchHandler(v8::TryCatch* that);
|
|
|
|
|
|
|
|
char* ArchiveThread(char* to);
|
|
|
|
char* RestoreThread(char* from);
|
|
|
|
|
|
|
|
static const char* const kStackOverflowMessage;
|
|
|
|
|
|
|
|
static const int kUC16AlphabetSize = 256; // See StringSearchBase.
|
|
|
|
static const int kBMMaxShift = 250; // See StringSearchBase.
|
|
|
|
|
|
|
|
// Accessors.
|
|
|
|
#define GLOBAL_ACCESSOR(type, name, initialvalue) \
|
|
|
|
inline type name() const { \
|
|
|
|
ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
|
|
|
|
return name##_; \
|
|
|
|
} \
|
|
|
|
inline void set_##name(type value) { \
|
|
|
|
ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
|
|
|
|
name##_ = value; \
|
|
|
|
}
|
|
|
|
ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
|
|
|
|
#undef GLOBAL_ACCESSOR
|
|
|
|
|
|
|
|
#define GLOBAL_ARRAY_ACCESSOR(type, name, length) \
|
|
|
|
inline type* name() { \
|
|
|
|
ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
|
|
|
|
return &(name##_)[0]; \
|
|
|
|
}
|
|
|
|
ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
|
|
|
|
#undef GLOBAL_ARRAY_ACCESSOR
|
|
|
|
|
2013-02-25 14:46:09 +00:00
|
|
|
#define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name) \
|
|
|
|
Handle<type> name() { \
|
|
|
|
return Handle<type>(context()->native_context()->name(), this); \
|
2013-05-13 07:35:26 +00:00
|
|
|
} \
|
|
|
|
bool is_##name(type* value) { \
|
|
|
|
return context()->native_context()->is_##name(value); \
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
2012-08-17 09:03:08 +00:00
|
|
|
NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR)
|
|
|
|
#undef NATIVE_CONTEXT_FIELD_ACCESSOR
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
Bootstrapper* bootstrapper() { return bootstrapper_; }
|
2011-08-04 15:18:18 +00:00
|
|
|
Counters* counters() {
|
|
|
|
// Call InitializeLoggingAndCounters() if logging is needed before
|
|
|
|
// the isolate is fully initialized.
|
|
|
|
ASSERT(counters_ != NULL);
|
|
|
|
return counters_;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
CodeRange* code_range() { return code_range_; }
|
|
|
|
RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
|
|
|
|
CompilationCache* compilation_cache() { return compilation_cache_; }
|
2011-08-04 15:18:18 +00:00
|
|
|
Logger* logger() {
|
|
|
|
// Call InitializeLoggingAndCounters() if logging is needed before
|
|
|
|
// the isolate is fully initialized.
|
|
|
|
ASSERT(logger_ != NULL);
|
|
|
|
return logger_;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
StackGuard* stack_guard() { return &stack_guard_; }
|
|
|
|
Heap* heap() { return &heap_; }
|
2011-08-04 15:18:18 +00:00
|
|
|
StatsTable* stats_table();
|
2011-03-18 20:35:07 +00:00
|
|
|
StubCache* stub_cache() { return stub_cache_; }
|
|
|
|
DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
|
|
|
|
ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
MaterializedObjectStore* materialized_object_store() {
|
|
|
|
return materialized_object_store_;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
MemoryAllocator* memory_allocator() {
|
|
|
|
return memory_allocator_;
|
|
|
|
}
|
|
|
|
|
|
|
|
KeyedLookupCache* keyed_lookup_cache() {
|
|
|
|
return keyed_lookup_cache_;
|
|
|
|
}
|
|
|
|
|
|
|
|
ContextSlotCache* context_slot_cache() {
|
|
|
|
return context_slot_cache_;
|
|
|
|
}
|
|
|
|
|
|
|
|
DescriptorLookupCache* descriptor_lookup_cache() {
|
|
|
|
return descriptor_lookup_cache_;
|
|
|
|
}
|
|
|
|
|
2014-01-16 08:17:40 +00:00
|
|
|
HandleScopeData* handle_scope_data() { return &handle_scope_data_; }
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
HandleScopeImplementer* handle_scope_implementer() {
|
|
|
|
ASSERT(handle_scope_implementer_);
|
|
|
|
return handle_scope_implementer_;
|
|
|
|
}
|
2013-07-03 11:40:30 +00:00
|
|
|
Zone* runtime_zone() { return &runtime_zone_; }
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2011-04-12 08:27:38 +00:00
|
|
|
UnicodeCache* unicode_cache() {
|
|
|
|
return unicode_cache_;
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
2011-09-20 10:08:39 +00:00
|
|
|
InnerPointerToCodeCache* inner_pointer_to_code_cache() {
|
|
|
|
return inner_pointer_to_code_cache_;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2012-12-31 11:13:50 +00:00
|
|
|
ConsStringIteratorOp* write_iterator() { return write_iterator_; }
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
GlobalHandles* global_handles() { return global_handles_; }
|
|
|
|
|
2013-08-05 09:46:23 +00:00
|
|
|
EternalHandles* eternal_handles() { return eternal_handles_; }
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
ThreadManager* thread_manager() { return thread_manager_; }
|
|
|
|
|
|
|
|
StringTracker* string_tracker() { return string_tracker_; }
|
|
|
|
|
|
|
|
unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
|
|
|
|
return &jsregexp_uncanonicalize_;
|
|
|
|
}
|
|
|
|
|
|
|
|
unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
|
|
|
|
return &jsregexp_canonrange_;
|
|
|
|
}
|
|
|
|
|
2012-12-31 11:13:50 +00:00
|
|
|
ConsStringIteratorOp* objects_string_compare_iterator_a() {
|
|
|
|
return &objects_string_compare_iterator_a_;
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
2012-12-31 11:13:50 +00:00
|
|
|
ConsStringIteratorOp* objects_string_compare_iterator_b() {
|
|
|
|
return &objects_string_compare_iterator_b_;
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
2012-12-31 11:13:50 +00:00
|
|
|
StaticResource<ConsStringIteratorOp>* objects_string_iterator() {
|
|
|
|
return &objects_string_iterator_;
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
RuntimeState* runtime_state() { return &runtime_state_; }
|
|
|
|
|
|
|
|
Builtins* builtins() { return &builtins_; }
|
|
|
|
|
2011-11-15 22:48:55 +00:00
|
|
|
void NotifyExtensionInstalled() {
|
|
|
|
has_installed_extensions_ = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool has_installed_extensions() { return has_installed_extensions_; }
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
unibrow::Mapping<unibrow::Ecma262Canonicalize>*
|
|
|
|
regexp_macro_assembler_canonicalize() {
|
|
|
|
return ®exp_macro_assembler_canonicalize_;
|
|
|
|
}
|
|
|
|
|
|
|
|
RegExpStack* regexp_stack() { return regexp_stack_; }
|
|
|
|
|
|
|
|
unibrow::Mapping<unibrow::Ecma262Canonicalize>*
|
|
|
|
interp_canonicalize_mapping() {
|
|
|
|
return &interp_canonicalize_mapping_;
|
|
|
|
}
|
|
|
|
|
2013-10-31 11:43:23 +00:00
|
|
|
inline bool IsCodePreAgingActive();
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
2011-08-04 15:18:18 +00:00
|
|
|
Debugger* debugger() {
|
|
|
|
if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
|
|
|
|
return debugger_;
|
|
|
|
}
|
|
|
|
Debug* debug() {
|
|
|
|
if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
|
|
|
|
return debug_;
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
#endif
|
|
|
|
|
2012-03-23 12:16:40 +00:00
|
|
|
inline bool IsDebuggerActive();
|
2011-04-26 13:53:19 +00:00
|
|
|
inline bool DebuggerHasBreakPoints();
|
|
|
|
|
2013-04-02 07:53:50 +00:00
|
|
|
CpuProfiler* cpu_profiler() const { return cpu_profiler_; }
|
2013-04-02 08:03:01 +00:00
|
|
|
HeapProfiler* heap_profiler() const { return heap_profiler_; }
|
2013-04-02 07:53:50 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
HistogramInfo* heap_histograms() { return heap_histograms_; }
|
|
|
|
|
|
|
|
JSObject::SpillInformation* js_spill_information() {
|
|
|
|
return &js_spill_information_;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
Factory* factory() { return reinterpret_cast<Factory*>(this); }
|
|
|
|
|
2012-05-22 14:05:44 +00:00
|
|
|
static const int kJSRegexpStaticOffsetsVectorSize = 128;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2013-11-20 15:16:18 +00:00
|
|
|
void SetData(uint32_t slot, void* data) {
|
|
|
|
ASSERT(slot < Internals::kNumIsolateDataSlots);
|
|
|
|
embedder_data_[slot] = data;
|
|
|
|
}
|
|
|
|
void* GetData(uint32_t slot) {
|
|
|
|
ASSERT(slot < Internals::kNumIsolateDataSlots);
|
|
|
|
return embedder_data_[slot];
|
|
|
|
}
|
2011-05-18 23:26:38 +00:00
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
THREAD_LOCAL_TOP_ACCESSOR(LookupResult*, top_lookup_result)
|
2011-10-18 11:18:55 +00:00
|
|
|
|
2013-09-03 09:35:26 +00:00
|
|
|
bool IsDead() { return has_fatal_error_; }
|
|
|
|
void SignalFatalError() { has_fatal_error_ = true; }
|
|
|
|
|
2013-09-03 08:49:44 +00:00
|
|
|
bool use_crankshaft() const { return use_crankshaft_; }
|
|
|
|
|
2013-06-28 13:40:41 +00:00
|
|
|
bool initialized_from_snapshot() { return initialized_from_snapshot_; }
|
|
|
|
|
2012-02-06 08:59:43 +00:00
|
|
|
double time_millis_since_init() {
|
|
|
|
return OS::TimeCurrentMillis() - time_millis_at_init_;
|
|
|
|
}
|
|
|
|
|
2012-03-09 12:07:29 +00:00
|
|
|
DateCache* date_cache() {
|
|
|
|
return date_cache_;
|
|
|
|
}
|
|
|
|
|
|
|
|
void set_date_cache(DateCache* date_cache) {
|
|
|
|
if (date_cache != date_cache_) {
|
|
|
|
delete date_cache_;
|
|
|
|
}
|
|
|
|
date_cache_ = date_cache;
|
|
|
|
}
|
|
|
|
|
2013-05-13 07:35:26 +00:00
|
|
|
Map* get_initial_js_array_map(ElementsKind kind);
|
|
|
|
|
|
|
|
bool IsFastArrayConstructorPrototypeChainIntact();
|
|
|
|
|
2012-12-18 16:25:45 +00:00
|
|
|
CodeStubInterfaceDescriptor*
|
|
|
|
code_stub_interface_descriptor(int index);
|
|
|
|
|
This is a preview of a first step towards unification of the hydrogen
call machinery. The change replaces CallNamed, CallKeyed,
CallConstantFunction and CallKnownGlobal hydrogen instructions with two
new instructions with a more lower level semantics:
1. CallJSFunction for direct calls of JSFunction objects (no
argument adaptation)
2. CallWithDescriptor for calls of a given Code object according to
the supplied calling convention.
Details:
CallJSFunction should be straightforward, the main difference from the
existing InvokeFunction instruction is the absence of argument adaptor
handling. (As a next step, we will replace InvokeFunction with an
equivalent hydrogen code.)
For CallWithDescriptor, the calling conventions are represented by a
tweaked version of CallStubInterfaceDescriptor. In addition to the
parameter-register mapping, we also define parameter-representation
mapping there. The CallWithDescriptor instruction has variable number of
parameters now - this required some simple tweaks in Lithium, which
assumed fixed number of arguments in some places.
The calling conventions used in the calls are initialized in the
CallDescriptors class (code-stubs.h, <arch>/code-stubs-<arch>.cc), and
they live in a new table in the Isolate class. I should say I am not
quite sure about Representation::Integer32() representation for some of
the params of ArgumentAdaptorCall - it is not clear to me wether the
params could not end up on the stack and thus confuse the GC.
The change also includes an earlier small change to argument adaptor
(https://codereview.chromium.org/98463007) that avoids passing a naked
pointer to the code entry as a parameter. I am sorry for packaging that
with an already biggish change.
Performance implications:
Locally, I see a small regression (.2% or so). It is hard to say where
exactly it comes from, but I do see inefficient call sequences to the
adaptor trampoline. For example:
;;; <@78,#24> constant-t
bf85aa515a mov edi,0x5a51aa85 ;; debug: position 29
;;; <@72,#53> load-named-field
8b7717 mov esi,[edi+0x17] ;; debug: position 195
;;; <@80,#51> constant-s
b902000000 mov ecx,0x2 ;; debug: position 195
;;; <@81,#51> gap
894df0 mov [ebp+0xf0],ecx
;;; <@82,#103> constant-i
bb01000000 mov ebx,0x1
;;; <@84,#102> constant-i
b902000000 mov ecx,0x2
;;; <@85,#102> gap
89d8 mov eax,ebx
89cb mov ebx,ecx
8b4df0 mov ecx,[ebp+0xf0]
;;; <@86,#58> call-with-descriptor
e8ef57fcff call ArgumentsAdaptorTrampoline (0x2d80e6e0) ;; code: BUILTIN
Note the silly handling of ecx; the hydrogen for this code is:
0 4 s27 Constant 1 range:1_1 <|@
0 3 t30 Constant 0x5bc1aa85 <JS Function xyz (SharedFunctionInfo 0x5bc1a919)> type:object <|@
0 1 t36 LoadNamedField t30.[in-object]@24 <|@
0 1 t38 Constant 0x2300e6a1 <Code> <|@
0 1 i102 Constant 2 range:2_2 <|@
0 1 i103 Constant 1 range:1_1 <|@
0 2 t41 CallWithDescriptor t38 t30 t36 s27 i103 i102 #2 changes[*] <|@
BUG=
R=verwaest@chromium.org, danno@chromium.org
Review URL: https://codereview.chromium.org/104663004
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18626 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-15 17:00:35 +00:00
|
|
|
enum CallDescriptorKey {
|
|
|
|
KeyedCall,
|
|
|
|
NamedCall,
|
2014-01-24 11:47:53 +00:00
|
|
|
CallHandler,
|
This is a preview of a first step towards unification of the hydrogen
call machinery. The change replaces CallNamed, CallKeyed,
CallConstantFunction and CallKnownGlobal hydrogen instructions with two
new instructions with a more lower level semantics:
1. CallJSFunction for direct calls of JSFunction objects (no
argument adaptation)
2. CallWithDescriptor for calls of a given Code object according to
the supplied calling convention.
Details:
CallJSFunction should be straightforward, the main difference from the
existing InvokeFunction instruction is the absence of argument adaptor
handling. (As a next step, we will replace InvokeFunction with an
equivalent hydrogen code.)
For CallWithDescriptor, the calling conventions are represented by a
tweaked version of CallStubInterfaceDescriptor. In addition to the
parameter-register mapping, we also define parameter-representation
mapping there. The CallWithDescriptor instruction has variable number of
parameters now - this required some simple tweaks in Lithium, which
assumed fixed number of arguments in some places.
The calling conventions used in the calls are initialized in the
CallDescriptors class (code-stubs.h, <arch>/code-stubs-<arch>.cc), and
they live in a new table in the Isolate class. I should say I am not
quite sure about Representation::Integer32() representation for some of
the params of ArgumentAdaptorCall - it is not clear to me wether the
params could not end up on the stack and thus confuse the GC.
The change also includes an earlier small change to argument adaptor
(https://codereview.chromium.org/98463007) that avoids passing a naked
pointer to the code entry as a parameter. I am sorry for packaging that
with an already biggish change.
Performance implications:
Locally, I see a small regression (.2% or so). It is hard to say where
exactly it comes from, but I do see inefficient call sequences to the
adaptor trampoline. For example:
;;; <@78,#24> constant-t
bf85aa515a mov edi,0x5a51aa85 ;; debug: position 29
;;; <@72,#53> load-named-field
8b7717 mov esi,[edi+0x17] ;; debug: position 195
;;; <@80,#51> constant-s
b902000000 mov ecx,0x2 ;; debug: position 195
;;; <@81,#51> gap
894df0 mov [ebp+0xf0],ecx
;;; <@82,#103> constant-i
bb01000000 mov ebx,0x1
;;; <@84,#102> constant-i
b902000000 mov ecx,0x2
;;; <@85,#102> gap
89d8 mov eax,ebx
89cb mov ebx,ecx
8b4df0 mov ecx,[ebp+0xf0]
;;; <@86,#58> call-with-descriptor
e8ef57fcff call ArgumentsAdaptorTrampoline (0x2d80e6e0) ;; code: BUILTIN
Note the silly handling of ecx; the hydrogen for this code is:
0 4 s27 Constant 1 range:1_1 <|@
0 3 t30 Constant 0x5bc1aa85 <JS Function xyz (SharedFunctionInfo 0x5bc1a919)> type:object <|@
0 1 t36 LoadNamedField t30.[in-object]@24 <|@
0 1 t38 Constant 0x2300e6a1 <Code> <|@
0 1 i102 Constant 2 range:2_2 <|@
0 1 i103 Constant 1 range:1_1 <|@
0 2 t41 CallWithDescriptor t38 t30 t36 s27 i103 i102 #2 changes[*] <|@
BUG=
R=verwaest@chromium.org, danno@chromium.org
Review URL: https://codereview.chromium.org/104663004
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18626 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-15 17:00:35 +00:00
|
|
|
ArgumentAdaptorCall,
|
2014-01-30 13:18:41 +00:00
|
|
|
ApiFunctionCall,
|
This is a preview of a first step towards unification of the hydrogen
call machinery. The change replaces CallNamed, CallKeyed,
CallConstantFunction and CallKnownGlobal hydrogen instructions with two
new instructions with a more lower level semantics:
1. CallJSFunction for direct calls of JSFunction objects (no
argument adaptation)
2. CallWithDescriptor for calls of a given Code object according to
the supplied calling convention.
Details:
CallJSFunction should be straightforward, the main difference from the
existing InvokeFunction instruction is the absence of argument adaptor
handling. (As a next step, we will replace InvokeFunction with an
equivalent hydrogen code.)
For CallWithDescriptor, the calling conventions are represented by a
tweaked version of CallStubInterfaceDescriptor. In addition to the
parameter-register mapping, we also define parameter-representation
mapping there. The CallWithDescriptor instruction has variable number of
parameters now - this required some simple tweaks in Lithium, which
assumed fixed number of arguments in some places.
The calling conventions used in the calls are initialized in the
CallDescriptors class (code-stubs.h, <arch>/code-stubs-<arch>.cc), and
they live in a new table in the Isolate class. I should say I am not
quite sure about Representation::Integer32() representation for some of
the params of ArgumentAdaptorCall - it is not clear to me wether the
params could not end up on the stack and thus confuse the GC.
The change also includes an earlier small change to argument adaptor
(https://codereview.chromium.org/98463007) that avoids passing a naked
pointer to the code entry as a parameter. I am sorry for packaging that
with an already biggish change.
Performance implications:
Locally, I see a small regression (.2% or so). It is hard to say where
exactly it comes from, but I do see inefficient call sequences to the
adaptor trampoline. For example:
;;; <@78,#24> constant-t
bf85aa515a mov edi,0x5a51aa85 ;; debug: position 29
;;; <@72,#53> load-named-field
8b7717 mov esi,[edi+0x17] ;; debug: position 195
;;; <@80,#51> constant-s
b902000000 mov ecx,0x2 ;; debug: position 195
;;; <@81,#51> gap
894df0 mov [ebp+0xf0],ecx
;;; <@82,#103> constant-i
bb01000000 mov ebx,0x1
;;; <@84,#102> constant-i
b902000000 mov ecx,0x2
;;; <@85,#102> gap
89d8 mov eax,ebx
89cb mov ebx,ecx
8b4df0 mov ecx,[ebp+0xf0]
;;; <@86,#58> call-with-descriptor
e8ef57fcff call ArgumentsAdaptorTrampoline (0x2d80e6e0) ;; code: BUILTIN
Note the silly handling of ecx; the hydrogen for this code is:
0 4 s27 Constant 1 range:1_1 <|@
0 3 t30 Constant 0x5bc1aa85 <JS Function xyz (SharedFunctionInfo 0x5bc1a919)> type:object <|@
0 1 t36 LoadNamedField t30.[in-object]@24 <|@
0 1 t38 Constant 0x2300e6a1 <Code> <|@
0 1 i102 Constant 2 range:2_2 <|@
0 1 i103 Constant 1 range:1_1 <|@
0 2 t41 CallWithDescriptor t38 t30 t36 s27 i103 i102 #2 changes[*] <|@
BUG=
R=verwaest@chromium.org, danno@chromium.org
Review URL: https://codereview.chromium.org/104663004
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18626 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-15 17:00:35 +00:00
|
|
|
NUMBER_OF_CALL_DESCRIPTORS
|
|
|
|
};
|
|
|
|
|
|
|
|
CallInterfaceDescriptor* call_descriptor(CallDescriptorKey index);
|
|
|
|
|
2012-07-18 14:15:02 +00:00
|
|
|
void IterateDeferredHandles(ObjectVisitor* visitor);
|
|
|
|
void LinkDeferredHandles(DeferredHandles* deferred_handles);
|
|
|
|
void UnlinkDeferredHandles(DeferredHandles* deferred_handles);
|
|
|
|
|
2013-04-23 09:23:07 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
bool IsDeferredHandle(Object** location);
|
|
|
|
#endif // DEBUG
|
|
|
|
|
2013-11-19 11:52:47 +00:00
|
|
|
bool concurrent_recompilation_enabled() {
|
|
|
|
// Thread is only available with flag enabled.
|
|
|
|
ASSERT(optimizing_compiler_thread_ == NULL ||
|
|
|
|
FLAG_concurrent_recompilation);
|
|
|
|
return optimizing_compiler_thread_ != NULL;
|
|
|
|
}
|
|
|
|
|
2013-11-19 12:14:22 +00:00
|
|
|
bool concurrent_osr_enabled() const {
|
2013-11-19 11:52:47 +00:00
|
|
|
// Thread is only available with flag enabled.
|
|
|
|
ASSERT(optimizing_compiler_thread_ == NULL ||
|
|
|
|
FLAG_concurrent_recompilation);
|
|
|
|
return optimizing_compiler_thread_ != NULL && FLAG_concurrent_osr;
|
|
|
|
}
|
|
|
|
|
2012-07-19 18:58:23 +00:00
|
|
|
OptimizingCompilerThread* optimizing_compiler_thread() {
|
2013-09-25 15:14:12 +00:00
|
|
|
return optimizing_compiler_thread_;
|
2012-07-19 18:58:23 +00:00
|
|
|
}
|
|
|
|
|
2013-11-19 12:14:22 +00:00
|
|
|
int num_sweeper_threads() const {
|
2013-11-19 11:52:47 +00:00
|
|
|
return num_sweeper_threads_;
|
|
|
|
}
|
2013-01-18 07:20:17 +00:00
|
|
|
|
2013-01-30 12:19:32 +00:00
|
|
|
SweeperThread** sweeper_threads() {
|
|
|
|
return sweeper_thread_;
|
|
|
|
}
|
|
|
|
|
2013-11-19 11:52:47 +00:00
|
|
|
// PreInits and returns a default isolate. Needed when a new thread tries
|
|
|
|
// to create a Locker for the first time (the lock itself is in the isolate).
|
|
|
|
// TODO(svenpanne) This method is on death row...
|
|
|
|
static v8::Isolate* GetDefaultIsolateForLocking();
|
|
|
|
|
2013-06-20 10:05:33 +00:00
|
|
|
int id() const { return static_cast<int>(id_); }
|
|
|
|
|
2013-03-06 10:49:34 +00:00
|
|
|
HStatistics* GetHStatistics();
|
2013-03-06 07:25:46 +00:00
|
|
|
HTracer* GetHTracer();
|
2013-11-07 16:35:27 +00:00
|
|
|
CodeTracer* GetCodeTracer();
|
2013-03-06 07:25:46 +00:00
|
|
|
|
2013-06-28 13:40:41 +00:00
|
|
|
FunctionEntryHook function_entry_hook() { return function_entry_hook_; }
|
|
|
|
void set_function_entry_hook(FunctionEntryHook function_entry_hook) {
|
|
|
|
function_entry_hook_ = function_entry_hook;
|
|
|
|
}
|
|
|
|
|
2013-07-18 08:12:01 +00:00
|
|
|
void* stress_deopt_count_address() { return &stress_deopt_count_; }
|
|
|
|
|
2013-09-10 11:13:55 +00:00
|
|
|
inline RandomNumberGenerator* random_number_generator();
|
|
|
|
|
2013-07-19 09:39:01 +00:00
|
|
|
// Given an address occupied by a live code object, return that object.
|
|
|
|
Object* FindCodeObject(Address a);
|
|
|
|
|
2014-02-13 16:09:28 +00:00
|
|
|
int NextOptimizationId() {
|
|
|
|
int id = next_optimization_id_++;
|
|
|
|
if (!Smi::IsValid(next_optimization_id_)) {
|
|
|
|
next_optimization_id_ = 0;
|
|
|
|
}
|
|
|
|
return id;
|
|
|
|
}
|
|
|
|
|
2014-03-24 16:34:06 +00:00
|
|
|
// Get (and lazily initialize) the registry for per-isolate symbols.
|
|
|
|
Handle<JSObject> GetSymbolRegistry();
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
private:
|
2013-10-02 09:01:40 +00:00
|
|
|
Isolate();
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2012-03-12 13:56:56 +00:00
|
|
|
friend struct GlobalState;
|
|
|
|
friend struct InitializeGlobalState;
|
|
|
|
|
2012-04-23 15:09:59 +00:00
|
|
|
enum State {
|
|
|
|
UNINITIALIZED, // Some components may not have been allocated.
|
|
|
|
INITIALIZED // All components are fully initialized.
|
|
|
|
};
|
|
|
|
|
|
|
|
// These fields are accessed through the API, offsets must be kept in sync
|
|
|
|
// with v8::internal::Internals (in include/v8.h) constants. This is also
|
|
|
|
// verified in Isolate::Init() using runtime checks.
|
2013-11-20 15:16:18 +00:00
|
|
|
void* embedder_data_[Internals::kNumIsolateDataSlots];
|
2012-04-23 15:09:59 +00:00
|
|
|
Heap heap_;
|
2013-11-20 15:16:18 +00:00
|
|
|
State state_; // Will be padded to kApiPointerSize.
|
2012-04-23 15:09:59 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// The per-process lock should be acquired before the ThreadDataTable is
|
|
|
|
// modified.
|
|
|
|
class ThreadDataTable {
|
|
|
|
public:
|
|
|
|
ThreadDataTable();
|
|
|
|
~ThreadDataTable();
|
|
|
|
|
|
|
|
PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
|
|
|
|
void Insert(PerIsolateThreadData* data);
|
|
|
|
void Remove(PerIsolateThreadData* data);
|
2011-06-07 18:33:03 +00:00
|
|
|
void RemoveAllThreads(Isolate* isolate);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
PerIsolateThreadData* list_;
|
|
|
|
};
|
|
|
|
|
|
|
|
// These items form a stack synchronously with threads Enter'ing and Exit'ing
|
|
|
|
// the Isolate. The top of the stack points to a thread which is currently
|
|
|
|
// running the Isolate. When the stack is empty, the Isolate is considered
|
|
|
|
// not entered by any thread and can be Disposed.
|
|
|
|
// If the same thread enters the Isolate more then once, the entry_count_
|
|
|
|
// is incremented rather then a new item pushed to the stack.
|
|
|
|
class EntryStackItem {
|
|
|
|
public:
|
|
|
|
EntryStackItem(PerIsolateThreadData* previous_thread_data,
|
|
|
|
Isolate* previous_isolate,
|
|
|
|
EntryStackItem* previous_item)
|
|
|
|
: entry_count(1),
|
|
|
|
previous_thread_data(previous_thread_data),
|
|
|
|
previous_isolate(previous_isolate),
|
|
|
|
previous_item(previous_item) { }
|
|
|
|
|
|
|
|
int entry_count;
|
|
|
|
PerIsolateThreadData* previous_thread_data;
|
|
|
|
Isolate* previous_isolate;
|
|
|
|
EntryStackItem* previous_item;
|
|
|
|
|
2012-01-20 16:17:08 +00:00
|
|
|
private:
|
2011-03-18 20:35:07 +00:00
|
|
|
DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
|
|
|
|
};
|
|
|
|
|
2012-03-30 14:30:46 +00:00
|
|
|
// This mutex protects highest_thread_id_, thread_data_table_ and
|
|
|
|
// default_isolate_.
|
2013-09-06 13:18:26 +00:00
|
|
|
static Mutex process_wide_mutex_;
|
2012-03-30 14:30:46 +00:00
|
|
|
|
|
|
|
static Thread::LocalStorageKey per_isolate_thread_data_key_;
|
|
|
|
static Thread::LocalStorageKey isolate_key_;
|
|
|
|
static Thread::LocalStorageKey thread_id_key_;
|
2013-10-02 09:01:40 +00:00
|
|
|
static Isolate* default_isolate_;
|
2012-03-30 14:30:46 +00:00
|
|
|
static ThreadDataTable* thread_data_table_;
|
|
|
|
|
2013-03-06 07:25:46 +00:00
|
|
|
// A global counter for all generated Isolates, might overflow.
|
|
|
|
static Atomic32 isolate_counter_;
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void Deinit();
|
|
|
|
|
|
|
|
static void SetIsolateThreadLocals(Isolate* isolate,
|
|
|
|
PerIsolateThreadData* data);
|
|
|
|
|
|
|
|
// Find the PerThread for this particular (isolate, thread) combination.
|
|
|
|
// If one does not yet exist, allocate a new one.
|
|
|
|
PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
|
|
|
|
|
|
|
|
// Initializes the current thread to run this Isolate.
|
|
|
|
// Not thread-safe. Multiple threads should not Enter/Exit the same isolate
|
|
|
|
// at the same time, this should be prevented using external locking.
|
|
|
|
void Enter();
|
|
|
|
|
|
|
|
// Exits the current thread. The previosuly entered Isolate is restored
|
|
|
|
// for the thread.
|
|
|
|
// Not thread-safe. Multiple threads should not Enter/Exit the same isolate
|
|
|
|
// at the same time, this should be prevented using external locking.
|
|
|
|
void Exit();
|
|
|
|
|
|
|
|
void InitializeThreadLocal();
|
|
|
|
|
|
|
|
void MarkCompactPrologue(bool is_compacting,
|
|
|
|
ThreadLocalTop* archived_thread_data);
|
|
|
|
void MarkCompactEpilogue(bool is_compacting,
|
|
|
|
ThreadLocalTop* archived_thread_data);
|
|
|
|
|
|
|
|
void FillCache();
|
|
|
|
|
2011-04-07 19:52:24 +00:00
|
|
|
void PropagatePendingExceptionToExternalTryCatch();
|
|
|
|
|
2011-08-04 15:18:18 +00:00
|
|
|
void InitializeDebugger();
|
|
|
|
|
2012-02-07 09:31:06 +00:00
|
|
|
// Traverse prototype chain to find out whether the object is derived from
|
|
|
|
// the Error object.
|
|
|
|
bool IsErrorObject(Handle<Object> obj);
|
|
|
|
|
2013-03-06 07:25:46 +00:00
|
|
|
Atomic32 id_;
|
2012-04-23 15:09:59 +00:00
|
|
|
EntryStackItem* entry_stack_;
|
2011-03-18 20:35:07 +00:00
|
|
|
int stack_trace_nesting_level_;
|
|
|
|
StringStream* incomplete_message_;
|
2011-09-08 16:29:57 +00:00
|
|
|
Address isolate_addresses_[kIsolateAddressCount + 1]; // NOLINT
|
2011-03-18 20:35:07 +00:00
|
|
|
Bootstrapper* bootstrapper_;
|
|
|
|
RuntimeProfiler* runtime_profiler_;
|
|
|
|
CompilationCache* compilation_cache_;
|
|
|
|
Counters* counters_;
|
|
|
|
CodeRange* code_range_;
|
2013-08-29 09:58:30 +00:00
|
|
|
RecursiveMutex break_access_;
|
2011-08-04 15:18:18 +00:00
|
|
|
Atomic32 debugger_initialized_;
|
2013-08-29 09:58:30 +00:00
|
|
|
RecursiveMutex debugger_access_;
|
2011-03-18 20:35:07 +00:00
|
|
|
Logger* logger_;
|
|
|
|
StackGuard stack_guard_;
|
|
|
|
StatsTable* stats_table_;
|
|
|
|
StubCache* stub_cache_;
|
|
|
|
DeoptimizerData* deoptimizer_data_;
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
MaterializedObjectStore* materialized_object_store_;
|
2011-03-18 20:35:07 +00:00
|
|
|
ThreadLocalTop thread_local_top_;
|
|
|
|
bool capture_stack_trace_for_uncaught_exceptions_;
|
|
|
|
int stack_trace_for_uncaught_exceptions_frame_limit_;
|
|
|
|
StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
|
|
|
|
MemoryAllocator* memory_allocator_;
|
|
|
|
KeyedLookupCache* keyed_lookup_cache_;
|
|
|
|
ContextSlotCache* context_slot_cache_;
|
|
|
|
DescriptorLookupCache* descriptor_lookup_cache_;
|
2014-01-16 08:17:40 +00:00
|
|
|
HandleScopeData handle_scope_data_;
|
2011-03-18 20:35:07 +00:00
|
|
|
HandleScopeImplementer* handle_scope_implementer_;
|
2011-04-12 08:27:38 +00:00
|
|
|
UnicodeCache* unicode_cache_;
|
2013-07-03 11:40:30 +00:00
|
|
|
Zone runtime_zone_;
|
2011-09-20 10:08:39 +00:00
|
|
|
InnerPointerToCodeCache* inner_pointer_to_code_cache_;
|
2012-12-31 11:13:50 +00:00
|
|
|
ConsStringIteratorOp* write_iterator_;
|
2011-03-18 20:35:07 +00:00
|
|
|
GlobalHandles* global_handles_;
|
2013-08-05 09:46:23 +00:00
|
|
|
EternalHandles* eternal_handles_;
|
2011-03-18 20:35:07 +00:00
|
|
|
ThreadManager* thread_manager_;
|
|
|
|
RuntimeState runtime_state_;
|
|
|
|
Builtins builtins_;
|
2011-11-15 22:48:55 +00:00
|
|
|
bool has_installed_extensions_;
|
2011-03-18 20:35:07 +00:00
|
|
|
StringTracker* string_tracker_;
|
|
|
|
unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
|
|
|
|
unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
|
2012-12-31 11:13:50 +00:00
|
|
|
ConsStringIteratorOp objects_string_compare_iterator_a_;
|
|
|
|
ConsStringIteratorOp objects_string_compare_iterator_b_;
|
|
|
|
StaticResource<ConsStringIteratorOp> objects_string_iterator_;
|
2011-03-18 20:35:07 +00:00
|
|
|
unibrow::Mapping<unibrow::Ecma262Canonicalize>
|
|
|
|
regexp_macro_assembler_canonicalize_;
|
|
|
|
RegExpStack* regexp_stack_;
|
2012-03-09 12:07:29 +00:00
|
|
|
DateCache* date_cache_;
|
2011-03-18 20:35:07 +00:00
|
|
|
unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
|
2012-12-18 16:25:45 +00:00
|
|
|
CodeStubInterfaceDescriptor* code_stub_interface_descriptors_;
|
This is a preview of a first step towards unification of the hydrogen
call machinery. The change replaces CallNamed, CallKeyed,
CallConstantFunction and CallKnownGlobal hydrogen instructions with two
new instructions with a more lower level semantics:
1. CallJSFunction for direct calls of JSFunction objects (no
argument adaptation)
2. CallWithDescriptor for calls of a given Code object according to
the supplied calling convention.
Details:
CallJSFunction should be straightforward, the main difference from the
existing InvokeFunction instruction is the absence of argument adaptor
handling. (As a next step, we will replace InvokeFunction with an
equivalent hydrogen code.)
For CallWithDescriptor, the calling conventions are represented by a
tweaked version of CallStubInterfaceDescriptor. In addition to the
parameter-register mapping, we also define parameter-representation
mapping there. The CallWithDescriptor instruction has variable number of
parameters now - this required some simple tweaks in Lithium, which
assumed fixed number of arguments in some places.
The calling conventions used in the calls are initialized in the
CallDescriptors class (code-stubs.h, <arch>/code-stubs-<arch>.cc), and
they live in a new table in the Isolate class. I should say I am not
quite sure about Representation::Integer32() representation for some of
the params of ArgumentAdaptorCall - it is not clear to me wether the
params could not end up on the stack and thus confuse the GC.
The change also includes an earlier small change to argument adaptor
(https://codereview.chromium.org/98463007) that avoids passing a naked
pointer to the code entry as a parameter. I am sorry for packaging that
with an already biggish change.
Performance implications:
Locally, I see a small regression (.2% or so). It is hard to say where
exactly it comes from, but I do see inefficient call sequences to the
adaptor trampoline. For example:
;;; <@78,#24> constant-t
bf85aa515a mov edi,0x5a51aa85 ;; debug: position 29
;;; <@72,#53> load-named-field
8b7717 mov esi,[edi+0x17] ;; debug: position 195
;;; <@80,#51> constant-s
b902000000 mov ecx,0x2 ;; debug: position 195
;;; <@81,#51> gap
894df0 mov [ebp+0xf0],ecx
;;; <@82,#103> constant-i
bb01000000 mov ebx,0x1
;;; <@84,#102> constant-i
b902000000 mov ecx,0x2
;;; <@85,#102> gap
89d8 mov eax,ebx
89cb mov ebx,ecx
8b4df0 mov ecx,[ebp+0xf0]
;;; <@86,#58> call-with-descriptor
e8ef57fcff call ArgumentsAdaptorTrampoline (0x2d80e6e0) ;; code: BUILTIN
Note the silly handling of ecx; the hydrogen for this code is:
0 4 s27 Constant 1 range:1_1 <|@
0 3 t30 Constant 0x5bc1aa85 <JS Function xyz (SharedFunctionInfo 0x5bc1a919)> type:object <|@
0 1 t36 LoadNamedField t30.[in-object]@24 <|@
0 1 t38 Constant 0x2300e6a1 <Code> <|@
0 1 i102 Constant 2 range:2_2 <|@
0 1 i103 Constant 1 range:1_1 <|@
0 2 t41 CallWithDescriptor t38 t30 t36 s27 i103 i102 #2 changes[*] <|@
BUG=
R=verwaest@chromium.org, danno@chromium.org
Review URL: https://codereview.chromium.org/104663004
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18626 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-15 17:00:35 +00:00
|
|
|
CallInterfaceDescriptor* call_descriptors_;
|
2013-09-10 11:13:55 +00:00
|
|
|
RandomNumberGenerator* random_number_generator_;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2013-09-03 09:35:26 +00:00
|
|
|
// True if fatal error has been signaled for this isolate.
|
|
|
|
bool has_fatal_error_;
|
|
|
|
|
2013-09-03 08:49:44 +00:00
|
|
|
// True if we are using the Crankshaft optimizing compiler.
|
|
|
|
bool use_crankshaft_;
|
|
|
|
|
2013-06-28 13:40:41 +00:00
|
|
|
// True if this isolate was initialized from a snapshot.
|
|
|
|
bool initialized_from_snapshot_;
|
|
|
|
|
2012-02-06 08:59:43 +00:00
|
|
|
// Time stamp at initialization.
|
|
|
|
double time_millis_at_init_;
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
// A static array of histogram info for each type.
|
|
|
|
HistogramInfo heap_histograms_[LAST_TYPE + 1];
|
|
|
|
JSObject::SpillInformation js_spill_information_;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
|
|
|
Debugger* debugger_;
|
|
|
|
Debug* debug_;
|
|
|
|
#endif
|
2013-04-02 07:53:50 +00:00
|
|
|
CpuProfiler* cpu_profiler_;
|
2013-04-02 08:03:01 +00:00
|
|
|
HeapProfiler* heap_profiler_;
|
2013-06-28 13:40:41 +00:00
|
|
|
FunctionEntryHook function_entry_hook_;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
#define GLOBAL_BACKING_STORE(type, name, initialvalue) \
|
|
|
|
type name##_;
|
|
|
|
ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
|
|
|
|
#undef GLOBAL_BACKING_STORE
|
|
|
|
|
|
|
|
#define GLOBAL_ARRAY_BACKING_STORE(type, name, length) \
|
|
|
|
type name##_[length];
|
|
|
|
ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
|
|
|
|
#undef GLOBAL_ARRAY_BACKING_STORE
|
|
|
|
|
|
|
|
#ifdef DEBUG
|
|
|
|
// This class is huge and has a number of fields controlled by
|
|
|
|
// preprocessor defines. Make sure the offsets of these fields agree
|
|
|
|
// between compilation units.
|
|
|
|
#define ISOLATE_FIELD_OFFSET(type, name, ignored) \
|
|
|
|
static const intptr_t name##_debug_offset_;
|
|
|
|
ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
|
|
|
|
ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
|
|
|
|
#undef ISOLATE_FIELD_OFFSET
|
|
|
|
#endif
|
|
|
|
|
2012-07-18 14:15:02 +00:00
|
|
|
DeferredHandles* deferred_handles_head_;
|
2013-09-25 15:14:12 +00:00
|
|
|
OptimizingCompilerThread* optimizing_compiler_thread_;
|
2013-01-30 12:19:32 +00:00
|
|
|
SweeperThread** sweeper_thread_;
|
2013-11-19 11:52:47 +00:00
|
|
|
int num_sweeper_threads_;
|
|
|
|
|
2013-07-18 08:12:01 +00:00
|
|
|
// Counts deopt points if deopt_every_n_times is enabled.
|
|
|
|
unsigned int stress_deopt_count_;
|
|
|
|
|
2014-02-13 16:09:28 +00:00
|
|
|
int next_optimization_id_;
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
friend class ExecutionAccess;
|
2012-07-19 18:58:23 +00:00
|
|
|
friend class HandleScopeImplementer;
|
2011-03-18 20:35:07 +00:00
|
|
|
friend class IsolateInitializer;
|
2012-07-19 18:58:23 +00:00
|
|
|
friend class OptimizingCompilerThread;
|
2013-01-30 12:19:32 +00:00
|
|
|
friend class SweeperThread;
|
2011-05-05 18:55:31 +00:00
|
|
|
friend class ThreadManager;
|
|
|
|
friend class Simulator;
|
|
|
|
friend class StackGuard;
|
2011-04-11 23:46:22 +00:00
|
|
|
friend class ThreadId;
|
2011-08-04 15:18:18 +00:00
|
|
|
friend class TestMemoryAllocatorScope;
|
2013-01-29 09:09:55 +00:00
|
|
|
friend class TestCodeRangeScope;
|
2011-03-18 20:35:07 +00:00
|
|
|
friend class v8::Isolate;
|
|
|
|
friend class v8::Locker;
|
2011-05-05 18:55:31 +00:00
|
|
|
friend class v8::Unlocker;
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(Isolate);
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2014-02-11 14:03:31 +00:00
|
|
|
#undef FIELD_ACCESSOR
|
|
|
|
#undef THREAD_LOCAL_TOP_ACCESSOR
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
// If the GCC version is 4.1.x or 4.2.x an additional field is added to the
|
|
|
|
// class as a work around for a bug in the generated code found with these
|
|
|
|
// versions of GCC. See V8 issue 122 for details.
|
|
|
|
class SaveContext BASE_EMBEDDED {
|
|
|
|
public:
|
2011-10-03 11:13:20 +00:00
|
|
|
inline explicit SaveContext(Isolate* isolate);
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
~SaveContext() {
|
2013-09-03 11:47:16 +00:00
|
|
|
isolate_->set_context(context_.is_null() ? NULL : *context_);
|
|
|
|
isolate_->set_save_context(prev_);
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Handle<Context> context() { return context_; }
|
|
|
|
SaveContext* prev() { return prev_; }
|
|
|
|
|
|
|
|
// Returns true if this save context is below a given JavaScript frame.
|
2011-10-28 12:49:09 +00:00
|
|
|
bool IsBelowFrame(JavaScriptFrame* frame) {
|
|
|
|
return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2013-09-03 11:47:16 +00:00
|
|
|
Isolate* isolate_;
|
2011-03-18 20:35:07 +00:00
|
|
|
Handle<Context> context_;
|
|
|
|
SaveContext* prev_;
|
2011-10-28 12:49:09 +00:00
|
|
|
Address c_entry_fp_;
|
2011-03-18 20:35:07 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
class AssertNoContextChange BASE_EMBEDDED {
|
|
|
|
#ifdef DEBUG
|
|
|
|
public:
|
2013-10-01 14:53:45 +00:00
|
|
|
explicit AssertNoContextChange(Isolate* isolate)
|
|
|
|
: isolate_(isolate),
|
|
|
|
context_(isolate->context(), isolate) { }
|
2013-08-29 17:13:57 +00:00
|
|
|
~AssertNoContextChange() {
|
2013-09-11 08:39:38 +00:00
|
|
|
ASSERT(isolate_->context() == *context_);
|
2013-08-29 17:13:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2013-09-11 08:39:38 +00:00
|
|
|
Isolate* isolate_;
|
2013-08-29 17:13:57 +00:00
|
|
|
Handle<Context> context_;
|
|
|
|
#else
|
|
|
|
public:
|
2013-10-01 14:53:45 +00:00
|
|
|
explicit AssertNoContextChange(Isolate* isolate) { }
|
2011-03-18 20:35:07 +00:00
|
|
|
#endif
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
class ExecutionAccess BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
|
|
|
|
Lock(isolate);
|
|
|
|
}
|
|
|
|
~ExecutionAccess() { Unlock(isolate_); }
|
|
|
|
|
2013-08-29 09:58:30 +00:00
|
|
|
static void Lock(Isolate* isolate) { isolate->break_access()->Lock(); }
|
|
|
|
static void Unlock(Isolate* isolate) { isolate->break_access()->Unlock(); }
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
static bool TryLock(Isolate* isolate) {
|
2013-08-29 09:58:30 +00:00
|
|
|
return isolate->break_access()->TryLock();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
Isolate* isolate_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
// Support for checking for stack-overflows in C++ code.
|
|
|
|
class StackLimitCheck BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
|
|
|
|
|
|
|
|
bool HasOverflowed() const {
|
|
|
|
StackGuard* stack_guard = isolate_->stack_guard();
|
2012-11-12 14:56:25 +00:00
|
|
|
return (reinterpret_cast<uintptr_t>(this) < stack_guard->real_climit());
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
private:
|
|
|
|
Isolate* isolate_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
// Support for temporarily postponing interrupts. When the outermost
|
|
|
|
// postpone scope is left the interrupts will be re-enabled and any
|
|
|
|
// interrupts that occurred while in the scope will be taken into
|
|
|
|
// account.
|
|
|
|
class PostponeInterruptsScope BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
explicit PostponeInterruptsScope(Isolate* isolate)
|
2014-02-05 11:28:55 +00:00
|
|
|
: stack_guard_(isolate->stack_guard()), isolate_(isolate) {
|
|
|
|
ExecutionAccess access(isolate_);
|
2011-03-18 20:35:07 +00:00
|
|
|
stack_guard_->thread_local_.postpone_interrupts_nesting_++;
|
|
|
|
stack_guard_->DisableInterrupts();
|
|
|
|
}
|
|
|
|
|
|
|
|
~PostponeInterruptsScope() {
|
2014-02-05 11:28:55 +00:00
|
|
|
ExecutionAccess access(isolate_);
|
2011-03-18 20:35:07 +00:00
|
|
|
if (--stack_guard_->thread_local_.postpone_interrupts_nesting_ == 0) {
|
|
|
|
stack_guard_->EnableInterrupts();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
private:
|
|
|
|
StackGuard* stack_guard_;
|
2014-02-05 11:28:55 +00:00
|
|
|
Isolate* isolate_;
|
2011-03-18 20:35:07 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
2013-11-07 16:35:27 +00:00
|
|
|
class CodeTracer V8_FINAL : public Malloced {
|
|
|
|
public:
|
|
|
|
explicit CodeTracer(int isolate_id)
|
|
|
|
: file_(NULL),
|
|
|
|
scope_depth_(0) {
|
|
|
|
if (!ShouldRedirect()) {
|
|
|
|
file_ = stdout;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (FLAG_redirect_code_traces_to == NULL) {
|
|
|
|
OS::SNPrintF(filename_,
|
|
|
|
"code-%d-%d.asm",
|
|
|
|
OS::GetCurrentProcessId(),
|
|
|
|
isolate_id);
|
|
|
|
} else {
|
|
|
|
OS::StrNCpy(filename_, FLAG_redirect_code_traces_to, filename_.length());
|
|
|
|
}
|
|
|
|
|
|
|
|
WriteChars(filename_.start(), "", 0, false);
|
|
|
|
}
|
|
|
|
|
|
|
|
class Scope {
|
|
|
|
public:
|
|
|
|
explicit Scope(CodeTracer* tracer) : tracer_(tracer) { tracer->OpenFile(); }
|
|
|
|
~Scope() { tracer_->CloseFile(); }
|
|
|
|
|
|
|
|
FILE* file() const { return tracer_->file(); }
|
|
|
|
|
|
|
|
private:
|
|
|
|
CodeTracer* tracer_;
|
|
|
|
};
|
|
|
|
|
|
|
|
void OpenFile() {
|
|
|
|
if (!ShouldRedirect()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (file_ == NULL) {
|
|
|
|
file_ = OS::FOpen(filename_.start(), "a");
|
|
|
|
}
|
|
|
|
|
|
|
|
scope_depth_++;
|
|
|
|
}
|
|
|
|
|
|
|
|
void CloseFile() {
|
|
|
|
if (!ShouldRedirect()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (--scope_depth_ == 0) {
|
|
|
|
fclose(file_);
|
|
|
|
file_ = NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
FILE* file() const { return file_; }
|
|
|
|
|
|
|
|
private:
|
|
|
|
static bool ShouldRedirect() {
|
|
|
|
return FLAG_redirect_code_traces;
|
|
|
|
}
|
|
|
|
|
|
|
|
EmbeddedVector<char, 128> filename_;
|
|
|
|
FILE* file_;
|
|
|
|
int scope_depth_;
|
|
|
|
};
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
} } // namespace v8::internal
|
|
|
|
|
|
|
|
#endif // V8_ISOLATE_H_
|