2018-09-07 09:03:35 +00:00
|
|
|
// Copyright 2018 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
|
|
|
#ifndef INCLUDE_V8_INTERNAL_H_
|
|
|
|
#define INCLUDE_V8_INTERNAL_H_
|
|
|
|
|
|
|
|
#include <stddef.h>
|
|
|
|
#include <stdint.h>
|
|
|
|
#include <type_traits>
|
|
|
|
|
|
|
|
#include "v8-version.h" // NOLINT(build/include)
|
|
|
|
#include "v8config.h" // NOLINT(build/include)
|
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
|
|
|
|
class Context;
|
|
|
|
class Data;
|
|
|
|
class Isolate;
|
|
|
|
|
|
|
|
namespace internal {
|
|
|
|
|
2019-01-08 09:48:09 +00:00
|
|
|
class Isolate;
|
|
|
|
|
2018-10-11 23:23:33 +00:00
|
|
|
typedef uintptr_t Address;
|
|
|
|
static const Address kNullAddress = 0;
|
2018-09-07 09:03:35 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Configuration of tagging scheme.
|
|
|
|
*/
|
2018-11-20 16:03:16 +00:00
|
|
|
const int kApiSystemPointerSize = sizeof(void*);
|
|
|
|
const int kApiTaggedSize = kApiSystemPointerSize;
|
|
|
|
const int kApiDoubleSize = sizeof(double);
|
|
|
|
const int kApiIntSize = sizeof(int);
|
|
|
|
const int kApiInt64Size = sizeof(int64_t);
|
2018-09-07 09:03:35 +00:00
|
|
|
|
|
|
|
// Tag information for HeapObject.
|
|
|
|
const int kHeapObjectTag = 1;
|
|
|
|
const int kWeakHeapObjectTag = 3;
|
|
|
|
const int kHeapObjectTagSize = 2;
|
|
|
|
const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
|
|
|
|
|
|
|
|
// Tag information for Smi.
|
|
|
|
const int kSmiTag = 0;
|
|
|
|
const int kSmiTagSize = 1;
|
|
|
|
const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
|
|
|
|
|
|
|
|
template <size_t tagged_ptr_size>
|
|
|
|
struct SmiTagging;
|
|
|
|
|
|
|
|
// Smi constants for systems where tagged pointer is a 32-bit value.
|
|
|
|
template <>
|
|
|
|
struct SmiTagging<4> {
|
|
|
|
enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
|
2018-10-11 23:23:33 +00:00
|
|
|
V8_INLINE static int SmiToInt(const internal::Address value) {
|
2018-09-07 09:03:35 +00:00
|
|
|
int shift_bits = kSmiTagSize + kSmiShiftSize;
|
2018-10-11 23:23:33 +00:00
|
|
|
// Shift down (requires >> to be sign extending).
|
|
|
|
return static_cast<int>(static_cast<intptr_t>(value)) >> shift_bits;
|
2018-09-07 09:03:35 +00:00
|
|
|
}
|
|
|
|
V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
|
|
|
|
// To be representable as an tagged small integer, the two
|
|
|
|
// most-significant bits of 'value' must be either 00 or 11 due to
|
|
|
|
// sign-extension. To check this we add 01 to the two
|
2018-10-11 23:23:33 +00:00
|
|
|
// most-significant bits, and check if the most-significant bit is 0.
|
2018-09-07 09:03:35 +00:00
|
|
|
//
|
|
|
|
// CAUTION: The original code below:
|
|
|
|
// bool result = ((value + 0x40000000) & 0x80000000) == 0;
|
|
|
|
// may lead to incorrect results according to the C language spec, and
|
|
|
|
// in fact doesn't work correctly with gcc4.1.1 in some cases: The
|
|
|
|
// compiler may produce undefined results in case of signed integer
|
|
|
|
// overflow. The computation must be done w/ unsigned ints.
|
|
|
|
return static_cast<uintptr_t>(value) + 0x40000000U < 0x80000000U;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// Smi constants for systems where tagged pointer is a 64-bit value.
|
|
|
|
template <>
|
|
|
|
struct SmiTagging<8> {
|
|
|
|
enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
|
2018-10-11 23:23:33 +00:00
|
|
|
V8_INLINE static int SmiToInt(const internal::Address value) {
|
2018-09-07 09:03:35 +00:00
|
|
|
int shift_bits = kSmiTagSize + kSmiShiftSize;
|
|
|
|
// Shift down and throw away top 32 bits.
|
2018-10-11 23:23:33 +00:00
|
|
|
return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
|
2018-09-07 09:03:35 +00:00
|
|
|
}
|
|
|
|
V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
|
|
|
|
// To be representable as a long smi, the value must be a 32-bit integer.
|
|
|
|
return (value == static_cast<int32_t>(value));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2018-11-30 12:17:47 +00:00
|
|
|
#if defined(V8_COMPRESS_POINTERS)
|
2018-09-07 09:03:35 +00:00
|
|
|
static_assert(
|
2018-11-20 16:03:16 +00:00
|
|
|
kApiSystemPointerSize == kApiInt64Size,
|
2018-09-07 09:03:35 +00:00
|
|
|
"Pointer compression can be enabled only for 64-bit architectures");
|
2018-11-30 12:17:47 +00:00
|
|
|
#endif
|
|
|
|
|
2018-12-13 15:13:42 +00:00
|
|
|
#if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
|
2018-11-30 12:17:47 +00:00
|
|
|
typedef SmiTagging<kApiIntSize> PlatformSmiTagging;
|
2018-12-13 15:13:42 +00:00
|
|
|
#else
|
|
|
|
typedef SmiTagging<kApiSystemPointerSize> PlatformSmiTagging;
|
|
|
|
#endif
|
2018-09-07 09:03:35 +00:00
|
|
|
|
|
|
|
const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
|
|
|
|
const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
|
|
|
|
const int kSmiMinValue = (static_cast<unsigned int>(-1)) << (kSmiValueSize - 1);
|
|
|
|
const int kSmiMaxValue = -(kSmiMinValue + 1);
|
|
|
|
constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
|
|
|
|
constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
|
|
|
|
|
2018-11-03 00:13:22 +00:00
|
|
|
V8_INLINE static constexpr internal::Address IntToSmi(int value) {
|
|
|
|
return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
|
|
|
|
kSmiTag;
|
|
|
|
}
|
|
|
|
|
2018-09-07 09:03:35 +00:00
|
|
|
/**
|
|
|
|
* This class exports constants and functionality from within v8 that
|
|
|
|
* is necessary to implement inline functions in the v8 api. Don't
|
|
|
|
* depend on functions and constants defined here.
|
|
|
|
*/
|
|
|
|
class Internals {
|
|
|
|
public:
|
|
|
|
// These values match non-compiler-dependent values defined within
|
|
|
|
// the implementation of v8.
|
|
|
|
static const int kHeapObjectMapOffset = 0;
|
2018-11-20 16:03:16 +00:00
|
|
|
static const int kMapInstanceTypeOffset = 1 * kApiTaggedSize + kApiIntSize;
|
|
|
|
static const int kStringResourceOffset = 1 * kApiTaggedSize + 2 * kApiIntSize;
|
|
|
|
|
|
|
|
static const int kOddballKindOffset = 4 * kApiTaggedSize + kApiDoubleSize;
|
|
|
|
static const int kForeignAddressOffset = kApiTaggedSize;
|
|
|
|
static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
|
2018-12-12 10:35:55 +00:00
|
|
|
static const int kJSObjectHeaderSizeForEmbedderFields =
|
|
|
|
(kJSObjectHeaderSize + kApiSystemPointerSize - 1) &
|
|
|
|
-kApiSystemPointerSize;
|
2018-11-20 16:03:16 +00:00
|
|
|
static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
|
|
|
|
static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize;
|
2018-11-21 15:10:14 +00:00
|
|
|
static const int kEmbedderDataSlotSize =
|
|
|
|
#ifdef V8_COMPRESS_POINTERS
|
|
|
|
2 *
|
|
|
|
#endif
|
|
|
|
kApiSystemPointerSize;
|
2018-11-22 14:41:39 +00:00
|
|
|
static const int kNativeContextEmbedderDataOffset = 7 * kApiTaggedSize;
|
2018-09-07 09:03:35 +00:00
|
|
|
static const int kFullStringRepresentationMask = 0x0f;
|
|
|
|
static const int kStringEncodingMask = 0x8;
|
|
|
|
static const int kExternalTwoByteRepresentationTag = 0x02;
|
|
|
|
static const int kExternalOneByteRepresentationTag = 0x0a;
|
|
|
|
|
2018-10-26 13:30:12 +00:00
|
|
|
static const uint32_t kNumIsolateDataSlots = 4;
|
|
|
|
|
2018-10-30 12:48:12 +00:00
|
|
|
static const int kIsolateEmbedderDataOffset = 0;
|
2018-10-26 13:30:12 +00:00
|
|
|
static const int kExternalMemoryOffset =
|
2018-11-20 16:03:16 +00:00
|
|
|
kNumIsolateDataSlots * kApiTaggedSize;
|
2018-09-07 09:03:35 +00:00
|
|
|
static const int kExternalMemoryLimitOffset =
|
|
|
|
kExternalMemoryOffset + kApiInt64Size;
|
|
|
|
static const int kExternalMemoryAtLastMarkCompactOffset =
|
|
|
|
kExternalMemoryLimitOffset + kApiInt64Size;
|
2018-10-26 13:30:12 +00:00
|
|
|
static const int kIsolateRootsOffset =
|
|
|
|
kExternalMemoryAtLastMarkCompactOffset + kApiInt64Size;
|
|
|
|
|
2018-09-07 09:03:35 +00:00
|
|
|
static const int kUndefinedValueRootIndex = 4;
|
|
|
|
static const int kTheHoleValueRootIndex = 5;
|
|
|
|
static const int kNullValueRootIndex = 6;
|
|
|
|
static const int kTrueValueRootIndex = 7;
|
|
|
|
static const int kFalseValueRootIndex = 8;
|
|
|
|
static const int kEmptyStringRootIndex = 9;
|
|
|
|
|
2018-11-20 16:03:16 +00:00
|
|
|
static const int kNodeClassIdOffset = 1 * kApiTaggedSize;
|
|
|
|
static const int kNodeFlagsOffset = 1 * kApiTaggedSize + 3;
|
2018-09-07 09:03:35 +00:00
|
|
|
static const int kNodeStateMask = 0x7;
|
|
|
|
static const int kNodeStateIsWeakValue = 2;
|
|
|
|
static const int kNodeStateIsPendingValue = 3;
|
|
|
|
static const int kNodeStateIsNearDeathValue = 4;
|
|
|
|
static const int kNodeIsIndependentShift = 3;
|
|
|
|
static const int kNodeIsActiveShift = 4;
|
|
|
|
|
2019-03-04 12:55:22 +00:00
|
|
|
static const int kFirstNonstringType = 0x40;
|
|
|
|
static const int kOddballType = 0x43;
|
|
|
|
static const int kForeignType = 0x47;
|
2018-09-07 09:03:35 +00:00
|
|
|
static const int kJSSpecialApiObjectType = 0x410;
|
|
|
|
static const int kJSApiObjectType = 0x420;
|
|
|
|
static const int kJSObjectType = 0x421;
|
|
|
|
|
|
|
|
static const int kUndefinedOddballKind = 5;
|
|
|
|
static const int kNullOddballKind = 3;
|
|
|
|
|
2019-02-07 13:19:57 +00:00
|
|
|
// Constants used by PropertyCallbackInfo to check if we should throw when an
|
|
|
|
// error occurs.
|
|
|
|
static const int kThrowOnError = 0;
|
|
|
|
static const int kDontThrow = 1;
|
|
|
|
static const int kInferShouldThrowMode = 2;
|
|
|
|
|
2018-09-07 09:03:35 +00:00
|
|
|
// Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
|
|
|
|
// incremental GC once the external memory reaches this limit.
|
|
|
|
static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
|
|
|
|
|
|
|
|
V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
|
|
|
|
V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
|
|
|
|
#ifdef V8_ENABLE_CHECKS
|
|
|
|
CheckInitializedImpl(isolate);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2018-10-11 23:23:33 +00:00
|
|
|
V8_INLINE static bool HasHeapObjectTag(const internal::Address value) {
|
|
|
|
return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
|
2018-09-07 09:03:35 +00:00
|
|
|
}
|
|
|
|
|
2018-10-11 23:23:33 +00:00
|
|
|
V8_INLINE static int SmiValue(const internal::Address value) {
|
2018-09-07 09:03:35 +00:00
|
|
|
return PlatformSmiTagging::SmiToInt(value);
|
|
|
|
}
|
|
|
|
|
2018-11-03 00:13:22 +00:00
|
|
|
V8_INLINE static constexpr internal::Address IntToSmi(int value) {
|
|
|
|
return internal::IntToSmi(value);
|
2018-09-07 09:03:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
|
|
|
|
return PlatformSmiTagging::IsValidSmi(value);
|
|
|
|
}
|
|
|
|
|
2018-10-11 23:23:33 +00:00
|
|
|
V8_INLINE static int GetInstanceType(const internal::Address obj) {
|
|
|
|
typedef internal::Address A;
|
2018-12-27 09:49:08 +00:00
|
|
|
A map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
|
|
|
|
return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
|
2018-09-07 09:03:35 +00:00
|
|
|
}
|
|
|
|
|
2018-10-11 23:23:33 +00:00
|
|
|
V8_INLINE static int GetOddballKind(const internal::Address obj) {
|
2018-12-27 09:49:08 +00:00
|
|
|
return SmiValue(ReadTaggedSignedField(obj, kOddballKindOffset));
|
2018-09-07 09:03:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
|
|
|
|
int representation = (instance_type & kFullStringRepresentationMask);
|
|
|
|
return representation == kExternalTwoByteRepresentationTag;
|
|
|
|
}
|
|
|
|
|
2018-10-11 23:23:33 +00:00
|
|
|
V8_INLINE static uint8_t GetNodeFlag(internal::Address* obj, int shift) {
|
2018-09-07 09:03:35 +00:00
|
|
|
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
|
|
|
|
return *addr & static_cast<uint8_t>(1U << shift);
|
|
|
|
}
|
|
|
|
|
2018-10-11 23:23:33 +00:00
|
|
|
V8_INLINE static void UpdateNodeFlag(internal::Address* obj, bool value,
|
2018-09-07 09:03:35 +00:00
|
|
|
int shift) {
|
|
|
|
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
|
|
|
|
uint8_t mask = static_cast<uint8_t>(1U << shift);
|
|
|
|
*addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
|
|
|
|
}
|
|
|
|
|
2018-10-11 23:23:33 +00:00
|
|
|
V8_INLINE static uint8_t GetNodeState(internal::Address* obj) {
|
2018-09-07 09:03:35 +00:00
|
|
|
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
|
|
|
|
return *addr & kNodeStateMask;
|
|
|
|
}
|
|
|
|
|
2018-10-11 23:23:33 +00:00
|
|
|
V8_INLINE static void UpdateNodeState(internal::Address* obj, uint8_t value) {
|
2018-09-07 09:03:35 +00:00
|
|
|
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
|
|
|
|
*addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
|
|
|
|
}
|
|
|
|
|
|
|
|
V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
|
|
|
|
void* data) {
|
2018-10-11 23:23:33 +00:00
|
|
|
internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
|
|
|
|
kIsolateEmbedderDataOffset +
|
2018-11-20 16:03:16 +00:00
|
|
|
slot * kApiSystemPointerSize;
|
2018-09-07 09:03:35 +00:00
|
|
|
*reinterpret_cast<void**>(addr) = data;
|
|
|
|
}
|
|
|
|
|
|
|
|
V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
|
|
|
|
uint32_t slot) {
|
2018-10-11 23:23:33 +00:00
|
|
|
internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
|
|
|
|
kIsolateEmbedderDataOffset +
|
2018-11-20 16:03:16 +00:00
|
|
|
slot * kApiSystemPointerSize;
|
2018-09-07 09:03:35 +00:00
|
|
|
return *reinterpret_cast<void* const*>(addr);
|
|
|
|
}
|
|
|
|
|
2018-10-11 23:23:33 +00:00
|
|
|
V8_INLINE static internal::Address* GetRoot(v8::Isolate* isolate, int index) {
|
2018-11-20 16:03:16 +00:00
|
|
|
internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
|
|
|
|
kIsolateRootsOffset +
|
|
|
|
index * kApiSystemPointerSize;
|
|
|
|
return reinterpret_cast<internal::Address*>(addr);
|
2018-09-07 09:03:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
template <typename T>
|
2018-12-27 09:49:08 +00:00
|
|
|
V8_INLINE static T ReadRawField(internal::Address heap_object_ptr,
|
|
|
|
int offset) {
|
2018-10-11 23:23:33 +00:00
|
|
|
internal::Address addr = heap_object_ptr + offset - kHeapObjectTag;
|
2018-09-07 09:03:35 +00:00
|
|
|
return *reinterpret_cast<const T*>(addr);
|
|
|
|
}
|
|
|
|
|
2018-12-27 09:49:08 +00:00
|
|
|
V8_INLINE static internal::Address ReadTaggedPointerField(
|
|
|
|
internal::Address heap_object_ptr, int offset) {
|
|
|
|
#ifdef V8_COMPRESS_POINTERS
|
|
|
|
int32_t value = ReadRawField<int32_t>(heap_object_ptr, offset);
|
|
|
|
internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
|
|
|
|
return root + static_cast<internal::Address>(static_cast<intptr_t>(value));
|
|
|
|
#else
|
|
|
|
return ReadRawField<internal::Address>(heap_object_ptr, offset);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
V8_INLINE static internal::Address ReadTaggedSignedField(
|
|
|
|
internal::Address heap_object_ptr, int offset) {
|
|
|
|
#ifdef V8_COMPRESS_POINTERS
|
|
|
|
int32_t value = ReadRawField<int32_t>(heap_object_ptr, offset);
|
|
|
|
return static_cast<internal::Address>(static_cast<intptr_t>(value));
|
|
|
|
#else
|
|
|
|
return ReadRawField<internal::Address>(heap_object_ptr, offset);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
V8_INLINE static internal::Address ReadTaggedAnyField(
|
|
|
|
internal::Address heap_object_ptr, int offset) {
|
|
|
|
#ifdef V8_COMPRESS_POINTERS
|
|
|
|
int32_t value = ReadRawField<int32_t>(heap_object_ptr, offset);
|
|
|
|
internal::Address root_mask = static_cast<internal::Address>(
|
|
|
|
-static_cast<intptr_t>(value & kSmiTagMask));
|
|
|
|
internal::Address root_or_zero =
|
|
|
|
root_mask & GetRootFromOnHeapAddress(heap_object_ptr);
|
|
|
|
return root_or_zero +
|
|
|
|
static_cast<internal::Address>(static_cast<intptr_t>(value));
|
|
|
|
#else
|
|
|
|
return ReadRawField<internal::Address>(heap_object_ptr, offset);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef V8_COMPRESS_POINTERS
|
|
|
|
static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32;
|
|
|
|
static constexpr size_t kPtrComprIsolateRootBias =
|
|
|
|
kPtrComprHeapReservationSize / 2;
|
|
|
|
static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32;
|
|
|
|
|
|
|
|
V8_INLINE static internal::Address GetRootFromOnHeapAddress(
|
|
|
|
internal::Address addr) {
|
|
|
|
return (addr + kPtrComprIsolateRootBias) &
|
|
|
|
-static_cast<intptr_t>(kPtrComprIsolateRootAlignment);
|
|
|
|
}
|
|
|
|
|
|
|
|
#else
|
|
|
|
|
2018-09-07 09:03:35 +00:00
|
|
|
template <typename T>
|
|
|
|
V8_INLINE static T ReadEmbedderData(const v8::Context* context, int index) {
|
2018-10-11 23:23:33 +00:00
|
|
|
typedef internal::Address A;
|
2018-09-07 09:03:35 +00:00
|
|
|
typedef internal::Internals I;
|
2018-10-11 23:23:33 +00:00
|
|
|
A ctx = *reinterpret_cast<const A*>(context);
|
2018-12-27 09:49:08 +00:00
|
|
|
A embedder_data =
|
|
|
|
I::ReadTaggedPointerField(ctx, I::kNativeContextEmbedderDataOffset);
|
2018-09-07 09:03:35 +00:00
|
|
|
int value_offset =
|
2018-11-21 11:30:49 +00:00
|
|
|
I::kEmbedderDataArrayHeaderSize + (I::kEmbedderDataSlotSize * index);
|
2018-12-27 09:49:08 +00:00
|
|
|
return I::ReadRawField<T>(embedder_data, value_offset);
|
2018-09-07 09:03:35 +00:00
|
|
|
}
|
2018-12-27 09:49:08 +00:00
|
|
|
#endif // V8_COMPRESS_POINTERS
|
2018-09-07 09:03:35 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
// Only perform cast check for types derived from v8::Data since
|
|
|
|
// other types do not implement the Cast method.
|
|
|
|
template <bool PerformCheck>
|
|
|
|
struct CastCheck {
|
|
|
|
template <class T>
|
|
|
|
static void Perform(T* data);
|
|
|
|
};
|
|
|
|
|
|
|
|
template <>
|
|
|
|
template <class T>
|
|
|
|
void CastCheck<true>::Perform(T* data) {
|
|
|
|
T::Cast(data);
|
|
|
|
}
|
|
|
|
|
|
|
|
template <>
|
|
|
|
template <class T>
|
|
|
|
void CastCheck<false>::Perform(T* data) {}
|
|
|
|
|
|
|
|
template <class T>
|
|
|
|
V8_INLINE void PerformCastCheck(T* data) {
|
|
|
|
CastCheck<std::is_base_of<Data, T>::value>::Perform(data);
|
|
|
|
}
|
|
|
|
|
2019-01-08 09:48:09 +00:00
|
|
|
// {obj} must be the raw tagged pointer representation of a HeapObject
|
|
|
|
// that's guaranteed to never be in ReadOnlySpace.
|
|
|
|
V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
|
|
|
|
|
2019-02-07 13:19:57 +00:00
|
|
|
// Returns if we need to throw when an error occurs. This infers the language
|
|
|
|
// mode based on the current context and the closure. This returns true if the
|
|
|
|
// language mode is strict.
|
|
|
|
V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);
|
|
|
|
|
2018-09-07 09:03:35 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|
|
|
|
|
|
|
|
#endif // INCLUDE_V8_INTERNAL_H_
|