2012-02-22 12:26:36 +00:00
|
|
|
// Copyright 2012 the V8 project authors. All rights reserved.
|
2014-04-29 06:42:26 +00:00
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2008-11-05 19:18:10 +00:00
|
|
|
#ifndef V8_GLOBALS_H_
|
|
|
|
#define V8_GLOBALS_H_
|
|
|
|
|
2014-10-21 08:25:14 +00:00
|
|
|
#include <stddef.h>
|
|
|
|
#include <stdint.h>
|
2011-09-07 12:39:53 +00:00
|
|
|
|
2015-04-27 09:12:58 +00:00
|
|
|
#include <ostream>
|
|
|
|
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/base/build_config.h"
|
2014-06-30 13:25:46 +00:00
|
|
|
#include "src/base/logging.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/base/macros.h"
|
2014-04-23 11:51:28 +00:00
|
|
|
|
2014-12-20 13:17:20 +00:00
|
|
|
// Unfortunately, the INFINITY macro cannot be used with the '-pedantic'
|
|
|
|
// warning flag and certain versions of GCC due to a bug:
|
|
|
|
// http://gcc.gnu.org/bugzilla/show_bug.cgi?id=11931
|
|
|
|
// For now, we use the more involved template-based version from <limits>, but
|
|
|
|
// only when compiling with GCC versions affected by the bug (2.96.x - 4.0.x)
|
|
|
|
#if V8_CC_GNU && V8_GNUC_PREREQ(2, 96, 0) && !V8_GNUC_PREREQ(4, 1, 0)
|
|
|
|
# include <limits> // NOLINT
|
|
|
|
# define V8_INFINITY std::numeric_limits<double>::infinity()
|
|
|
|
#elif V8_LIBC_MSVCRT
|
|
|
|
# define V8_INFINITY HUGE_VAL
|
2015-01-30 08:01:53 +00:00
|
|
|
#elif V8_OS_AIX
|
|
|
|
#define V8_INFINITY (__builtin_inff())
|
2014-12-20 13:17:20 +00:00
|
|
|
#else
|
|
|
|
# define V8_INFINITY INFINITY
|
|
|
|
#endif
|
|
|
|
|
2013-07-31 07:51:46 +00:00
|
|
|
namespace v8 {
|
2014-06-30 13:25:46 +00:00
|
|
|
|
|
|
|
namespace base {
|
|
|
|
class Mutex;
|
|
|
|
class RecursiveMutex;
|
|
|
|
class VirtualMemory;
|
|
|
|
}
|
|
|
|
|
2013-07-31 07:51:46 +00:00
|
|
|
namespace internal {
|
2013-07-29 12:12:39 +00:00
|
|
|
|
2010-10-25 16:40:41 +00:00
|
|
|
// Determine whether we are running in a simulated environment.
|
2010-10-26 08:12:17 +00:00
|
|
|
// Setting USE_SIMULATOR explicitly from the build script will force
|
|
|
|
// the use of a simulated environment.
|
|
|
|
#if !defined(USE_SIMULATOR)
|
2014-03-21 09:28:26 +00:00
|
|
|
#if (V8_TARGET_ARCH_ARM64 && !V8_HOST_ARCH_ARM64)
|
2014-02-12 09:19:30 +00:00
|
|
|
#define USE_SIMULATOR 1
|
|
|
|
#endif
|
2013-07-31 07:51:46 +00:00
|
|
|
#if (V8_TARGET_ARCH_ARM && !V8_HOST_ARCH_ARM)
|
|
|
|
#define USE_SIMULATOR 1
|
|
|
|
#endif
|
2015-01-16 07:42:00 +00:00
|
|
|
#if (V8_TARGET_ARCH_PPC && !V8_HOST_ARCH_PPC)
|
|
|
|
#define USE_SIMULATOR 1
|
|
|
|
#endif
|
2013-07-31 07:51:46 +00:00
|
|
|
#if (V8_TARGET_ARCH_MIPS && !V8_HOST_ARCH_MIPS)
|
|
|
|
#define USE_SIMULATOR 1
|
2010-10-26 08:12:17 +00:00
|
|
|
#endif
|
2014-07-09 11:08:26 +00:00
|
|
|
#if (V8_TARGET_ARCH_MIPS64 && !V8_HOST_ARCH_MIPS64)
|
|
|
|
#define USE_SIMULATOR 1
|
|
|
|
#endif
|
2016-03-10 14:02:50 +00:00
|
|
|
#if (V8_TARGET_ARCH_S390 && !V8_HOST_ARCH_S390)
|
|
|
|
#define USE_SIMULATOR 1
|
|
|
|
#endif
|
2013-06-21 13:02:38 +00:00
|
|
|
#endif
|
|
|
|
|
2015-06-04 14:44:00 +00:00
|
|
|
// Determine whether the architecture uses an embedded constant pool
|
|
|
|
// (contiguous constant pool embedded in code object).
|
|
|
|
#if V8_TARGET_ARCH_PPC
|
|
|
|
#define V8_EMBEDDED_CONSTANT_POOL 1
|
|
|
|
#else
|
|
|
|
#define V8_EMBEDDED_CONSTANT_POOL 0
|
|
|
|
#endif
|
2014-03-11 20:52:00 +00:00
|
|
|
|
2014-09-19 11:26:36 +00:00
|
|
|
#ifdef V8_TARGET_ARCH_ARM
|
|
|
|
// Set stack limit lower for ARM than for other architectures because
|
|
|
|
// stack allocating MacroAssembler takes 120K bytes.
|
|
|
|
// See issue crbug.com/405338
|
|
|
|
#define V8_DEFAULT_STACK_SIZE_KB 864
|
|
|
|
#else
|
|
|
|
// Slightly less than 1MB, since Windows' default stack size for
|
|
|
|
// the main execution thread is 1MB for both 32 and 64-bit.
|
|
|
|
#define V8_DEFAULT_STACK_SIZE_KB 984
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
// Determine whether double field unboxing feature is enabled.
|
2015-01-16 07:42:00 +00:00
|
|
|
#if V8_TARGET_ARCH_64_BIT
|
2015-02-26 12:26:49 +00:00
|
|
|
#define V8_DOUBLE_FIELDS_UNBOXING 1
|
2014-11-11 10:24:52 +00:00
|
|
|
#else
|
|
|
|
#define V8_DOUBLE_FIELDS_UNBOXING 0
|
|
|
|
#endif
|
|
|
|
|
2013-07-29 12:12:39 +00:00
|
|
|
|
2013-07-30 10:36:58 +00:00
|
|
|
typedef uint8_t byte;
|
|
|
|
typedef byte* Address;
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
// -----------------------------------------------------------------------------
|
|
|
|
// Constants
|
|
|
|
|
|
|
|
const int KB = 1024;
|
|
|
|
const int MB = KB * KB;
|
|
|
|
const int GB = KB * KB * KB;
|
|
|
|
const int kMaxInt = 0x7FFFFFFF;
|
|
|
|
const int kMinInt = -kMaxInt - 1;
|
2013-11-08 17:35:58 +00:00
|
|
|
const int kMaxInt8 = (1 << 7) - 1;
|
|
|
|
const int kMinInt8 = -(1 << 7);
|
|
|
|
const int kMaxUInt8 = (1 << 8) - 1;
|
|
|
|
const int kMinUInt8 = 0;
|
|
|
|
const int kMaxInt16 = (1 << 15) - 1;
|
|
|
|
const int kMinInt16 = -(1 << 15);
|
|
|
|
const int kMaxUInt16 = (1 << 16) - 1;
|
|
|
|
const int kMinUInt16 = 0;
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2009-04-27 11:16:59 +00:00
|
|
|
const uint32_t kMaxUInt32 = 0xFFFFFFFFu;
|
2016-03-21 17:08:21 +00:00
|
|
|
const int kMinUInt32 = 0;
|
2009-04-27 11:16:59 +00:00
|
|
|
|
2013-07-23 13:46:10 +00:00
|
|
|
const int kCharSize = sizeof(char); // NOLINT
|
|
|
|
const int kShortSize = sizeof(short); // NOLINT
|
|
|
|
const int kIntSize = sizeof(int); // NOLINT
|
2013-10-14 13:35:06 +00:00
|
|
|
const int kInt32Size = sizeof(int32_t); // NOLINT
|
|
|
|
const int kInt64Size = sizeof(int64_t); // NOLINT
|
2015-06-03 23:56:21 +00:00
|
|
|
const int kFloatSize = sizeof(float); // NOLINT
|
2013-07-23 13:46:10 +00:00
|
|
|
const int kDoubleSize = sizeof(double); // NOLINT
|
|
|
|
const int kIntptrSize = sizeof(intptr_t); // NOLINT
|
|
|
|
const int kPointerSize = sizeof(void*); // NOLINT
|
2014-06-24 05:27:44 +00:00
|
|
|
#if V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_32_BIT
|
|
|
|
const int kRegisterSize = kPointerSize + kPointerSize;
|
|
|
|
#else
|
2013-07-23 13:46:10 +00:00
|
|
|
const int kRegisterSize = kPointerSize;
|
2014-06-24 05:27:44 +00:00
|
|
|
#endif
|
2013-07-23 13:46:10 +00:00
|
|
|
const int kPCOnStackSize = kRegisterSize;
|
|
|
|
const int kFPOnStackSize = kRegisterSize;
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2016-04-14 06:57:00 +00:00
|
|
|
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
|
2016-03-30 14:08:03 +00:00
|
|
|
const int kElidedFrameSlots = kPCOnStackSize / kPointerSize;
|
|
|
|
#else
|
|
|
|
const int kElidedFrameSlots = 0;
|
|
|
|
#endif
|
|
|
|
|
2011-07-13 13:50:27 +00:00
|
|
|
const int kDoubleSizeLog2 = 3;
|
|
|
|
|
2009-05-05 12:06:20 +00:00
|
|
|
#if V8_HOST_ARCH_64_BIT
|
2009-05-04 13:29:29 +00:00
|
|
|
const int kPointerSizeLog2 = 3;
|
2009-06-17 11:50:33 +00:00
|
|
|
const intptr_t kIntptrSignBit = V8_INT64_C(0x8000000000000000);
|
2010-10-26 18:14:45 +00:00
|
|
|
const uintptr_t kUintptrAllBitsSet = V8_UINT64_C(0xFFFFFFFFFFFFFFFF);
|
2014-06-13 11:06:42 +00:00
|
|
|
const bool kRequiresCodeRange = true;
|
2015-06-09 14:50:26 +00:00
|
|
|
#if V8_TARGET_ARCH_MIPS64
|
|
|
|
// To use pseudo-relative jumps such as j/jal instructions which have 28-bit
|
|
|
|
// encoded immediate, the addresses have to be in range of 256MB aligned
|
|
|
|
// region. Used only for large object space.
|
|
|
|
const size_t kMaximalCodeRangeSize = 256 * MB;
|
2016-05-19 17:53:56 +00:00
|
|
|
const size_t kCodeRangeAreaAlignment = 256 * MB;
|
2016-07-29 14:25:40 +00:00
|
|
|
#elif V8_HOST_ARCH_PPC && V8_TARGET_ARCH_PPC && V8_OS_LINUX
|
|
|
|
const size_t kMaximalCodeRangeSize = 512 * MB;
|
|
|
|
const size_t kCodeRangeAreaAlignment = 64 * KB; // OS page on PPC Linux
|
2015-06-09 14:50:26 +00:00
|
|
|
#else
|
2014-06-13 11:06:42 +00:00
|
|
|
const size_t kMaximalCodeRangeSize = 512 * MB;
|
2016-05-19 17:53:56 +00:00
|
|
|
const size_t kCodeRangeAreaAlignment = 4 * KB; // OS page.
|
2015-06-09 14:50:26 +00:00
|
|
|
#endif
|
2014-10-01 09:16:57 +00:00
|
|
|
#if V8_OS_WIN
|
2014-10-02 08:28:29 +00:00
|
|
|
const size_t kMinimumCodeRangeSize = 4 * MB;
|
2014-10-01 09:16:57 +00:00
|
|
|
const size_t kReservedCodeRangePages = 1;
|
2016-07-29 14:25:40 +00:00
|
|
|
// On PPC Linux PageSize is 4MB
|
|
|
|
#elif V8_HOST_ARCH_PPC && V8_TARGET_ARCH_PPC && V8_OS_LINUX
|
|
|
|
const size_t kMinimumCodeRangeSize = 12 * MB;
|
|
|
|
const size_t kReservedCodeRangePages = 0;
|
2014-10-01 09:16:57 +00:00
|
|
|
#else
|
2014-10-02 08:28:29 +00:00
|
|
|
const size_t kMinimumCodeRangeSize = 3 * MB;
|
2014-10-01 09:16:57 +00:00
|
|
|
const size_t kReservedCodeRangePages = 0;
|
|
|
|
#endif
|
2009-05-04 13:29:29 +00:00
|
|
|
#else
|
2008-07-03 15:10:15 +00:00
|
|
|
const int kPointerSizeLog2 = 2;
|
2009-06-17 11:50:33 +00:00
|
|
|
const intptr_t kIntptrSignBit = 0x80000000;
|
2010-10-26 18:14:45 +00:00
|
|
|
const uintptr_t kUintptrAllBitsSet = 0xFFFFFFFFu;
|
2014-06-24 05:27:44 +00:00
|
|
|
#if V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_32_BIT
|
|
|
|
// x32 port also requires code range.
|
|
|
|
const bool kRequiresCodeRange = true;
|
|
|
|
const size_t kMaximalCodeRangeSize = 256 * MB;
|
2014-10-02 08:28:29 +00:00
|
|
|
const size_t kMinimumCodeRangeSize = 3 * MB;
|
2016-05-19 17:53:56 +00:00
|
|
|
const size_t kCodeRangeAreaAlignment = 4 * KB; // OS page.
|
2016-07-29 14:25:40 +00:00
|
|
|
#elif V8_HOST_ARCH_PPC && V8_TARGET_ARCH_PPC && V8_OS_LINUX
|
|
|
|
const bool kRequiresCodeRange = false;
|
|
|
|
const size_t kMaximalCodeRangeSize = 0 * MB;
|
|
|
|
const size_t kMinimumCodeRangeSize = 0 * MB;
|
|
|
|
const size_t kCodeRangeAreaAlignment = 64 * KB; // OS page on PPC Linux
|
2014-06-24 05:27:44 +00:00
|
|
|
#else
|
2014-06-13 11:06:42 +00:00
|
|
|
const bool kRequiresCodeRange = false;
|
|
|
|
const size_t kMaximalCodeRangeSize = 0 * MB;
|
2014-10-01 09:16:57 +00:00
|
|
|
const size_t kMinimumCodeRangeSize = 0 * MB;
|
2016-05-19 17:53:56 +00:00
|
|
|
const size_t kCodeRangeAreaAlignment = 4 * KB; // OS page.
|
2009-05-04 13:29:29 +00:00
|
|
|
#endif
|
2016-07-29 14:25:40 +00:00
|
|
|
const size_t kReservedCodeRangePages = 0;
|
2014-06-24 05:27:44 +00:00
|
|
|
#endif
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2016-06-21 09:25:08 +00:00
|
|
|
// The external allocation limit should be below 256 MB on all architectures
|
|
|
|
// to avoid that resource-constrained embedders run low on memory.
|
|
|
|
const int kExternalAllocationLimit = 192 * 1024 * 1024;
|
|
|
|
|
2014-06-24 09:31:30 +00:00
|
|
|
STATIC_ASSERT(kPointerSize == (1 << kPointerSizeLog2));
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
const int kBitsPerByte = 8;
|
|
|
|
const int kBitsPerByteLog2 = 3;
|
|
|
|
const int kBitsPerPointer = kPointerSize * kBitsPerByte;
|
|
|
|
const int kBitsPerInt = kIntSize * kBitsPerByte;
|
|
|
|
|
2010-03-23 13:38:04 +00:00
|
|
|
// IEEE 754 single precision floating point number bit layout.
|
|
|
|
const uint32_t kBinary32SignMask = 0x80000000u;
|
|
|
|
const uint32_t kBinary32ExponentMask = 0x7f800000u;
|
|
|
|
const uint32_t kBinary32MantissaMask = 0x007fffffu;
|
|
|
|
const int kBinary32ExponentBias = 127;
|
|
|
|
const int kBinary32MaxExponent = 0xFE;
|
|
|
|
const int kBinary32MinExponent = 0x01;
|
|
|
|
const int kBinary32MantissaBits = 23;
|
|
|
|
const int kBinary32ExponentShift = 23;
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2011-09-26 12:44:36 +00:00
|
|
|
// Quiet NaNs have bits 51 to 62 set, possibly the sign bit, and no
|
|
|
|
// other bits set.
|
|
|
|
const uint64_t kQuietNaNMask = static_cast<uint64_t>(0xfff) << 51;
|
|
|
|
|
2013-01-09 10:30:54 +00:00
|
|
|
// Latin1/UTF-16 constants
|
2010-12-22 20:14:19 +00:00
|
|
|
// Code-point values in Unicode 4.0 are 21 bits wide.
|
2012-03-12 12:35:28 +00:00
|
|
|
// Code units in UTF-16 are 16 bits wide.
|
2010-12-22 20:14:19 +00:00
|
|
|
typedef uint16_t uc16;
|
|
|
|
typedef int32_t uc32;
|
2013-01-09 10:30:54 +00:00
|
|
|
const int kOneByteSize = kCharSize;
|
2010-12-22 20:14:19 +00:00
|
|
|
const int kUC16Size = sizeof(uc16); // NOLINT
|
|
|
|
|
2015-06-02 22:56:00 +00:00
|
|
|
// 128 bit SIMD value size.
|
|
|
|
const int kSimd128Size = 16;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2013-08-23 08:22:07 +00:00
|
|
|
// Round up n to be a multiple of sz, where sz is a power of 2.
|
|
|
|
#define ROUND_UP(n, sz) (((n) + ((sz) - 1)) & ~((sz) - 1))
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
// FUNCTION_ADDR(f) gets the address of a C function f.
|
|
|
|
#define FUNCTION_ADDR(f) \
|
|
|
|
(reinterpret_cast<v8::internal::Address>(reinterpret_cast<intptr_t>(f)))
|
|
|
|
|
|
|
|
|
|
|
|
// FUNCTION_CAST<F>(addr) casts an address into a function
|
|
|
|
// of type F. Used to invoke generated code from within C.
|
|
|
|
template <typename F>
|
|
|
|
F FUNCTION_CAST(Address addr) {
|
|
|
|
return reinterpret_cast<F>(reinterpret_cast<intptr_t>(addr));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-10-28 13:42:23 +00:00
|
|
|
// Determine whether the architecture uses function descriptors
|
|
|
|
// which provide a level of indirection between the function pointer
|
|
|
|
// and the function entrypoint.
|
|
|
|
#if V8_HOST_ARCH_PPC && \
|
|
|
|
(V8_OS_AIX || (V8_TARGET_ARCH_PPC64 && V8_TARGET_BIG_ENDIAN))
|
|
|
|
#define USES_FUNCTION_DESCRIPTORS 1
|
|
|
|
#define FUNCTION_ENTRYPOINT_ADDRESS(f) \
|
|
|
|
(reinterpret_cast<v8::internal::Address*>( \
|
|
|
|
&(reinterpret_cast<intptr_t*>(f)[0])))
|
|
|
|
#else
|
|
|
|
#define USES_FUNCTION_DESCRIPTORS 0
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
2010-11-15 13:23:30 +00:00
|
|
|
// -----------------------------------------------------------------------------
|
|
|
|
// Forward declarations for frequently used classes
|
|
|
|
// (sorted alphabetically)
|
2010-08-31 08:05:42 +00:00
|
|
|
|
2010-11-15 13:23:30 +00:00
|
|
|
class FreeStoreAllocationPolicy;
|
|
|
|
template <typename T, class P = FreeStoreAllocationPolicy> class List;
|
2009-11-13 12:32:57 +00:00
|
|
|
|
2011-10-27 13:08:51 +00:00
|
|
|
// -----------------------------------------------------------------------------
|
|
|
|
// Declarations for use in both the preparser and the rest of V8.
|
|
|
|
|
|
|
|
// The Strict Mode (ECMA-262 5th edition, 4.2.2).
|
2014-03-11 14:41:22 +00:00
|
|
|
|
2016-08-18 15:56:47 +00:00
|
|
|
enum LanguageMode : uint32_t { SLOPPY, STRICT, LANGUAGE_END };
|
2015-02-05 14:11:34 +00:00
|
|
|
|
2015-05-21 14:31:41 +00:00
|
|
|
inline std::ostream& operator<<(std::ostream& os, const LanguageMode& mode) {
|
2015-04-27 09:12:58 +00:00
|
|
|
switch (mode) {
|
2016-03-10 12:43:51 +00:00
|
|
|
case SLOPPY: return os << "sloppy";
|
|
|
|
case STRICT: return os << "strict";
|
|
|
|
default: UNREACHABLE();
|
2015-04-27 09:12:58 +00:00
|
|
|
}
|
2016-03-10 12:43:51 +00:00
|
|
|
return os;
|
2015-04-27 09:12:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-02-05 14:11:34 +00:00
|
|
|
inline bool is_sloppy(LanguageMode language_mode) {
|
2016-03-10 12:43:51 +00:00
|
|
|
return language_mode == SLOPPY;
|
2015-02-05 14:11:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-02-04 09:34:05 +00:00
|
|
|
inline bool is_strict(LanguageMode language_mode) {
|
2016-03-10 12:43:51 +00:00
|
|
|
return language_mode != SLOPPY;
|
2015-02-04 09:34:05 +00:00
|
|
|
}
|
|
|
|
|
2015-02-05 14:11:34 +00:00
|
|
|
|
2015-02-04 09:34:05 +00:00
|
|
|
inline bool is_valid_language_mode(int language_mode) {
|
2016-03-10 12:43:51 +00:00
|
|
|
return language_mode == SLOPPY || language_mode == STRICT;
|
2015-02-05 14:11:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2016-03-10 12:43:51 +00:00
|
|
|
inline LanguageMode construct_language_mode(bool strict_bit) {
|
|
|
|
return static_cast<LanguageMode>(strict_bit);
|
2015-02-04 09:34:05 +00:00
|
|
|
}
|
2011-10-27 13:08:51 +00:00
|
|
|
|
2016-06-30 09:26:30 +00:00
|
|
|
// This constant is used as an undefined value when passing source positions.
|
|
|
|
const int kNoSourcePosition = -1;
|
2015-02-05 14:11:34 +00:00
|
|
|
|
2016-07-19 08:22:04 +00:00
|
|
|
// This constant is used to indicate missing deoptimization information.
|
|
|
|
const int kNoDeoptimizationId = -1;
|
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
// Mask for the sign bit in a smi.
|
|
|
|
const intptr_t kSmiSignMask = kIntptrSignBit;
|
|
|
|
|
|
|
|
const int kObjectAlignmentBits = kPointerSizeLog2;
|
|
|
|
const intptr_t kObjectAlignment = 1 << kObjectAlignmentBits;
|
|
|
|
const intptr_t kObjectAlignmentMask = kObjectAlignment - 1;
|
|
|
|
|
|
|
|
// Desired alignment for pointers.
|
|
|
|
const intptr_t kPointerAlignment = (1 << kPointerSizeLog2);
|
|
|
|
const intptr_t kPointerAlignmentMask = kPointerAlignment - 1;
|
|
|
|
|
|
|
|
// Desired alignment for double values.
|
|
|
|
const intptr_t kDoubleAlignment = 8;
|
|
|
|
const intptr_t kDoubleAlignmentMask = kDoubleAlignment - 1;
|
|
|
|
|
2015-06-02 22:56:00 +00:00
|
|
|
// Desired alignment for 128 bit SIMD values.
|
|
|
|
const intptr_t kSimd128Alignment = 16;
|
|
|
|
const intptr_t kSimd128AlignmentMask = kSimd128Alignment - 1;
|
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
// Desired alignment for generated code is 32 bytes (to improve cache line
|
|
|
|
// utilization).
|
|
|
|
const int kCodeAlignmentBits = 5;
|
|
|
|
const intptr_t kCodeAlignment = 1 << kCodeAlignmentBits;
|
|
|
|
const intptr_t kCodeAlignmentMask = kCodeAlignment - 1;
|
|
|
|
|
2014-07-28 18:54:06 +00:00
|
|
|
// The owner field of a page is tagged with the page header tag. We need that
|
|
|
|
// to find out if a slot is part of a large object. If we mask out the lower
|
|
|
|
// 0xfffff bits (1M pages), go to the owner offset, and see that this field
|
|
|
|
// is tagged with the page header tag, we can just look up the owner.
|
|
|
|
// Otherwise, we know that we are somewhere (not within the first 1M) in a
|
|
|
|
// large object.
|
|
|
|
const int kPageHeaderTag = 3;
|
|
|
|
const int kPageHeaderTagSize = 2;
|
|
|
|
const intptr_t kPageHeaderTagMask = (1 << kPageHeaderTagSize) - 1;
|
2014-07-15 12:22:38 +00:00
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
|
|
|
|
// Zap-value: The value used for zapping dead objects.
|
|
|
|
// Should be a recognizable hex value tagged as a failure.
|
|
|
|
#ifdef V8_HOST_ARCH_64_BIT
|
|
|
|
const Address kZapValue =
|
|
|
|
reinterpret_cast<Address>(V8_UINT64_C(0xdeadbeedbeadbeef));
|
|
|
|
const Address kHandleZapValue =
|
|
|
|
reinterpret_cast<Address>(V8_UINT64_C(0x1baddead0baddeaf));
|
|
|
|
const Address kGlobalHandleZapValue =
|
|
|
|
reinterpret_cast<Address>(V8_UINT64_C(0x1baffed00baffedf));
|
|
|
|
const Address kFromSpaceZapValue =
|
|
|
|
reinterpret_cast<Address>(V8_UINT64_C(0x1beefdad0beefdaf));
|
|
|
|
const uint64_t kDebugZapValue = V8_UINT64_C(0xbadbaddbbadbaddb);
|
|
|
|
const uint64_t kSlotsZapValue = V8_UINT64_C(0xbeefdeadbeefdeef);
|
|
|
|
const uint64_t kFreeListZapValue = 0xfeed1eaffeed1eaf;
|
|
|
|
#else
|
|
|
|
const Address kZapValue = reinterpret_cast<Address>(0xdeadbeef);
|
|
|
|
const Address kHandleZapValue = reinterpret_cast<Address>(0xbaddeaf);
|
|
|
|
const Address kGlobalHandleZapValue = reinterpret_cast<Address>(0xbaffedf);
|
|
|
|
const Address kFromSpaceZapValue = reinterpret_cast<Address>(0xbeefdaf);
|
|
|
|
const uint32_t kSlotsZapValue = 0xbeefdeef;
|
|
|
|
const uint32_t kDebugZapValue = 0xbadbaddb;
|
|
|
|
const uint32_t kFreeListZapValue = 0xfeed1eaf;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const int kCodeZapValue = 0xbadc0de;
|
2014-11-03 17:23:55 +00:00
|
|
|
const uint32_t kPhantomReferenceZap = 0xca11bac;
|
2014-05-26 11:28:08 +00:00
|
|
|
|
|
|
|
// On Intel architecture, cache line size is 64 bytes.
|
|
|
|
// On ARM it may be less (32 bytes), but as far this constant is
|
|
|
|
// used for aligning data, it doesn't hurt to align on a greater value.
|
|
|
|
#define PROCESSOR_CACHE_LINE_SIZE 64
|
|
|
|
|
|
|
|
// Constants relevant to double precision floating point numbers.
|
|
|
|
// If looking only at the top 32 bits, the QNaN mask is bits 19 to 30.
|
|
|
|
const uint32_t kQuietNaNHighBitsMask = 0xfff << (51 - 32);
|
|
|
|
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------------
|
|
|
|
// Forward declarations for frequently used classes
|
|
|
|
|
|
|
|
class AccessorInfo;
|
|
|
|
class Allocation;
|
|
|
|
class Arguments;
|
|
|
|
class Assembler;
|
|
|
|
class Code;
|
|
|
|
class CodeGenerator;
|
|
|
|
class CodeStub;
|
|
|
|
class Context;
|
|
|
|
class Debug;
|
|
|
|
class DebugInfo;
|
|
|
|
class Descriptor;
|
|
|
|
class DescriptorArray;
|
|
|
|
class TransitionArray;
|
|
|
|
class ExternalReference;
|
|
|
|
class FixedArray;
|
|
|
|
class FunctionTemplateInfo;
|
|
|
|
class MemoryChunk;
|
|
|
|
class SeededNumberDictionary;
|
|
|
|
class UnseededNumberDictionary;
|
|
|
|
class NameDictionary;
|
2015-06-01 15:43:24 +00:00
|
|
|
class GlobalDictionary;
|
2014-05-26 11:28:08 +00:00
|
|
|
template <typename T> class MaybeHandle;
|
|
|
|
template <typename T> class Handle;
|
|
|
|
class Heap;
|
|
|
|
class HeapObject;
|
|
|
|
class IC;
|
|
|
|
class InterceptorInfo;
|
|
|
|
class Isolate;
|
|
|
|
class JSReceiver;
|
|
|
|
class JSArray;
|
|
|
|
class JSFunction;
|
|
|
|
class JSObject;
|
|
|
|
class LargeObjectSpace;
|
|
|
|
class MacroAssembler;
|
|
|
|
class Map;
|
|
|
|
class MapSpace;
|
|
|
|
class MarkCompactCollector;
|
|
|
|
class NewSpace;
|
|
|
|
class Object;
|
|
|
|
class OldSpace;
|
2015-08-21 10:25:10 +00:00
|
|
|
class ParameterCount;
|
2014-05-26 11:28:08 +00:00
|
|
|
class Foreign;
|
|
|
|
class Scope;
|
2016-08-05 14:30:54 +00:00
|
|
|
class DeclarationScope;
|
2016-08-18 08:50:55 +00:00
|
|
|
class ModuleScope;
|
2014-05-26 11:28:08 +00:00
|
|
|
class ScopeInfo;
|
|
|
|
class Script;
|
|
|
|
class Smi;
|
|
|
|
template <typename Config, class Allocator = FreeStoreAllocationPolicy>
|
2015-09-01 15:19:57 +00:00
|
|
|
class SplayTree;
|
2014-05-26 11:28:08 +00:00
|
|
|
class String;
|
2014-12-10 20:02:48 +00:00
|
|
|
class Symbol;
|
2014-05-26 11:28:08 +00:00
|
|
|
class Name;
|
|
|
|
class Struct;
|
2015-10-01 13:48:05 +00:00
|
|
|
class TypeFeedbackVector;
|
2014-05-26 11:28:08 +00:00
|
|
|
class Variable;
|
|
|
|
class RelocInfo;
|
|
|
|
class Deserializer;
|
|
|
|
class MessageLocation;
|
|
|
|
|
|
|
|
typedef bool (*WeakSlotCallback)(Object** pointer);
|
|
|
|
|
|
|
|
typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer);
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------------
|
|
|
|
// Miscellaneous
|
|
|
|
|
|
|
|
// NOTE: SpaceIterator depends on AllocationSpace enumeration values being
|
|
|
|
// consecutive.
|
2014-12-22 08:51:57 +00:00
|
|
|
// Keep this enum in sync with the ObjectSpace enum in v8.h
|
2014-05-26 11:28:08 +00:00
|
|
|
enum AllocationSpace {
|
2015-04-07 11:31:57 +00:00
|
|
|
NEW_SPACE, // Semispaces collected with copying collector.
|
|
|
|
OLD_SPACE, // May contain pointers to new space.
|
|
|
|
CODE_SPACE, // No pointers to new space, marked executable.
|
|
|
|
MAP_SPACE, // Only and all map objects.
|
|
|
|
LO_SPACE, // Promoted large objects.
|
2014-05-26 11:28:08 +00:00
|
|
|
|
|
|
|
FIRST_SPACE = NEW_SPACE,
|
|
|
|
LAST_SPACE = LO_SPACE,
|
2015-04-07 11:31:57 +00:00
|
|
|
FIRST_PAGED_SPACE = OLD_SPACE,
|
2015-04-10 13:54:09 +00:00
|
|
|
LAST_PAGED_SPACE = MAP_SPACE
|
2014-05-26 11:28:08 +00:00
|
|
|
};
|
|
|
|
const int kSpaceTagSize = 3;
|
|
|
|
const int kSpaceTagMask = (1 << kSpaceTagSize) - 1;
|
|
|
|
|
2015-06-02 22:56:00 +00:00
|
|
|
enum AllocationAlignment {
|
|
|
|
kWordAligned,
|
|
|
|
kDoubleAligned,
|
|
|
|
kDoubleUnaligned,
|
|
|
|
kSimd128Unaligned
|
|
|
|
};
|
2014-05-26 11:28:08 +00:00
|
|
|
|
2016-06-22 07:50:34 +00:00
|
|
|
// Possible outcomes for decisions.
|
|
|
|
enum class Decision : uint8_t { kUnknown, kTrue, kFalse };
|
|
|
|
|
|
|
|
inline size_t hash_value(Decision decision) {
|
|
|
|
return static_cast<uint8_t>(decision);
|
|
|
|
}
|
|
|
|
|
|
|
|
inline std::ostream& operator<<(std::ostream& os, Decision decision) {
|
|
|
|
switch (decision) {
|
|
|
|
case Decision::kUnknown:
|
|
|
|
return os << "Unknown";
|
|
|
|
case Decision::kTrue:
|
|
|
|
return os << "True";
|
|
|
|
case Decision::kFalse:
|
|
|
|
return os << "False";
|
|
|
|
}
|
|
|
|
UNREACHABLE();
|
|
|
|
return os;
|
|
|
|
}
|
|
|
|
|
2016-05-03 13:55:16 +00:00
|
|
|
// Supported write barrier modes.
|
|
|
|
enum WriteBarrierKind : uint8_t {
|
|
|
|
kNoWriteBarrier,
|
|
|
|
kMapWriteBarrier,
|
|
|
|
kPointerWriteBarrier,
|
|
|
|
kFullWriteBarrier
|
|
|
|
};
|
|
|
|
|
|
|
|
inline size_t hash_value(WriteBarrierKind kind) {
|
|
|
|
return static_cast<uint8_t>(kind);
|
|
|
|
}
|
|
|
|
|
|
|
|
inline std::ostream& operator<<(std::ostream& os, WriteBarrierKind kind) {
|
|
|
|
switch (kind) {
|
|
|
|
case kNoWriteBarrier:
|
|
|
|
return os << "NoWriteBarrier";
|
|
|
|
case kMapWriteBarrier:
|
|
|
|
return os << "MapWriteBarrier";
|
|
|
|
case kPointerWriteBarrier:
|
|
|
|
return os << "PointerWriteBarrier";
|
|
|
|
case kFullWriteBarrier:
|
|
|
|
return os << "FullWriteBarrier";
|
|
|
|
}
|
|
|
|
UNREACHABLE();
|
|
|
|
return os;
|
|
|
|
}
|
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
// A flag that indicates whether objects should be pretenured when
|
|
|
|
// allocated (allocated directly into the old generation) or not
|
|
|
|
// (allocated in the young generation if the object size and type
|
|
|
|
// allows).
|
|
|
|
enum PretenureFlag { NOT_TENURED, TENURED };
|
|
|
|
|
2015-05-21 14:31:41 +00:00
|
|
|
inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) {
|
|
|
|
switch (flag) {
|
|
|
|
case NOT_TENURED:
|
|
|
|
return os << "NotTenured";
|
|
|
|
case TENURED:
|
|
|
|
return os << "Tenured";
|
|
|
|
}
|
|
|
|
UNREACHABLE();
|
|
|
|
return os;
|
|
|
|
}
|
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
enum MinimumCapacity {
|
|
|
|
USE_DEFAULT_MINIMUM_CAPACITY,
|
|
|
|
USE_CUSTOM_MINIMUM_CAPACITY
|
|
|
|
};
|
|
|
|
|
|
|
|
enum GarbageCollector { SCAVENGER, MARK_COMPACTOR };
|
|
|
|
|
|
|
|
enum Executability { NOT_EXECUTABLE, EXECUTABLE };
|
|
|
|
|
|
|
|
enum VisitMode {
|
|
|
|
VISIT_ALL,
|
|
|
|
VISIT_ALL_IN_SCAVENGE,
|
|
|
|
VISIT_ALL_IN_SWEEP_NEWSPACE,
|
2016-03-17 10:32:36 +00:00
|
|
|
VISIT_ONLY_STRONG,
|
2016-03-17 13:15:56 +00:00
|
|
|
VISIT_ONLY_STRONG_FOR_SERIALIZATION,
|
|
|
|
VISIT_ONLY_STRONG_ROOT_LIST,
|
2014-05-26 11:28:08 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
// Flag indicating whether code is built into the VM (one of the natives files).
|
2016-02-05 12:33:20 +00:00
|
|
|
enum NativesFlag { NOT_NATIVES_CODE, EXTENSION_CODE, NATIVES_CODE };
|
2014-05-26 11:28:08 +00:00
|
|
|
|
2015-08-11 07:34:10 +00:00
|
|
|
// JavaScript defines two kinds of 'nil'.
|
|
|
|
enum NilValue { kNullValue, kUndefinedValue };
|
2014-05-26 11:28:08 +00:00
|
|
|
|
2015-03-09 14:51:13 +00:00
|
|
|
// ParseRestriction is used to restrict the set of valid statements in a
|
|
|
|
// unit of compilation. Restriction violations cause a syntax error.
|
|
|
|
enum ParseRestriction {
|
|
|
|
NO_PARSE_RESTRICTION, // All expressions are allowed.
|
|
|
|
ONLY_SINGLE_FUNCTION_LITERAL // Only a single FunctionLiteral expression.
|
|
|
|
};
|
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
// A CodeDesc describes a buffer holding instructions and relocation
|
|
|
|
// information. The instructions start at the beginning of the buffer
|
|
|
|
// and grow forward, the relocation information starts at the end of
|
2015-06-04 14:44:00 +00:00
|
|
|
// the buffer and grows backward. A constant pool may exist at the
|
|
|
|
// end of the instructions.
|
2014-05-26 11:28:08 +00:00
|
|
|
//
|
2015-06-04 14:44:00 +00:00
|
|
|
// |<--------------- buffer_size ----------------------------------->|
|
|
|
|
// |<------------- instr_size ---------->| |<-- reloc_size -->|
|
|
|
|
// | |<- const_pool_size ->| |
|
|
|
|
// +=====================================+========+==================+
|
|
|
|
// | instructions | data | free | reloc info |
|
|
|
|
// +=====================================+========+==================+
|
2014-05-26 11:28:08 +00:00
|
|
|
// ^
|
|
|
|
// |
|
|
|
|
// buffer
|
|
|
|
|
|
|
|
struct CodeDesc {
|
|
|
|
byte* buffer;
|
|
|
|
int buffer_size;
|
|
|
|
int instr_size;
|
|
|
|
int reloc_size;
|
2015-06-04 14:44:00 +00:00
|
|
|
int constant_pool_size;
|
2016-06-27 15:06:32 +00:00
|
|
|
byte* unwinding_info;
|
|
|
|
int unwinding_info_size;
|
2014-05-26 11:28:08 +00:00
|
|
|
Assembler* origin;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
// Callback function used for checking constraints when copying/relocating
|
|
|
|
// objects. Returns true if an object can be copied/relocated from its
|
|
|
|
// old_addr to a new_addr.
|
|
|
|
typedef bool (*ConstraintCallback)(Address new_addr, Address old_addr);
|
|
|
|
|
|
|
|
|
|
|
|
// Callback function on inline caches, used for iterating over inline caches
|
|
|
|
// in compiled code.
|
|
|
|
typedef void (*InlineCacheCallback)(Code* code, Address ic);
|
|
|
|
|
|
|
|
|
|
|
|
// State for inline cache call sites. Aliased as IC::State.
|
|
|
|
enum InlineCacheState {
|
|
|
|
// Has never been executed.
|
|
|
|
UNINITIALIZED,
|
|
|
|
// Has been executed but monomorhic state has been delayed.
|
|
|
|
PREMONOMORPHIC,
|
|
|
|
// Has been executed and only one receiver type has been seen.
|
|
|
|
MONOMORPHIC,
|
2014-07-18 13:50:21 +00:00
|
|
|
// Check failed due to prototype (or map deprecation).
|
2016-04-06 10:05:51 +00:00
|
|
|
RECOMPUTE_HANDLER,
|
2014-05-26 11:28:08 +00:00
|
|
|
// Multiple receiver types have been seen.
|
|
|
|
POLYMORPHIC,
|
|
|
|
// Many receiver types have been seen.
|
|
|
|
MEGAMORPHIC,
|
|
|
|
// A generic handler is installed and no extra typefeedback is recorded.
|
|
|
|
GENERIC,
|
|
|
|
};
|
|
|
|
|
2014-07-18 13:50:21 +00:00
|
|
|
enum CacheHolderFlag {
|
|
|
|
kCacheOnPrototype,
|
|
|
|
kCacheOnPrototypeReceiverIsDictionary,
|
|
|
|
kCacheOnPrototypeReceiverIsPrimitive,
|
|
|
|
kCacheOnReceiver
|
2014-05-26 11:28:08 +00:00
|
|
|
};
|
|
|
|
|
2016-06-08 14:43:22 +00:00
|
|
|
enum WhereToStart { kStartAtReceiver, kStartAtPrototype };
|
2014-05-26 11:28:08 +00:00
|
|
|
|
|
|
|
// The Store Buffer (GC).
|
|
|
|
typedef enum {
|
|
|
|
kStoreBufferFullEvent,
|
|
|
|
kStoreBufferStartScanningPagesEvent,
|
|
|
|
kStoreBufferScanningPageEvent
|
|
|
|
} StoreBufferEvent;
|
|
|
|
|
|
|
|
|
|
|
|
typedef void (*StoreBufferCallback)(Heap* heap,
|
|
|
|
MemoryChunk* page,
|
|
|
|
StoreBufferEvent event);
|
|
|
|
|
|
|
|
// Union used for customized checking of the IEEE double types
|
|
|
|
// inlined within v8 runtime, rather than going to the underlying
|
|
|
|
// platform headers and libraries
|
|
|
|
union IeeeDoubleLittleEndianArchType {
|
|
|
|
double d;
|
|
|
|
struct {
|
|
|
|
unsigned int man_low :32;
|
|
|
|
unsigned int man_high :20;
|
|
|
|
unsigned int exp :11;
|
|
|
|
unsigned int sign :1;
|
|
|
|
} bits;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
union IeeeDoubleBigEndianArchType {
|
|
|
|
double d;
|
|
|
|
struct {
|
|
|
|
unsigned int sign :1;
|
|
|
|
unsigned int exp :11;
|
|
|
|
unsigned int man_high :20;
|
|
|
|
unsigned int man_low :32;
|
|
|
|
} bits;
|
|
|
|
};
|
|
|
|
|
2016-06-02 15:02:08 +00:00
|
|
|
#if V8_TARGET_LITTLE_ENDIAN
|
|
|
|
typedef IeeeDoubleLittleEndianArchType IeeeDoubleArchType;
|
|
|
|
const int kIeeeDoubleMantissaWordOffset = 0;
|
|
|
|
const int kIeeeDoubleExponentWordOffset = 4;
|
|
|
|
#else
|
|
|
|
typedef IeeeDoubleBigEndianArchType IeeeDoubleArchType;
|
|
|
|
const int kIeeeDoubleMantissaWordOffset = 4;
|
|
|
|
const int kIeeeDoubleExponentWordOffset = 0;
|
|
|
|
#endif
|
2014-05-26 11:28:08 +00:00
|
|
|
|
|
|
|
// AccessorCallback
|
|
|
|
struct AccessorDescriptor {
|
|
|
|
Object* (*getter)(Isolate* isolate, Object* object, void* data);
|
|
|
|
Object* (*setter)(
|
|
|
|
Isolate* isolate, JSObject* object, Object* value, void* data);
|
|
|
|
void* data;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------------
|
|
|
|
// Macros
|
|
|
|
|
|
|
|
// Testers for test.
|
|
|
|
|
|
|
|
#define HAS_SMI_TAG(value) \
|
|
|
|
((reinterpret_cast<intptr_t>(value) & kSmiTagMask) == kSmiTag)
|
|
|
|
|
|
|
|
// OBJECT_POINTER_ALIGN returns the value aligned as a HeapObject pointer
|
|
|
|
#define OBJECT_POINTER_ALIGN(value) \
|
|
|
|
(((value) + kObjectAlignmentMask) & ~kObjectAlignmentMask)
|
|
|
|
|
|
|
|
// POINTER_SIZE_ALIGN returns the value aligned as a pointer.
|
|
|
|
#define POINTER_SIZE_ALIGN(value) \
|
|
|
|
(((value) + kPointerAlignmentMask) & ~kPointerAlignmentMask)
|
|
|
|
|
|
|
|
// CODE_POINTER_ALIGN returns the value aligned as a generated code segment.
|
|
|
|
#define CODE_POINTER_ALIGN(value) \
|
|
|
|
(((value) + kCodeAlignmentMask) & ~kCodeAlignmentMask)
|
|
|
|
|
2015-05-07 05:44:47 +00:00
|
|
|
// DOUBLE_POINTER_ALIGN returns the value algined for double pointers.
|
|
|
|
#define DOUBLE_POINTER_ALIGN(value) \
|
|
|
|
(((value) + kDoubleAlignmentMask) & ~kDoubleAlignmentMask)
|
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
|
|
|
|
// CPU feature flags.
|
|
|
|
enum CpuFeature {
|
2014-10-28 15:22:22 +00:00
|
|
|
// x86
|
|
|
|
SSE4_1,
|
|
|
|
SSE3,
|
|
|
|
SAHF,
|
2014-11-26 05:31:41 +00:00
|
|
|
AVX,
|
|
|
|
FMA3,
|
2015-04-08 07:15:51 +00:00
|
|
|
BMI1,
|
|
|
|
BMI2,
|
|
|
|
LZCNT,
|
|
|
|
POPCNT,
|
2015-01-20 14:59:47 +00:00
|
|
|
ATOM,
|
2014-10-28 15:22:22 +00:00
|
|
|
// ARM
|
2016-08-30 10:01:50 +00:00
|
|
|
// - Standard configurations. The baseline is ARMv6+VFPv2.
|
|
|
|
ARMv7, // ARMv7-A + VFPv3-D32 + NEON
|
|
|
|
ARMv7_SUDIV, // ARMv7-A + VFPv4-D32 + NEON + SUDIV
|
|
|
|
ARMv8, // ARMv8-A (+ all of the above)
|
|
|
|
// - Additional tuning flags.
|
2014-10-28 15:22:22 +00:00
|
|
|
MOVW_MOVT_IMMEDIATE_LOADS,
|
|
|
|
// MIPS, MIPS64
|
|
|
|
FPU,
|
|
|
|
FP64FPU,
|
|
|
|
MIPSr1,
|
|
|
|
MIPSr2,
|
|
|
|
MIPSr6,
|
|
|
|
// ARM64
|
|
|
|
ALWAYS_ALIGN_CSP,
|
2015-01-16 07:42:00 +00:00
|
|
|
// PPC
|
|
|
|
FPR_GPR_MOV,
|
|
|
|
LWSYNC,
|
|
|
|
ISELECT,
|
2016-03-10 14:02:50 +00:00
|
|
|
// S390
|
|
|
|
DISTINCT_OPS,
|
|
|
|
GENERAL_INSTR_EXT,
|
|
|
|
FLOATING_POINT_EXT,
|
2016-07-27 13:24:28 +00:00
|
|
|
|
2016-08-30 10:01:50 +00:00
|
|
|
NUMBER_OF_CPU_FEATURES,
|
|
|
|
|
|
|
|
// ARM feature aliases (based on the standard configurations above).
|
|
|
|
VFP3 = ARMv7,
|
|
|
|
NEON = ARMv7,
|
|
|
|
VFP32DREGS = ARMv7,
|
|
|
|
SUDIV = ARMv7_SUDIV
|
2014-05-26 11:28:08 +00:00
|
|
|
};
|
|
|
|
|
2015-11-09 08:47:59 +00:00
|
|
|
// Defines hints about receiver values based on structural knowledge.
|
|
|
|
enum class ConvertReceiverMode : unsigned {
|
|
|
|
kNullOrUndefined, // Guaranteed to be null or undefined.
|
|
|
|
kNotNullOrUndefined, // Guaranteed to never be null or undefined.
|
|
|
|
kAny // No specific knowledge about receiver.
|
|
|
|
};
|
|
|
|
|
|
|
|
inline size_t hash_value(ConvertReceiverMode mode) {
|
|
|
|
return bit_cast<unsigned>(mode);
|
|
|
|
}
|
|
|
|
|
|
|
|
inline std::ostream& operator<<(std::ostream& os, ConvertReceiverMode mode) {
|
|
|
|
switch (mode) {
|
|
|
|
case ConvertReceiverMode::kNullOrUndefined:
|
|
|
|
return os << "NULL_OR_UNDEFINED";
|
|
|
|
case ConvertReceiverMode::kNotNullOrUndefined:
|
|
|
|
return os << "NOT_NULL_OR_UNDEFINED";
|
|
|
|
case ConvertReceiverMode::kAny:
|
|
|
|
return os << "ANY";
|
|
|
|
}
|
|
|
|
UNREACHABLE();
|
|
|
|
return os;
|
|
|
|
}
|
|
|
|
|
2016-01-26 11:07:15 +00:00
|
|
|
// Defines whether tail call optimization is allowed.
|
|
|
|
enum class TailCallMode : unsigned { kAllow, kDisallow };
|
|
|
|
|
|
|
|
inline size_t hash_value(TailCallMode mode) { return bit_cast<unsigned>(mode); }
|
|
|
|
|
|
|
|
inline std::ostream& operator<<(std::ostream& os, TailCallMode mode) {
|
|
|
|
switch (mode) {
|
|
|
|
case TailCallMode::kAllow:
|
|
|
|
return os << "ALLOW_TAIL_CALLS";
|
|
|
|
case TailCallMode::kDisallow:
|
|
|
|
return os << "DISALLOW_TAIL_CALLS";
|
|
|
|
}
|
|
|
|
UNREACHABLE();
|
|
|
|
return os;
|
|
|
|
}
|
2015-11-09 08:47:59 +00:00
|
|
|
|
2016-07-14 10:25:45 +00:00
|
|
|
// Valid hints for the abstract operation OrdinaryToPrimitive,
|
|
|
|
// implemented according to ES6, section 7.1.1.
|
|
|
|
enum class OrdinaryToPrimitiveHint { kNumber, kString };
|
|
|
|
|
|
|
|
// Valid hints for the abstract operation ToPrimitive,
|
|
|
|
// implemented according to ES6, section 7.1.1.
|
|
|
|
enum class ToPrimitiveHint { kDefault, kNumber, kString };
|
|
|
|
|
[runtime] Optimize and unify rest parameters.
Replace the somewhat awkward RestParamAccessStub, which would always
call into the runtime anyway with a proper FastNewRestParameterStub,
which is basically based on the code that was already there for strict
arguments object materialization. But for rest parameters we could
optimize even further (leading to 8-10x improvements for functions with
rest parameters), by fixing the internal formal parameter count:
Every SharedFunctionInfo has a formal_parameter_count field, which
specifies the number of formal parameters, and is used to decide whether
we need to create an arguments adaptor frame when calling a function
(i.e. if there's a mismatch between the actual and expected parameters).
Previously the formal_parameter_count included the rest parameter, which
was sort of unfortunate, as that meant that calling a function with only
the non-rest parameters still required an arguments adaptor (plus some
other oddities). Now with this CL we fix, so that we do no longer
include the rest parameter in that count. Thereby checking for rest
parameters is very efficient, as we only need to check whether there is
an arguments adaptor frame, and if not create an empty array, otherwise
check whether the arguments adaptor frame has more parameters than
specified by the formal_parameter_count.
The FastNewRestParameterStub is written in a way that it can be directly
used by Ignition as well, and with some tweaks to the TurboFan backends
and the CodeStubAssembler, we should be able to rewrite it as
TurboFanCodeStub in the near future.
Drive-by-fix: Refactor and unify the CreateArgumentsType which was
different in TurboFan and Ignition; now we have a single enum class
which is used in both TurboFan and Ignition.
R=jarin@chromium.org, rmcilroy@chromium.org
TBR=rossberg@chromium.org
BUG=v8:2159
LOG=n
Review URL: https://codereview.chromium.org/1676883002
Cr-Commit-Position: refs/heads/master@{#33809}
2016-02-08 10:08:21 +00:00
|
|
|
// Defines specifics about arguments object or rest parameter creation.
|
|
|
|
enum class CreateArgumentsType : uint8_t {
|
|
|
|
kMappedArguments,
|
|
|
|
kUnmappedArguments,
|
|
|
|
kRestParameter
|
|
|
|
};
|
|
|
|
|
|
|
|
inline size_t hash_value(CreateArgumentsType type) {
|
|
|
|
return bit_cast<uint8_t>(type);
|
|
|
|
}
|
|
|
|
|
|
|
|
inline std::ostream& operator<<(std::ostream& os, CreateArgumentsType type) {
|
|
|
|
switch (type) {
|
|
|
|
case CreateArgumentsType::kMappedArguments:
|
|
|
|
return os << "MAPPED_ARGUMENTS";
|
|
|
|
case CreateArgumentsType::kUnmappedArguments:
|
|
|
|
return os << "UNMAPPED_ARGUMENTS";
|
|
|
|
case CreateArgumentsType::kRestParameter:
|
|
|
|
return os << "REST_PARAMETER";
|
|
|
|
}
|
|
|
|
UNREACHABLE();
|
|
|
|
return os;
|
|
|
|
}
|
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
// Used to specify if a macro instruction must perform a smi check on tagged
|
|
|
|
// values.
|
|
|
|
enum SmiCheckType {
|
|
|
|
DONT_DO_SMI_CHECK,
|
|
|
|
DO_SMI_CHECK
|
|
|
|
};
|
|
|
|
|
2016-08-24 08:49:43 +00:00
|
|
|
enum ScopeType : uint8_t {
|
2014-05-26 11:28:08 +00:00
|
|
|
EVAL_SCOPE, // The top-level scope for an eval source.
|
|
|
|
FUNCTION_SCOPE, // The top-level scope for a function.
|
|
|
|
MODULE_SCOPE, // The scope introduced by a module literal
|
2014-11-12 11:34:09 +00:00
|
|
|
SCRIPT_SCOPE, // The top-level scope for a script or a top-level eval.
|
2014-05-26 11:28:08 +00:00
|
|
|
CATCH_SCOPE, // The scope introduced by catch.
|
|
|
|
BLOCK_SCOPE, // The scope introduced by a new block.
|
2015-10-07 14:55:38 +00:00
|
|
|
WITH_SCOPE // The scope introduced by with.
|
2014-05-26 11:28:08 +00:00
|
|
|
};
|
|
|
|
|
2015-02-20 19:09:18 +00:00
|
|
|
// The mips architecture prior to revision 5 has inverted encoding for sNaN.
|
2016-06-03 08:31:51 +00:00
|
|
|
// The x87 FPU convert the sNaN to qNaN automatically when loading sNaN from
|
|
|
|
// memmory.
|
|
|
|
// Use mips sNaN which is a not used qNaN in x87 port as sNaN to workaround this
|
|
|
|
// issue
|
|
|
|
// for some test cases.
|
2016-07-27 10:55:19 +00:00
|
|
|
#if (V8_TARGET_ARCH_MIPS && !defined(_MIPS_ARCH_MIPS32R6) && \
|
|
|
|
(!defined(USE_SIMULATOR) || !defined(_MIPS_TARGET_SIMULATOR))) || \
|
|
|
|
(V8_TARGET_ARCH_MIPS64 && !defined(_MIPS_ARCH_MIPS64R6) && \
|
|
|
|
(!defined(USE_SIMULATOR) || !defined(_MIPS_TARGET_SIMULATOR))) || \
|
2016-06-03 08:31:51 +00:00
|
|
|
(V8_TARGET_ARCH_X87)
|
2015-02-20 19:09:18 +00:00
|
|
|
const uint32_t kHoleNanUpper32 = 0xFFFF7FFF;
|
|
|
|
const uint32_t kHoleNanLower32 = 0xFFFF7FFF;
|
|
|
|
#else
|
2015-01-21 08:52:00 +00:00
|
|
|
const uint32_t kHoleNanUpper32 = 0xFFF7FFFF;
|
|
|
|
const uint32_t kHoleNanLower32 = 0xFFF7FFFF;
|
2015-02-20 19:09:18 +00:00
|
|
|
#endif
|
2014-05-26 11:28:08 +00:00
|
|
|
|
|
|
|
const uint64_t kHoleNanInt64 =
|
|
|
|
(static_cast<uint64_t>(kHoleNanUpper32) << 32) | kHoleNanLower32;
|
|
|
|
|
|
|
|
|
2015-09-29 07:41:03 +00:00
|
|
|
// ES6 section 20.1.2.6 Number.MAX_SAFE_INTEGER
|
|
|
|
const double kMaxSafeInteger = 9007199254740991.0; // 2^53-1
|
|
|
|
|
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
// The order of this enum has to be kept in sync with the predicates below.
|
2016-08-25 08:46:08 +00:00
|
|
|
enum VariableMode : uint8_t {
|
2014-05-26 11:28:08 +00:00
|
|
|
// User declared variables:
|
2016-04-19 17:03:26 +00:00
|
|
|
VAR, // declared via 'var', and 'function' declarations
|
2014-05-26 11:28:08 +00:00
|
|
|
|
2016-04-19 17:03:26 +00:00
|
|
|
LET, // declared via 'let' declarations (first lexical)
|
2014-05-26 11:28:08 +00:00
|
|
|
|
2016-04-19 17:03:26 +00:00
|
|
|
CONST, // declared via 'const' declarations (last lexical)
|
2015-02-26 18:40:50 +00:00
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
// Variables introduced by the compiler:
|
2016-04-19 17:03:26 +00:00
|
|
|
TEMPORARY, // temporary variables (not user-visible), stack-allocated
|
|
|
|
// unless the scope as a whole has forced context allocation
|
2014-05-26 11:28:08 +00:00
|
|
|
|
2016-04-19 17:03:26 +00:00
|
|
|
DYNAMIC, // always require dynamic lookup (we don't know
|
|
|
|
// the declaration)
|
2014-05-26 11:28:08 +00:00
|
|
|
|
|
|
|
DYNAMIC_GLOBAL, // requires dynamic lookup, but we know that the
|
|
|
|
// variable is global unless it has been shadowed
|
|
|
|
// by an eval-introduced variable
|
|
|
|
|
2016-08-25 08:46:08 +00:00
|
|
|
DYNAMIC_LOCAL, // requires dynamic lookup, but we know that the
|
|
|
|
// variable is local and where it is unless it
|
|
|
|
// has been shadowed by an eval-introduced
|
|
|
|
// variable
|
|
|
|
|
|
|
|
kLastVariableMode = DYNAMIC_LOCAL
|
2014-05-26 11:28:08 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
inline bool IsDynamicVariableMode(VariableMode mode) {
|
|
|
|
return mode >= DYNAMIC && mode <= DYNAMIC_LOCAL;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
inline bool IsDeclaredVariableMode(VariableMode mode) {
|
2016-08-28 21:03:07 +00:00
|
|
|
STATIC_ASSERT(VAR == 0); // Implies that mode >= VAR.
|
|
|
|
return mode <= CONST;
|
2014-05-26 11:28:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
inline bool IsLexicalVariableMode(VariableMode mode) {
|
2016-04-19 17:03:26 +00:00
|
|
|
return mode >= LET && mode <= CONST;
|
2014-05-26 11:28:08 +00:00
|
|
|
}
|
|
|
|
|
2016-08-25 08:46:08 +00:00
|
|
|
enum VariableLocation : uint8_t {
|
2015-07-06 16:36:28 +00:00
|
|
|
// Before and during variable allocation, a variable whose location is
|
|
|
|
// not yet determined. After allocation, a variable looked up as a
|
|
|
|
// property on the global object (and possibly absent). name() is the
|
|
|
|
// variable name, index() is invalid.
|
|
|
|
UNALLOCATED,
|
|
|
|
|
|
|
|
// A slot in the parameter section on the stack. index() is the
|
|
|
|
// parameter index, counting left-to-right. The receiver is index -1;
|
|
|
|
// the first parameter is index 0.
|
|
|
|
PARAMETER,
|
|
|
|
|
|
|
|
// A slot in the local section on the stack. index() is the variable
|
|
|
|
// index in the stack frame, starting at 0.
|
|
|
|
LOCAL,
|
|
|
|
|
|
|
|
// An indexed slot in a heap context. index() is the variable index in
|
|
|
|
// the context object on the heap, starting at 0. scope() is the
|
|
|
|
// corresponding scope.
|
|
|
|
CONTEXT,
|
|
|
|
|
|
|
|
// A named slot in a heap context. name() is the variable name in the
|
|
|
|
// context object on the heap, with lookup starting at the current
|
|
|
|
// context. index() is invalid.
|
2016-08-08 09:46:51 +00:00
|
|
|
LOOKUP,
|
2015-07-06 16:36:28 +00:00
|
|
|
|
2016-08-08 09:46:51 +00:00
|
|
|
// A named slot in a module's export table.
|
2016-08-25 08:46:08 +00:00
|
|
|
MODULE,
|
|
|
|
|
|
|
|
kLastVariableLocation = MODULE
|
2016-08-08 09:46:51 +00:00
|
|
|
};
|
2015-07-06 16:36:28 +00:00
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
// ES6 Draft Rev3 10.2 specifies declarative environment records with mutable
|
|
|
|
// and immutable bindings that can be in two states: initialized and
|
|
|
|
// uninitialized. In ES5 only immutable bindings have these two states. When
|
|
|
|
// accessing a binding, it needs to be checked for initialization. However in
|
|
|
|
// the following cases the binding is initialized immediately after creation
|
|
|
|
// so the initialization check can always be skipped:
|
|
|
|
// 1. Var declared local variables.
|
|
|
|
// var foo;
|
|
|
|
// 2. A local variable introduced by a function declaration.
|
|
|
|
// function foo() {}
|
|
|
|
// 3. Parameters
|
|
|
|
// function x(foo) {}
|
|
|
|
// 4. Catch bound variables.
|
|
|
|
// try {} catch (foo) {}
|
|
|
|
// 6. Function variables of named function expressions.
|
|
|
|
// var x = function foo() {}
|
|
|
|
// 7. Implicit binding of 'this'.
|
|
|
|
// 8. Implicit binding of 'arguments' in functions.
|
|
|
|
//
|
|
|
|
// ES5 specified object environment records which are introduced by ES elements
|
|
|
|
// such as Program and WithStatement that associate identifier bindings with the
|
|
|
|
// properties of some object. In the specification only mutable bindings exist
|
|
|
|
// (which may be non-writable) and have no distinct initialization step. However
|
|
|
|
// V8 allows const declarations in global code with distinct creation and
|
|
|
|
// initialization steps which are represented by non-writable properties in the
|
|
|
|
// global object. As a result also these bindings need to be checked for
|
|
|
|
// initialization.
|
|
|
|
//
|
|
|
|
// The following enum specifies a flag that indicates if the binding needs a
|
|
|
|
// distinct initialization step (kNeedsInitialization) or if the binding is
|
|
|
|
// immediately initialized upon creation (kCreatedInitialized).
|
2016-08-25 08:46:08 +00:00
|
|
|
enum InitializationFlag : uint8_t { kNeedsInitialization, kCreatedInitialized };
|
2014-07-30 13:54:45 +00:00
|
|
|
|
2016-08-25 08:46:08 +00:00
|
|
|
enum MaybeAssignedFlag : uint8_t { kNotAssigned, kMaybeAssigned };
|
2014-07-30 13:54:45 +00:00
|
|
|
|
2015-02-20 21:19:43 +00:00
|
|
|
// Serialized in PreparseData, so numeric values should not be changed.
|
|
|
|
enum ParseErrorType { kSyntaxError = 0, kReferenceError = 1 };
|
|
|
|
|
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
enum MinusZeroMode {
|
|
|
|
TREAT_MINUS_ZERO_AS_ZERO,
|
|
|
|
FAIL_ON_MINUS_ZERO
|
|
|
|
};
|
|
|
|
|
2014-09-10 16:39:42 +00:00
|
|
|
|
2014-09-24 08:49:32 +00:00
|
|
|
enum Signedness { kSigned, kUnsigned };
|
|
|
|
|
2016-08-01 13:25:39 +00:00
|
|
|
enum FunctionKind : uint16_t {
|
2014-09-10 16:39:42 +00:00
|
|
|
kNormalFunction = 0,
|
2015-02-05 23:34:16 +00:00
|
|
|
kArrowFunction = 1 << 0,
|
|
|
|
kGeneratorFunction = 1 << 1,
|
|
|
|
kConciseMethod = 1 << 2,
|
2014-11-07 16:39:00 +00:00
|
|
|
kConciseGeneratorMethod = kGeneratorFunction | kConciseMethod,
|
2016-02-19 02:50:58 +00:00
|
|
|
kDefaultConstructor = 1 << 3,
|
|
|
|
kSubclassConstructor = 1 << 4,
|
|
|
|
kBaseConstructor = 1 << 5,
|
|
|
|
kGetterFunction = 1 << 6,
|
|
|
|
kSetterFunction = 1 << 7,
|
2016-05-16 23:17:13 +00:00
|
|
|
kAsyncFunction = 1 << 8,
|
2016-02-19 02:50:58 +00:00
|
|
|
kAccessorFunction = kGetterFunction | kSetterFunction,
|
2015-02-11 17:22:50 +00:00
|
|
|
kDefaultBaseConstructor = kDefaultConstructor | kBaseConstructor,
|
2015-03-10 16:47:50 +00:00
|
|
|
kDefaultSubclassConstructor = kDefaultConstructor | kSubclassConstructor,
|
2015-11-04 14:29:54 +00:00
|
|
|
kClassConstructor =
|
|
|
|
kBaseConstructor | kSubclassConstructor | kDefaultConstructor,
|
2016-05-16 23:17:13 +00:00
|
|
|
kAsyncArrowFunction = kArrowFunction | kAsyncFunction,
|
|
|
|
kAsyncConciseMethod = kAsyncFunction | kConciseMethod
|
2014-09-10 16:39:42 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
inline bool IsValidFunctionKind(FunctionKind kind) {
|
|
|
|
return kind == FunctionKind::kNormalFunction ||
|
|
|
|
kind == FunctionKind::kArrowFunction ||
|
|
|
|
kind == FunctionKind::kGeneratorFunction ||
|
2014-09-18 17:14:13 +00:00
|
|
|
kind == FunctionKind::kConciseMethod ||
|
2014-11-07 16:39:00 +00:00
|
|
|
kind == FunctionKind::kConciseGeneratorMethod ||
|
2016-02-19 02:50:58 +00:00
|
|
|
kind == FunctionKind::kGetterFunction ||
|
|
|
|
kind == FunctionKind::kSetterFunction ||
|
2015-02-05 23:34:16 +00:00
|
|
|
kind == FunctionKind::kAccessorFunction ||
|
2015-02-11 17:22:50 +00:00
|
|
|
kind == FunctionKind::kDefaultBaseConstructor ||
|
|
|
|
kind == FunctionKind::kDefaultSubclassConstructor ||
|
2015-02-06 10:34:50 +00:00
|
|
|
kind == FunctionKind::kBaseConstructor ||
|
2016-05-16 23:17:13 +00:00
|
|
|
kind == FunctionKind::kSubclassConstructor ||
|
|
|
|
kind == FunctionKind::kAsyncFunction ||
|
|
|
|
kind == FunctionKind::kAsyncArrowFunction ||
|
|
|
|
kind == FunctionKind::kAsyncConciseMethod;
|
2014-09-10 16:39:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
inline bool IsArrowFunction(FunctionKind kind) {
|
|
|
|
DCHECK(IsValidFunctionKind(kind));
|
|
|
|
return kind & FunctionKind::kArrowFunction;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
inline bool IsGeneratorFunction(FunctionKind kind) {
|
|
|
|
DCHECK(IsValidFunctionKind(kind));
|
|
|
|
return kind & FunctionKind::kGeneratorFunction;
|
|
|
|
}
|
|
|
|
|
2016-05-16 23:17:13 +00:00
|
|
|
inline bool IsAsyncFunction(FunctionKind kind) {
|
|
|
|
DCHECK(IsValidFunctionKind(kind));
|
|
|
|
return kind & FunctionKind::kAsyncFunction;
|
|
|
|
}
|
2014-09-10 16:39:42 +00:00
|
|
|
|
2016-06-13 17:19:47 +00:00
|
|
|
inline bool IsResumableFunction(FunctionKind kind) {
|
|
|
|
return IsGeneratorFunction(kind) || IsAsyncFunction(kind);
|
|
|
|
}
|
|
|
|
|
2014-09-10 16:39:42 +00:00
|
|
|
inline bool IsConciseMethod(FunctionKind kind) {
|
|
|
|
DCHECK(IsValidFunctionKind(kind));
|
|
|
|
return kind & FunctionKind::kConciseMethod;
|
|
|
|
}
|
2014-11-07 16:39:00 +00:00
|
|
|
|
2016-02-19 02:50:58 +00:00
|
|
|
inline bool IsGetterFunction(FunctionKind kind) {
|
|
|
|
DCHECK(IsValidFunctionKind(kind));
|
|
|
|
return kind & FunctionKind::kGetterFunction;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool IsSetterFunction(FunctionKind kind) {
|
|
|
|
DCHECK(IsValidFunctionKind(kind));
|
|
|
|
return kind & FunctionKind::kSetterFunction;
|
|
|
|
}
|
2014-11-07 16:39:00 +00:00
|
|
|
|
2015-02-05 23:34:16 +00:00
|
|
|
inline bool IsAccessorFunction(FunctionKind kind) {
|
|
|
|
DCHECK(IsValidFunctionKind(kind));
|
|
|
|
return kind & FunctionKind::kAccessorFunction;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-07 16:39:00 +00:00
|
|
|
inline bool IsDefaultConstructor(FunctionKind kind) {
|
|
|
|
DCHECK(IsValidFunctionKind(kind));
|
|
|
|
return kind & FunctionKind::kDefaultConstructor;
|
|
|
|
}
|
|
|
|
|
2015-02-05 23:34:16 +00:00
|
|
|
|
2015-02-06 10:34:50 +00:00
|
|
|
inline bool IsBaseConstructor(FunctionKind kind) {
|
|
|
|
DCHECK(IsValidFunctionKind(kind));
|
|
|
|
return kind & FunctionKind::kBaseConstructor;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-02-03 17:42:41 +00:00
|
|
|
inline bool IsSubclassConstructor(FunctionKind kind) {
|
|
|
|
DCHECK(IsValidFunctionKind(kind));
|
|
|
|
return kind & FunctionKind::kSubclassConstructor;
|
|
|
|
}
|
2015-02-06 10:34:50 +00:00
|
|
|
|
|
|
|
|
2015-09-24 06:50:01 +00:00
|
|
|
inline bool IsClassConstructor(FunctionKind kind) {
|
2015-02-06 10:34:50 +00:00
|
|
|
DCHECK(IsValidFunctionKind(kind));
|
2015-11-04 14:29:54 +00:00
|
|
|
return kind & FunctionKind::kClassConstructor;
|
2015-02-06 10:34:50 +00:00
|
|
|
}
|
2015-03-10 16:47:50 +00:00
|
|
|
|
|
|
|
|
2015-11-24 17:16:40 +00:00
|
|
|
inline bool IsConstructable(FunctionKind kind, LanguageMode mode) {
|
|
|
|
if (IsAccessorFunction(kind)) return false;
|
2016-01-18 13:10:38 +00:00
|
|
|
if (IsConciseMethod(kind)) return false;
|
2015-11-24 17:16:40 +00:00
|
|
|
if (IsArrowFunction(kind)) return false;
|
2016-01-18 13:10:38 +00:00
|
|
|
if (IsGeneratorFunction(kind)) return false;
|
2016-05-17 00:26:53 +00:00
|
|
|
if (IsAsyncFunction(kind)) return false;
|
2015-11-24 17:16:40 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-07-13 07:58:40 +00:00
|
|
|
enum class CallableType : unsigned { kJSFunction, kAny };
|
|
|
|
|
|
|
|
inline size_t hash_value(CallableType type) { return bit_cast<unsigned>(type); }
|
|
|
|
|
|
|
|
inline std::ostream& operator<<(std::ostream& os, CallableType function_type) {
|
|
|
|
switch (function_type) {
|
|
|
|
case CallableType::kJSFunction:
|
|
|
|
return os << "JSFunction";
|
|
|
|
case CallableType::kAny:
|
|
|
|
return os << "Any";
|
|
|
|
}
|
|
|
|
UNREACHABLE();
|
|
|
|
return os;
|
|
|
|
}
|
2015-11-24 17:16:40 +00:00
|
|
|
|
Reland of "[heap] Parallel newspace evacuation, semispace copy, and compaction \o/"
This reverts commit 85ba94f28ce4b5d64e4c148efb1fee85bdb6579b.
All parallelism can be turned off using --predictable, or --noparallel-compaction.
This patch completely parallelizes
- semispace copy: from space -> to space (within newspace)
- newspace evacuation: newspace -> oldspace
- oldspace compaction: oldspace -> oldspace
Previously newspace has been handled sequentially (semispace copy, newspace
evacuation) before compacting oldspace in parallel. However, on a high level
there are no dependencies between those two actions, hence we parallelize them
altogether. We base the number of evacuation tasks on the overall set of
to-be-processed pages (newspace + oldspace compaction pages).
Some low-level details:
- The hard cap on number of tasks has been lifted
- We cache store buffer entries locally before merging them back into the global
StoreBuffer in a finalization phase.
- We cache AllocationSite operations locally before merging them back into the
global pretenuring storage in a finalization phase.
- AllocationSite might be compacted while they would be needed for newspace
evacuation. To mitigate any problems we defer checking allocation sites for
newspace till merging locally buffered data.
CQ_EXTRA_TRYBOTS=tryserver.v8:v8_linux_arm64_gc_stress_dbg,v8_linux_gc_stress_dbg,v8_mac_gc_stress_dbg,v8_linux64_asan_rel,v8_linux64_tsan_rel,v8_mac64_asan_rel
BUG=chromium:524425
LOG=N
R=hpayer@chromium.org, ulan@chromium.org
Review URL: https://codereview.chromium.org/1640563004
Cr-Commit-Position: refs/heads/master@{#33552}
2016-01-27 13:23:54 +00:00
|
|
|
inline uint32_t ObjectHash(Address address) {
|
|
|
|
// All objects are at least pointer aligned, so we can remove the trailing
|
|
|
|
// zeros.
|
|
|
|
return static_cast<uint32_t>(bit_cast<uintptr_t>(address) >>
|
|
|
|
kPointerSizeLog2);
|
|
|
|
}
|
|
|
|
|
2016-08-10 14:33:37 +00:00
|
|
|
// Type feedback is encoded in such a way that, we can combine the feedback
|
2016-08-09 06:48:03 +00:00
|
|
|
// at different points by performing an 'OR' operation. Type feedback moves
|
|
|
|
// to a more generic type when we combine feedback.
|
|
|
|
// kSignedSmall -> kNumber -> kAny
|
|
|
|
class BinaryOperationFeedback {
|
|
|
|
public:
|
|
|
|
enum { kNone = 0x00, kSignedSmall = 0x01, kNumber = 0x3, kAny = 0x7 };
|
|
|
|
};
|
|
|
|
|
2016-08-30 10:21:02 +00:00
|
|
|
// TODO(epertoso): consider unifying this with BinaryOperationFeedback.
|
|
|
|
class CompareOperationFeedback {
|
|
|
|
public:
|
|
|
|
enum { kNone = 0x00, kSignedSmall = 0x01, kNumber = 0x3, kAny = 0x7 };
|
|
|
|
};
|
|
|
|
|
2015-09-30 13:46:56 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2014-05-26 11:28:08 +00:00
|
|
|
namespace i = v8::internal;
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
#endif // V8_GLOBALS_H_
|