Reland "Initial switch to Chromium-style CHECK_* and DCHECK_* macros.".
R=svenpanne@chromium.org Review URL: https://codereview.chromium.org/877753007 Cr-Commit-Position: refs/heads/master@{#26346}
This commit is contained in:
parent
491eb81780
commit
c65ae4f10c
@ -185,6 +185,7 @@
|
||||
// V8_HAS_DECLSPEC_ALIGN - __declspec(align(n)) supported
|
||||
// V8_HAS_DECLSPEC_DEPRECATED - __declspec(deprecated) supported
|
||||
// V8_HAS_DECLSPEC_NOINLINE - __declspec(noinline) supported
|
||||
// V8_HAS_DECLSPEC_SELECTANY - __declspec(selectany) supported
|
||||
// V8_HAS___FINAL - __final supported in non-C++11 mode
|
||||
// V8_HAS___FORCEINLINE - __forceinline supported
|
||||
//
|
||||
@ -289,6 +290,7 @@
|
||||
# define V8_HAS_DECLSPEC_ALIGN 1
|
||||
# define V8_HAS_DECLSPEC_DEPRECATED 1
|
||||
# define V8_HAS_DECLSPEC_NOINLINE 1
|
||||
# define V8_HAS_DECLSPEC_SELECTANY 1
|
||||
|
||||
# define V8_HAS___FORCEINLINE 1
|
||||
|
||||
|
@ -4996,7 +4996,7 @@ void v8::Object::SetInternalField(int index, v8::Handle<Value> value) {
|
||||
if (!InternalFieldOK(obj, index, location)) return;
|
||||
i::Handle<i::Object> val = Utils::OpenHandle(*value);
|
||||
obj->SetInternalField(index, *val);
|
||||
DCHECK_EQ(value, GetInternalField(index));
|
||||
DCHECK(value->Equals(GetInternalField(index)));
|
||||
}
|
||||
|
||||
|
||||
@ -7357,6 +7357,11 @@ void HeapSnapshot::Serialize(OutputStream* stream,
|
||||
}
|
||||
|
||||
|
||||
// static
|
||||
STATIC_CONST_MEMBER_DEFINITION const SnapshotObjectId
|
||||
HeapProfiler::kUnknownObjectId;
|
||||
|
||||
|
||||
int HeapProfiler::GetSnapshotCount() {
|
||||
return reinterpret_cast<i::HeapProfiler*>(this)->GetSnapshotsCount();
|
||||
}
|
||||
|
@ -561,7 +561,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
|
||||
// If either is a Smi (we know that not both are), then they can only
|
||||
// be strictly equal if the other is a HeapNumber.
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
DCHECK_EQ(0, Smi::FromInt(0));
|
||||
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
|
||||
__ and_(r2, lhs, Operand(rhs));
|
||||
__ JumpIfNotSmi(r2, ¬_smis);
|
||||
// One operand is a smi. EmitSmiNonsmiComparison generates code that can:
|
||||
|
@ -3813,7 +3813,7 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
|
||||
void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK(args->length() == 2);
|
||||
DCHECK_NE(NULL, args->at(1)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(1)->AsLiteral());
|
||||
Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
|
||||
|
||||
VisitForAccumulatorValue(args->at(0)); // Load the object.
|
||||
@ -4161,7 +4161,7 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
|
||||
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK_EQ(2, args->length());
|
||||
DCHECK_NE(NULL, args->at(0)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(0)->AsLiteral());
|
||||
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
|
||||
|
||||
Handle<FixedArray> jsfunction_result_caches(
|
||||
|
@ -944,7 +944,7 @@ class MacroAssembler: public Assembler {
|
||||
ldr(type, FieldMemOperand(obj, HeapObject::kMapOffset), cond);
|
||||
ldrb(type, FieldMemOperand(type, Map::kInstanceTypeOffset), cond);
|
||||
tst(type, Operand(kIsNotStringMask), cond);
|
||||
DCHECK_EQ(0, kStringTag);
|
||||
DCHECK_EQ(0u, kStringTag);
|
||||
return eq;
|
||||
}
|
||||
|
||||
|
@ -3522,7 +3522,7 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
|
||||
void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK(args->length() == 2);
|
||||
DCHECK_NE(NULL, args->at(1)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(1)->AsLiteral());
|
||||
Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
|
||||
|
||||
VisitForAccumulatorValue(args->at(0)); // Load the object.
|
||||
@ -3868,7 +3868,7 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
|
||||
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK_EQ(2, args->length());
|
||||
DCHECK_NE(NULL, args->at(0)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(0)->AsLiteral());
|
||||
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
|
||||
|
||||
Handle<FixedArray> jsfunction_result_caches(
|
||||
|
@ -2820,7 +2820,7 @@ void LCodeGen::DoDivI(LDivI* instr) {
|
||||
__ Sdiv(result, dividend, divisor);
|
||||
|
||||
if (hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) {
|
||||
DCHECK_EQ(NULL, instr->temp());
|
||||
DCHECK(!instr->temp());
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -3936,7 +3936,7 @@ void MacroAssembler::EmitSeqStringSetCharCheck(
|
||||
Cmp(index, index_type == kIndexIsSmi ? scratch : Operand::UntagSmi(scratch));
|
||||
Check(lt, kIndexIsTooLarge);
|
||||
|
||||
DCHECK_EQ(0, Smi::FromInt(0));
|
||||
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
|
||||
Cmp(index, 0);
|
||||
Check(ge, kIndexIsNegative);
|
||||
}
|
||||
|
@ -51,4 +51,27 @@
|
||||
#define WARN_UNUSED_RESULT /* NOT SUPPORTED */
|
||||
#endif
|
||||
|
||||
|
||||
// The C++ standard requires that static const members have an out-of-class
|
||||
// definition (in a single compilation unit), but MSVC chokes on this (when
|
||||
// language extensions, which are required, are enabled). (You're only likely to
|
||||
// notice the need for a definition if you take the address of the member or,
|
||||
// more commonly, pass it to a function that takes it as a reference argument --
|
||||
// probably an STL function.) This macro makes MSVC do the right thing. See
|
||||
// http://msdn.microsoft.com/en-us/library/34h23df8(v=vs.100).aspx for more
|
||||
// information. Use like:
|
||||
//
|
||||
// In .h file:
|
||||
// struct Foo {
|
||||
// static const int kBar = 5;
|
||||
// };
|
||||
//
|
||||
// In .cc file:
|
||||
// STATIC_CONST_MEMBER_DEFINITION const int Foo::kBar;
|
||||
#if V8_HAS_DECLSPEC_SELECTANY
|
||||
#define STATIC_CONST_MEMBER_DEFINITION __declspec(selectany)
|
||||
#else
|
||||
#define STATIC_CONST_MEMBER_DEFINITION
|
||||
#endif
|
||||
|
||||
#endif // V8_BASE_COMPILER_SPECIFIC_H_
|
||||
|
@ -10,14 +10,45 @@
|
||||
#elif V8_OS_QNX
|
||||
# include <backtrace.h>
|
||||
#endif // V8_LIBC_GLIBC || V8_OS_BSD
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include <cstdio>
|
||||
#include <cstdlib>
|
||||
|
||||
#include "src/base/platform/platform.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace base {
|
||||
|
||||
// Explicit instantiations for commonly used comparisons.
|
||||
#define DEFINE_MAKE_CHECK_OP_STRING(type) \
|
||||
template std::string* MakeCheckOpString<type, type>( \
|
||||
type const&, type const&, char const*);
|
||||
DEFINE_MAKE_CHECK_OP_STRING(int)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(long) // NOLINT(runtime/int)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(long long) // NOLINT(runtime/int)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(unsigned int)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(unsigned long) // NOLINT(runtime/int)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(unsigned long long) // NOLINT(runtime/int)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(char const*)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(void const*)
|
||||
#undef DEFINE_MAKE_CHECK_OP_STRING
|
||||
|
||||
|
||||
// Explicit instantiations for floating point checks.
|
||||
#define DEFINE_CHECK_OP_IMPL(NAME) \
|
||||
template std::string* Check##NAME##Impl<float, float>( \
|
||||
float const& lhs, float const& rhs, char const* msg); \
|
||||
template std::string* Check##NAME##Impl<double, double>( \
|
||||
double const& lhs, double const& rhs, char const* msg);
|
||||
DEFINE_CHECK_OP_IMPL(EQ)
|
||||
DEFINE_CHECK_OP_IMPL(NE)
|
||||
DEFINE_CHECK_OP_IMPL(LE)
|
||||
DEFINE_CHECK_OP_IMPL(LT)
|
||||
DEFINE_CHECK_OP_IMPL(GE)
|
||||
DEFINE_CHECK_OP_IMPL(GT)
|
||||
#undef DEFINE_CHECK_OP_IMPL
|
||||
|
||||
|
||||
// Attempts to dump a backtrace (if supported).
|
||||
void DumpBacktrace() {
|
||||
#if V8_LIBC_GLIBC || V8_OS_BSD
|
||||
@ -68,7 +99,8 @@ void DumpBacktrace() {
|
||||
#endif // V8_LIBC_GLIBC || V8_OS_BSD
|
||||
}
|
||||
|
||||
} } // namespace v8::base
|
||||
} // namespace base
|
||||
} // namespace v8
|
||||
|
||||
|
||||
// Contains protection against recursive calls (faults while handling faults).
|
||||
|
@ -5,8 +5,9 @@
|
||||
#ifndef V8_BASE_LOGGING_H_
|
||||
#define V8_BASE_LOGGING_H_
|
||||
|
||||
#include <stdint.h>
|
||||
#include <string.h>
|
||||
#include <cstring>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
|
||||
#include "src/base/build_config.h"
|
||||
|
||||
@ -31,186 +32,124 @@ extern "C" void V8_Fatal(const char* file, int line, const char* format, ...);
|
||||
#endif
|
||||
|
||||
|
||||
// The CHECK macro checks that the given condition is true; if not, it
|
||||
// prints a message to stderr and aborts.
|
||||
namespace v8 {
|
||||
namespace base {
|
||||
|
||||
// CHECK dies with a fatal error if condition is not true. It is *not*
|
||||
// controlled by DEBUG, so the check will be executed regardless of
|
||||
// compilation mode.
|
||||
//
|
||||
// We make sure CHECK et al. always evaluates their arguments, as
|
||||
// doing CHECK(FunctionWithSideEffect()) is a common idiom.
|
||||
#define CHECK(condition) \
|
||||
do { \
|
||||
if (V8_UNLIKELY(!(condition))) { \
|
||||
V8_Fatal(__FILE__, __LINE__, "CHECK(%s) failed", #condition); \
|
||||
V8_Fatal(__FILE__, __LINE__, "Check failed: %s.", #condition); \
|
||||
} \
|
||||
} while (0)
|
||||
|
||||
|
||||
// Helper function used by the CHECK_EQ function when given int
|
||||
// arguments. Should not be called directly.
|
||||
inline void CheckEqualsHelper(const char* file, int line,
|
||||
const char* expected_source, int expected,
|
||||
const char* value_source, int value) {
|
||||
if (V8_UNLIKELY(expected != value)) {
|
||||
V8_Fatal(file, line,
|
||||
"CHECK_EQ(%s, %s) failed\n# Expected: %i\n# Found: %i",
|
||||
expected_source, value_source, expected, value);
|
||||
}
|
||||
}
|
||||
#ifdef DEBUG
|
||||
|
||||
// Helper macro for binary operators.
|
||||
// Don't use this macro directly in your code, use CHECK_EQ et al below.
|
||||
#define CHECK_OP(name, op, lhs, rhs) \
|
||||
do { \
|
||||
if (std::string* _msg = ::v8::base::Check##name##Impl( \
|
||||
(lhs), (rhs), #lhs " " #op " " #rhs)) { \
|
||||
V8_Fatal(__FILE__, __LINE__, "Check failed: %s.", _msg->c_str()); \
|
||||
delete _msg; \
|
||||
} \
|
||||
} while (0)
|
||||
|
||||
// Helper function used by the CHECK_EQ function when given int64_t
|
||||
// arguments. Should not be called directly.
|
||||
inline void CheckEqualsHelper(const char* file, int line,
|
||||
const char* expected_source,
|
||||
int64_t expected,
|
||||
const char* value_source,
|
||||
int64_t value) {
|
||||
if (V8_UNLIKELY(expected != value)) {
|
||||
// Print int64_t values in hex, as two int32s,
|
||||
// to avoid platform-dependencies.
|
||||
V8_Fatal(file, line,
|
||||
"CHECK_EQ(%s, %s) failed\n#"
|
||||
" Expected: 0x%08x%08x\n# Found: 0x%08x%08x",
|
||||
expected_source, value_source,
|
||||
static_cast<uint32_t>(expected >> 32),
|
||||
static_cast<uint32_t>(expected),
|
||||
static_cast<uint32_t>(value >> 32),
|
||||
static_cast<uint32_t>(value));
|
||||
}
|
||||
}
|
||||
#else
|
||||
|
||||
// Make all CHECK functions discard their log strings to reduce code
|
||||
// bloat for official release builds.
|
||||
|
||||
#define CHECK_OP(name, op, lhs, rhs) CHECK((lhs)op(rhs))
|
||||
|
||||
// 32-bit AIX defines intptr_t as long int.
|
||||
#if V8_OS_AIX && V8_HOST_ARCH_32_BIT
|
||||
// Helper function used by the CHECK_EQ function when given intptr_t
|
||||
// arguments. Should not be called directly.
|
||||
inline void CheckEqualsHelper(const char* file, int line,
|
||||
const char* expected_source, intptr_t expected,
|
||||
const char* value_source, intptr_t value) {
|
||||
if (expected != value) {
|
||||
V8_Fatal(file, line,
|
||||
"CHECK_EQ(%s, %s) failed\n#"
|
||||
" Expected: 0x%lx\n# Found: 0x%lx",
|
||||
expected_source, value_source, expected, value);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
// Helper function used by the CHECK_NE function when given int
|
||||
// arguments. Should not be called directly.
|
||||
inline void CheckNonEqualsHelper(const char* file,
|
||||
int line,
|
||||
const char* unexpected_source,
|
||||
int unexpected,
|
||||
const char* value_source,
|
||||
int value) {
|
||||
if (V8_UNLIKELY(unexpected == value)) {
|
||||
V8_Fatal(file, line, "CHECK_NE(%s, %s) failed\n# Value: %i",
|
||||
unexpected_source, value_source, value);
|
||||
}
|
||||
// Build the error message string. This is separate from the "Impl"
|
||||
// function template because it is not performance critical and so can
|
||||
// be out of line, while the "Impl" code should be inline. Caller
|
||||
// takes ownership of the returned string.
|
||||
template <typename Lhs, typename Rhs>
|
||||
std::string* MakeCheckOpString(Lhs const& lhs, Rhs const& rhs,
|
||||
char const* msg) {
|
||||
std::ostringstream ss;
|
||||
ss << msg << " (" << lhs << " vs. " << rhs << ")";
|
||||
return new std::string(ss.str());
|
||||
}
|
||||
|
||||
|
||||
// Helper function used by the CHECK function when given string
|
||||
// arguments. Should not be called directly.
|
||||
inline void CheckEqualsHelper(const char* file,
|
||||
int line,
|
||||
const char* expected_source,
|
||||
const char* expected,
|
||||
const char* value_source,
|
||||
const char* value) {
|
||||
if (V8_UNLIKELY((expected == NULL && value != NULL) ||
|
||||
(expected != NULL && value == NULL) ||
|
||||
(expected != NULL && value != NULL &&
|
||||
strcmp(expected, value) != 0))) {
|
||||
V8_Fatal(file, line,
|
||||
"CHECK_EQ(%s, %s) failed\n# Expected: %s\n# Found: %s",
|
||||
expected_source, value_source, expected, value);
|
||||
}
|
||||
}
|
||||
// Commonly used instantiations of MakeCheckOpString<>. Explicitly instantiated
|
||||
// in logging.cc.
|
||||
#define DEFINE_MAKE_CHECK_OP_STRING(type) \
|
||||
extern template std::string* MakeCheckOpString<type, type>( \
|
||||
type const&, type const&, char const*);
|
||||
DEFINE_MAKE_CHECK_OP_STRING(int)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(long) // NOLINT(runtime/int)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(long long) // NOLINT(runtime/int)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(unsigned int)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(unsigned long) // NOLINT(runtime/int)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(unsigned long long) // NOLINT(runtime/int)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(char const*)
|
||||
DEFINE_MAKE_CHECK_OP_STRING(void const*)
|
||||
#undef DEFINE_MAKE_CHECK_OP_STRING
|
||||
|
||||
|
||||
inline void CheckNonEqualsHelper(const char* file,
|
||||
int line,
|
||||
const char* expected_source,
|
||||
const char* expected,
|
||||
const char* value_source,
|
||||
const char* value) {
|
||||
if (V8_UNLIKELY(expected == value || (expected != NULL && value != NULL &&
|
||||
strcmp(expected, value) == 0))) {
|
||||
V8_Fatal(file, line, "CHECK_NE(%s, %s) failed\n# Value: %s",
|
||||
expected_source, value_source, value);
|
||||
}
|
||||
}
|
||||
// Helper functions for CHECK_OP macro.
|
||||
// The (int, int) specialization works around the issue that the compiler
|
||||
// will not instantiate the template version of the function on values of
|
||||
// unnamed enum type - see comment below.
|
||||
// The (float, float) and (double, double) instantiations are explicitly
|
||||
// externialized to ensure proper 32/64-bit comparisons on x86.
|
||||
#define DEFINE_CHECK_OP_IMPL(NAME, op) \
|
||||
template <typename Lhs, typename Rhs> \
|
||||
V8_INLINE std::string* Check##NAME##Impl(Lhs const& lhs, Rhs const& rhs, \
|
||||
char const* msg) { \
|
||||
return V8_LIKELY(lhs op rhs) ? nullptr : MakeCheckOpString(lhs, rhs, msg); \
|
||||
} \
|
||||
V8_INLINE std::string* Check##NAME##Impl(int lhs, int rhs, \
|
||||
char const* msg) { \
|
||||
return V8_LIKELY(lhs op rhs) ? nullptr : MakeCheckOpString(lhs, rhs, msg); \
|
||||
} \
|
||||
extern template std::string* Check##NAME##Impl<float, float>( \
|
||||
float const& lhs, float const& rhs, char const* msg); \
|
||||
extern template std::string* Check##NAME##Impl<double, double>( \
|
||||
double const& lhs, double const& rhs, char const* msg);
|
||||
DEFINE_CHECK_OP_IMPL(EQ, ==)
|
||||
DEFINE_CHECK_OP_IMPL(NE, !=)
|
||||
DEFINE_CHECK_OP_IMPL(LE, <=)
|
||||
DEFINE_CHECK_OP_IMPL(LT, < )
|
||||
DEFINE_CHECK_OP_IMPL(GE, >=)
|
||||
DEFINE_CHECK_OP_IMPL(GT, > )
|
||||
#undef DEFINE_CHECK_OP_IMPL
|
||||
|
||||
#define CHECK_EQ(lhs, rhs) CHECK_OP(EQ, ==, lhs, rhs)
|
||||
#define CHECK_NE(lhs, rhs) CHECK_OP(NE, !=, lhs, rhs)
|
||||
#define CHECK_LE(lhs, rhs) CHECK_OP(LE, <=, lhs, rhs)
|
||||
#define CHECK_LT(lhs, rhs) CHECK_OP(LT, <, lhs, rhs)
|
||||
#define CHECK_GE(lhs, rhs) CHECK_OP(GE, >=, lhs, rhs)
|
||||
#define CHECK_GT(lhs, rhs) CHECK_OP(GT, >, lhs, rhs)
|
||||
#define CHECK_NULL(val) CHECK((val) == nullptr)
|
||||
#define CHECK_NOT_NULL(val) CHECK((val) != nullptr)
|
||||
#define CHECK_IMPLIES(lhs, rhs) CHECK(!(lhs) || (rhs))
|
||||
|
||||
// Helper function used by the CHECK function when given pointer
|
||||
// arguments. Should not be called directly.
|
||||
inline void CheckEqualsHelper(const char* file,
|
||||
int line,
|
||||
const char* expected_source,
|
||||
const void* expected,
|
||||
const char* value_source,
|
||||
const void* value) {
|
||||
if (V8_UNLIKELY(expected != value)) {
|
||||
V8_Fatal(file, line,
|
||||
"CHECK_EQ(%s, %s) failed\n# Expected: %p\n# Found: %p",
|
||||
expected_source, value_source,
|
||||
expected, value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
inline void CheckNonEqualsHelper(const char* file,
|
||||
int line,
|
||||
const char* expected_source,
|
||||
const void* expected,
|
||||
const char* value_source,
|
||||
const void* value) {
|
||||
if (V8_UNLIKELY(expected == value)) {
|
||||
V8_Fatal(file, line, "CHECK_NE(%s, %s) failed\n# Value: %p",
|
||||
expected_source, value_source, value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
inline void CheckNonEqualsHelper(const char* file,
|
||||
int line,
|
||||
const char* expected_source,
|
||||
int64_t expected,
|
||||
const char* value_source,
|
||||
int64_t value) {
|
||||
if (V8_UNLIKELY(expected == value)) {
|
||||
V8_Fatal(file, line,
|
||||
"CHECK_EQ(%s, %s) failed\n# Expected: %f\n# Found: %f",
|
||||
expected_source, value_source, expected, value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#define CHECK_EQ(expected, value) CheckEqualsHelper(__FILE__, __LINE__, \
|
||||
#expected, expected, #value, value)
|
||||
|
||||
|
||||
#define CHECK_NE(unexpected, value) CheckNonEqualsHelper(__FILE__, __LINE__, \
|
||||
#unexpected, unexpected, #value, value)
|
||||
|
||||
|
||||
#define CHECK_GT(a, b) CHECK((a) > (b))
|
||||
#define CHECK_GE(a, b) CHECK((a) >= (b))
|
||||
#define CHECK_LT(a, b) CHECK((a) < (b))
|
||||
#define CHECK_LE(a, b) CHECK((a) <= (b))
|
||||
|
||||
|
||||
namespace v8 {
|
||||
namespace base {
|
||||
|
||||
// Exposed for making debugging easier (to see where your function is being
|
||||
// called, just add a call to DumpBacktrace).
|
||||
void DumpBacktrace();
|
||||
|
||||
} } // namespace v8::base
|
||||
} // namespace base
|
||||
} // namespace v8
|
||||
|
||||
|
||||
// The DCHECK macro is equivalent to CHECK except that it only
|
||||
// generates code in debug builds.
|
||||
// TODO(bmeurer): DCHECK_RESULT(expr) must die!
|
||||
#ifdef DEBUG
|
||||
#define DCHECK_RESULT(expr) CHECK(expr)
|
||||
#define DCHECK(condition) CHECK(condition)
|
||||
@ -219,6 +158,9 @@ void DumpBacktrace();
|
||||
#define DCHECK_GE(v1, v2) CHECK_GE(v1, v2)
|
||||
#define DCHECK_LT(v1, v2) CHECK_LT(v1, v2)
|
||||
#define DCHECK_LE(v1, v2) CHECK_LE(v1, v2)
|
||||
#define DCHECK_NULL(val) CHECK_NULL(val)
|
||||
#define DCHECK_NOT_NULL(val) CHECK_NOT_NULL(val)
|
||||
#define DCHECK_IMPLIES(v1, v2) CHECK_IMPLIES(v1, v2)
|
||||
#else
|
||||
#define DCHECK_RESULT(expr) (expr)
|
||||
#define DCHECK(condition) ((void) 0)
|
||||
@ -227,8 +169,9 @@ void DumpBacktrace();
|
||||
#define DCHECK_GE(v1, v2) ((void) 0)
|
||||
#define DCHECK_LT(v1, v2) ((void) 0)
|
||||
#define DCHECK_LE(v1, v2) ((void) 0)
|
||||
#define DCHECK_NULL(val) ((void) 0)
|
||||
#define DCHECK_NOT_NULL(val) ((void) 0)
|
||||
#define DCHECK_IMPLIES(v1, v2) ((void) 0)
|
||||
#endif
|
||||
|
||||
#define DCHECK_NOT_NULL(p) DCHECK_NE(NULL, p)
|
||||
|
||||
#endif // V8_BASE_LOGGING_H_
|
||||
|
@ -182,7 +182,7 @@ void ConditionVariable::NativeHandle::Post(Event* event, bool result) {
|
||||
|
||||
// Remove the event from the wait list.
|
||||
for (Event** wep = &waitlist_;; wep = &(*wep)->next_) {
|
||||
DCHECK_NE(NULL, *wep);
|
||||
DCHECK(*wep);
|
||||
if (*wep == event) {
|
||||
*wep = event->next_;
|
||||
break;
|
||||
|
@ -13,7 +13,8 @@
|
||||
#include <mach/mach_time.h>
|
||||
#endif
|
||||
|
||||
#include <string.h>
|
||||
#include <cstring>
|
||||
#include <ostream>
|
||||
|
||||
#if V8_OS_WIN
|
||||
#include "src/base/lazy-instance.h"
|
||||
@ -355,6 +356,11 @@ double Time::ToJsTime() const {
|
||||
}
|
||||
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, const Time& time) {
|
||||
return os << time.ToJsTime();
|
||||
}
|
||||
|
||||
|
||||
#if V8_OS_WIN
|
||||
|
||||
class TickClock {
|
||||
|
@ -5,7 +5,8 @@
|
||||
#ifndef V8_BASE_PLATFORM_TIME_H_
|
||||
#define V8_BASE_PLATFORM_TIME_H_
|
||||
|
||||
#include <time.h>
|
||||
#include <ctime>
|
||||
#include <iosfwd>
|
||||
#include <limits>
|
||||
|
||||
#include "src/base/macros.h"
|
||||
@ -280,6 +281,8 @@ class Time FINAL {
|
||||
int64_t us_;
|
||||
};
|
||||
|
||||
std::ostream& operator<<(std::ostream&, const Time&);
|
||||
|
||||
inline Time operator+(const TimeDelta& delta, const Time& time) {
|
||||
return time + delta;
|
||||
}
|
||||
|
@ -1491,7 +1491,7 @@ static Handle<JSObject> ResolveBuiltinIdHolder(Handle<Context> native_context,
|
||||
.ToHandleChecked());
|
||||
}
|
||||
const char* inner = period_pos + 1;
|
||||
DCHECK_EQ(NULL, strchr(inner, '.'));
|
||||
DCHECK(!strchr(inner, '.'));
|
||||
Vector<const char> property(holder_expr,
|
||||
static_cast<int>(period_pos - holder_expr));
|
||||
Handle<String> property_string = factory->InternalizeUtf8String(property);
|
||||
|
@ -4,85 +4,6 @@
|
||||
|
||||
#include "src/checks.h"
|
||||
|
||||
#include "src/v8.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
intptr_t HeapObjectTagMask() { return kHeapObjectTagMask; }
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
|
||||
static bool CheckEqualsStrict(volatile double* exp, volatile double* val) {
|
||||
v8::internal::DoubleRepresentation exp_rep(*exp);
|
||||
v8::internal::DoubleRepresentation val_rep(*val);
|
||||
if (std::isnan(exp_rep.value) && std::isnan(val_rep.value)) return true;
|
||||
return exp_rep.bits == val_rep.bits;
|
||||
}
|
||||
|
||||
|
||||
void CheckEqualsHelper(const char* file, int line, const char* expected_source,
|
||||
double expected, const char* value_source,
|
||||
double value) {
|
||||
// Force values to 64 bit memory to truncate 80 bit precision on IA32.
|
||||
volatile double* exp = new double[1];
|
||||
*exp = expected;
|
||||
volatile double* val = new double[1];
|
||||
*val = value;
|
||||
if (!CheckEqualsStrict(exp, val)) {
|
||||
V8_Fatal(file, line,
|
||||
"CHECK_EQ(%s, %s) failed\n# Expected: %f\n# Found: %f",
|
||||
expected_source, value_source, *exp, *val);
|
||||
}
|
||||
delete[] exp;
|
||||
delete[] val;
|
||||
}
|
||||
|
||||
|
||||
void CheckNonEqualsHelper(const char* file, int line,
|
||||
const char* expected_source, double expected,
|
||||
const char* value_source, double value) {
|
||||
// Force values to 64 bit memory to truncate 80 bit precision on IA32.
|
||||
volatile double* exp = new double[1];
|
||||
*exp = expected;
|
||||
volatile double* val = new double[1];
|
||||
*val = value;
|
||||
if (CheckEqualsStrict(exp, val)) {
|
||||
V8_Fatal(file, line,
|
||||
"CHECK_EQ(%s, %s) failed\n# Expected: %f\n# Found: %f",
|
||||
expected_source, value_source, *exp, *val);
|
||||
}
|
||||
delete[] exp;
|
||||
delete[] val;
|
||||
}
|
||||
|
||||
|
||||
void CheckEqualsHelper(const char* file,
|
||||
int line,
|
||||
const char* expected_source,
|
||||
v8::Handle<v8::Value> expected,
|
||||
const char* value_source,
|
||||
v8::Handle<v8::Value> value) {
|
||||
if (!expected->Equals(value)) {
|
||||
v8::String::Utf8Value value_str(value);
|
||||
v8::String::Utf8Value expected_str(expected);
|
||||
V8_Fatal(file, line,
|
||||
"CHECK_EQ(%s, %s) failed\n# Expected: %s\n# Found: %s",
|
||||
expected_source, value_source, *expected_str, *value_str);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void CheckNonEqualsHelper(const char* file,
|
||||
int line,
|
||||
const char* unexpected_source,
|
||||
v8::Handle<v8::Value> unexpected,
|
||||
const char* value_source,
|
||||
v8::Handle<v8::Value> value) {
|
||||
if (unexpected->Equals(value)) {
|
||||
v8::String::Utf8Value value_str(value);
|
||||
V8_Fatal(file, line, "CHECK_NE(%s, %s) failed\n# Value: %s",
|
||||
unexpected_source, value_source, *value_str);
|
||||
}
|
||||
}
|
||||
namespace internal {} // namespace internal
|
||||
} // namespace v8
|
||||
|
30
src/checks.h
30
src/checks.h
@ -5,6 +5,7 @@
|
||||
#ifndef V8_CHECKS_H_
|
||||
#define V8_CHECKS_H_
|
||||
|
||||
#include "include/v8.h"
|
||||
#include "src/base/logging.h"
|
||||
|
||||
namespace v8 {
|
||||
@ -14,8 +15,6 @@ template <class T> class Handle;
|
||||
|
||||
namespace internal {
|
||||
|
||||
intptr_t HeapObjectTagMask();
|
||||
|
||||
#ifdef ENABLE_SLOW_DCHECKS
|
||||
#define SLOW_DCHECK(condition) \
|
||||
CHECK(!v8::internal::FLAG_enable_slow_asserts || (condition))
|
||||
@ -27,30 +26,11 @@ const bool FLAG_enable_slow_asserts = false;
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
|
||||
void CheckNonEqualsHelper(const char* file, int line,
|
||||
const char* expected_source, double expected,
|
||||
const char* value_source, double value);
|
||||
|
||||
void CheckEqualsHelper(const char* file, int line, const char* expected_source,
|
||||
double expected, const char* value_source, double value);
|
||||
|
||||
void CheckNonEqualsHelper(const char* file, int line,
|
||||
const char* unexpected_source,
|
||||
v8::Handle<v8::Value> unexpected,
|
||||
const char* value_source,
|
||||
v8::Handle<v8::Value> value);
|
||||
|
||||
void CheckEqualsHelper(const char* file,
|
||||
int line,
|
||||
const char* expected_source,
|
||||
v8::Handle<v8::Value> expected,
|
||||
const char* value_source,
|
||||
v8::Handle<v8::Value> value);
|
||||
|
||||
#define DCHECK_TAG_ALIGNED(address) \
|
||||
DCHECK((reinterpret_cast<intptr_t>(address) & HeapObjectTagMask()) == 0)
|
||||
DCHECK((reinterpret_cast<intptr_t>(address) & \
|
||||
::v8::internal::kHeapObjectTagMask) == 0)
|
||||
|
||||
#define DCHECK_SIZE_TAG_ALIGNED(size) DCHECK((size & HeapObjectTagMask()) == 0)
|
||||
#define DCHECK_SIZE_TAG_ALIGNED(size) \
|
||||
DCHECK((size & ::v8::internal::kHeapObjectTagMask) == 0)
|
||||
|
||||
#endif // V8_CHECKS_H_
|
||||
|
@ -208,7 +208,7 @@ CompilationInfo::~CompilationInfo() {
|
||||
// Check that no dependent maps have been added or added dependent maps have
|
||||
// been rolled back or committed.
|
||||
for (int i = 0; i < DependentCode::kGroupCount; i++) {
|
||||
DCHECK_EQ(NULL, dependencies_[i]);
|
||||
DCHECK(!dependencies_[i]);
|
||||
}
|
||||
#endif // DEBUG
|
||||
}
|
||||
|
@ -745,7 +745,7 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr,
|
||||
|
||||
// Materialize a full 32-bit 1 or 0 value. The result register is always the
|
||||
// last output of the instruction.
|
||||
DCHECK_NE(0, instr->OutputCount());
|
||||
DCHECK_NE(0u, instr->OutputCount());
|
||||
Register reg = i.OutputRegister(instr->OutputCount() - 1);
|
||||
Condition cc = FlagsConditionToCondition(condition);
|
||||
__ mov(reg, Operand(0));
|
||||
|
@ -233,8 +233,8 @@ void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
outputs[output_count++] = g.DefineAsRegister(cont->result());
|
||||
}
|
||||
|
||||
DCHECK_NE(0, input_count);
|
||||
DCHECK_NE(0, output_count);
|
||||
DCHECK_NE(0u, input_count);
|
||||
DCHECK_NE(0u, output_count);
|
||||
DCHECK_GE(arraysize(inputs), input_count);
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
DCHECK_NE(kMode_None, AddressingModeField::decode(opcode));
|
||||
@ -448,8 +448,8 @@ void EmitBic(InstructionSelector* selector, Node* node, Node* left,
|
||||
|
||||
void EmitUbfx(InstructionSelector* selector, Node* node, Node* left,
|
||||
uint32_t lsb, uint32_t width) {
|
||||
DCHECK_LE(1, width);
|
||||
DCHECK_LE(width, 32 - lsb);
|
||||
DCHECK_LE(1u, width);
|
||||
DCHECK_LE(width, 32u - lsb);
|
||||
ArmOperandGenerator g(selector);
|
||||
selector->Emit(kArmUbfx, g.DefineAsRegister(node), g.UseRegister(left),
|
||||
g.TempImmediate(lsb), g.TempImmediate(width));
|
||||
@ -481,7 +481,7 @@ void InstructionSelector::VisitWord32And(Node* node) {
|
||||
uint32_t msb = base::bits::CountLeadingZeros32(value);
|
||||
// Try to interpret this AND as UBFX.
|
||||
if (IsSupported(ARMv7) && width != 0 && msb + width == 32) {
|
||||
DCHECK_EQ(0, base::bits::CountTrailingZeros32(value));
|
||||
DCHECK_EQ(0u, base::bits::CountTrailingZeros32(value));
|
||||
if (m.left().IsWord32Shr()) {
|
||||
Int32BinopMatcher mleft(m.left().node());
|
||||
if (mleft.right().IsInRange(0, 31)) {
|
||||
@ -550,10 +550,11 @@ void InstructionSelector::VisitWord32Xor(Node* node) {
|
||||
}
|
||||
|
||||
|
||||
namespace {
|
||||
|
||||
template <typename TryMatchShift>
|
||||
static inline void VisitShift(InstructionSelector* selector, Node* node,
|
||||
TryMatchShift try_match_shift,
|
||||
FlagsContinuation* cont) {
|
||||
void VisitShift(InstructionSelector* selector, Node* node,
|
||||
TryMatchShift try_match_shift, FlagsContinuation* cont) {
|
||||
ArmOperandGenerator g(selector);
|
||||
InstructionCode opcode = kArmMov;
|
||||
InstructionOperand* inputs[4];
|
||||
@ -573,8 +574,8 @@ static inline void VisitShift(InstructionSelector* selector, Node* node,
|
||||
outputs[output_count++] = g.DefineAsRegister(cont->result());
|
||||
}
|
||||
|
||||
DCHECK_NE(0, input_count);
|
||||
DCHECK_NE(0, output_count);
|
||||
DCHECK_NE(0u, input_count);
|
||||
DCHECK_NE(0u, output_count);
|
||||
DCHECK_GE(arraysize(inputs), input_count);
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
DCHECK_NE(kMode_None, AddressingModeField::decode(opcode));
|
||||
@ -586,12 +587,14 @@ static inline void VisitShift(InstructionSelector* selector, Node* node,
|
||||
|
||||
|
||||
template <typename TryMatchShift>
|
||||
static inline void VisitShift(InstructionSelector* selector, Node* node,
|
||||
void VisitShift(InstructionSelector* selector, Node* node,
|
||||
TryMatchShift try_match_shift) {
|
||||
FlagsContinuation cont;
|
||||
VisitShift(selector, node, try_match_shift, &cont);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
void InstructionSelector::VisitWord32Shl(Node* node) {
|
||||
VisitShift(this, node, TryMatchLSL);
|
||||
@ -603,7 +606,7 @@ void InstructionSelector::VisitWord32Shr(Node* node) {
|
||||
Int32BinopMatcher m(node);
|
||||
if (IsSupported(ARMv7) && m.left().IsWord32And() &&
|
||||
m.right().IsInRange(0, 31)) {
|
||||
int32_t lsb = m.right().Value();
|
||||
uint32_t lsb = m.right().Value();
|
||||
Int32BinopMatcher mleft(m.left().node());
|
||||
if (mleft.right().HasValue()) {
|
||||
uint32_t value = (mleft.right().Value() >> lsb) << lsb;
|
||||
@ -1123,7 +1126,7 @@ void VisitWordCompare(InstructionSelector* selector, Node* node,
|
||||
outputs[output_count++] = g.DefineAsRegister(cont->result());
|
||||
}
|
||||
|
||||
DCHECK_NE(0, input_count);
|
||||
DCHECK_NE(0u, input_count);
|
||||
DCHECK_GE(arraysize(inputs), input_count);
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
|
||||
|
@ -846,7 +846,7 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr,
|
||||
|
||||
// Materialize a full 64-bit 1 or 0 value. The result register is always the
|
||||
// last output of the instruction.
|
||||
DCHECK_NE(0, instr->OutputCount());
|
||||
DCHECK_NE(0u, instr->OutputCount());
|
||||
Register reg = i.OutputRegister(instr->OutputCount() - 1);
|
||||
Condition cc = FlagsConditionToCondition(condition);
|
||||
__ Cset(reg, cc);
|
||||
|
@ -215,8 +215,8 @@ static void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
outputs[output_count++] = g.DefineAsRegister(cont->result());
|
||||
}
|
||||
|
||||
DCHECK_NE(0, input_count);
|
||||
DCHECK_NE(0, output_count);
|
||||
DCHECK_NE(0u, input_count);
|
||||
DCHECK_NE(0u, output_count);
|
||||
DCHECK_GE(arraysize(inputs), input_count);
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
|
||||
@ -507,7 +507,7 @@ void InstructionSelector::VisitWord32And(Node* node) {
|
||||
uint32_t mask_msb = base::bits::CountLeadingZeros32(mask);
|
||||
if ((mask_width != 0) && (mask_msb + mask_width == 32)) {
|
||||
// The mask must be contiguous, and occupy the least-significant bits.
|
||||
DCHECK_EQ(0, base::bits::CountTrailingZeros32(mask));
|
||||
DCHECK_EQ(0u, base::bits::CountTrailingZeros32(mask));
|
||||
|
||||
// Select Ubfx for And(Shr(x, imm), mask) where the mask is in the least
|
||||
// significant bits.
|
||||
@ -544,7 +544,7 @@ void InstructionSelector::VisitWord64And(Node* node) {
|
||||
uint64_t mask_msb = base::bits::CountLeadingZeros64(mask);
|
||||
if ((mask_width != 0) && (mask_msb + mask_width == 64)) {
|
||||
// The mask must be contiguous, and occupy the least-significant bits.
|
||||
DCHECK_EQ(0, base::bits::CountTrailingZeros64(mask));
|
||||
DCHECK_EQ(0u, base::bits::CountTrailingZeros64(mask));
|
||||
|
||||
// Select Ubfx for And(Shr(x, imm), mask) where the mask is in the least
|
||||
// significant bits.
|
||||
@ -628,7 +628,7 @@ void InstructionSelector::VisitWord32Shr(Node* node) {
|
||||
Arm64OperandGenerator g(this);
|
||||
Int32BinopMatcher m(node);
|
||||
if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
|
||||
int32_t lsb = m.right().Value();
|
||||
uint32_t lsb = m.right().Value();
|
||||
Int32BinopMatcher mleft(m.left().node());
|
||||
if (mleft.right().HasValue()) {
|
||||
uint32_t mask = (mleft.right().Value() >> lsb) << lsb;
|
||||
@ -653,7 +653,7 @@ void InstructionSelector::VisitWord64Shr(Node* node) {
|
||||
Arm64OperandGenerator g(this);
|
||||
Int64BinopMatcher m(node);
|
||||
if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
|
||||
int64_t lsb = m.right().Value();
|
||||
uint64_t lsb = m.right().Value();
|
||||
Int64BinopMatcher mleft(m.left().node());
|
||||
if (mleft.right().HasValue()) {
|
||||
// Select Ubfx for Shr(And(x, mask), imm) where the result of the mask is
|
||||
|
@ -281,7 +281,7 @@ void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
|
||||
for (int i = 0; i < deopt_count; i++) {
|
||||
DeoptimizationState* deoptimization_state = deoptimization_states_[i];
|
||||
data->SetAstId(i, deoptimization_state->bailout_id());
|
||||
CHECK_NE(NULL, deoptimization_states_[i]);
|
||||
CHECK(deoptimization_states_[i]);
|
||||
data->SetTranslationIndex(
|
||||
i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
|
||||
data->SetArgumentsStackHeight(i, Smi::FromInt(0));
|
||||
|
@ -296,7 +296,7 @@ class ControlReducerImpl {
|
||||
for (size_t j = 0; j < nodes.size(); j++) {
|
||||
Node* node = nodes[j];
|
||||
for (Node* const input : node->inputs()) {
|
||||
CHECK_NE(NULL, input);
|
||||
CHECK(input);
|
||||
}
|
||||
for (Node* const use : node->uses()) {
|
||||
CHECK(marked.IsReachableFromEnd(use));
|
||||
@ -319,7 +319,7 @@ class ControlReducerImpl {
|
||||
|
||||
// Recurse on an input if necessary.
|
||||
for (Node* const input : node->inputs()) {
|
||||
CHECK_NE(NULL, input);
|
||||
DCHECK(input);
|
||||
if (Recurse(input)) return;
|
||||
}
|
||||
|
||||
|
@ -768,7 +768,7 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr,
|
||||
// Materialize a full 32-bit 1 or 0 value. The result register is always the
|
||||
// last output of the instruction.
|
||||
Label check;
|
||||
DCHECK_NE(0, instr->OutputCount());
|
||||
DCHECK_NE(0u, instr->OutputCount());
|
||||
Register reg = i.OutputRegister(instr->OutputCount() - 1);
|
||||
Condition cc = no_condition;
|
||||
switch (condition) {
|
||||
|
@ -370,8 +370,8 @@ static void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
outputs[output_count++] = g.DefineAsRegister(cont->result());
|
||||
}
|
||||
|
||||
DCHECK_NE(0, input_count);
|
||||
DCHECK_NE(0, output_count);
|
||||
DCHECK_NE(0u, input_count);
|
||||
DCHECK_NE(0u, output_count);
|
||||
DCHECK_GE(arraysize(inputs), input_count);
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
|
||||
|
@ -40,7 +40,7 @@ void InstructionSelector::SelectInstructions() {
|
||||
BasicBlockVector* blocks = schedule()->rpo_order();
|
||||
for (auto const block : *blocks) {
|
||||
if (!block->IsLoopHeader()) continue;
|
||||
DCHECK_LE(2, block->PredecessorCount());
|
||||
DCHECK_LE(2u, block->PredecessorCount());
|
||||
for (Node* const phi : *block) {
|
||||
if (phi->opcode() != IrOpcode::kPhi) continue;
|
||||
|
||||
@ -342,7 +342,7 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
|
||||
if (use->opcode() != IrOpcode::kProjection) continue;
|
||||
size_t const index = ProjectionIndexOf(use->op());
|
||||
DCHECK_LT(index, buffer->output_nodes.size());
|
||||
DCHECK_EQ(nullptr, buffer->output_nodes[index]);
|
||||
DCHECK(!buffer->output_nodes[index]);
|
||||
buffer->output_nodes[index] = use;
|
||||
}
|
||||
}
|
||||
@ -435,7 +435,7 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
|
||||
if (static_cast<size_t>(stack_index) >= buffer->pushed_nodes.size()) {
|
||||
buffer->pushed_nodes.resize(stack_index + 1, NULL);
|
||||
}
|
||||
DCHECK_EQ(NULL, buffer->pushed_nodes[stack_index]);
|
||||
DCHECK(!buffer->pushed_nodes[stack_index]);
|
||||
buffer->pushed_nodes[stack_index] = *iter;
|
||||
pushed_count++;
|
||||
} else {
|
||||
@ -450,7 +450,7 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
|
||||
|
||||
|
||||
void InstructionSelector::VisitBlock(BasicBlock* block) {
|
||||
DCHECK_EQ(NULL, current_block_);
|
||||
DCHECK(!current_block_);
|
||||
current_block_ = block;
|
||||
int current_block_end = static_cast<int>(instructions_.size());
|
||||
|
||||
|
@ -447,7 +447,7 @@ InstructionBlocks* InstructionSequence::InstructionBlocksFor(
|
||||
size_t rpo_number = 0;
|
||||
for (BasicBlockVector::const_iterator it = schedule->rpo_order()->begin();
|
||||
it != schedule->rpo_order()->end(); ++it, ++rpo_number) {
|
||||
DCHECK_EQ(NULL, (*blocks)[rpo_number]);
|
||||
DCHECK(!(*blocks)[rpo_number]);
|
||||
DCHECK((*it)->GetRpoNumber().ToSize() == rpo_number);
|
||||
(*blocks)[rpo_number] = InstructionBlockFor(zone, *it);
|
||||
}
|
||||
|
@ -520,7 +520,7 @@ class Instruction : public ZoneObject {
|
||||
|
||||
void set_pointer_map(PointerMap* map) {
|
||||
DCHECK(NeedsPointerMap());
|
||||
DCHECK_EQ(NULL, pointer_map_);
|
||||
DCHECK(!pointer_map_);
|
||||
pointer_map_ = map;
|
||||
}
|
||||
|
||||
|
@ -84,7 +84,7 @@ class Inlinee {
|
||||
|
||||
// Counts only formal parameters.
|
||||
size_t formal_parameters() {
|
||||
DCHECK_GE(total_parameters(), 3);
|
||||
DCHECK_GE(total_parameters(), 3u);
|
||||
return total_parameters() - 3;
|
||||
}
|
||||
|
||||
@ -176,7 +176,7 @@ class CopyVisitor : public NullNodeVisitor {
|
||||
if (copy == NULL) {
|
||||
copy = GetSentinel(original);
|
||||
}
|
||||
DCHECK_NE(NULL, copy);
|
||||
DCHECK(copy);
|
||||
return copy;
|
||||
}
|
||||
|
||||
@ -193,7 +193,7 @@ class CopyVisitor : public NullNodeVisitor {
|
||||
Node* sentinel = sentinels_[id];
|
||||
if (sentinel == NULL) continue;
|
||||
Node* copy = copies_[id];
|
||||
DCHECK_NE(NULL, copy);
|
||||
DCHECK(copy);
|
||||
sentinel->ReplaceUses(copy);
|
||||
}
|
||||
}
|
||||
|
@ -268,7 +268,7 @@ PeeledIteration* LoopPeeler::Peel(Graph* graph, CommonOperatorBuilder* common,
|
||||
}
|
||||
}
|
||||
// There should be a merge or a return for each exit.
|
||||
CHECK_NE(NULL, found);
|
||||
CHECK(found);
|
||||
}
|
||||
// Return nodes, the end merge, and the phis associated with the end merge
|
||||
// must be duplicated as well.
|
||||
|
@ -103,7 +103,7 @@ Node* MachineOperatorReducer::Int32Div(Node* dividend, int32_t divisor) {
|
||||
|
||||
|
||||
Node* MachineOperatorReducer::Uint32Div(Node* dividend, uint32_t divisor) {
|
||||
DCHECK_LT(0, divisor);
|
||||
DCHECK_LT(0u, divisor);
|
||||
// If the divisor is even, we can avoid using the expensive fixup by shifting
|
||||
// the dividend upfront.
|
||||
unsigned const shift = base::bits::CountTrailingZeros32(divisor);
|
||||
@ -115,7 +115,7 @@ Node* MachineOperatorReducer::Uint32Div(Node* dividend, uint32_t divisor) {
|
||||
Node* quotient = graph()->NewNode(machine()->Uint32MulHigh(), dividend,
|
||||
Uint32Constant(mag.multiplier));
|
||||
if (mag.add) {
|
||||
DCHECK_LE(1, mag.shift);
|
||||
DCHECK_LE(1u, mag.shift);
|
||||
quotient = Word32Shr(
|
||||
Int32Add(Word32Shr(Int32Sub(dividend, quotient), 1), quotient),
|
||||
mag.shift - 1);
|
||||
@ -520,7 +520,7 @@ Reduction MachineOperatorReducer::ReduceInt32Div(Node* node) {
|
||||
Node* quotient = dividend;
|
||||
if (base::bits::IsPowerOfTwo32(Abs(divisor))) {
|
||||
uint32_t const shift = WhichPowerOf2Abs(divisor);
|
||||
DCHECK_NE(0, shift);
|
||||
DCHECK_NE(0u, shift);
|
||||
if (shift > 1) {
|
||||
quotient = Word32Sar(quotient, 31);
|
||||
}
|
||||
|
@ -83,11 +83,11 @@ static MoveOperands* PrepareInsertAfter(ParallelMove* left, MoveOperands* move,
|
||||
for (auto curr = move_ops->begin(); curr != move_ops->end(); ++curr) {
|
||||
if (curr->IsEliminated()) continue;
|
||||
if (curr->destination()->Equals(move->source())) {
|
||||
DCHECK_EQ(nullptr, replacement);
|
||||
DCHECK(!replacement);
|
||||
replacement = curr;
|
||||
if (to_eliminate != nullptr) break;
|
||||
} else if (curr->destination()->Equals(move->destination())) {
|
||||
DCHECK_EQ(nullptr, to_eliminate);
|
||||
DCHECK(!to_eliminate);
|
||||
to_eliminate = curr;
|
||||
if (replacement != nullptr) break;
|
||||
}
|
||||
|
@ -134,7 +134,7 @@ void Node::ReplaceUses(Node* replace_to) {
|
||||
use->from->GetInputRecordPtr(use->input_index)->to = replace_to;
|
||||
}
|
||||
if (!replace_to->last_use_) {
|
||||
DCHECK_EQ(nullptr, replace_to->first_use_);
|
||||
DCHECK(!replace_to->first_use_);
|
||||
replace_to->first_use_ = first_use_;
|
||||
replace_to->last_use_ = last_use_;
|
||||
} else if (first_use_) {
|
||||
|
@ -40,18 +40,18 @@ bool OsrHelper::Deconstruct(JSGraph* jsgraph, CommonOperatorBuilder* common,
|
||||
|
||||
if (osr_loop_entry == nullptr) {
|
||||
// No OSR entry found, do nothing.
|
||||
CHECK_NE(nullptr, osr_normal_entry);
|
||||
CHECK(osr_normal_entry);
|
||||
return true;
|
||||
}
|
||||
|
||||
for (Node* use : osr_loop_entry->uses()) {
|
||||
if (use->opcode() == IrOpcode::kLoop) {
|
||||
CHECK_EQ(nullptr, osr_loop); // should be only one OSR loop.
|
||||
CHECK(!osr_loop); // should be only one OSR loop.
|
||||
osr_loop = use; // found the OSR loop.
|
||||
}
|
||||
}
|
||||
|
||||
CHECK_NE(nullptr, osr_loop); // Should have found the OSR loop.
|
||||
CHECK(osr_loop); // Should have found the OSR loop.
|
||||
|
||||
// Analyze the graph to determine how deeply nested the OSR loop is.
|
||||
LoopTree* loop_tree = LoopFinder::BuildLoopTree(graph, tmp_zone);
|
||||
|
@ -145,19 +145,19 @@ class PipelineData {
|
||||
|
||||
LoopAssignmentAnalysis* loop_assignment() const { return loop_assignment_; }
|
||||
void set_loop_assignment(LoopAssignmentAnalysis* loop_assignment) {
|
||||
DCHECK_EQ(nullptr, loop_assignment_);
|
||||
DCHECK(!loop_assignment_);
|
||||
loop_assignment_ = loop_assignment;
|
||||
}
|
||||
|
||||
Node* context_node() const { return context_node_; }
|
||||
void set_context_node(Node* context_node) {
|
||||
DCHECK_EQ(nullptr, context_node_);
|
||||
DCHECK(!context_node_);
|
||||
context_node_ = context_node;
|
||||
}
|
||||
|
||||
Schedule* schedule() const { return schedule_; }
|
||||
void set_schedule(Schedule* schedule) {
|
||||
DCHECK_EQ(nullptr, schedule_);
|
||||
DCHECK(!schedule_);
|
||||
schedule_ = schedule;
|
||||
}
|
||||
|
||||
@ -194,7 +194,7 @@ class PipelineData {
|
||||
}
|
||||
|
||||
void InitializeInstructionSequence() {
|
||||
DCHECK_EQ(nullptr, sequence_);
|
||||
DCHECK(!sequence_);
|
||||
InstructionBlocks* instruction_blocks =
|
||||
InstructionSequence::InstructionBlocksFor(instruction_zone(),
|
||||
schedule());
|
||||
@ -205,8 +205,8 @@ class PipelineData {
|
||||
void InitializeRegisterAllocator(Zone* local_zone,
|
||||
const RegisterConfiguration* config,
|
||||
const char* debug_name) {
|
||||
DCHECK_EQ(nullptr, register_allocator_);
|
||||
DCHECK_EQ(nullptr, frame_);
|
||||
DCHECK(!register_allocator_);
|
||||
DCHECK(!frame_);
|
||||
frame_ = new (instruction_zone()) Frame();
|
||||
register_allocator_ = new (instruction_zone())
|
||||
RegisterAllocator(config, local_zone, frame(), sequence(), debug_name);
|
||||
|
@ -20,7 +20,7 @@ static void VerifyGapEmpty(const GapInstruction* gap) {
|
||||
i <= GapInstruction::LAST_INNER_POSITION; i++) {
|
||||
GapInstruction::InnerPosition inner_pos =
|
||||
static_cast<GapInstruction::InnerPosition>(i);
|
||||
CHECK_EQ(NULL, gap->GetParallelMove(inner_pos));
|
||||
CHECK(!gap->GetParallelMove(inner_pos));
|
||||
}
|
||||
}
|
||||
|
||||
@ -432,14 +432,14 @@ class OperandMap : public ZoneObject {
|
||||
for (; p != nullptr; p = p->first_pred_phi) {
|
||||
if (p->virtual_register == v->use_vreg) break;
|
||||
}
|
||||
CHECK_NE(nullptr, p);
|
||||
CHECK(p);
|
||||
}
|
||||
// Mark the use.
|
||||
it->second->use_vreg = use_vreg;
|
||||
return;
|
||||
}
|
||||
// Use of a phi value without definition.
|
||||
CHECK(false);
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
private:
|
||||
|
@ -183,7 +183,7 @@ void LiveRange::SetSpillOperand(InstructionOperand* operand) {
|
||||
|
||||
void LiveRange::SetSpillRange(SpillRange* spill_range) {
|
||||
DCHECK(HasNoSpillType() || HasSpillRange());
|
||||
DCHECK_NE(spill_range, nullptr);
|
||||
DCHECK(spill_range);
|
||||
spill_type_ = SpillType::kSpillRange;
|
||||
spill_range_ = spill_range;
|
||||
}
|
||||
|
@ -266,7 +266,7 @@ class CFGBuilder : public ZoneObject {
|
||||
// single-exit region that makes up a minimal component to be scheduled.
|
||||
if (IsSingleEntrySingleExitRegion(node, exit)) {
|
||||
Trace("Found SESE at #%d:%s\n", node->id(), node->op()->mnemonic());
|
||||
DCHECK_EQ(NULL, component_entry_);
|
||||
DCHECK(!component_entry_);
|
||||
component_entry_ = node;
|
||||
continue;
|
||||
}
|
||||
@ -276,7 +276,7 @@ class CFGBuilder : public ZoneObject {
|
||||
Queue(node->InputAt(i));
|
||||
}
|
||||
}
|
||||
DCHECK_NE(NULL, component_entry_);
|
||||
DCHECK(component_entry_);
|
||||
|
||||
for (NodeVector::iterator i = control_.begin(); i != control_.end(); ++i) {
|
||||
ConnectBlocks(*i); // Connect block to its predecessor/successors.
|
||||
@ -370,16 +370,16 @@ class CFGBuilder : public ZoneObject {
|
||||
buffer[1] = NULL;
|
||||
for (Node* use : node->uses()) {
|
||||
if (use->opcode() == true_opcode) {
|
||||
DCHECK_EQ(NULL, buffer[0]);
|
||||
DCHECK(!buffer[0]);
|
||||
buffer[0] = use;
|
||||
}
|
||||
if (use->opcode() == false_opcode) {
|
||||
DCHECK_EQ(NULL, buffer[1]);
|
||||
DCHECK(!buffer[1]);
|
||||
buffer[1] = use;
|
||||
}
|
||||
}
|
||||
DCHECK_NE(NULL, buffer[0]);
|
||||
DCHECK_NE(NULL, buffer[1]);
|
||||
DCHECK(buffer[0]);
|
||||
DCHECK(buffer[1]);
|
||||
}
|
||||
|
||||
void CollectSuccessorBlocks(Node* node, BasicBlock** buffer,
|
||||
@ -448,7 +448,7 @@ class CFGBuilder : public ZoneObject {
|
||||
}
|
||||
|
||||
void TraceConnect(Node* node, BasicBlock* block, BasicBlock* succ) {
|
||||
DCHECK_NE(NULL, block);
|
||||
DCHECK(block);
|
||||
if (succ == NULL) {
|
||||
Trace("Connect #%d:%s, B%d -> end\n", node->id(), node->op()->mnemonic(),
|
||||
block->id().ToInt());
|
||||
@ -533,7 +533,7 @@ class SpecialRPONumberer : public ZoneObject {
|
||||
// that is for the graph spanned between the schedule's start and end blocks.
|
||||
void ComputeSpecialRPO() {
|
||||
DCHECK(schedule_->end()->SuccessorCount() == 0);
|
||||
DCHECK_EQ(NULL, order_); // Main order does not exist yet.
|
||||
DCHECK(!order_); // Main order does not exist yet.
|
||||
ComputeAndInsertSpecialRPO(schedule_->start(), schedule_->end());
|
||||
}
|
||||
|
||||
@ -541,7 +541,7 @@ class SpecialRPONumberer : public ZoneObject {
|
||||
// that is for the graph spanned between the given {entry} and {end} blocks,
|
||||
// then updates the existing ordering with this new information.
|
||||
void UpdateSpecialRPO(BasicBlock* entry, BasicBlock* end) {
|
||||
DCHECK_NE(NULL, order_); // Main order to be updated is present.
|
||||
DCHECK(order_); // Main order to be updated is present.
|
||||
ComputeAndInsertSpecialRPO(entry, end);
|
||||
}
|
||||
|
||||
|
@ -763,8 +763,8 @@ void Verifier::Visitor::Check(Node* node) {
|
||||
|
||||
|
||||
void Verifier::Run(Graph* graph, Typing typing) {
|
||||
CHECK_NE(NULL, graph->start());
|
||||
CHECK_NE(NULL, graph->end());
|
||||
CHECK_NOT_NULL(graph->start());
|
||||
CHECK_NOT_NULL(graph->end());
|
||||
Zone zone;
|
||||
Visitor visitor(&zone, typing);
|
||||
for (Node* node : AllNodes(&zone, graph).live) visitor.Check(node);
|
||||
@ -868,10 +868,10 @@ void ScheduleVerifier::Run(Schedule* schedule) {
|
||||
BasicBlock* dom = block->dominator();
|
||||
if (b == 0) {
|
||||
// All blocks except start should have a dominator.
|
||||
CHECK_EQ(NULL, dom);
|
||||
CHECK_NULL(dom);
|
||||
} else {
|
||||
// Check that the immediate dominator appears somewhere before the block.
|
||||
CHECK_NE(NULL, dom);
|
||||
CHECK_NOT_NULL(dom);
|
||||
CHECK_LT(dom->rpo_number(), block->rpo_number());
|
||||
}
|
||||
}
|
||||
|
@ -106,7 +106,7 @@ Zone* ZonePool::NewEmptyZone() {
|
||||
zone = new Zone();
|
||||
}
|
||||
used_.push_back(zone);
|
||||
DCHECK_EQ(0, zone->allocation_size());
|
||||
DCHECK_EQ(0u, zone->allocation_size());
|
||||
return zone;
|
||||
}
|
||||
|
||||
@ -129,7 +129,7 @@ void ZonePool::ReturnZone(Zone* zone) {
|
||||
delete zone;
|
||||
} else {
|
||||
zone->DeleteAll();
|
||||
DCHECK_EQ(0, zone->allocation_size());
|
||||
DCHECK_EQ(0u, zone->allocation_size());
|
||||
unused_.push_back(zone);
|
||||
}
|
||||
}
|
||||
|
@ -110,7 +110,7 @@ size_t Deoptimizer::GetMaxDeoptTableSize() {
|
||||
|
||||
Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
|
||||
Deoptimizer* result = isolate->deoptimizer_data()->current_;
|
||||
CHECK_NE(result, NULL);
|
||||
CHECK_NOT_NULL(result);
|
||||
result->DeleteFrameDescriptions();
|
||||
isolate->deoptimizer_data()->current_ = NULL;
|
||||
return result;
|
||||
@ -901,7 +901,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
|
||||
bool is_bottommost = (0 == frame_index);
|
||||
bool is_topmost = (output_count_ - 1 == frame_index);
|
||||
CHECK(frame_index >= 0 && frame_index < output_count_);
|
||||
CHECK_EQ(output_[frame_index], NULL);
|
||||
CHECK_NULL(output_[frame_index]);
|
||||
output_[frame_index] = output_frame;
|
||||
|
||||
// The top address for the bottommost output frame can be computed from
|
||||
@ -1060,7 +1060,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
|
||||
output_offset -= kPointerSize;
|
||||
DoTranslateCommand(iterator, frame_index, output_offset);
|
||||
}
|
||||
CHECK_EQ(0, output_offset);
|
||||
CHECK_EQ(0u, output_offset);
|
||||
|
||||
// Compute this frame's PC, state, and continuation.
|
||||
Code* non_optimized_code = function->shared()->code();
|
||||
@ -1382,7 +1382,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
|
||||
top_address + output_offset, output_offset, value);
|
||||
}
|
||||
|
||||
CHECK_EQ(0, output_offset);
|
||||
CHECK_EQ(0u, output_offset);
|
||||
|
||||
intptr_t pc = reinterpret_cast<intptr_t>(
|
||||
construct_stub->instruction_start() +
|
||||
@ -1429,7 +1429,7 @@ void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
|
||||
|
||||
// A frame for an accessor stub can not be the topmost or bottommost one.
|
||||
CHECK(frame_index > 0 && frame_index < output_count_ - 1);
|
||||
CHECK_EQ(output_[frame_index], NULL);
|
||||
CHECK_NULL(output_[frame_index]);
|
||||
output_[frame_index] = output_frame;
|
||||
|
||||
// The top address of the frame is computed from the previous frame's top and
|
||||
@ -1522,7 +1522,7 @@ void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
|
||||
DoTranslateCommand(iterator, frame_index, output_offset);
|
||||
}
|
||||
|
||||
CHECK_EQ(output_offset, 0);
|
||||
CHECK_EQ(0u, output_offset);
|
||||
|
||||
Smi* offset = is_setter_stub_frame ?
|
||||
isolate_->heap()->setter_stub_deopt_pc_offset() :
|
||||
@ -1735,7 +1735,7 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
}
|
||||
}
|
||||
|
||||
CHECK_EQ(output_frame_offset, 0);
|
||||
CHECK_EQ(0u, output_frame_offset);
|
||||
|
||||
if (!arg_count_known) {
|
||||
CHECK_GE(arguments_length_offset, 0);
|
||||
|
@ -95,11 +95,10 @@ class Deoptimizer : public Malloced {
|
||||
SOFT,
|
||||
// This last bailout type is not really a bailout, but used by the
|
||||
// debugger to deoptimize stack frames to allow inspection.
|
||||
DEBUGGER
|
||||
DEBUGGER,
|
||||
kBailoutTypesWithCodeEntry = SOFT + 1
|
||||
};
|
||||
|
||||
static const int kBailoutTypesWithCodeEntry = SOFT + 1;
|
||||
|
||||
struct Reason {
|
||||
Reason(int r, const char* m, const char* d)
|
||||
: raw_position(r), mnemonic(m), detail(d) {}
|
||||
|
@ -5458,7 +5458,7 @@ bool Heap::CreateHeapObjects() {
|
||||
|
||||
// Create initial objects
|
||||
CreateInitialObjects();
|
||||
CHECK_EQ(0, gc_count_);
|
||||
CHECK_EQ(0u, gc_count_);
|
||||
|
||||
set_native_contexts_list(undefined_value());
|
||||
set_array_buffers_list(undefined_value());
|
||||
|
@ -1276,13 +1276,13 @@ class AllocationInfo {
|
||||
|
||||
INLINE(void set_top(Address top)) {
|
||||
SLOW_DCHECK(top == NULL ||
|
||||
(reinterpret_cast<intptr_t>(top) & HeapObjectTagMask()) == 0);
|
||||
(reinterpret_cast<intptr_t>(top) & kHeapObjectTagMask) == 0);
|
||||
top_ = top;
|
||||
}
|
||||
|
||||
INLINE(Address top()) const {
|
||||
SLOW_DCHECK(top_ == NULL ||
|
||||
(reinterpret_cast<intptr_t>(top_) & HeapObjectTagMask()) == 0);
|
||||
(reinterpret_cast<intptr_t>(top_) & kHeapObjectTagMask) == 0);
|
||||
return top_;
|
||||
}
|
||||
|
||||
@ -1290,13 +1290,13 @@ class AllocationInfo {
|
||||
|
||||
INLINE(void set_limit(Address limit)) {
|
||||
SLOW_DCHECK(limit == NULL ||
|
||||
(reinterpret_cast<intptr_t>(limit) & HeapObjectTagMask()) == 0);
|
||||
(reinterpret_cast<intptr_t>(limit) & kHeapObjectTagMask) == 0);
|
||||
limit_ = limit;
|
||||
}
|
||||
|
||||
INLINE(Address limit()) const {
|
||||
SLOW_DCHECK(limit_ == NULL ||
|
||||
(reinterpret_cast<intptr_t>(limit_) & HeapObjectTagMask()) ==
|
||||
(reinterpret_cast<intptr_t>(limit_) & kHeapObjectTagMask) ==
|
||||
0);
|
||||
return limit_;
|
||||
}
|
||||
|
@ -373,7 +373,7 @@ class HCheckTable : public ZoneObject {
|
||||
instr->DeleteAndReplaceWith(entry->check_);
|
||||
INC_STAT(redundant_);
|
||||
} else if (entry->state_ == HCheckTableEntry::UNCHECKED_STABLE) {
|
||||
DCHECK_EQ(NULL, entry->check_);
|
||||
DCHECK_NULL(entry->check_);
|
||||
TRACE(("Marking redundant CheckMaps #%d at B%d as stability check\n",
|
||||
instr->id(), instr->block()->block_id()));
|
||||
instr->set_maps(entry->maps_->Copy(graph->zone()));
|
||||
@ -684,14 +684,14 @@ class HCheckTable : public ZoneObject {
|
||||
bool compact = false;
|
||||
for (int i = 0; i < size_; i++) {
|
||||
HCheckTableEntry* entry = &entries_[i];
|
||||
DCHECK(entry->object_ != NULL);
|
||||
DCHECK_NOT_NULL(entry->object_);
|
||||
if (phase_->aliasing_->MayAlias(entry->object_, object)) {
|
||||
entry->object_ = NULL;
|
||||
compact = true;
|
||||
}
|
||||
}
|
||||
if (compact) Compact();
|
||||
DCHECK(Find(object) == NULL);
|
||||
DCHECK_NULL(Find(object));
|
||||
}
|
||||
|
||||
void Compact() {
|
||||
|
@ -8721,7 +8721,7 @@ bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction> function,
|
||||
case kCallApiGetter:
|
||||
// Receiver and prototype chain cannot have changed.
|
||||
DCHECK_EQ(0, argc);
|
||||
DCHECK_EQ(NULL, receiver);
|
||||
DCHECK_NULL(receiver);
|
||||
// Receiver is on expression stack.
|
||||
receiver = Pop();
|
||||
Add<HPushArguments>(receiver);
|
||||
@ -8731,7 +8731,7 @@ bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction> function,
|
||||
is_store = true;
|
||||
// Receiver and prototype chain cannot have changed.
|
||||
DCHECK_EQ(1, argc);
|
||||
DCHECK_EQ(NULL, receiver);
|
||||
DCHECK_NULL(receiver);
|
||||
// Receiver and value are on expression stack.
|
||||
HValue* value = Pop();
|
||||
receiver = Pop();
|
||||
@ -11812,7 +11812,7 @@ void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) {
|
||||
|
||||
void HOptimizedGraphBuilder::GenerateDateField(CallRuntime* call) {
|
||||
DCHECK(call->arguments()->length() == 2);
|
||||
DCHECK_NE(NULL, call->arguments()->at(1)->AsLiteral());
|
||||
DCHECK_NOT_NULL(call->arguments()->at(1)->AsLiteral());
|
||||
Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->value()));
|
||||
CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
|
||||
HValue* date = Pop();
|
||||
|
@ -1706,7 +1706,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
|
||||
// If either is a Smi (we know that not both are), then they can only
|
||||
// be equal if the other is a HeapNumber. If so, use the slow case.
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
DCHECK_EQ(0, Smi::FromInt(0));
|
||||
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
|
||||
__ mov(ecx, Immediate(kSmiTagMask));
|
||||
__ and_(ecx, eax);
|
||||
__ test(ecx, edx);
|
||||
|
@ -3708,7 +3708,7 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
|
||||
void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK(args->length() == 2);
|
||||
DCHECK_NE(NULL, args->at(1)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(1)->AsLiteral());
|
||||
Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
|
||||
|
||||
VisitForAccumulatorValue(args->at(0)); // Load the object.
|
||||
@ -4064,7 +4064,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK_EQ(2, args->length());
|
||||
|
||||
DCHECK_NE(NULL, args->at(0)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(0)->AsLiteral());
|
||||
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
|
||||
|
||||
Handle<FixedArray> jsfunction_result_caches(
|
||||
|
@ -4387,7 +4387,7 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
|
||||
__ mov(FieldOperand(object_reg, HeapObject::kMapOffset),
|
||||
Immediate(to_map));
|
||||
// Write barrier.
|
||||
DCHECK_NE(instr->temp(), NULL);
|
||||
DCHECK_NOT_NULL(instr->temp());
|
||||
__ RecordWriteForMap(object_reg, to_map, new_map_reg,
|
||||
ToRegister(instr->temp()),
|
||||
kDontSaveFPRegs);
|
||||
|
@ -347,7 +347,7 @@ void NamedLoadHandlerCompiler::GenerateLoadPostInterceptor(
|
||||
case LookupIterator::ACCESSOR:
|
||||
Handle<ExecutableAccessorInfo> info =
|
||||
Handle<ExecutableAccessorInfo>::cast(it->GetAccessors());
|
||||
DCHECK_NE(NULL, info->getter());
|
||||
DCHECK_NOT_NULL(info->getter());
|
||||
GenerateLoadCallback(reg, info);
|
||||
}
|
||||
}
|
||||
|
@ -36,6 +36,14 @@ std::ostream& operator<<(std::ostream& os, const CallICState& s) {
|
||||
}
|
||||
|
||||
|
||||
// static
|
||||
STATIC_CONST_MEMBER_DEFINITION const int BinaryOpICState::FIRST_TOKEN;
|
||||
|
||||
|
||||
// static
|
||||
STATIC_CONST_MEMBER_DEFINITION const int BinaryOpICState::LAST_TOKEN;
|
||||
|
||||
|
||||
BinaryOpICState::BinaryOpICState(Isolate* isolate, ExtraICState extra_ic_state)
|
||||
: isolate_(isolate) {
|
||||
op_ =
|
||||
|
@ -2550,7 +2550,7 @@ MaybeHandle<Object> BinaryOpIC::Transition(
|
||||
target = stub.GetCode();
|
||||
|
||||
// Sanity check the generic stub.
|
||||
DCHECK_EQ(NULL, target->FindFirstAllocationSite());
|
||||
DCHECK_NULL(target->FindFirstAllocationSite());
|
||||
}
|
||||
set_target(*target);
|
||||
|
||||
|
@ -30,7 +30,7 @@ static void ProbeTable(Isolate* isolate, MacroAssembler* masm,
|
||||
: kPointerSizeLog2 == StubCache::kCacheIndexShift);
|
||||
ScaleFactor scale_factor = kPointerSize == kInt64Size ? times_2 : times_1;
|
||||
|
||||
DCHECK_EQ(3 * kPointerSize, sizeof(StubCache::Entry));
|
||||
DCHECK_EQ(3u * kPointerSize, sizeof(StubCache::Entry));
|
||||
// The offset register holds the entry offset times four (due to masking
|
||||
// and shifting optimizations).
|
||||
ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
|
||||
|
@ -1560,7 +1560,7 @@ Isolate::ThreadDataTable::~ThreadDataTable() {
|
||||
// TODO(svenpanne) The assertion below would fire if an embedder does not
|
||||
// cleanly dispose all Isolates before disposing v8, so we are conservative
|
||||
// and leave it out for now.
|
||||
// DCHECK_EQ(NULL, list_);
|
||||
// DCHECK_NULL(list_);
|
||||
}
|
||||
|
||||
|
||||
|
@ -3446,14 +3446,14 @@ int ChoiceNode::GreedyLoopTextLengthForAlternative(
|
||||
|
||||
|
||||
void LoopChoiceNode::AddLoopAlternative(GuardedAlternative alt) {
|
||||
DCHECK_EQ(loop_node_, NULL);
|
||||
DCHECK_NULL(loop_node_);
|
||||
AddAlternative(alt);
|
||||
loop_node_ = alt.node();
|
||||
}
|
||||
|
||||
|
||||
void LoopChoiceNode::AddContinueAlternative(GuardedAlternative alt) {
|
||||
DCHECK_EQ(continue_node_, NULL);
|
||||
DCHECK_NULL(continue_node_);
|
||||
AddAlternative(alt);
|
||||
continue_node_ = alt.node();
|
||||
}
|
||||
@ -3473,7 +3473,7 @@ void LoopChoiceNode::Emit(RegExpCompiler* compiler, Trace* trace) {
|
||||
macro_assembler->GoTo(trace->loop_label());
|
||||
return;
|
||||
}
|
||||
DCHECK(trace->stop_node() == NULL);
|
||||
DCHECK_NULL(trace->stop_node());
|
||||
if (!trace->is_trivial()) {
|
||||
trace->Flush(compiler, this);
|
||||
return;
|
||||
@ -5294,8 +5294,8 @@ void CharacterRange::Split(ZoneList<CharacterRange>* base,
|
||||
ZoneList<CharacterRange>** included,
|
||||
ZoneList<CharacterRange>** excluded,
|
||||
Zone* zone) {
|
||||
DCHECK_EQ(NULL, *included);
|
||||
DCHECK_EQ(NULL, *excluded);
|
||||
DCHECK_NULL(*included);
|
||||
DCHECK_NULL(*excluded);
|
||||
DispatchTable table(zone);
|
||||
for (int i = 0; i < base->length(); i++)
|
||||
table.AddRange(base->at(i), CharacterRangeSplitter::kInBase, zone);
|
||||
|
@ -239,7 +239,7 @@ class CharacterRange {
|
||||
public:
|
||||
CharacterRange() : from_(0), to_(0) { }
|
||||
// For compatibility with the CHECK_OK macro
|
||||
CharacterRange(void* null) { DCHECK_EQ(NULL, null); } //NOLINT
|
||||
CharacterRange(void* null) { DCHECK_NULL(null); } // NOLINT
|
||||
CharacterRange(uc16 from, uc16 to) : from_(from), to_(to) { }
|
||||
static void AddClassEscape(uc16 type, ZoneList<CharacterRange>* ranges,
|
||||
Zone* zone);
|
||||
|
@ -271,7 +271,7 @@ PerfBasicLogger::PerfBasicLogger()
|
||||
CHECK_NE(size, -1);
|
||||
perf_output_handle_ =
|
||||
base::OS::FOpen(perf_dump_name.start(), base::OS::LogFileOpenMode);
|
||||
CHECK_NE(perf_output_handle_, NULL);
|
||||
CHECK_NOT_NULL(perf_output_handle_);
|
||||
setvbuf(perf_output_handle_, NULL, _IOFBF, kLogBufferSize);
|
||||
}
|
||||
|
||||
|
@ -3795,7 +3795,7 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
|
||||
void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK(args->length() == 2);
|
||||
DCHECK_NE(NULL, args->at(1)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(1)->AsLiteral());
|
||||
Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
|
||||
|
||||
VisitForAccumulatorValue(args->at(0)); // Load the object.
|
||||
@ -4161,7 +4161,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK_EQ(2, args->length());
|
||||
|
||||
DCHECK_NE(NULL, args->at(0)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(0)->AsLiteral());
|
||||
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
|
||||
|
||||
Handle<FixedArray> jsfunction_result_caches(
|
||||
|
@ -3794,7 +3794,7 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
|
||||
void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK(args->length() == 2);
|
||||
DCHECK_NE(NULL, args->at(1)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(1)->AsLiteral());
|
||||
Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
|
||||
|
||||
VisitForAccumulatorValue(args->at(0)); // Load the object.
|
||||
@ -4161,7 +4161,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK_EQ(2, args->length());
|
||||
|
||||
DCHECK_NE(NULL, args->at(0)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(0)->AsLiteral());
|
||||
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
|
||||
|
||||
Handle<FixedArray> jsfunction_result_caches(
|
||||
|
@ -207,7 +207,7 @@ void HeapObject::VerifyHeapPointer(Object* p) {
|
||||
void Symbol::SymbolVerify() {
|
||||
CHECK(IsSymbol());
|
||||
CHECK(HasHashCode());
|
||||
CHECK_GT(Hash(), 0);
|
||||
CHECK_GT(Hash(), 0u);
|
||||
CHECK(name()->IsUndefined() || name()->IsString());
|
||||
CHECK(flags()->IsSmi());
|
||||
}
|
||||
|
@ -2951,7 +2951,7 @@ int LinearSearch(T* array, Name* name, int len, int valid_entries,
|
||||
return T::kNotFound;
|
||||
} else {
|
||||
DCHECK(len >= valid_entries);
|
||||
DCHECK_EQ(NULL, out_insertion_index); // Not supported here.
|
||||
DCHECK_NULL(out_insertion_index); // Not supported here.
|
||||
for (int number = 0; number < valid_entries; number++) {
|
||||
Name* entry = array->GetKey(number);
|
||||
uint32_t current_hash = entry->Hash();
|
||||
@ -3392,6 +3392,12 @@ CAST_ACCESSOR(WeakFixedArray)
|
||||
CAST_ACCESSOR(WeakHashTable)
|
||||
|
||||
|
||||
// static
|
||||
template <class Traits>
|
||||
STATIC_CONST_MEMBER_DEFINITION const InstanceType
|
||||
FixedTypedArray<Traits>::kInstanceType;
|
||||
|
||||
|
||||
template <class Traits>
|
||||
FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
|
||||
SLOW_DCHECK(object->IsHeapObject() &&
|
||||
|
@ -9589,7 +9589,7 @@ FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) {
|
||||
FixedArray* code_map = FixedArray::cast(optimized_code_map());
|
||||
if (!bound()) {
|
||||
FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
|
||||
DCHECK_NE(NULL, cached_literals);
|
||||
DCHECK_NOT_NULL(cached_literals);
|
||||
return cached_literals;
|
||||
}
|
||||
return NULL;
|
||||
@ -9600,7 +9600,7 @@ Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) {
|
||||
DCHECK(index > kEntriesStart);
|
||||
FixedArray* code_map = FixedArray::cast(optimized_code_map());
|
||||
Code* code = Code::cast(code_map->get(index));
|
||||
DCHECK_NE(NULL, code);
|
||||
DCHECK_NOT_NULL(code);
|
||||
return code;
|
||||
}
|
||||
|
||||
|
@ -102,7 +102,7 @@ OptimizingCompilerThread::~OptimizingCompilerThread() {
|
||||
if (FLAG_concurrent_osr) {
|
||||
#ifdef DEBUG
|
||||
for (int i = 0; i < osr_buffer_capacity_; i++) {
|
||||
CHECK_EQ(NULL, osr_buffer_[i]);
|
||||
CHECK_NULL(osr_buffer_[i]);
|
||||
}
|
||||
#endif
|
||||
DeleteArray(osr_buffer_);
|
||||
@ -178,7 +178,7 @@ OptimizedCompileJob* OptimizingCompilerThread::NextInput(StopFlag* flag) {
|
||||
return NULL;
|
||||
}
|
||||
OptimizedCompileJob* job = input_queue_[InputQueueIndex(0)];
|
||||
DCHECK_NE(NULL, job);
|
||||
DCHECK_NOT_NULL(job);
|
||||
input_queue_shift_ = InputQueueIndex(1);
|
||||
input_queue_length_--;
|
||||
if (flag) {
|
||||
@ -189,7 +189,7 @@ OptimizedCompileJob* OptimizingCompilerThread::NextInput(StopFlag* flag) {
|
||||
|
||||
|
||||
void OptimizingCompilerThread::CompileNext(OptimizedCompileJob* job) {
|
||||
DCHECK_NE(NULL, job);
|
||||
DCHECK_NOT_NULL(job);
|
||||
|
||||
// The function may have already been optimized by OSR. Simply continue.
|
||||
OptimizedCompileJob::Status status = job->OptimizeGraph();
|
||||
|
@ -57,7 +57,7 @@ PerfJitLogger::PerfJitLogger() : perf_output_handle_(NULL), code_index_(0) {
|
||||
CHECK_NE(size, -1);
|
||||
perf_output_handle_ =
|
||||
base::OS::FOpen(perf_dump_name.start(), base::OS::LogFileOpenMode);
|
||||
CHECK_NE(perf_output_handle_, NULL);
|
||||
CHECK_NOT_NULL(perf_output_handle_);
|
||||
setvbuf(perf_output_handle_, NULL, _IOFBF, kLogBufferSize);
|
||||
|
||||
LogWriteHeader();
|
||||
|
@ -3753,7 +3753,7 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
|
||||
void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK(args->length() == 2);
|
||||
DCHECK_NE(NULL, args->at(1)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(1)->AsLiteral());
|
||||
Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
|
||||
|
||||
VisitForAccumulatorValue(args->at(0)); // Load the object.
|
||||
@ -4089,7 +4089,7 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
|
||||
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK_EQ(2, args->length());
|
||||
DCHECK_NE(NULL, args->at(0)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(0)->AsLiteral());
|
||||
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
|
||||
|
||||
Handle<FixedArray> jsfunction_result_caches(
|
||||
|
@ -851,7 +851,7 @@ RUNTIME_FUNCTION(Runtime_ArrayConcat) {
|
||||
case FAST_HOLEY_ELEMENTS:
|
||||
case FAST_ELEMENTS:
|
||||
case DICTIONARY_ELEMENTS:
|
||||
DCHECK_EQ(0, length);
|
||||
DCHECK_EQ(0u, length);
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
|
@ -95,12 +95,12 @@ void ExternalReferenceTable::Add(Address address,
|
||||
TypeCode type,
|
||||
uint16_t id,
|
||||
const char* name) {
|
||||
DCHECK_NE(NULL, address);
|
||||
DCHECK_NOT_NULL(address);
|
||||
ExternalReferenceEntry entry;
|
||||
entry.address = address;
|
||||
entry.code = EncodeExternal(type, id);
|
||||
entry.name = name;
|
||||
DCHECK_NE(0, entry.code);
|
||||
DCHECK_NE(0u, entry.code);
|
||||
// Assert that the code is added in ascending order to rule out duplicates.
|
||||
DCHECK((size() == 0) || (code(size() - 1) < entry.code));
|
||||
refs_.Add(entry);
|
||||
@ -647,10 +647,10 @@ bool Deserializer::ReserveSpace() {
|
||||
|
||||
|
||||
void Deserializer::Initialize(Isolate* isolate) {
|
||||
DCHECK_EQ(NULL, isolate_);
|
||||
DCHECK_NE(NULL, isolate);
|
||||
DCHECK_NULL(isolate_);
|
||||
DCHECK_NOT_NULL(isolate);
|
||||
isolate_ = isolate;
|
||||
DCHECK_EQ(NULL, external_reference_decoder_);
|
||||
DCHECK_NULL(external_reference_decoder_);
|
||||
external_reference_decoder_ = new ExternalReferenceDecoder(isolate);
|
||||
}
|
||||
|
||||
@ -659,7 +659,7 @@ void Deserializer::Deserialize(Isolate* isolate) {
|
||||
Initialize(isolate);
|
||||
if (!ReserveSpace()) FatalProcessOutOfMemory("deserializing context");
|
||||
// No active threads.
|
||||
DCHECK_EQ(NULL, isolate_->thread_manager()->FirstThreadStateInUse());
|
||||
DCHECK_NULL(isolate_->thread_manager()->FirstThreadStateInUse());
|
||||
// No active handles.
|
||||
DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty());
|
||||
isolate_->heap()->IterateSmiRoots(this);
|
||||
@ -942,7 +942,7 @@ Address Deserializer::Allocate(int space_index, int size) {
|
||||
} else {
|
||||
DCHECK(space_index < kNumberOfPreallocatedSpaces);
|
||||
Address address = high_water_[space_index];
|
||||
DCHECK_NE(NULL, address);
|
||||
DCHECK_NOT_NULL(address);
|
||||
high_water_[space_index] += size;
|
||||
#ifdef DEBUG
|
||||
// Assert that the current reserved chunk is still big enough.
|
||||
@ -1383,7 +1383,7 @@ Serializer::~Serializer() {
|
||||
void StartupSerializer::SerializeStrongReferences() {
|
||||
Isolate* isolate = this->isolate();
|
||||
// No active threads.
|
||||
CHECK_EQ(NULL, isolate->thread_manager()->FirstThreadStateInUse());
|
||||
CHECK_NULL(isolate->thread_manager()->FirstThreadStateInUse());
|
||||
// No active or weak handles.
|
||||
CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
|
||||
CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
|
||||
|
@ -275,7 +275,7 @@ class BackReferenceMap : public AddressMapBase {
|
||||
|
||||
void Add(HeapObject* obj, BackReference b) {
|
||||
DCHECK(b.is_valid());
|
||||
DCHECK_EQ(NULL, LookupEntry(map_, obj, false));
|
||||
DCHECK_NULL(LookupEntry(map_, obj, false));
|
||||
HashMap::Entry* entry = LookupEntry(map_, obj, true);
|
||||
SetValue(entry, b.bitfield());
|
||||
}
|
||||
@ -307,7 +307,7 @@ class HotObjectsList {
|
||||
}
|
||||
|
||||
HeapObject* Get(int index) {
|
||||
DCHECK_NE(NULL, circular_queue_[index]);
|
||||
DCHECK_NOT_NULL(circular_queue_[index]);
|
||||
return circular_queue_[index];
|
||||
}
|
||||
|
||||
|
@ -49,7 +49,7 @@ class Unique {
|
||||
// TODO(titzer): other immortable immovable objects are also fine.
|
||||
DCHECK(!AllowHeapAllocation::IsAllowed() || handle->IsMap());
|
||||
raw_address_ = reinterpret_cast<Address>(*handle);
|
||||
DCHECK_NE(raw_address_, NULL); // Non-null should imply non-zero address.
|
||||
DCHECK_NOT_NULL(raw_address_); // Non-null should imply non-zero address.
|
||||
}
|
||||
handle_ = handle;
|
||||
}
|
||||
|
2
src/v8.h
2
src/v8.h
@ -73,7 +73,7 @@ class V8 : public AllStatic {
|
||||
}
|
||||
|
||||
static void SetArrayBufferAllocator(v8::ArrayBuffer::Allocator *allocator) {
|
||||
CHECK_EQ(NULL, array_buffer_allocator_);
|
||||
CHECK_NULL(array_buffer_allocator_);
|
||||
array_buffer_allocator_ = allocator;
|
||||
}
|
||||
|
||||
|
@ -803,7 +803,7 @@ int DisassemblerX64::ShiftInstruction(byte* data) {
|
||||
UnimplementedInstruction();
|
||||
return count + 1;
|
||||
}
|
||||
DCHECK_NE(NULL, mnem);
|
||||
DCHECK_NOT_NULL(mnem);
|
||||
AppendToBuffer("%s%c ", mnem, operand_size_code());
|
||||
}
|
||||
count += PrintRightOperand(data + count);
|
||||
|
@ -3706,7 +3706,7 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
|
||||
void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK(args->length() == 2);
|
||||
DCHECK_NE(NULL, args->at(1)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(1)->AsLiteral());
|
||||
Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
|
||||
|
||||
VisitForAccumulatorValue(args->at(0)); // Load the object.
|
||||
@ -4058,7 +4058,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK_EQ(2, args->length());
|
||||
|
||||
DCHECK_NE(NULL, args->at(0)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(0)->AsLiteral());
|
||||
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
|
||||
|
||||
Handle<FixedArray> jsfunction_result_caches(
|
||||
|
@ -2198,7 +2198,7 @@ void MacroAssembler::SelectNonSmi(Register dst,
|
||||
Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
|
||||
#endif
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
DCHECK_EQ(0, Smi::FromInt(0));
|
||||
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
|
||||
movl(kScratchRegister, Immediate(kSmiTagMask));
|
||||
andp(kScratchRegister, src1);
|
||||
testl(kScratchRegister, src2);
|
||||
|
@ -3648,7 +3648,7 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
|
||||
void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK(args->length() == 2);
|
||||
DCHECK_NE(NULL, args->at(1)->AsLiteral());
|
||||
DCHECK_NOT_NUL(args->at(1)->AsLiteral());
|
||||
Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
|
||||
|
||||
VisitForAccumulatorValue(args->at(0)); // Load the object.
|
||||
@ -4003,7 +4003,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
DCHECK_EQ(2, args->length());
|
||||
|
||||
DCHECK_NE(NULL, args->at(0)->AsLiteral());
|
||||
DCHECK_NOT_NULL(args->at(0)->AsLiteral());
|
||||
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
|
||||
|
||||
Handle<FixedArray> jsfunction_result_caches(
|
||||
|
@ -103,7 +103,6 @@
|
||||
'test-bignum.cc',
|
||||
'test-bignum-dtoa.cc',
|
||||
'test-bit-vector.cc',
|
||||
'test-checks.cc',
|
||||
'test-circular-queue.cc',
|
||||
'test-compiler.cc',
|
||||
'test-constantpool.cc',
|
||||
|
@ -440,7 +440,7 @@ static inline void ExpectString(const char* code, const char* expected) {
|
||||
v8::Local<v8::Value> result = CompileRun(code);
|
||||
CHECK(result->IsString());
|
||||
v8::String::Utf8Value utf8(result);
|
||||
CHECK_EQ(expected, *utf8);
|
||||
CHECK_EQ(0, strcmp(expected, *utf8));
|
||||
}
|
||||
|
||||
|
||||
@ -557,7 +557,7 @@ class HeapObjectsTracker {
|
||||
public:
|
||||
HeapObjectsTracker() {
|
||||
heap_profiler_ = i::Isolate::Current()->heap_profiler();
|
||||
CHECK_NE(NULL, heap_profiler_);
|
||||
CHECK_NOT_NULL(heap_profiler_);
|
||||
heap_profiler_->StartHeapObjectsTracking(true);
|
||||
}
|
||||
|
||||
|
@ -373,9 +373,9 @@ void Int32BinopInputShapeTester::RunRight(
|
||||
TEST(ParametersEqual) {
|
||||
RawMachineAssemblerTester<int32_t> m(kMachInt32, kMachInt32);
|
||||
Node* p1 = m.Parameter(1);
|
||||
CHECK_NE(NULL, p1);
|
||||
CHECK(p1);
|
||||
Node* p0 = m.Parameter(0);
|
||||
CHECK_NE(NULL, p0);
|
||||
CHECK(p0);
|
||||
CHECK_EQ(p0, m.Parameter(0));
|
||||
CHECK_EQ(p1, m.Parameter(1));
|
||||
}
|
||||
@ -561,7 +561,7 @@ TEST(RunBinopTester) {
|
||||
Float64BinopTester bt(&m);
|
||||
bt.AddReturn(bt.param0);
|
||||
|
||||
FOR_FLOAT64_INPUTS(i) { CHECK_EQ(*i, bt.call(*i, 9.0)); }
|
||||
FOR_FLOAT64_INPUTS(i) { CheckDoubleEq(*i, bt.call(*i, 9.0)); }
|
||||
}
|
||||
|
||||
{
|
||||
@ -569,7 +569,7 @@ TEST(RunBinopTester) {
|
||||
Float64BinopTester bt(&m);
|
||||
bt.AddReturn(bt.param1);
|
||||
|
||||
FOR_FLOAT64_INPUTS(i) { CHECK_EQ(*i, bt.call(-11.25, *i)); }
|
||||
FOR_FLOAT64_INPUTS(i) { CheckDoubleEq(*i, bt.call(-11.25, *i)); }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -332,6 +332,16 @@ class Int32BinopInputShapeTester {
|
||||
void RunLeft(RawMachineAssemblerTester<int32_t>* m);
|
||||
void RunRight(RawMachineAssemblerTester<int32_t>* m);
|
||||
};
|
||||
|
||||
// TODO(bmeurer): Drop this crap once we switch to GTest/Gmock.
|
||||
static inline void CheckDoubleEq(volatile double x, volatile double y) {
|
||||
if (std::isnan(x)) {
|
||||
CHECK(std::isnan(y));
|
||||
} else {
|
||||
CHECK_EQ(x, y);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace compiler
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -36,7 +36,7 @@ class FunctionTester : public InitializedHandleScope {
|
||||
const uint32_t supported_flags = CompilationInfo::kContextSpecializing |
|
||||
CompilationInfo::kInliningEnabled |
|
||||
CompilationInfo::kTypingEnabled;
|
||||
CHECK_EQ(0, flags_ & ~supported_flags);
|
||||
CHECK_EQ(0u, flags_ & ~supported_flags);
|
||||
}
|
||||
|
||||
explicit FunctionTester(Graph* graph)
|
||||
|
@ -21,7 +21,7 @@ MachineCallHelper::MachineCallHelper(Isolate* isolate,
|
||||
|
||||
void MachineCallHelper::InitParameters(GraphBuilder* builder,
|
||||
CommonOperatorBuilder* common) {
|
||||
DCHECK_EQ(NULL, parameters_);
|
||||
DCHECK(!parameters_);
|
||||
graph_ = builder->graph();
|
||||
int param_count = static_cast<int>(parameter_count());
|
||||
if (param_count == 0) return;
|
||||
@ -46,7 +46,7 @@ byte* MachineCallHelper::Generate() {
|
||||
|
||||
|
||||
Node* MachineCallHelper::Parameter(size_t index) {
|
||||
DCHECK_NE(NULL, parameters_);
|
||||
DCHECK(parameters_);
|
||||
DCHECK(index < parameter_count());
|
||||
return parameters_[index];
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ class BasicBlockProfilerTest : public RawMachineAssemblerTester<int32_t> {
|
||||
void ResetCounts() { isolate()->basic_block_profiler()->ResetCounts(); }
|
||||
|
||||
void Expect(size_t size, uint32_t* expected) {
|
||||
CHECK_NE(NULL, isolate()->basic_block_profiler());
|
||||
CHECK(isolate()->basic_block_profiler());
|
||||
const BasicBlockProfiler::DataList* l =
|
||||
isolate()->basic_block_profiler()->data_list();
|
||||
CHECK_NE(0, static_cast<int>(l->size()));
|
||||
|
@ -242,13 +242,13 @@ TEST(RunChangeTaggedToFloat64) {
|
||||
{
|
||||
Handle<Object> number = t.factory()->NewNumber(input);
|
||||
t.Call(*number);
|
||||
CHECK_EQ(input, result);
|
||||
CheckDoubleEq(input, result);
|
||||
}
|
||||
|
||||
{
|
||||
Handle<HeapNumber> number = t.factory()->NewHeapNumber(input);
|
||||
t.Call(*number);
|
||||
CHECK_EQ(input, result);
|
||||
CheckDoubleEq(input, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -221,7 +221,7 @@ TEST(Trim1_dead) {
|
||||
CHECK(IsUsedBy(T.start, T.p0));
|
||||
T.Trim();
|
||||
CHECK(!IsUsedBy(T.start, T.p0));
|
||||
CHECK_EQ(NULL, T.p0->InputAt(0));
|
||||
CHECK(!T.p0->InputAt(0));
|
||||
}
|
||||
|
||||
|
||||
@ -252,9 +252,9 @@ TEST(Trim2_dead) {
|
||||
CHECK(!IsUsedBy(T.one, phi));
|
||||
CHECK(!IsUsedBy(T.half, phi));
|
||||
CHECK(!IsUsedBy(T.start, phi));
|
||||
CHECK_EQ(NULL, phi->InputAt(0));
|
||||
CHECK_EQ(NULL, phi->InputAt(1));
|
||||
CHECK_EQ(NULL, phi->InputAt(2));
|
||||
CHECK(!phi->InputAt(0));
|
||||
CHECK(!phi->InputAt(1));
|
||||
CHECK(!phi->InputAt(2));
|
||||
}
|
||||
|
||||
|
||||
@ -274,7 +274,7 @@ TEST(Trim_chain1) {
|
||||
T.Trim();
|
||||
for (int i = 0; i < kDepth; i++) {
|
||||
CHECK(!IsUsedBy(live[i], dead[i]));
|
||||
CHECK_EQ(NULL, dead[i]->InputAt(0));
|
||||
CHECK(!dead[i]->InputAt(0));
|
||||
CHECK_EQ(i == 0 ? T.start : live[i - 1], live[i]->InputAt(0));
|
||||
}
|
||||
}
|
||||
@ -354,9 +354,9 @@ TEST(Trim_cycle2) {
|
||||
CHECK(!IsUsedBy(loop, phi));
|
||||
CHECK(!IsUsedBy(T.one, phi));
|
||||
CHECK(!IsUsedBy(T.half, phi));
|
||||
CHECK_EQ(NULL, phi->InputAt(0));
|
||||
CHECK_EQ(NULL, phi->InputAt(1));
|
||||
CHECK_EQ(NULL, phi->InputAt(2));
|
||||
CHECK(!phi->InputAt(0));
|
||||
CHECK(!phi->InputAt(1));
|
||||
CHECK(!phi->InputAt(2));
|
||||
}
|
||||
|
||||
|
||||
@ -365,8 +365,8 @@ void CheckTrimConstant(ControlReducerTester* T, Node* k) {
|
||||
CHECK(IsUsedBy(k, phi));
|
||||
T->Trim();
|
||||
CHECK(!IsUsedBy(k, phi));
|
||||
CHECK_EQ(NULL, phi->InputAt(0));
|
||||
CHECK_EQ(NULL, phi->InputAt(1));
|
||||
CHECK(!phi->InputAt(0));
|
||||
CHECK(!phi->InputAt(1));
|
||||
}
|
||||
|
||||
|
||||
@ -954,7 +954,7 @@ TEST(CMergeReduce_dead_chain1) {
|
||||
R.graph.SetEnd(end);
|
||||
R.ReduceGraph();
|
||||
CHECK(merge->IsDead());
|
||||
CHECK_EQ(NULL, end->InputAt(0)); // end dies.
|
||||
CHECK(!end->InputAt(0)); // end dies.
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -135,7 +135,7 @@ TEST(InstructionBasic) {
|
||||
for (auto block : *blocks) {
|
||||
CHECK_EQ(block->rpo_number(), R.BlockAt(block)->rpo_number().ToInt());
|
||||
CHECK_EQ(block->id().ToInt(), R.BlockAt(block)->id().ToInt());
|
||||
CHECK_EQ(NULL, block->loop_end());
|
||||
CHECK(!block->loop_end());
|
||||
}
|
||||
}
|
||||
|
||||
@ -278,7 +278,7 @@ TEST(InstructionAddGapMove) {
|
||||
R.code->AddGapMove(index, op1, op2);
|
||||
GapInstruction* gap = R.code->GapAt(index);
|
||||
ParallelMove* move = gap->GetParallelMove(GapInstruction::START);
|
||||
CHECK_NE(NULL, move);
|
||||
CHECK(move);
|
||||
const ZoneList<MoveOperands>* move_operands = move->move_operands();
|
||||
CHECK_EQ(1, move_operands->length());
|
||||
MoveOperands* cur = &move_operands->at(0);
|
||||
|
@ -103,10 +103,10 @@ TEST(MinusZeroConstant) {
|
||||
double zero_value = OpParameter<double>(zero);
|
||||
double minus_zero_value = OpParameter<double>(minus_zero);
|
||||
|
||||
CHECK_EQ(0.0, zero_value);
|
||||
CHECK_NE(-0.0, zero_value);
|
||||
CHECK_EQ(-0.0, minus_zero_value);
|
||||
CHECK_NE(0.0, minus_zero_value);
|
||||
CHECK(bit_cast<uint64_t>(0.0) == bit_cast<uint64_t>(zero_value));
|
||||
CHECK(bit_cast<uint64_t>(-0.0) != bit_cast<uint64_t>(zero_value));
|
||||
CHECK(bit_cast<uint64_t>(0.0) != bit_cast<uint64_t>(minus_zero_value));
|
||||
CHECK(bit_cast<uint64_t>(-0.0) == bit_cast<uint64_t>(minus_zero_value));
|
||||
}
|
||||
|
||||
|
||||
|
@ -797,7 +797,7 @@ TEST(RemoveToNumberEffects) {
|
||||
}
|
||||
}
|
||||
|
||||
CHECK_EQ(NULL, effect_use); // should have done all cases above.
|
||||
CHECK(!effect_use); // should have done all cases above.
|
||||
}
|
||||
|
||||
|
||||
|
@ -62,7 +62,7 @@ TEST(TestLinkageJSFunctionIncoming) {
|
||||
Linkage linkage(info.zone(), &info);
|
||||
|
||||
CallDescriptor* descriptor = linkage.GetIncomingDescriptor();
|
||||
CHECK_NE(NULL, descriptor);
|
||||
CHECK(descriptor);
|
||||
|
||||
CHECK_EQ(1 + i, static_cast<int>(descriptor->JSParameterCount()));
|
||||
CHECK_EQ(1, static_cast<int>(descriptor->ReturnCount()));
|
||||
@ -78,7 +78,7 @@ TEST(TestLinkageCodeStubIncoming) {
|
||||
Linkage linkage(info.zone(), &info);
|
||||
// TODO(titzer): test linkage creation with a bonafide code stub.
|
||||
// this just checks current behavior.
|
||||
CHECK_EQ(NULL, linkage.GetIncomingDescriptor());
|
||||
CHECK(!linkage.GetIncomingDescriptor());
|
||||
}
|
||||
|
||||
|
||||
@ -91,7 +91,7 @@ TEST(TestLinkageJSCall) {
|
||||
for (int i = 0; i < 32; i++) {
|
||||
CallDescriptor* descriptor =
|
||||
linkage.GetJSCallDescriptor(i, CallDescriptor::kNoFlags);
|
||||
CHECK_NE(NULL, descriptor);
|
||||
CHECK(descriptor);
|
||||
CHECK_EQ(i, static_cast<int>(descriptor->JSParameterCount()));
|
||||
CHECK_EQ(1, static_cast<int>(descriptor->ReturnCount()));
|
||||
CHECK_EQ(Operator::kNoProperties, descriptor->properties());
|
||||
|
@ -136,7 +136,7 @@ class LoopFinderTester : HandleAndZoneScope {
|
||||
void CheckLoop(Node** header, int header_count, Node** body, int body_count) {
|
||||
LoopTree* tree = GetLoopTree();
|
||||
LoopTree::Loop* loop = tree->ContainingLoop(header[0]);
|
||||
CHECK_NE(NULL, loop);
|
||||
CHECK(loop);
|
||||
|
||||
CHECK(header_count == static_cast<int>(loop->HeaderSize()));
|
||||
for (int i = 0; i < header_count; i++) {
|
||||
@ -164,7 +164,7 @@ class LoopFinderTester : HandleAndZoneScope {
|
||||
Node* header = chain[i];
|
||||
// Each header should be in a loop.
|
||||
LoopTree::Loop* loop = tree->ContainingLoop(header);
|
||||
CHECK_NE(NULL, loop);
|
||||
CHECK(loop);
|
||||
// Check parentage.
|
||||
LoopTree::Loop* parent =
|
||||
i == 0 ? NULL : tree->ContainingLoop(chain[i - 1]);
|
||||
|
@ -37,18 +37,18 @@ struct TestHelper : public HandleAndZoneScope {
|
||||
|
||||
Scope* scope = info.function()->scope();
|
||||
AstValueFactory* factory = info.ast_value_factory();
|
||||
CHECK_NE(NULL, scope);
|
||||
CHECK(scope);
|
||||
|
||||
if (result == NULL) {
|
||||
AstLoopAssignmentAnalyzer analyzer(main_zone(), &info);
|
||||
result = analyzer.Analyze();
|
||||
CHECK_NE(NULL, result);
|
||||
CHECK(result);
|
||||
}
|
||||
|
||||
const i::AstRawString* name = factory->GetOneByteString(var_name);
|
||||
|
||||
i::Variable* var = scope->Lookup(name);
|
||||
CHECK_NE(NULL, var);
|
||||
CHECK(var);
|
||||
|
||||
if (var->location() == Variable::UNALLOCATED) {
|
||||
CHECK_EQ(0, expected);
|
||||
|
@ -100,7 +100,7 @@ class ReducerTester : public HandleAndZoneScope {
|
||||
// the {expect} value.
|
||||
template <typename T>
|
||||
void CheckFoldBinop(volatile T expect, Node* a, Node* b) {
|
||||
CHECK_NE(NULL, binop);
|
||||
CHECK(binop);
|
||||
Node* n = CreateBinopNode(a, b);
|
||||
MachineOperatorReducer reducer(&jsgraph);
|
||||
Reduction reduction = reducer.Reduce(n);
|
||||
@ -112,7 +112,7 @@ class ReducerTester : public HandleAndZoneScope {
|
||||
// Check that the reduction of this binop applied to {a} and {b} yields
|
||||
// the {expect} node.
|
||||
void CheckBinop(Node* expect, Node* a, Node* b) {
|
||||
CHECK_NE(NULL, binop);
|
||||
CHECK(binop);
|
||||
Node* n = CreateBinopNode(a, b);
|
||||
MachineOperatorReducer reducer(&jsgraph);
|
||||
Reduction reduction = reducer.Reduce(n);
|
||||
@ -124,7 +124,7 @@ class ReducerTester : public HandleAndZoneScope {
|
||||
// this binop applied to {left_expect} and {right_expect}.
|
||||
void CheckFoldBinop(Node* left_expect, Node* right_expect, Node* left,
|
||||
Node* right) {
|
||||
CHECK_NE(NULL, binop);
|
||||
CHECK(binop);
|
||||
Node* n = CreateBinopNode(left, right);
|
||||
MachineOperatorReducer reducer(&jsgraph);
|
||||
Reduction reduction = reducer.Reduce(n);
|
||||
@ -139,7 +139,7 @@ class ReducerTester : public HandleAndZoneScope {
|
||||
template <typename T>
|
||||
void CheckFoldBinop(volatile T left_expect, const Operator* op_expect,
|
||||
Node* right_expect, Node* left, Node* right) {
|
||||
CHECK_NE(NULL, binop);
|
||||
CHECK(binop);
|
||||
Node* n = CreateBinopNode(left, right);
|
||||
MachineOperatorReducer reducer(&jsgraph);
|
||||
Reduction r = reducer.Reduce(n);
|
||||
@ -154,7 +154,7 @@ class ReducerTester : public HandleAndZoneScope {
|
||||
template <typename T>
|
||||
void CheckFoldBinop(Node* left_expect, const Operator* op_expect,
|
||||
volatile T right_expect, Node* left, Node* right) {
|
||||
CHECK_NE(NULL, binop);
|
||||
CHECK(binop);
|
||||
Node* n = CreateBinopNode(left, right);
|
||||
MachineOperatorReducer reducer(&jsgraph);
|
||||
Reduction r = reducer.Reduce(n);
|
||||
@ -723,133 +723,6 @@ TEST(ReduceLoadStore) {
|
||||
}
|
||||
|
||||
|
||||
static void CheckNans(ReducerTester* R) {
|
||||
Node* x = R->Parameter();
|
||||
std::vector<double> nans = ValueHelper::nan_vector();
|
||||
for (std::vector<double>::const_iterator pl = nans.begin(); pl != nans.end();
|
||||
++pl) {
|
||||
for (std::vector<double>::const_iterator pr = nans.begin();
|
||||
pr != nans.end(); ++pr) {
|
||||
Node* nan1 = R->Constant<double>(*pl);
|
||||
Node* nan2 = R->Constant<double>(*pr);
|
||||
R->CheckBinop(nan1, x, nan1); // x op NaN => NaN
|
||||
R->CheckBinop(nan1, nan1, x); // NaN op x => NaN
|
||||
R->CheckBinop(nan1, nan2, nan1); // NaN op NaN => NaN
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
TEST(ReduceFloat64Add) {
|
||||
ReducerTester R;
|
||||
R.binop = R.machine.Float64Add();
|
||||
|
||||
FOR_FLOAT64_INPUTS(pl) {
|
||||
FOR_FLOAT64_INPUTS(pr) {
|
||||
double x = *pl, y = *pr;
|
||||
R.CheckFoldBinop<double>(x + y, x, y);
|
||||
}
|
||||
}
|
||||
|
||||
FOR_FLOAT64_INPUTS(i) {
|
||||
Double tmp(*i);
|
||||
if (!tmp.IsSpecial() || tmp.IsInfinite()) {
|
||||
// Don't check NaNs as they are reduced more.
|
||||
R.CheckPutConstantOnRight(*i);
|
||||
}
|
||||
}
|
||||
|
||||
CheckNans(&R);
|
||||
}
|
||||
|
||||
|
||||
TEST(ReduceFloat64Sub) {
|
||||
ReducerTester R;
|
||||
R.binop = R.machine.Float64Sub();
|
||||
|
||||
FOR_FLOAT64_INPUTS(pl) {
|
||||
FOR_FLOAT64_INPUTS(pr) {
|
||||
double x = *pl, y = *pr;
|
||||
R.CheckFoldBinop<double>(x - y, x, y);
|
||||
}
|
||||
}
|
||||
|
||||
Node* zero = R.Constant<double>(0.0);
|
||||
Node* x = R.Parameter();
|
||||
|
||||
R.CheckBinop(x, x, zero); // x - 0.0 => x
|
||||
|
||||
CheckNans(&R);
|
||||
}
|
||||
|
||||
|
||||
TEST(ReduceFloat64Mul) {
|
||||
ReducerTester R;
|
||||
R.binop = R.machine.Float64Mul();
|
||||
|
||||
FOR_FLOAT64_INPUTS(pl) {
|
||||
FOR_FLOAT64_INPUTS(pr) {
|
||||
double x = *pl, y = *pr;
|
||||
R.CheckFoldBinop<double>(x * y, x, y);
|
||||
}
|
||||
}
|
||||
|
||||
double inf = V8_INFINITY;
|
||||
R.CheckPutConstantOnRight(-inf);
|
||||
R.CheckPutConstantOnRight(-0.1);
|
||||
R.CheckPutConstantOnRight(0.1);
|
||||
R.CheckPutConstantOnRight(inf);
|
||||
|
||||
Node* x = R.Parameter();
|
||||
Node* one = R.Constant<double>(1.0);
|
||||
|
||||
R.CheckBinop(x, x, one); // x * 1.0 => x
|
||||
R.CheckBinop(x, one, x); // 1.0 * x => x
|
||||
|
||||
CheckNans(&R);
|
||||
}
|
||||
|
||||
|
||||
TEST(ReduceFloat64Div) {
|
||||
ReducerTester R;
|
||||
R.binop = R.machine.Float64Div();
|
||||
|
||||
FOR_FLOAT64_INPUTS(pl) {
|
||||
FOR_FLOAT64_INPUTS(pr) {
|
||||
double x = *pl, y = *pr;
|
||||
R.CheckFoldBinop<double>(x / y, x, y);
|
||||
}
|
||||
}
|
||||
|
||||
Node* x = R.Parameter();
|
||||
Node* one = R.Constant<double>(1.0);
|
||||
|
||||
R.CheckBinop(x, x, one); // x / 1.0 => x
|
||||
|
||||
CheckNans(&R);
|
||||
}
|
||||
|
||||
|
||||
TEST(ReduceFloat64Mod) {
|
||||
ReducerTester R;
|
||||
R.binop = R.machine.Float64Mod();
|
||||
|
||||
FOR_FLOAT64_INPUTS(pl) {
|
||||
FOR_FLOAT64_INPUTS(pr) {
|
||||
double x = *pl, y = *pr;
|
||||
R.CheckFoldBinop<double>(modulo(x, y), x, y);
|
||||
}
|
||||
}
|
||||
|
||||
Node* x = R.Parameter();
|
||||
Node* zero = R.Constant<double>(0.0);
|
||||
|
||||
R.CheckFoldBinop<double>(std::numeric_limits<double>::quiet_NaN(), x, zero);
|
||||
|
||||
CheckNans(&R);
|
||||
}
|
||||
|
||||
|
||||
// TODO(titzer): test MachineOperatorReducer for Word64And
|
||||
// TODO(titzer): test MachineOperatorReducer for Word64Or
|
||||
// TODO(titzer): test MachineOperatorReducer for Word64Xor
|
||||
@ -870,3 +743,8 @@ TEST(ReduceFloat64Mod) {
|
||||
// TODO(titzer): test MachineOperatorReducer for ChangeInt32ToFloat64
|
||||
// TODO(titzer): test MachineOperatorReducer for ChangeFloat64ToInt32
|
||||
// TODO(titzer): test MachineOperatorReducer for Float64Compare
|
||||
// TODO(titzer): test MachineOperatorReducer for Float64Add
|
||||
// TODO(titzer): test MachineOperatorReducer for Float64Sub
|
||||
// TODO(titzer): test MachineOperatorReducer for Float64Mul
|
||||
// TODO(titzer): test MachineOperatorReducer for Float64Div
|
||||
// TODO(titzer): test MachineOperatorReducer for Float64Mod
|
||||
|
@ -17,7 +17,7 @@ TEST(Int32Constant_back_to_back) {
|
||||
|
||||
for (int i = -2000000000; i < 2000000000; i += 3315177) {
|
||||
Node** pos = cache.Find(graph.zone(), i);
|
||||
CHECK_NE(NULL, pos);
|
||||
CHECK(pos);
|
||||
for (int j = 0; j < 3; j++) {
|
||||
Node** npos = cache.Find(graph.zone(), i);
|
||||
CHECK_EQ(pos, npos);
|
||||
@ -80,7 +80,7 @@ TEST(Int64Constant_back_to_back) {
|
||||
|
||||
for (int64_t i = -2000000000; i < 2000000000; i += 3315177) {
|
||||
Node** pos = cache.Find(graph.zone(), i);
|
||||
CHECK_NE(NULL, pos);
|
||||
CHECK(pos);
|
||||
for (int j = 0; j < 3; j++) {
|
||||
Node** npos = cache.Find(graph.zone(), i);
|
||||
CHECK_EQ(pos, npos);
|
||||
|
@ -632,15 +632,15 @@ TEST(RemoveAllInputs) {
|
||||
n1->RemoveAllInputs();
|
||||
CHECK_EQ(1, n1->InputCount());
|
||||
CHECK_EQ(1, n0->UseCount());
|
||||
CHECK_EQ(NULL, n1->InputAt(0));
|
||||
CHECK(!n1->InputAt(0));
|
||||
|
||||
CHECK_EQ(1, n1->UseCount());
|
||||
n2->RemoveAllInputs();
|
||||
CHECK_EQ(2, n2->InputCount());
|
||||
CHECK_EQ(0, n0->UseCount());
|
||||
CHECK_EQ(0, n1->UseCount());
|
||||
CHECK_EQ(NULL, n2->InputAt(0));
|
||||
CHECK_EQ(NULL, n2->InputAt(1));
|
||||
CHECK(!n2->InputAt(0));
|
||||
CHECK(!n2->InputAt(1));
|
||||
}
|
||||
|
||||
{
|
||||
@ -653,6 +653,6 @@ TEST(RemoveAllInputs) {
|
||||
n1->RemoveAllInputs();
|
||||
CHECK_EQ(1, n1->InputCount());
|
||||
CHECK_EQ(0, n1->UseCount());
|
||||
CHECK_EQ(NULL, n1->InputAt(0));
|
||||
CHECK(!n1->InputAt(0));
|
||||
}
|
||||
}
|
||||
|
@ -80,14 +80,14 @@ TEST(TestOperator_Print) {
|
||||
Operator op1a(19, NONE, "Another1", 0, 0, 0, 0, 0, 0);
|
||||
Operator op1b(19, FOLD, "Another2", 2, 0, 0, 2, 0, 0);
|
||||
|
||||
CHECK_EQ("Another1", OperatorToString(&op1a).get());
|
||||
CHECK_EQ("Another2", OperatorToString(&op1b).get());
|
||||
CHECK_EQ(0, strcmp("Another1", OperatorToString(&op1a).get()));
|
||||
CHECK_EQ(0, strcmp("Another2", OperatorToString(&op1b).get()));
|
||||
|
||||
Operator op2a(20, NONE, "Flog1", 0, 0, 0, 0, 0, 0);
|
||||
Operator op2b(20, FOLD, "Flog2", 1, 0, 0, 1, 0, 0);
|
||||
|
||||
CHECK_EQ("Flog1", OperatorToString(&op2a).get());
|
||||
CHECK_EQ("Flog2", OperatorToString(&op2b).get());
|
||||
CHECK_EQ(0, strcmp("Flog1", OperatorToString(&op2a).get()));
|
||||
CHECK_EQ(0, strcmp("Flog2", OperatorToString(&op2b).get()));
|
||||
}
|
||||
|
||||
|
||||
@ -148,16 +148,16 @@ TEST(TestOperator1int_Equals) {
|
||||
|
||||
TEST(TestOperator1int_Print) {
|
||||
Operator1<int> op1(12, NONE, "Op1Test", 0, 0, 0, 1, 0, 0, 0);
|
||||
CHECK_EQ("Op1Test[0]", OperatorToString(&op1).get());
|
||||
CHECK_EQ(0, strcmp("Op1Test[0]", OperatorToString(&op1).get()));
|
||||
|
||||
Operator1<int> op2(12, NONE, "Op1Test", 0, 0, 0, 1, 0, 0, 66666666);
|
||||
CHECK_EQ("Op1Test[66666666]", OperatorToString(&op2).get());
|
||||
CHECK_EQ(0, strcmp("Op1Test[66666666]", OperatorToString(&op2).get()));
|
||||
|
||||
Operator1<int> op3(12, NONE, "FooBar", 0, 0, 0, 1, 0, 0, 2347);
|
||||
CHECK_EQ("FooBar[2347]", OperatorToString(&op3).get());
|
||||
CHECK_EQ(0, strcmp("FooBar[2347]", OperatorToString(&op3).get()));
|
||||
|
||||
Operator1<int> op4(12, NONE, "BarFoo", 0, 0, 0, 1, 0, 0, -879);
|
||||
CHECK_EQ("BarFoo[-879]", OperatorToString(&op4).get());
|
||||
CHECK_EQ(0, strcmp("BarFoo[-879]", OperatorToString(&op4).get()));
|
||||
}
|
||||
|
||||
|
||||
@ -179,8 +179,8 @@ TEST(TestOperator1doublePrint) {
|
||||
Operator1<double> op1a(23, NONE, "Canary", 0, 0, 0, 0, 0, 0, 0.5);
|
||||
Operator1<double> op1b(23, FOLD, "Finch", 2, 0, 0, 2, 0, 0, -1.5);
|
||||
|
||||
CHECK_EQ("Canary[0.5]", OperatorToString(&op1a).get());
|
||||
CHECK_EQ("Finch[-1.5]", OperatorToString(&op1b).get());
|
||||
CHECK_EQ(0, strcmp("Canary[0.5]", OperatorToString(&op1a).get()));
|
||||
CHECK_EQ(0, strcmp("Finch[-1.5]", OperatorToString(&op1b).get()));
|
||||
}
|
||||
|
||||
|
||||
|
@ -6,6 +6,7 @@
|
||||
|
||||
#include "src/v8.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/compiler/codegen-tester.h"
|
||||
#include "test/cctest/compiler/graph-builder-tester.h"
|
||||
#include "test/cctest/compiler/value-helper.h"
|
||||
|
||||
@ -58,7 +59,7 @@ class RepresentationChangerTester : public HandleAndZoneScope,
|
||||
void CheckFloat64Constant(Node* n, double expected) {
|
||||
Float64Matcher m(n);
|
||||
CHECK(m.HasValue());
|
||||
CHECK_EQ(expected, m.Value());
|
||||
CheckDoubleEq(expected, m.Value());
|
||||
}
|
||||
|
||||
void CheckFloat32Constant(Node* n, float expected) {
|
||||
@ -77,7 +78,7 @@ class RepresentationChangerTester : public HandleAndZoneScope,
|
||||
NumberMatcher m(n);
|
||||
CHECK_EQ(IrOpcode::kNumberConstant, n->opcode());
|
||||
CHECK(m.HasValue());
|
||||
CHECK_EQ(expected, m.Value());
|
||||
CheckDoubleEq(expected, m.Value());
|
||||
}
|
||||
|
||||
Node* Parameter(int index = 0) {
|
||||
|
@ -15,10 +15,6 @@
|
||||
#if V8_TURBOFAN_TARGET
|
||||
|
||||
using namespace v8::base;
|
||||
|
||||
#define CHECK_UINT32_EQ(x, y) \
|
||||
CHECK_EQ(static_cast<int32_t>(x), static_cast<int32_t>(y))
|
||||
|
||||
using namespace v8::internal;
|
||||
using namespace v8::internal::compiler;
|
||||
|
||||
@ -505,7 +501,7 @@ TEST(RunLoadStoreFloat64Offset) {
|
||||
p1 = *j;
|
||||
p2 = *j - 5;
|
||||
CHECK_EQ(magic, m.Call());
|
||||
CHECK_EQ(p1, p2);
|
||||
CheckDoubleEq(p1, p2);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -763,7 +759,7 @@ TEST(RunInt32AddInBranch) {
|
||||
static const int32_t constant = 987654321;
|
||||
{
|
||||
RawMachineAssemblerTester<int32_t> m;
|
||||
Uint32BinopTester bt(&m);
|
||||
Int32BinopTester bt(&m);
|
||||
MLabel blocka, blockb;
|
||||
m.Branch(
|
||||
m.Word32Equal(m.Int32Add(bt.param0, bt.param1), m.Int32Constant(0)),
|
||||
@ -781,7 +777,7 @@ TEST(RunInt32AddInBranch) {
|
||||
}
|
||||
{
|
||||
RawMachineAssemblerTester<int32_t> m;
|
||||
Uint32BinopTester bt(&m);
|
||||
Int32BinopTester bt(&m);
|
||||
MLabel blocka, blockb;
|
||||
m.Branch(
|
||||
m.Word32NotEqual(m.Int32Add(bt.param0, bt.param1), m.Int32Constant(0)),
|
||||
@ -810,7 +806,7 @@ TEST(RunInt32AddInBranch) {
|
||||
m.Return(m.Int32Constant(0 - constant));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i + *j) == 0 ? constant : 0 - constant;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -827,7 +823,7 @@ TEST(RunInt32AddInBranch) {
|
||||
m.Return(m.Int32Constant(0 - constant));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i + *j) != 0 ? constant : 0 - constant;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -885,7 +881,7 @@ TEST(RunInt32AddInComparison) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i + *j) == 0;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -897,7 +893,7 @@ TEST(RunInt32AddInComparison) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i + *j) == 0;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -908,7 +904,7 @@ TEST(RunInt32AddInComparison) {
|
||||
m.Int32Constant(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i + *j) == 0;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -919,7 +915,7 @@ TEST(RunInt32AddInComparison) {
|
||||
m.Int32Constant(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*j + *i) == 0;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -971,7 +967,7 @@ TEST(RunInt32SubP) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = static_cast<int32_t>(*i - *j);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -984,7 +980,7 @@ TEST(RunInt32SubImm) {
|
||||
m.Return(m.Int32Sub(m.Int32Constant(*i), m.Parameter(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i - *j;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -994,7 +990,7 @@ TEST(RunInt32SubImm) {
|
||||
m.Return(m.Int32Sub(m.Parameter(0), m.Int32Constant(*i)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *j - *i;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1072,8 +1068,8 @@ TEST(RunInt32SubAndWord32ShrP) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
// Use uint32_t because signed overflow is UB in C.
|
||||
int32_t expected = *i - (*j >> shift);
|
||||
CHECK_UINT32_EQ(expected, m.Call(*i, *j, shift));
|
||||
uint32_t expected = *i - (*j >> shift);
|
||||
CHECK_EQ(expected, m.Call(*i, *j, shift));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1087,7 +1083,7 @@ TEST(RunInt32SubAndWord32ShrP) {
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
FOR_UINT32_INPUTS(k) {
|
||||
// Use uint32_t because signed overflow is UB in C.
|
||||
int32_t expected = (*i >> shift) - *k;
|
||||
uint32_t expected = (*i >> shift) - *k;
|
||||
CHECK_EQ(expected, m.Call(*i, shift, *k));
|
||||
}
|
||||
}
|
||||
@ -1100,7 +1096,7 @@ TEST(RunInt32SubInBranch) {
|
||||
static const int constant = 987654321;
|
||||
{
|
||||
RawMachineAssemblerTester<int32_t> m;
|
||||
Uint32BinopTester bt(&m);
|
||||
Int32BinopTester bt(&m);
|
||||
MLabel blocka, blockb;
|
||||
m.Branch(
|
||||
m.Word32Equal(m.Int32Sub(bt.param0, bt.param1), m.Int32Constant(0)),
|
||||
@ -1118,7 +1114,7 @@ TEST(RunInt32SubInBranch) {
|
||||
}
|
||||
{
|
||||
RawMachineAssemblerTester<int32_t> m;
|
||||
Uint32BinopTester bt(&m);
|
||||
Int32BinopTester bt(&m);
|
||||
MLabel blocka, blockb;
|
||||
m.Branch(
|
||||
m.Word32NotEqual(m.Int32Sub(bt.param0, bt.param1), m.Int32Constant(0)),
|
||||
@ -1146,7 +1142,7 @@ TEST(RunInt32SubInBranch) {
|
||||
m.Bind(&blockb);
|
||||
m.Return(m.Int32Constant(0 - constant));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
int32_t expected = (*i - *j) == 0 ? constant : 0 - constant;
|
||||
uint32_t expected = (*i - *j) == 0 ? constant : 0 - constant;
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
@ -1222,7 +1218,7 @@ TEST(RunInt32SubInComparison) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i - *j) == 0;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1234,7 +1230,7 @@ TEST(RunInt32SubInComparison) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i - *j) == 0;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1245,7 +1241,7 @@ TEST(RunInt32SubInComparison) {
|
||||
m.Int32Constant(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i - *j) == 0;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1256,7 +1252,7 @@ TEST(RunInt32SubInComparison) {
|
||||
m.Int32Constant(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*j - *i) == 0;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1318,7 +1314,7 @@ TEST(RunInt32MulP) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i * *j;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1346,7 +1342,7 @@ TEST(RunInt32MulImm) {
|
||||
m.Return(m.Int32Mul(m.Int32Constant(*i), m.Parameter(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i * *j;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1356,7 +1352,7 @@ TEST(RunInt32MulImm) {
|
||||
m.Return(m.Int32Mul(m.Parameter(0), m.Int32Constant(*i)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *j * *i;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1527,7 +1523,7 @@ TEST(RunUint32DivP) {
|
||||
uint32_t p0 = *i;
|
||||
uint32_t p1 = *j;
|
||||
if (p1 != 0) {
|
||||
uint32_t expected = static_cast<uint32_t>(p0 / p1);
|
||||
int32_t expected = bit_cast<int32_t>(p0 / p1);
|
||||
CHECK_EQ(expected, bt.call(p0, p1));
|
||||
}
|
||||
}
|
||||
@ -1542,7 +1538,7 @@ TEST(RunUint32DivP) {
|
||||
uint32_t p0 = *i;
|
||||
uint32_t p1 = *j;
|
||||
if (p1 != 0) {
|
||||
uint32_t expected = static_cast<uint32_t>(p0 + (p0 / p1));
|
||||
int32_t expected = bit_cast<int32_t>(p0 + (p0 / p1));
|
||||
CHECK_EQ(expected, bt.call(p0, p1));
|
||||
}
|
||||
}
|
||||
@ -1588,7 +1584,7 @@ TEST(RunInt32ModP) {
|
||||
TEST(RunUint32ModP) {
|
||||
{
|
||||
RawMachineAssemblerTester<int32_t> m;
|
||||
Int32BinopTester bt(&m);
|
||||
Uint32BinopTester bt(&m);
|
||||
bt.AddReturn(m.Uint32Mod(bt.param0, bt.param1));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
@ -1603,7 +1599,7 @@ TEST(RunUint32ModP) {
|
||||
}
|
||||
{
|
||||
RawMachineAssemblerTester<int32_t> m;
|
||||
Int32BinopTester bt(&m);
|
||||
Uint32BinopTester bt(&m);
|
||||
bt.AddReturn(m.Int32Add(bt.param0, m.Uint32Mod(bt.param0, bt.param1)));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
@ -1626,7 +1622,7 @@ TEST(RunWord32AndP) {
|
||||
bt.AddReturn(m.Word32And(bt.param0, bt.param1));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i & *j;
|
||||
int32_t expected = *i & *j;
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
@ -1637,7 +1633,7 @@ TEST(RunWord32AndP) {
|
||||
bt.AddReturn(m.Word32And(bt.param0, m.Word32Not(bt.param1)));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i & ~(*j);
|
||||
int32_t expected = *i & ~(*j);
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
@ -1648,7 +1644,7 @@ TEST(RunWord32AndP) {
|
||||
bt.AddReturn(m.Word32And(m.Word32Not(bt.param0), bt.param1));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = ~(*i) & *j;
|
||||
int32_t expected = ~(*i) & *j;
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
@ -1665,7 +1661,7 @@ TEST(RunWord32AndAndWord32ShlP) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i << (*j & 0x1f);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1677,7 +1673,7 @@ TEST(RunWord32AndAndWord32ShlP) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i << (0x1f & *j);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1693,7 +1689,7 @@ TEST(RunWord32AndAndWord32ShrP) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i >> (*j & 0x1f);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1705,7 +1701,7 @@ TEST(RunWord32AndAndWord32ShrP) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i >> (0x1f & *j);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1732,7 +1728,7 @@ TEST(RunWord32AndAndWord32SarP) {
|
||||
m.Word32Sar(bt.param0, m.Word32And(m.Int32Constant(0x1f), bt.param1)));
|
||||
FOR_INT32_INPUTS(i) {
|
||||
FOR_INT32_INPUTS(j) {
|
||||
uint32_t expected = *i >> (0x1f & *j);
|
||||
int32_t expected = *i >> (0x1f & *j);
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
@ -1747,7 +1743,7 @@ TEST(RunWord32AndImm) {
|
||||
m.Return(m.Word32And(m.Int32Constant(*i), m.Parameter(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i & *j;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1757,7 +1753,7 @@ TEST(RunWord32AndImm) {
|
||||
m.Return(m.Word32And(m.Int32Constant(*i), m.Word32Not(m.Parameter(0))));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i & ~(*j);
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1768,7 +1764,7 @@ TEST(RunWord32AndInBranch) {
|
||||
static const int constant = 987654321;
|
||||
{
|
||||
RawMachineAssemblerTester<int32_t> m;
|
||||
Uint32BinopTester bt(&m);
|
||||
Int32BinopTester bt(&m);
|
||||
MLabel blocka, blockb;
|
||||
m.Branch(
|
||||
m.Word32Equal(m.Word32And(bt.param0, bt.param1), m.Int32Constant(0)),
|
||||
@ -1786,7 +1782,7 @@ TEST(RunWord32AndInBranch) {
|
||||
}
|
||||
{
|
||||
RawMachineAssemblerTester<int32_t> m;
|
||||
Uint32BinopTester bt(&m);
|
||||
Int32BinopTester bt(&m);
|
||||
MLabel blocka, blockb;
|
||||
m.Branch(
|
||||
m.Word32NotEqual(m.Word32And(bt.param0, bt.param1), m.Int32Constant(0)),
|
||||
@ -1891,7 +1887,7 @@ TEST(RunWord32AndInComparison) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i & *j) == 0;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1903,7 +1899,7 @@ TEST(RunWord32AndInComparison) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i & *j) == 0;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1914,7 +1910,7 @@ TEST(RunWord32AndInComparison) {
|
||||
m.Int32Constant(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i & *j) == 0;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1925,7 +1921,7 @@ TEST(RunWord32AndInComparison) {
|
||||
m.Int32Constant(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*j & *i) == 0;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1940,7 +1936,7 @@ TEST(RunWord32OrP) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i | *j;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1951,7 +1947,7 @@ TEST(RunWord32OrP) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i | ~(*j);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1962,7 +1958,7 @@ TEST(RunWord32OrP) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = ~(*i) | *j;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1976,7 +1972,7 @@ TEST(RunWord32OrImm) {
|
||||
m.Return(m.Word32Or(m.Int32Constant(*i), m.Parameter(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i | *j;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1986,7 +1982,7 @@ TEST(RunWord32OrImm) {
|
||||
m.Return(m.Word32Or(m.Int32Constant(*i), m.Word32Not(m.Parameter(0))));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i | ~(*j);
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2113,7 +2109,7 @@ TEST(RunWord32OrInBranch) {
|
||||
TEST(RunWord32OrInComparison) {
|
||||
{
|
||||
RawMachineAssemblerTester<int32_t> m;
|
||||
Uint32BinopTester bt(&m);
|
||||
Int32BinopTester bt(&m);
|
||||
bt.AddReturn(
|
||||
m.Word32Equal(m.Word32Or(bt.param0, bt.param1), m.Int32Constant(0)));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
@ -2125,7 +2121,7 @@ TEST(RunWord32OrInComparison) {
|
||||
}
|
||||
{
|
||||
RawMachineAssemblerTester<int32_t> m;
|
||||
Uint32BinopTester bt(&m);
|
||||
Int32BinopTester bt(&m);
|
||||
bt.AddReturn(
|
||||
m.Word32Equal(m.Int32Constant(0), m.Word32Or(bt.param0, bt.param1)));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
@ -2142,7 +2138,7 @@ TEST(RunWord32OrInComparison) {
|
||||
m.Int32Constant(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i | *j) == 0;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2153,7 +2149,7 @@ TEST(RunWord32OrInComparison) {
|
||||
m.Int32Constant(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*j | *i) == 0;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2163,11 +2159,11 @@ TEST(RunWord32OrInComparison) {
|
||||
TEST(RunWord32XorP) {
|
||||
{
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
RawMachineAssemblerTester<int32_t> m(kMachUint32);
|
||||
RawMachineAssemblerTester<uint32_t> m(kMachUint32);
|
||||
m.Return(m.Word32Xor(m.Int32Constant(*i), m.Parameter(0)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i ^ *j;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2177,8 +2173,8 @@ TEST(RunWord32XorP) {
|
||||
bt.AddReturn(m.Word32Xor(bt.param0, bt.param1));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
int32_t expected = *i ^ *j;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
uint32_t expected = *i ^ *j;
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2210,7 +2206,7 @@ TEST(RunWord32XorP) {
|
||||
m.Return(m.Word32Xor(m.Int32Constant(*i), m.Word32Not(m.Parameter(0))));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *i ^ ~(*j);
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2233,7 +2229,7 @@ TEST(RunWord32XorInBranch) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i ^ *j) == 0 ? constant : 0 - constant;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2251,7 +2247,7 @@ TEST(RunWord32XorInBranch) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i ^ *j) != 0 ? constant : 0 - constant;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, *j));
|
||||
CHECK_EQ(expected, bt.call(*i, *j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2268,7 +2264,7 @@ TEST(RunWord32XorInBranch) {
|
||||
m.Return(m.Int32Constant(0 - constant));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i ^ *j) == 0 ? constant : 0 - constant;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2286,7 +2282,7 @@ TEST(RunWord32XorInBranch) {
|
||||
m.Return(m.Int32Constant(0 - constant));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = (*i ^ *j) != 0 ? constant : 0 - constant;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2342,7 +2338,7 @@ TEST(RunWord32ShlP) {
|
||||
m.Return(m.Word32Shl(m.Parameter(0), m.Int32Constant(shift)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *j << shift;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2353,7 +2349,7 @@ TEST(RunWord32ShlP) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
uint32_t expected = *i << shift;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, shift));
|
||||
CHECK_EQ(expected, bt.call(*i, shift));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2369,7 +2365,7 @@ TEST(RunWord32ShlInComparison) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
uint32_t expected = 0 == (*i << shift);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, shift));
|
||||
CHECK_EQ(expected, bt.call(*i, shift));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2381,31 +2377,31 @@ TEST(RunWord32ShlInComparison) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
uint32_t expected = 0 == (*i << shift);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, shift));
|
||||
CHECK_EQ(expected, bt.call(*i, shift));
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
RawMachineAssemblerTester<int32_t> m(kMachUint32);
|
||||
RawMachineAssemblerTester<uint32_t> m(kMachUint32);
|
||||
m.Return(
|
||||
m.Word32Equal(m.Int32Constant(0),
|
||||
m.Word32Shl(m.Parameter(0), m.Int32Constant(shift))));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
uint32_t expected = 0 == (*i << shift);
|
||||
CHECK_UINT32_EQ(expected, m.Call(*i));
|
||||
CHECK_EQ(expected, m.Call(*i));
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
RawMachineAssemblerTester<int32_t> m(kMachUint32);
|
||||
RawMachineAssemblerTester<uint32_t> m(kMachUint32);
|
||||
m.Return(
|
||||
m.Word32Equal(m.Word32Shl(m.Parameter(0), m.Int32Constant(shift)),
|
||||
m.Int32Constant(0)));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
uint32_t expected = 0 == (*i << shift);
|
||||
CHECK_UINT32_EQ(expected, m.Call(*i));
|
||||
CHECK_EQ(expected, m.Call(*i));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2419,7 +2415,7 @@ TEST(RunWord32ShrP) {
|
||||
m.Return(m.Word32Shr(m.Parameter(0), m.Int32Constant(shift)));
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
uint32_t expected = *j >> shift;
|
||||
CHECK_UINT32_EQ(expected, m.Call(*j));
|
||||
CHECK_EQ(expected, m.Call(*j));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2430,10 +2426,10 @@ TEST(RunWord32ShrP) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
uint32_t expected = *i >> shift;
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, shift));
|
||||
CHECK_EQ(expected, bt.call(*i, shift));
|
||||
}
|
||||
}
|
||||
CHECK_EQ(0x00010000, bt.call(0x80000000, 15));
|
||||
CHECK_EQ(0x00010000u, bt.call(0x80000000, 15));
|
||||
}
|
||||
}
|
||||
|
||||
@ -2447,7 +2443,7 @@ TEST(RunWord32ShrInComparison) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
uint32_t expected = 0 == (*i >> shift);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, shift));
|
||||
CHECK_EQ(expected, bt.call(*i, shift));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2459,31 +2455,31 @@ TEST(RunWord32ShrInComparison) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
uint32_t expected = 0 == (*i >> shift);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, shift));
|
||||
CHECK_EQ(expected, bt.call(*i, shift));
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
RawMachineAssemblerTester<int32_t> m(kMachUint32);
|
||||
RawMachineAssemblerTester<uint32_t> m(kMachUint32);
|
||||
m.Return(
|
||||
m.Word32Equal(m.Int32Constant(0),
|
||||
m.Word32Shr(m.Parameter(0), m.Int32Constant(shift))));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
uint32_t expected = 0 == (*i >> shift);
|
||||
CHECK_UINT32_EQ(expected, m.Call(*i));
|
||||
CHECK_EQ(expected, m.Call(*i));
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
RawMachineAssemblerTester<int32_t> m(kMachUint32);
|
||||
RawMachineAssemblerTester<uint32_t> m(kMachUint32);
|
||||
m.Return(
|
||||
m.Word32Equal(m.Word32Shr(m.Parameter(0), m.Int32Constant(shift)),
|
||||
m.Int32Constant(0)));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
uint32_t expected = 0 == (*i >> shift);
|
||||
CHECK_UINT32_EQ(expected, m.Call(*i));
|
||||
CHECK_EQ(expected, m.Call(*i));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2511,7 +2507,7 @@ TEST(RunWord32SarP) {
|
||||
CHECK_EQ(expected, bt.call(*i, shift));
|
||||
}
|
||||
}
|
||||
CHECK_EQ(0xFFFF0000, bt.call(0x80000000, 15));
|
||||
CHECK_EQ(bit_cast<int32_t>(0xFFFF0000), bt.call(0x80000000, 15));
|
||||
}
|
||||
}
|
||||
|
||||
@ -2560,7 +2556,7 @@ TEST(RunWord32SarInComparison) {
|
||||
m.Word32Equal(m.Word32Sar(m.Parameter(0), m.Int32Constant(shift)),
|
||||
m.Int32Constant(0)));
|
||||
FOR_INT32_INPUTS(i) {
|
||||
uint32_t expected = 0 == (*i >> shift);
|
||||
int32_t expected = 0 == (*i >> shift);
|
||||
CHECK_EQ(expected, m.Call(*i));
|
||||
}
|
||||
}
|
||||
@ -2586,7 +2582,7 @@ TEST(RunWord32RorP) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
uint32_t expected = bits::RotateRight32(*i, shift);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, shift));
|
||||
CHECK_EQ(expected, bt.call(*i, shift));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2602,7 +2598,7 @@ TEST(RunWord32RorInComparison) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
uint32_t expected = 0 == bits::RotateRight32(*i, shift);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, shift));
|
||||
CHECK_EQ(expected, bt.call(*i, shift));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2614,31 +2610,31 @@ TEST(RunWord32RorInComparison) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
uint32_t expected = 0 == bits::RotateRight32(*i, shift);
|
||||
CHECK_UINT32_EQ(expected, bt.call(*i, shift));
|
||||
CHECK_EQ(expected, bt.call(*i, shift));
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
RawMachineAssemblerTester<int32_t> m(kMachUint32);
|
||||
RawMachineAssemblerTester<uint32_t> m(kMachUint32);
|
||||
m.Return(
|
||||
m.Word32Equal(m.Int32Constant(0),
|
||||
m.Word32Ror(m.Parameter(0), m.Int32Constant(shift))));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
uint32_t expected = 0 == bits::RotateRight32(*i, shift);
|
||||
CHECK_UINT32_EQ(expected, m.Call(*i));
|
||||
CHECK_EQ(expected, m.Call(*i));
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
FOR_UINT32_SHIFTS(shift) {
|
||||
RawMachineAssemblerTester<int32_t> m(kMachUint32);
|
||||
RawMachineAssemblerTester<uint32_t> m(kMachUint32);
|
||||
m.Return(
|
||||
m.Word32Equal(m.Word32Ror(m.Parameter(0), m.Int32Constant(shift)),
|
||||
m.Int32Constant(0)));
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
uint32_t expected = 0 == bits::RotateRight32(*i, shift);
|
||||
CHECK_UINT32_EQ(expected, m.Call(*i));
|
||||
CHECK_EQ(expected, m.Call(*i));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2964,7 +2960,7 @@ TEST(RunFloat64AddP) {
|
||||
FOR_FLOAT64_INPUTS(pl) {
|
||||
FOR_FLOAT64_INPUTS(pr) {
|
||||
double expected = *pl + *pr;
|
||||
CHECK_EQ(expected, bt.call(*pl, *pr));
|
||||
CheckDoubleEq(expected, bt.call(*pl, *pr));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2979,7 +2975,7 @@ TEST(RunFloat64SubP) {
|
||||
FOR_FLOAT64_INPUTS(pl) {
|
||||
FOR_FLOAT64_INPUTS(pr) {
|
||||
double expected = *pl - *pr;
|
||||
CHECK_EQ(expected, bt.call(*pl, *pr));
|
||||
CheckDoubleEq(expected, bt.call(*pl, *pr));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2999,7 +2995,7 @@ TEST(RunFloat64SubImm1) {
|
||||
input = *j;
|
||||
double expected = *i - input;
|
||||
CHECK_EQ(0, m.Call());
|
||||
CHECK_EQ(expected, output);
|
||||
CheckDoubleEq(expected, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3019,7 +3015,7 @@ TEST(RunFloat64SubImm2) {
|
||||
input = *j;
|
||||
double expected = input - *i;
|
||||
CHECK_EQ(0, m.Call());
|
||||
CHECK_EQ(expected, output);
|
||||
CheckDoubleEq(expected, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3034,7 +3030,7 @@ TEST(RunFloat64MulP) {
|
||||
FOR_FLOAT64_INPUTS(pl) {
|
||||
FOR_FLOAT64_INPUTS(pr) {
|
||||
double expected = *pl * *pr;
|
||||
CHECK_EQ(expected, bt.call(*pl, *pr));
|
||||
CheckDoubleEq(expected, bt.call(*pl, *pr));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3063,7 +3059,7 @@ TEST(RunFloat64MulAndFloat64AddP) {
|
||||
volatile double temp = input_a * input_b;
|
||||
volatile double expected = temp + input_c;
|
||||
CHECK_EQ(0, m.Call());
|
||||
CHECK_EQ(expected, output);
|
||||
CheckDoubleEq(expected, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3085,7 +3081,7 @@ TEST(RunFloat64MulAndFloat64AddP) {
|
||||
volatile double temp = input_b * input_c;
|
||||
volatile double expected = input_a + temp;
|
||||
CHECK_EQ(0, m.Call());
|
||||
CHECK_EQ(expected, output);
|
||||
CheckDoubleEq(expected, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3115,7 +3111,7 @@ TEST(RunFloat64MulAndFloat64SubP) {
|
||||
volatile double temp = input_b * input_c;
|
||||
volatile double expected = input_a - temp;
|
||||
CHECK_EQ(0, m.Call());
|
||||
CHECK_EQ(expected, output);
|
||||
CheckDoubleEq(expected, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3137,7 +3133,7 @@ TEST(RunFloat64MulImm) {
|
||||
input = *j;
|
||||
double expected = *i * input;
|
||||
CHECK_EQ(0, m.Call());
|
||||
CHECK_EQ(expected, output);
|
||||
CheckDoubleEq(expected, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3152,7 +3148,7 @@ TEST(RunFloat64MulImm) {
|
||||
input = *j;
|
||||
double expected = input * *i;
|
||||
CHECK_EQ(0, m.Call());
|
||||
CHECK_EQ(expected, output);
|
||||
CheckDoubleEq(expected, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3168,7 +3164,7 @@ TEST(RunFloat64DivP) {
|
||||
FOR_FLOAT64_INPUTS(pl) {
|
||||
FOR_FLOAT64_INPUTS(pr) {
|
||||
double expected = *pl / *pr;
|
||||
CHECK_EQ(expected, bt.call(*pl, *pr));
|
||||
CheckDoubleEq(expected, bt.call(*pl, *pr));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3184,7 +3180,7 @@ TEST(RunFloat64ModP) {
|
||||
FOR_FLOAT64_INPUTS(j) {
|
||||
double expected = modulo(*i, *j);
|
||||
double found = bt.call(*i, *j);
|
||||
CHECK_EQ(expected, found);
|
||||
CheckDoubleEq(expected, found);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3223,7 +3219,7 @@ TEST(RunChangeInt32ToFloat64_B) {
|
||||
|
||||
|
||||
TEST(RunChangeUint32ToFloat64_B) {
|
||||
RawMachineAssemblerTester<int32_t> m(kMachUint32);
|
||||
RawMachineAssemblerTester<uint32_t> m(kMachUint32);
|
||||
double output = 0;
|
||||
|
||||
Node* convert = m.ChangeUint32ToFloat64(m.Parameter(0));
|
||||
@ -3404,7 +3400,7 @@ TEST(RunChangeFloat64ToInt32_spilled) {
|
||||
TEST(RunChangeFloat64ToUint32_spilled) {
|
||||
RawMachineAssemblerTester<uint32_t> m;
|
||||
const int kNumInputs = 32;
|
||||
int32_t magic = 0x786234;
|
||||
uint32_t magic = 0x786234;
|
||||
double input[kNumInputs];
|
||||
uint32_t result[kNumInputs];
|
||||
Node* input_node[kNumInputs];
|
||||
@ -3433,9 +3429,9 @@ TEST(RunChangeFloat64ToUint32_spilled) {
|
||||
|
||||
for (int i = 0; i < kNumInputs; i++) {
|
||||
if (i % 2) {
|
||||
CHECK_UINT32_EQ(result[i], static_cast<uint32_t>(100 + i + 2147483648u));
|
||||
CHECK_EQ(result[i], static_cast<uint32_t>(100 + i + 2147483648u));
|
||||
} else {
|
||||
CHECK_UINT32_EQ(result[i], static_cast<uint32_t>(100 + i));
|
||||
CHECK_EQ(result[i], static_cast<uint32_t>(100 + i));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3444,7 +3440,7 @@ TEST(RunChangeFloat64ToUint32_spilled) {
|
||||
TEST(RunTruncateFloat64ToFloat32_spilled) {
|
||||
RawMachineAssemblerTester<uint32_t> m;
|
||||
const int kNumInputs = 32;
|
||||
int32_t magic = 0x786234;
|
||||
uint32_t magic = 0x786234;
|
||||
double input[kNumInputs];
|
||||
float result[kNumInputs];
|
||||
Node* input_node[kNumInputs];
|
||||
@ -4368,7 +4364,7 @@ TEST(RunTruncateInt64ToInt32P) {
|
||||
FOR_UINT32_INPUTS(i) {
|
||||
FOR_UINT32_INPUTS(j) {
|
||||
expected = (static_cast<uint64_t>(*j) << 32) | *i;
|
||||
CHECK_UINT32_EQ(expected, m.Call());
|
||||
CHECK_EQ(static_cast<int32_t>(expected), m.Call());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4504,7 +4500,7 @@ TEST(RunTruncateFloat64ToFloat32) {
|
||||
input = *i;
|
||||
volatile double expected = DoubleToFloat32(input);
|
||||
CHECK_EQ(0, m.Call());
|
||||
CHECK_EQ(expected, actual);
|
||||
CheckDoubleEq(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,7 @@ class ValueHelper {
|
||||
|
||||
void CheckUint32Constant(int32_t expected, Node* node) {
|
||||
CHECK_EQ(IrOpcode::kInt32Constant, node->opcode());
|
||||
CHECK_EQ(expected, OpParameter<uint32_t>(node));
|
||||
CHECK_EQ(expected, OpParameter<int32_t>(node));
|
||||
}
|
||||
|
||||
void CheckHeapConstant(Object* expected, Node* node) {
|
||||
|
@ -150,20 +150,20 @@ static void XGetter(const Info& info, int offset) {
|
||||
ApiTestFuzzer::Fuzz();
|
||||
v8::Isolate* isolate = CcTest::isolate();
|
||||
CHECK_EQ(isolate, info.GetIsolate());
|
||||
CHECK_EQ(x_receiver, info.This());
|
||||
CHECK(x_receiver->Equals(info.This()));
|
||||
info.GetReturnValue().Set(v8_num(x_register[offset]));
|
||||
}
|
||||
|
||||
|
||||
static void XGetter(Local<String> name,
|
||||
const v8::PropertyCallbackInfo<v8::Value>& info) {
|
||||
CHECK_EQ(x_holder, info.Holder());
|
||||
CHECK(x_holder->Equals(info.Holder()));
|
||||
XGetter(info, 0);
|
||||
}
|
||||
|
||||
|
||||
static void XGetter(const v8::FunctionCallbackInfo<v8::Value>& info) {
|
||||
CHECK_EQ(x_receiver, info.Holder());
|
||||
CHECK(x_receiver->Equals(info.Holder()));
|
||||
XGetter(info, 1);
|
||||
}
|
||||
|
||||
@ -172,8 +172,8 @@ template<class Info>
|
||||
static void XSetter(Local<Value> value, const Info& info, int offset) {
|
||||
v8::Isolate* isolate = CcTest::isolate();
|
||||
CHECK_EQ(isolate, info.GetIsolate());
|
||||
CHECK_EQ(x_holder, info.This());
|
||||
CHECK_EQ(x_holder, info.Holder());
|
||||
CHECK(x_holder->Equals(info.This()));
|
||||
CHECK(x_holder->Equals(info.Holder()));
|
||||
x_register[offset] = value->Int32Value();
|
||||
info.GetReturnValue().Set(v8_num(-1));
|
||||
}
|
||||
@ -222,10 +222,10 @@ THREADED_TEST(AccessorIC) {
|
||||
" result.push(obj[key_1]);"
|
||||
"}"
|
||||
"result"));
|
||||
CHECK_EQ(80, array->Length());
|
||||
CHECK_EQ(80u, array->Length());
|
||||
for (int i = 0; i < 80; i++) {
|
||||
v8::Handle<Value> entry = array->Get(v8::Integer::New(isolate, i));
|
||||
CHECK_EQ(v8::Integer::New(isolate, i/2), entry);
|
||||
CHECK(v8::Integer::New(isolate, i / 2)->Equals(entry));
|
||||
}
|
||||
}
|
||||
|
||||
@ -407,7 +407,7 @@ THREADED_TEST(Regress1054726) {
|
||||
"for (var i = 0; i < 5; i++) {"
|
||||
" try { obj.x; } catch (e) { result += e; }"
|
||||
"}; result"))->Run();
|
||||
CHECK_EQ(v8_str("ggggg"), result);
|
||||
CHECK(v8_str("ggggg")->Equals(result));
|
||||
|
||||
result = Script::Compile(String::NewFromUtf8(
|
||||
isolate,
|
||||
@ -415,7 +415,7 @@ THREADED_TEST(Regress1054726) {
|
||||
"for (var i = 0; i < 5; i++) {"
|
||||
" try { obj.x = i; } catch (e) { result += e; }"
|
||||
"}; result"))->Run();
|
||||
CHECK_EQ(v8_str("01234"), result);
|
||||
CHECK(v8_str("01234")->Equals(result));
|
||||
}
|
||||
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -987,10 +987,10 @@ TEST(11) {
|
||||
Object* dummy = CALL_GENERATED_CODE(f, &i, 0, 0, 0, 0);
|
||||
USE(dummy);
|
||||
|
||||
CHECK_EQ(0xabcd0001, i.a);
|
||||
CHECK_EQ(static_cast<int32_t>(0xabcd0001), i.a);
|
||||
CHECK_EQ(static_cast<int32_t>(0xabcd0000) >> 1, i.b);
|
||||
CHECK_EQ(0x00000000, i.c);
|
||||
CHECK_EQ(0xffffffff, i.d);
|
||||
CHECK_EQ(static_cast<int32_t>(0xffffffff), i.d);
|
||||
}
|
||||
|
||||
|
||||
@ -1129,8 +1129,8 @@ TEST(13) {
|
||||
CHECK_EQ(14.7610017472335499, t.i);
|
||||
CHECK_EQ(16.0, t.j);
|
||||
CHECK_EQ(73.8818412254460241, t.k);
|
||||
CHECK_EQ(372106121, t.low);
|
||||
CHECK_EQ(1079146608, t.high);
|
||||
CHECK_EQ(372106121u, t.low);
|
||||
CHECK_EQ(1079146608u, t.high);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1321,22 +1321,22 @@ TEST(15) {
|
||||
t.dstA7 = 0;
|
||||
Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
||||
USE(dummy);
|
||||
CHECK_EQ(0x01020304, t.dst0);
|
||||
CHECK_EQ(0x11121314, t.dst1);
|
||||
CHECK_EQ(0x21222324, t.dst2);
|
||||
CHECK_EQ(0x31323334, t.dst3);
|
||||
CHECK_EQ(0x41424344, t.dst4);
|
||||
CHECK_EQ(0x51525354, t.dst5);
|
||||
CHECK_EQ(0x61626364, t.dst6);
|
||||
CHECK_EQ(0x71727374, t.dst7);
|
||||
CHECK_EQ(0x00430044, t.dstA0);
|
||||
CHECK_EQ(0x00410042, t.dstA1);
|
||||
CHECK_EQ(0x00830084, t.dstA2);
|
||||
CHECK_EQ(0x00810082, t.dstA3);
|
||||
CHECK_EQ(0x00430044, t.dstA4);
|
||||
CHECK_EQ(0x00410042, t.dstA5);
|
||||
CHECK_EQ(0x00830084, t.dstA6);
|
||||
CHECK_EQ(0x00810082, t.dstA7);
|
||||
CHECK_EQ(0x01020304u, t.dst0);
|
||||
CHECK_EQ(0x11121314u, t.dst1);
|
||||
CHECK_EQ(0x21222324u, t.dst2);
|
||||
CHECK_EQ(0x31323334u, t.dst3);
|
||||
CHECK_EQ(0x41424344u, t.dst4);
|
||||
CHECK_EQ(0x51525354u, t.dst5);
|
||||
CHECK_EQ(0x61626364u, t.dst6);
|
||||
CHECK_EQ(0x71727374u, t.dst7);
|
||||
CHECK_EQ(0x00430044u, t.dstA0);
|
||||
CHECK_EQ(0x00410042u, t.dstA1);
|
||||
CHECK_EQ(0x00830084u, t.dstA2);
|
||||
CHECK_EQ(0x00810082u, t.dstA3);
|
||||
CHECK_EQ(0x00430044u, t.dstA4);
|
||||
CHECK_EQ(0x00410042u, t.dstA5);
|
||||
CHECK_EQ(0x00830084u, t.dstA6);
|
||||
CHECK_EQ(0x00810082u, t.dstA7);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1406,11 +1406,11 @@ TEST(16) {
|
||||
t.dst4 = 0;
|
||||
Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
||||
USE(dummy);
|
||||
CHECK_EQ(0x12130304, t.dst0);
|
||||
CHECK_EQ(0x01021213, t.dst1);
|
||||
CHECK_EQ(0x00010003, t.dst2);
|
||||
CHECK_EQ(0x00000003, t.dst3);
|
||||
CHECK_EQ(0x11121313, t.dst4);
|
||||
CHECK_EQ(0x12130304u, t.dst0);
|
||||
CHECK_EQ(0x01021213u, t.dst1);
|
||||
CHECK_EQ(0x00010003u, t.dst2);
|
||||
CHECK_EQ(0x00000003u, t.dst3);
|
||||
CHECK_EQ(0x11121313u, t.dst4);
|
||||
}
|
||||
|
||||
|
||||
@ -1542,10 +1542,10 @@ TEST(udiv) {
|
||||
#endif
|
||||
F3 f = FUNCTION_CAST<F3>(code->entry());
|
||||
Object* dummy;
|
||||
TEST_UDIV(0, 0, 0);
|
||||
TEST_UDIV(0, 1024, 0);
|
||||
TEST_UDIV(5, 10, 2);
|
||||
TEST_UDIV(3, 10, 3);
|
||||
TEST_UDIV(0u, 0, 0);
|
||||
TEST_UDIV(0u, 1024, 0);
|
||||
TEST_UDIV(5u, 10, 2);
|
||||
TEST_UDIV(3u, 10, 3);
|
||||
USE(dummy);
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user