[gn] Enable stricter build flags
Default to the chromium-internal build config (instead of the more permissive no_chromium_code config). BUG=v8:5878 Review-Url: https://codereview.chromium.org/2758563002 Cr-Commit-Position: refs/heads/master@{#43909}
This commit is contained in:
parent
1191e6f6ef
commit
542b41a7cc
19
BUILD.gn
19
BUILD.gn
@ -463,6 +463,20 @@ config("toolchain") {
|
||||
# cflags += [ "-Wshorten-64-to-32" ]
|
||||
#}
|
||||
}
|
||||
|
||||
if (is_win) {
|
||||
cflags += [
|
||||
"/wd4245", # Conversion with signed/unsigned mismatch.
|
||||
"/wd4267", # Conversion with possible loss of data.
|
||||
"/wd4324", # Padding structure due to alignment.
|
||||
"/wd4701", # Potentially uninitialized local variable.
|
||||
"/wd4702", # Unreachable code.
|
||||
"/wd4703", # Potentially uninitialized local pointer variable.
|
||||
"/wd4709", # Comma operator within array index expr (bugged).
|
||||
"/wd4718", # Recursive call has no side-effect.
|
||||
"/wd4800", # Forcing value to bool.
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
@ -2289,11 +2303,6 @@ v8_source_set("v8_base") {
|
||||
sources += [ v8_generated_peephole_source ]
|
||||
deps += [ ":run_mkpeephole" ]
|
||||
|
||||
if (is_win) {
|
||||
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
|
||||
cflags = [ "/wd4267" ]
|
||||
}
|
||||
|
||||
if (v8_enable_i18n_support) {
|
||||
deps += [ "//third_party/icu" ]
|
||||
if (is_win) {
|
||||
|
@ -66,9 +66,8 @@ v8_inspector_js_protocol = v8_path_prefix + "/src/inspector/js_protocol.json"
|
||||
#
|
||||
|
||||
# Common configs to remove or add in all v8 targets.
|
||||
v8_remove_configs = [ "//build/config/compiler:chromium_code" ]
|
||||
v8_remove_configs = []
|
||||
v8_add_configs = [
|
||||
"//build/config/compiler:no_chromium_code",
|
||||
v8_path_prefix + ":features",
|
||||
v8_path_prefix + ":toolchain",
|
||||
]
|
||||
|
@ -9398,7 +9398,7 @@ void debug::GetLoadedScripts(v8::Isolate* v8_isolate,
|
||||
i::DisallowHeapAllocation no_gc;
|
||||
i::Script::Iterator iterator(isolate);
|
||||
i::Script* script;
|
||||
while ((script = iterator.Next())) {
|
||||
while ((script = iterator.Next()) != nullptr) {
|
||||
if (!script->IsUserJavaScript()) continue;
|
||||
if (script->HasValidSource()) {
|
||||
i::HandleScope handle_scope(isolate);
|
||||
|
@ -38,7 +38,7 @@ class ContextSlotCache {
|
||||
for (int i = 0; i < kLength; ++i) {
|
||||
keys_[i].data = NULL;
|
||||
keys_[i].name = NULL;
|
||||
values_[i] = kNotFound;
|
||||
values_[i] = static_cast<uint32_t>(kNotFound);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -214,8 +214,9 @@ class ModuleDescriptor : public ZoneObject {
|
||||
|
||||
int AddModuleRequest(const AstRawString* specifier) {
|
||||
DCHECK_NOT_NULL(specifier);
|
||||
int module_requests_count = static_cast<int>(module_requests_.size());
|
||||
auto it = module_requests_
|
||||
.insert(std::make_pair(specifier, module_requests_.size()))
|
||||
.insert(std::make_pair(specifier, module_requests_count))
|
||||
.first;
|
||||
return it->second;
|
||||
}
|
||||
|
@ -104,7 +104,7 @@ static V8_INLINE void UnlockNativeHandle(PCRITICAL_SECTION cs) {
|
||||
|
||||
|
||||
static V8_INLINE bool TryLockNativeHandle(PCRITICAL_SECTION cs) {
|
||||
return TryEnterCriticalSection(cs);
|
||||
return TryEnterCriticalSection(cs) != FALSE;
|
||||
}
|
||||
|
||||
#endif // V8_OS_POSIX
|
||||
|
@ -816,6 +816,7 @@ void OS::Guard(void* address, const size_t size) {
|
||||
void OS::Unprotect(void* address, const size_t size) {
|
||||
LPVOID result = VirtualAlloc(address, size, MEM_COMMIT, PAGE_READWRITE);
|
||||
DCHECK_IMPLIES(result != nullptr, GetLastError() == 0);
|
||||
USE(result);
|
||||
}
|
||||
|
||||
void OS::Sleep(TimeDelta interval) {
|
||||
|
@ -148,7 +148,8 @@ void GapResolver::PerformMove(ParallelMove* moves, MoveOperands* move) {
|
||||
move->SetPending();
|
||||
|
||||
// We may need to split moves between FP locations differently.
|
||||
bool is_fp_loc_move = !kSimpleFPAliasing && destination.IsFPLocationOperand();
|
||||
const bool is_fp_loc_move =
|
||||
!kSimpleFPAliasing && destination.IsFPLocationOperand();
|
||||
|
||||
// Perform a depth-first traversal of the move graph to resolve dependencies.
|
||||
// Any unperformed, unpending move with a source the same as this one's
|
||||
@ -158,7 +159,7 @@ void GapResolver::PerformMove(ParallelMove* moves, MoveOperands* move) {
|
||||
if (other->IsEliminated()) continue;
|
||||
if (other->IsPending()) continue;
|
||||
if (other->source().InterferesWith(destination)) {
|
||||
if (!kSimpleFPAliasing && is_fp_loc_move &&
|
||||
if (is_fp_loc_move &&
|
||||
LocationOperand::cast(other->source()).representation() >
|
||||
split_rep_) {
|
||||
// 'other' must also be an FP location move. Break it into fragments
|
||||
@ -213,7 +214,7 @@ void GapResolver::PerformMove(ParallelMove* moves, MoveOperands* move) {
|
||||
move->Eliminate();
|
||||
|
||||
// Update outstanding moves whose source may now have been moved.
|
||||
if (!kSimpleFPAliasing && is_fp_loc_move) {
|
||||
if (is_fp_loc_move) {
|
||||
// We may have to split larger moves.
|
||||
for (size_t i = 0; i < moves->size(); ++i) {
|
||||
auto other = (*moves)[i];
|
||||
|
@ -1983,8 +1983,8 @@ class InspectorFrontend final : public v8_inspector::V8Inspector::Channel {
|
||||
if (callback->IsFunction()) {
|
||||
v8::TryCatch try_catch(isolate_);
|
||||
Local<Value> args[] = {message};
|
||||
MaybeLocal<Value> result = Local<Function>::Cast(callback)->Call(
|
||||
context, Undefined(isolate_), 1, args);
|
||||
USE(Local<Function>::Cast(callback)->Call(context, Undefined(isolate_), 1,
|
||||
args));
|
||||
#ifdef DEBUG
|
||||
if (try_catch.HasCaught()) {
|
||||
Local<Object> exception = Local<Object>::Cast(try_catch.Exception());
|
||||
|
@ -1291,7 +1291,7 @@ bool Debug::PrepareFunctionForBreakPoints(Handle<SharedFunctionInfo> shared) {
|
||||
HeapIterator iterator(isolate_->heap());
|
||||
HeapObject* obj;
|
||||
|
||||
while ((obj = iterator.next())) {
|
||||
while ((obj = iterator.next()) != nullptr) {
|
||||
if (obj->IsJSFunction()) {
|
||||
JSFunction* function = JSFunction::cast(obj);
|
||||
if (!function->Inlines(*shared)) continue;
|
||||
@ -1637,7 +1637,7 @@ Handle<FixedArray> Debug::GetLoadedScripts() {
|
||||
{
|
||||
Script::Iterator iterator(isolate_);
|
||||
Script* script;
|
||||
while ((script = iterator.Next())) {
|
||||
while ((script = iterator.Next()) != nullptr) {
|
||||
if (script->HasValidSource()) results->set(length++, script);
|
||||
}
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ void StatisticsExtension::GetCounters(
|
||||
HeapObject* obj;
|
||||
int reloc_info_total = 0;
|
||||
int source_position_table_total = 0;
|
||||
while ((obj = iterator.next())) {
|
||||
while ((obj = iterator.next()) != nullptr) {
|
||||
if (obj->IsCode()) {
|
||||
Code* code = Code::cast(obj);
|
||||
reloc_info_total += code->relocation_info()->Size();
|
||||
|
@ -43,7 +43,7 @@ inline FieldIndex FieldIndex::ForPropertyIndex(Map* map,
|
||||
// FieldIndex object from it.
|
||||
inline FieldIndex FieldIndex::ForLoadByFieldIndex(Map* map, int orig_index) {
|
||||
int field_index = orig_index;
|
||||
int is_inobject = true;
|
||||
bool is_inobject = true;
|
||||
bool is_double = field_index & 1;
|
||||
int first_inobject_offset = 0;
|
||||
field_index >>= 1;
|
||||
|
@ -1125,23 +1125,23 @@ inline bool IsValidFunctionKind(FunctionKind kind) {
|
||||
|
||||
inline bool IsArrowFunction(FunctionKind kind) {
|
||||
DCHECK(IsValidFunctionKind(kind));
|
||||
return kind & FunctionKind::kArrowFunction;
|
||||
return (kind & FunctionKind::kArrowFunction) != 0;
|
||||
}
|
||||
|
||||
|
||||
inline bool IsGeneratorFunction(FunctionKind kind) {
|
||||
DCHECK(IsValidFunctionKind(kind));
|
||||
return kind & FunctionKind::kGeneratorFunction;
|
||||
return (kind & FunctionKind::kGeneratorFunction) != 0;
|
||||
}
|
||||
|
||||
inline bool IsModule(FunctionKind kind) {
|
||||
DCHECK(IsValidFunctionKind(kind));
|
||||
return kind & FunctionKind::kModule;
|
||||
return (kind & FunctionKind::kModule) != 0;
|
||||
}
|
||||
|
||||
inline bool IsAsyncFunction(FunctionKind kind) {
|
||||
DCHECK(IsValidFunctionKind(kind));
|
||||
return kind & FunctionKind::kAsyncFunction;
|
||||
return (kind & FunctionKind::kAsyncFunction) != 0;
|
||||
}
|
||||
|
||||
inline bool IsResumableFunction(FunctionKind kind) {
|
||||
@ -1150,45 +1150,45 @@ inline bool IsResumableFunction(FunctionKind kind) {
|
||||
|
||||
inline bool IsConciseMethod(FunctionKind kind) {
|
||||
DCHECK(IsValidFunctionKind(kind));
|
||||
return kind & FunctionKind::kConciseMethod;
|
||||
return (kind & FunctionKind::kConciseMethod) != 0;
|
||||
}
|
||||
|
||||
inline bool IsGetterFunction(FunctionKind kind) {
|
||||
DCHECK(IsValidFunctionKind(kind));
|
||||
return kind & FunctionKind::kGetterFunction;
|
||||
return (kind & FunctionKind::kGetterFunction) != 0;
|
||||
}
|
||||
|
||||
inline bool IsSetterFunction(FunctionKind kind) {
|
||||
DCHECK(IsValidFunctionKind(kind));
|
||||
return kind & FunctionKind::kSetterFunction;
|
||||
return (kind & FunctionKind::kSetterFunction) != 0;
|
||||
}
|
||||
|
||||
inline bool IsAccessorFunction(FunctionKind kind) {
|
||||
DCHECK(IsValidFunctionKind(kind));
|
||||
return kind & FunctionKind::kAccessorFunction;
|
||||
return (kind & FunctionKind::kAccessorFunction) != 0;
|
||||
}
|
||||
|
||||
|
||||
inline bool IsDefaultConstructor(FunctionKind kind) {
|
||||
DCHECK(IsValidFunctionKind(kind));
|
||||
return kind & FunctionKind::kDefaultConstructor;
|
||||
return (kind & FunctionKind::kDefaultConstructor) != 0;
|
||||
}
|
||||
|
||||
|
||||
inline bool IsBaseConstructor(FunctionKind kind) {
|
||||
DCHECK(IsValidFunctionKind(kind));
|
||||
return kind & FunctionKind::kBaseConstructor;
|
||||
return (kind & FunctionKind::kBaseConstructor) != 0;
|
||||
}
|
||||
|
||||
inline bool IsDerivedConstructor(FunctionKind kind) {
|
||||
DCHECK(IsValidFunctionKind(kind));
|
||||
return kind & FunctionKind::kDerivedConstructor;
|
||||
return (kind & FunctionKind::kDerivedConstructor) != 0;
|
||||
}
|
||||
|
||||
|
||||
inline bool IsClassConstructor(FunctionKind kind) {
|
||||
DCHECK(IsValidFunctionKind(kind));
|
||||
return kind & FunctionKind::kClassConstructor;
|
||||
return (kind & FunctionKind::kClassConstructor) != 0;
|
||||
}
|
||||
|
||||
|
||||
|
@ -852,7 +852,7 @@ AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
|
||||
|
||||
|
||||
AlwaysAllocateScope::~AlwaysAllocateScope() {
|
||||
heap_->always_allocate_scope_count_.Increment(-1);
|
||||
heap_->always_allocate_scope_count_.Decrement(1);
|
||||
}
|
||||
|
||||
|
||||
|
@ -758,7 +758,7 @@ void Heap::GarbageCollectionEpilogue() {
|
||||
void Heap::PreprocessStackTraces() {
|
||||
WeakFixedArray::Iterator iterator(weak_stack_trace_list());
|
||||
FixedArray* elements;
|
||||
while ((elements = iterator.Next<FixedArray>())) {
|
||||
while ((elements = iterator.Next<FixedArray>()) != nullptr) {
|
||||
for (int j = 1; j < elements->length(); j += 4) {
|
||||
Object* maybe_code = elements->get(j + 2);
|
||||
// If GC happens while adding a stack trace to the weak fixed array,
|
||||
|
@ -1646,15 +1646,15 @@ class Heap {
|
||||
}
|
||||
|
||||
inline bool ShouldReduceMemory() const {
|
||||
return current_gc_flags_ & kReduceMemoryFootprintMask;
|
||||
return (current_gc_flags_ & kReduceMemoryFootprintMask) != 0;
|
||||
}
|
||||
|
||||
inline bool ShouldAbortIncrementalMarking() const {
|
||||
return current_gc_flags_ & kAbortIncrementalMarkingMask;
|
||||
return (current_gc_flags_ & kAbortIncrementalMarkingMask) != 0;
|
||||
}
|
||||
|
||||
inline bool ShouldFinalizeIncrementalMarking() const {
|
||||
return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
|
||||
return (current_gc_flags_ & kFinalizeIncrementalMarkingMask) != 0;
|
||||
}
|
||||
|
||||
void PreprocessStackTraces();
|
||||
|
@ -212,7 +212,7 @@ Page* Page::ConvertNewToOld(Page* old_page) {
|
||||
DCHECK(old_page->InNewSpace());
|
||||
OldSpace* old_space = old_page->heap()->old_space();
|
||||
old_page->set_owner(old_space);
|
||||
old_page->SetFlags(0, ~0);
|
||||
old_page->SetFlags(0, static_cast<uintptr_t>(~0));
|
||||
old_space->AccountCommitted(old_page->size());
|
||||
Page* new_page = Page::Initialize<kDoNotFreeMemory>(
|
||||
old_page->heap(), old_page, NOT_EXECUTABLE, old_space);
|
||||
|
@ -508,7 +508,7 @@ void Page::InitializeAsAnchor(Space* space) {
|
||||
set_owner(space);
|
||||
set_next_chunk(this);
|
||||
set_prev_chunk(this);
|
||||
SetFlags(0, ~0);
|
||||
SetFlags(0, static_cast<uintptr_t>(~0));
|
||||
SetFlag(ANCHOR);
|
||||
}
|
||||
|
||||
@ -1637,7 +1637,7 @@ bool SemiSpace::EnsureCurrentCapacity() {
|
||||
current_page->InsertAfter(anchor());
|
||||
current_page->ClearLiveness();
|
||||
current_page->SetFlags(anchor()->prev_page()->GetFlags(),
|
||||
Page::kCopyAllFlags);
|
||||
static_cast<uintptr_t>(Page::kCopyAllFlags));
|
||||
heap()->CreateFillerObjectAt(current_page->area_start(),
|
||||
static_cast<int>(current_page->area_size()),
|
||||
ClearRecordedSlots::kNo);
|
||||
@ -2094,7 +2094,8 @@ void SemiSpace::RemovePage(Page* page) {
|
||||
}
|
||||
|
||||
void SemiSpace::PrependPage(Page* page) {
|
||||
page->SetFlags(current_page()->GetFlags(), Page::kCopyAllFlags);
|
||||
page->SetFlags(current_page()->GetFlags(),
|
||||
static_cast<uintptr_t>(Page::kCopyAllFlags));
|
||||
page->set_owner(this);
|
||||
page->InsertAfter(anchor());
|
||||
pages_used_++;
|
||||
|
@ -534,7 +534,7 @@ class MemoryChunk {
|
||||
|
||||
void SetFlag(Flag flag) { flags_ |= flag; }
|
||||
void ClearFlag(Flag flag) { flags_ &= ~Flags(flag); }
|
||||
bool IsFlagSet(Flag flag) { return flags_ & flag; }
|
||||
bool IsFlagSet(Flag flag) { return (flags_ & flag) != 0; }
|
||||
|
||||
// Set or clear multiple flags at a time. The flags in the mask are set to
|
||||
// the value in "flags", the rest retain the current value in |flags_|.
|
||||
|
@ -789,6 +789,7 @@ class V8_EXPORT_PRIVATE Bytecodes final {
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
UNREACHABLE();
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -53,12 +53,12 @@ class Sampler {
|
||||
void DecreaseProfilingDepth();
|
||||
|
||||
// Whether the sampler is running (that is, consumes resources).
|
||||
bool IsActive() const { return base::NoBarrier_Load(&active_); }
|
||||
bool IsActive() const { return base::NoBarrier_Load(&active_) != 0; }
|
||||
|
||||
// CpuProfiler collects samples by calling DoSample directly
|
||||
// without calling Start. To keep it working, we register the sampler
|
||||
// with the CpuProfiler.
|
||||
bool IsRegistered() const { return base::NoBarrier_Load(®istered_); }
|
||||
bool IsRegistered() const { return base::NoBarrier_Load(®istered_) != 0; }
|
||||
|
||||
void DoSample();
|
||||
|
||||
|
@ -108,7 +108,7 @@ template <bool is_element>
|
||||
void LookupIterator::RestartInternal(InterceptorState interceptor_state) {
|
||||
interceptor_state_ = interceptor_state;
|
||||
property_details_ = PropertyDetails::Empty();
|
||||
number_ = DescriptorArray::kNotFound;
|
||||
number_ = static_cast<uint32_t>(DescriptorArray::kNotFound);
|
||||
Start<is_element>();
|
||||
}
|
||||
|
||||
|
@ -69,7 +69,7 @@ class V8_EXPORT_PRIVATE LookupIterator final BASE_EMBEDDED {
|
||||
initial_holder_(holder),
|
||||
// kMaxUInt32 isn't a valid index.
|
||||
index_(kMaxUInt32),
|
||||
number_(DescriptorArray::kNotFound) {
|
||||
number_(static_cast<uint32_t>(DescriptorArray::kNotFound)) {
|
||||
#ifdef DEBUG
|
||||
uint32_t index; // Assert that the name is not an array index.
|
||||
DCHECK(!name->AsArrayIndex(&index));
|
||||
@ -92,7 +92,7 @@ class V8_EXPORT_PRIVATE LookupIterator final BASE_EMBEDDED {
|
||||
receiver_(receiver),
|
||||
initial_holder_(holder),
|
||||
index_(index),
|
||||
number_(DescriptorArray::kNotFound) {
|
||||
number_(static_cast<uint32_t>(DescriptorArray::kNotFound)) {
|
||||
// kMaxUInt32 isn't a valid index.
|
||||
DCHECK_NE(kMaxUInt32, index_);
|
||||
Start<true>();
|
||||
|
@ -122,7 +122,9 @@ const uint32_t kMipsSwlOffset = 0;
|
||||
#error Unknown endianness
|
||||
#endif
|
||||
|
||||
#ifndef __STDC_FORMAT_MACROS
|
||||
#define __STDC_FORMAT_MACROS
|
||||
#endif
|
||||
#include <inttypes.h>
|
||||
|
||||
// Defines constants and accessor classes to assemble, disassemble and
|
||||
|
@ -12351,7 +12351,7 @@ static void InvalidatePrototypeChainsInternal(Map* map) {
|
||||
WeakFixedArray::Iterator iterator(proto_info->prototype_users());
|
||||
// For now, only maps register themselves as users.
|
||||
Map* user;
|
||||
while ((user = iterator.Next<Map>())) {
|
||||
while ((user = iterator.Next<Map>()) != nullptr) {
|
||||
// Walk the prototype chain (backwards, towards leaf objects) if necessary.
|
||||
InvalidatePrototypeChainsInternal(user);
|
||||
}
|
||||
@ -13342,7 +13342,7 @@ void SharedFunctionInfo::SetScript(Handle<SharedFunctionInfo> shared,
|
||||
if (FLAG_enable_slow_asserts) {
|
||||
WeakFixedArray::Iterator iterator(*list);
|
||||
SharedFunctionInfo* next;
|
||||
while ((next = iterator.Next<SharedFunctionInfo>())) {
|
||||
while ((next = iterator.Next<SharedFunctionInfo>()) != nullptr) {
|
||||
DCHECK_NE(next, *shared);
|
||||
}
|
||||
}
|
||||
|
@ -10144,7 +10144,7 @@ class Oddball: public HeapObject {
|
||||
|
||||
static const byte kFalse = 0;
|
||||
static const byte kTrue = 1;
|
||||
static const byte kNotBooleanMask = ~1;
|
||||
static const byte kNotBooleanMask = static_cast<byte>(~1);
|
||||
static const byte kTheHole = 2;
|
||||
static const byte kNull = 3;
|
||||
static const byte kArgumentsMarker = 4;
|
||||
|
@ -1715,6 +1715,7 @@ class RootsReferencesExtractor : public ObjectVisitor {
|
||||
void FillReferences(V8HeapExplorer* explorer) {
|
||||
DCHECK(strong_references_.length() <= all_references_.length());
|
||||
Builtins* builtins = heap_->isolate()->builtins();
|
||||
USE(builtins);
|
||||
int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
|
||||
while (all_index < all_references_.length()) {
|
||||
bool is_strong = strong_index < strong_references_.length()
|
||||
|
@ -1312,7 +1312,7 @@ RUNTIME_FUNCTION(Runtime_DebugReferencedBy) {
|
||||
// Get the constructor function for context extension and arguments array.
|
||||
Object* arguments_fun = isolate->sloppy_arguments_map()->GetConstructor();
|
||||
HeapObject* heap_obj;
|
||||
while ((heap_obj = iterator.next())) {
|
||||
while ((heap_obj = iterator.next()) != nullptr) {
|
||||
if (!heap_obj->IsJSObject()) continue;
|
||||
JSObject* obj = JSObject::cast(heap_obj);
|
||||
if (obj->IsJSContextExtensionObject()) continue;
|
||||
@ -1365,7 +1365,7 @@ RUNTIME_FUNCTION(Runtime_DebugConstructedBy) {
|
||||
{
|
||||
HeapIterator iterator(heap, HeapIterator::kFilterUnreachable);
|
||||
HeapObject* heap_obj;
|
||||
while ((heap_obj = iterator.next())) {
|
||||
while ((heap_obj = iterator.next()) != nullptr) {
|
||||
if (!heap_obj->IsJSObject()) continue;
|
||||
JSObject* obj = JSObject::cast(heap_obj);
|
||||
if (obj->map()->GetConstructor() != *constructor) continue;
|
||||
|
@ -32,7 +32,7 @@ RUNTIME_FUNCTION(Runtime_LiveEditFindSharedFunctionInfosForScript) {
|
||||
{
|
||||
HeapIterator iterator(heap);
|
||||
HeapObject* heap_obj;
|
||||
while ((heap_obj = iterator.next())) {
|
||||
while ((heap_obj = iterator.next()) != nullptr) {
|
||||
if (!heap_obj->IsSharedFunctionInfo()) continue;
|
||||
SharedFunctionInfo* shared = SharedFunctionInfo::cast(heap_obj);
|
||||
if (shared->script() != *script) continue;
|
||||
|
@ -523,7 +523,7 @@ Maybe<bool> ValueSerializer::WriteJSObject(Handle<JSObject> object) {
|
||||
Maybe<bool> ValueSerializer::WriteJSObjectSlow(Handle<JSObject> object) {
|
||||
WriteTag(SerializationTag::kBeginJSObject);
|
||||
Handle<FixedArray> keys;
|
||||
uint32_t properties_written;
|
||||
uint32_t properties_written = 0;
|
||||
if (!KeyAccumulator::GetKeys(object, KeyCollectionMode::kOwnOnly,
|
||||
ENUMERABLE_STRINGS)
|
||||
.ToHandle(&keys) ||
|
||||
@ -632,7 +632,7 @@ Maybe<bool> ValueSerializer::WriteJSArray(Handle<JSArray> array) {
|
||||
WriteTag(SerializationTag::kBeginSparseJSArray);
|
||||
WriteVarint<uint32_t>(length);
|
||||
Handle<FixedArray> keys;
|
||||
uint32_t properties_written;
|
||||
uint32_t properties_written = 0;
|
||||
if (!KeyAccumulator::GetKeys(array, KeyCollectionMode::kOwnOnly,
|
||||
ENUMERABLE_STRINGS)
|
||||
.ToHandle(&keys) ||
|
||||
|
@ -367,7 +367,7 @@ void WasmModuleBuilder::WriteTo(ZoneBuffer& buffer) const {
|
||||
buffer.write_size(functions_.size());
|
||||
for (auto function : functions_) {
|
||||
function->WriteSignature(buffer);
|
||||
exports += function->exported_names_.size();
|
||||
exports += static_cast<uint32_t>(function->exported_names_.size());
|
||||
if (function->name_.size() > 0) has_names = true;
|
||||
}
|
||||
FixupSection(buffer, start);
|
||||
|
@ -294,7 +294,7 @@ class WasmCompiledModule : public FixedArray {
|
||||
#define DEBUG_ONLY_TABLE(MACRO) MACRO(SMALL_NUMBER, uint32_t, instance_id)
|
||||
#else
|
||||
#define DEBUG_ONLY_TABLE(IGNORE)
|
||||
uint32_t instance_id() const { return -1; }
|
||||
uint32_t instance_id() const { return static_cast<uint32_t>(-1); }
|
||||
#endif
|
||||
|
||||
#define WCM_PROPERTY_TABLE(MACRO) \
|
||||
|
@ -5715,7 +5715,7 @@ TEST(NewSpaceAllocationCounter) {
|
||||
size_t counter3 = heap->NewSpaceAllocationCounter();
|
||||
CHECK_EQ(0U, counter3 - counter2);
|
||||
// Test counter overflow.
|
||||
size_t max_counter = -1;
|
||||
size_t max_counter = static_cast<size_t>(-1);
|
||||
heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
|
||||
size_t start = heap->NewSpaceAllocationCounter();
|
||||
for (int i = 0; i < 20; i++) {
|
||||
@ -5748,7 +5748,7 @@ TEST(OldSpaceAllocationCounter) {
|
||||
size_t counter4 = heap->OldGenerationAllocationCounter();
|
||||
CHECK_LE(kSize, counter4 - counter3);
|
||||
// Test counter overflow.
|
||||
size_t max_counter = -1;
|
||||
size_t max_counter = static_cast<size_t>(-1);
|
||||
heap->set_old_generation_allocation_counter_at_last_gc(max_counter -
|
||||
10 * kSize);
|
||||
size_t start = heap->OldGenerationAllocationCounter();
|
||||
|
@ -592,7 +592,8 @@ TEST(InterpreterBinaryOpTypeFeedback) {
|
||||
{Token::Value::SUB, ast_factory.NewSmi(2), ast_factory.NewSmi(3),
|
||||
Handle<Smi>(Smi::FromInt(-1), isolate),
|
||||
BinaryOperationFeedback::kSignedSmall},
|
||||
{Token::Value::SUB, ast_factory.NewSmi(Smi::kMinValue),
|
||||
{Token::Value::SUB,
|
||||
ast_factory.NewSmi(static_cast<uint32_t>(Smi::kMinValue)),
|
||||
ast_factory.NewSmi(1),
|
||||
isolate->factory()->NewHeapNumber(Smi::kMinValue - 1.0),
|
||||
BinaryOperationFeedback::kNumber},
|
||||
@ -610,7 +611,8 @@ TEST(InterpreterBinaryOpTypeFeedback) {
|
||||
{Token::Value::MUL, ast_factory.NewSmi(2), ast_factory.NewSmi(3),
|
||||
Handle<Smi>(Smi::FromInt(6), isolate),
|
||||
BinaryOperationFeedback::kSignedSmall},
|
||||
{Token::Value::MUL, ast_factory.NewSmi(Smi::kMinValue),
|
||||
{Token::Value::MUL,
|
||||
ast_factory.NewSmi(static_cast<uint32_t>(Smi::kMinValue)),
|
||||
ast_factory.NewSmi(2),
|
||||
isolate->factory()->NewHeapNumber(Smi::kMinValue * 2.0),
|
||||
BinaryOperationFeedback::kNumber},
|
||||
@ -645,8 +647,8 @@ TEST(InterpreterBinaryOpTypeFeedback) {
|
||||
{Token::Value::MOD, ast_factory.NewSmi(5), ast_factory.NewSmi(3),
|
||||
Handle<Smi>(Smi::FromInt(2), isolate),
|
||||
BinaryOperationFeedback::kSignedSmall},
|
||||
{Token::Value::MOD, ast_factory.NewSmi(-4), ast_factory.NewSmi(2),
|
||||
isolate->factory()->NewHeapNumber(-0.0),
|
||||
{Token::Value::MOD, ast_factory.NewSmi(static_cast<uint32_t>(-4)),
|
||||
ast_factory.NewSmi(2), isolate->factory()->NewHeapNumber(-0.0),
|
||||
BinaryOperationFeedback::kNumber},
|
||||
{Token::Value::MOD, ast_factory.NewNumber(3.1415), ast_factory.NewSmi(3),
|
||||
isolate->factory()->NewHeapNumber(fmod(3.1415, 3.0)),
|
||||
@ -723,7 +725,8 @@ TEST(InterpreterBinaryOpSmiTypeFeedback) {
|
||||
{Token::Value::SUB, ast_factory.NewSmi(2), 42,
|
||||
Handle<Smi>(Smi::FromInt(-40), isolate),
|
||||
BinaryOperationFeedback::kSignedSmall},
|
||||
{Token::Value::SUB, ast_factory.NewSmi(Smi::kMinValue), 1,
|
||||
{Token::Value::SUB,
|
||||
ast_factory.NewSmi(static_cast<uint32_t>(Smi::kMinValue)), 1,
|
||||
isolate->factory()->NewHeapNumber(Smi::kMinValue - 1.0),
|
||||
BinaryOperationFeedback::kNumber},
|
||||
{Token::Value::SUB, ast_factory.NewNumber(3.1415), 2,
|
||||
|
@ -1114,16 +1114,9 @@ TEST(CachedHashOverflow) {
|
||||
v8::HandleScope handle_scope(CcTest::isolate());
|
||||
// Lines must be executed sequentially. Combining them into one script
|
||||
// makes the bug go away.
|
||||
const char* lines[] = {
|
||||
"var x = [];",
|
||||
"x[4] = 42;",
|
||||
"var s = \"1073741828\";",
|
||||
"x[s];",
|
||||
"x[s] = 37;",
|
||||
"x[4];",
|
||||
"x[s];",
|
||||
NULL
|
||||
};
|
||||
const char* lines[] = {"var x = [];", "x[4] = 42;", "var s = \"1073741828\";",
|
||||
"x[s];", "x[s] = 37;", "x[4];",
|
||||
"x[s];"};
|
||||
|
||||
Handle<Smi> fortytwo(Smi::FromInt(42), isolate);
|
||||
Handle<Smi> thirtyseven(Smi::FromInt(37), isolate);
|
||||
@ -1136,9 +1129,9 @@ TEST(CachedHashOverflow) {
|
||||
thirtyseven // Bug yielded 42 here.
|
||||
};
|
||||
|
||||
const char* line;
|
||||
v8::Local<v8::Context> context = CcTest::isolate()->GetCurrentContext();
|
||||
for (int i = 0; (line = lines[i]); i++) {
|
||||
for (size_t i = 0; i < arraysize(lines); i++) {
|
||||
const char* line = lines[i];
|
||||
printf("%s\n", line);
|
||||
v8::Local<v8::Value> result =
|
||||
v8::Script::Compile(context,
|
||||
|
@ -288,7 +288,6 @@ class UtilsExtension : public v8::Extension {
|
||||
fprintf(stderr, "Internal error: cancelPauseOnNextStatement().");
|
||||
Exit();
|
||||
}
|
||||
v8::Local<v8::Context> context = args.GetIsolate()->GetCurrentContext();
|
||||
inspector_client_->session()->cancelPauseOnNextStatement();
|
||||
}
|
||||
|
||||
|
@ -15,6 +15,7 @@ class WasmMacroGenTest : public TestWithZone {};
|
||||
#define EXPECT_SIZE(size, ...) \
|
||||
do { \
|
||||
byte code[] = {__VA_ARGS__}; \
|
||||
USE(code); \
|
||||
EXPECT_EQ(static_cast<size_t>(size), sizeof(code)); \
|
||||
} while (false)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user