Fix invalid use of int in Zone.

Review URL: https://codereview.chromium.org/924453002

Cr-Commit-Position: refs/heads/master@{#26609}
This commit is contained in:
bmeurer 2015-02-12 04:46:58 -08:00 committed by Commit bot
parent e87c0bac35
commit 4465836c8a
19 changed files with 70 additions and 89 deletions

View File

@ -200,9 +200,7 @@ class AstNode: public ZoneObject {
};
#undef DECLARE_TYPE_ENUM
void* operator new(size_t size, Zone* zone) {
return zone->New(static_cast<int>(size));
}
void* operator new(size_t size, Zone* zone) { return zone->New(size); }
explicit AstNode(int position): position_(position) {}
virtual ~AstNode() {}

View File

@ -1565,7 +1565,7 @@ CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
CompilationPhase::~CompilationPhase() {
if (FLAG_hydrogen_stats) {
unsigned size = zone()->allocation_size();
size_t size = zone()->allocation_size();
size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
}

View File

@ -773,7 +773,7 @@ class CompilationPhase BASE_EMBEDDED {
const char* name_;
CompilationInfo* info_;
Zone zone_;
unsigned info_zone_start_allocation_size_;
size_t info_zone_start_allocation_size_;
base::ElapsedTimer timer_;
DISALLOW_COPY_AND_ASSIGN(CompilationPhase);

View File

@ -1064,7 +1064,7 @@ void InstructionSelector::VisitSwitch(Node* node, BasicBlock** branches,
Node* const value = node->InputAt(0);
size_t const input_count = branch_count + 1;
InstructionOperand* const inputs =
zone()->NewArray<InstructionOperand>(static_cast<int>(input_count));
zone()->NewArray<InstructionOperand>(input_count);
inputs[0] = g.UseRegister(value);
for (size_t index = 0; index < branch_count; ++index) {
inputs[index + 1] = g.Label(branches[index]);

View File

@ -36,7 +36,7 @@ bool NodeCache<Key, Hash, Pred>::Resize(Zone* zone) {
size_t old_size = size_ + kLinearProbe;
size_ *= 4;
size_t num_entries = size_ + kLinearProbe;
entries_ = zone->NewArray<Entry>(static_cast<int>(num_entries));
entries_ = zone->NewArray<Entry>(num_entries);
memset(entries_, 0, sizeof(Entry) * num_entries);
// Insert the old entries into the new block.
@ -66,7 +66,7 @@ Node** NodeCache<Key, Hash, Pred>::Find(Zone* zone, Key key) {
if (!entries_) {
// Allocate the initial entries and insert the first entry.
size_t num_entries = kInitialSize + kLinearProbe;
entries_ = zone->NewArray<Entry>(static_cast<int>(num_entries));
entries_ = zone->NewArray<Entry>(num_entries);
size_ = kInitialSize;
memset(entries_, 0, sizeof(Entry) * num_entries);
Entry* entry = &entries_[hash & (kInitialSize - 1)];

View File

@ -80,8 +80,7 @@ void RawMachineAssembler::Switch(Node* index, Label** succ_labels,
size_t succ_count) {
DCHECK_NE(schedule()->end(), current_block_);
Node* sw = NewNode(common()->Switch(succ_count), index);
BasicBlock** succ_blocks =
zone()->NewArray<BasicBlock*>(static_cast<int>(succ_count));
BasicBlock** succ_blocks = zone()->NewArray<BasicBlock*>(succ_count);
for (size_t index = 0; index < succ_count; ++index) {
succ_blocks[index] = Use(succ_labels[index]);
}

View File

@ -61,8 +61,7 @@ RegisterAllocatorVerifier::RegisterAllocatorVerifier(
// kSameAsFirst along the way.
for (const auto* instr : sequence->instructions()) {
const size_t operand_count = OperandCount(instr);
auto* op_constraints =
zone->NewArray<OperandConstraint>(static_cast<int>(operand_count));
auto* op_constraints = zone->NewArray<OperandConstraint>(operand_count);
size_t count = 0;
for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
BuildConstraint(instr->InputAt(i), &op_constraints[count]);

View File

@ -1514,7 +1514,7 @@ class LiveRangeBoundArray {
void Initialize(Zone* zone, const LiveRange* const range) {
size_t length = 0;
for (auto i = range; i != nullptr; i = i->next()) length++;
start_ = zone->NewArray<LiveRangeBound>(static_cast<int>(length));
start_ = zone->NewArray<LiveRangeBound>(length);
length_ = length;
auto curr = start_;
for (auto i = range; i != nullptr; i = i->next(), ++curr) {

View File

@ -365,8 +365,7 @@ class CFGBuilder : public ZoneObject {
void BuildBlocksForSuccessors(Node* node) {
size_t const successor_count = node->op()->ControlOutputCount();
Node** successors =
zone_->NewArray<Node*>(static_cast<int>(successor_count));
Node** successors = zone_->NewArray<Node*>(successor_count);
CollectSuccessorProjections(node, successors, successor_count);
for (size_t index = 0; index < successor_count; ++index) {
BuildBlockForNode(successors[index]);
@ -457,7 +456,7 @@ class CFGBuilder : public ZoneObject {
void ConnectSwitch(Node* sw) {
size_t const successor_count = sw->op()->ControlOutputCount();
BasicBlock** successor_blocks =
zone_->NewArray<BasicBlock*>(static_cast<int>(successor_count));
zone_->NewArray<BasicBlock*>(successor_count);
CollectSuccessorBlocks(sw, successor_blocks, successor_count);
if (sw == component_entry_) {

View File

@ -135,7 +135,7 @@ void ValueNumberingReducer::Grow() {
Node** const old_entries = entries_;
size_t const old_capacity = capacity_;
capacity_ *= kCapacityToSizeRatio;
entries_ = zone()->NewArray<Node*>(static_cast<int>(capacity_));
entries_ = zone()->NewArray<Node*>(capacity_);
memset(entries_, 0, sizeof(*entries_) * capacity_);
size_ = 0;
size_t const mask = capacity_ - 1;

View File

@ -1073,7 +1073,7 @@ void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
void CodeGenerator::AssembleArchSwitch(Instruction* instr) {
X64OperandConverter i(this, instr);
size_t const label_count = instr->InputCount() - 1;
Label** labels = zone()->NewArray<Label*>(static_cast<int>(label_count));
Label** labels = zone()->NewArray<Label*>(label_count);
for (size_t index = 0; index < label_count; ++index) {
labels[index] = GetLabel(i.InputRpo(static_cast<int>(index + 1)));
}

View File

@ -13452,9 +13452,9 @@ void HStatistics::Print() {
double percent = times_[i].PercentOf(sum);
PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
unsigned size = sizes_[i];
size_t size = sizes_[i];
double size_percent = static_cast<double>(size) * 100 / total_size_;
PrintF(" %9u bytes / %4.1f %%\n", size, size_percent);
PrintF(" %9zu bytes / %4.1f %%\n", size, size_percent);
}
PrintF(
@ -13470,7 +13470,7 @@ void HStatistics::Print() {
PrintF(
"----------------------------------------"
"----------------------------------------\n");
PrintF("%33s %8.3f ms %9u bytes\n", "Total",
PrintF("%33s %8.3f ms %9zu bytes\n", "Total",
total.InMillisecondsF(), total_size_);
PrintF("%33s (%.1f times slower than full code gen)\n", "",
total.TimesOf(full_code_gen_));
@ -13489,7 +13489,7 @@ void HStatistics::Print() {
void HStatistics::SaveTiming(const char* name, base::TimeDelta time,
unsigned size) {
size_t size) {
total_size_ += size;
for (int i = 0; i < names_.length(); ++i) {
if (strcmp(names_[i], name) == 0) {

View File

@ -2134,9 +2134,7 @@ class HOptimizedGraphBuilder : public HGraphBuilder, public AstVisitor {
void VisitDeclarations(ZoneList<Declaration*>* declarations) OVERRIDE;
void* operator new(size_t size, Zone* zone) {
return zone->New(static_cast<int>(size));
}
void* operator new(size_t size, Zone* zone) { return zone->New(size); }
void operator delete(void* pointer, Zone* zone) { }
void operator delete(void* pointer) { }
@ -2800,7 +2798,7 @@ class HStatistics FINAL: public Malloced {
void Initialize(CompilationInfo* info);
void Print();
void SaveTiming(const char* name, base::TimeDelta time, unsigned size);
void SaveTiming(const char* name, base::TimeDelta time, size_t size);
void IncrementFullCodeGen(base::TimeDelta full_code_gen) {
full_code_gen_ += full_code_gen;
@ -2825,11 +2823,11 @@ class HStatistics FINAL: public Malloced {
private:
List<base::TimeDelta> times_;
List<const char*> names_;
List<unsigned> sizes_;
List<size_t> sizes_;
base::TimeDelta create_graph_;
base::TimeDelta optimize_graph_;
base::TimeDelta generate_code_;
unsigned total_size_;
size_t total_size_;
base::TimeDelta full_code_gen_;
double source_size_;
};

View File

@ -2174,7 +2174,7 @@ LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator)
LAllocatorPhase::~LAllocatorPhase() {
if (FLAG_hydrogen_stats) {
unsigned size = allocator_->zone()->allocation_size() -
size_t size = allocator_->zone()->allocation_size() -
allocator_zone_start_allocation_size_;
isolate()->GetHStatistics()->SaveTiming(name(), base::TimeDelta(), size);
}

View File

@ -564,7 +564,7 @@ class LAllocatorPhase : public CompilationPhase {
private:
LAllocator* allocator_;
unsigned allocator_zone_start_allocation_size_;
size_t allocator_zone_start_allocation_size_;
DISALLOW_COPY_AND_ASSIGN(LAllocatorPhase);
};

View File

@ -23,9 +23,7 @@ class AstTyper: public AstVisitor {
public:
static void Run(CompilationInfo* info);
void* operator new(size_t size, Zone* zone) {
return zone->New(static_cast<int>(size));
}
void* operator new(size_t size, Zone* zone) { return zone->New(size); }
void operator delete(void* pointer, Zone* zone) { }
void operator delete(void* pointer) { }

View File

@ -19,7 +19,7 @@ namespace {
#if V8_USE_ADDRESS_SANITIZER
const int kASanRedzoneBytes = 24; // Must be a multiple of 8.
const size_t kASanRedzoneBytes = 24; // Must be a multiple of 8.
#else
@ -35,7 +35,7 @@ const int kASanRedzoneBytes = 24; // Must be a multiple of 8.
USE(size); \
} while (false)
const int kASanRedzoneBytes = 0;
const size_t kASanRedzoneBytes = 0;
#endif // V8_USE_ADDRESS_SANITIZER
@ -50,7 +50,7 @@ const int kASanRedzoneBytes = 0;
class Segment {
public:
void Initialize(Segment* next, int size) {
void Initialize(Segment* next, size_t size) {
next_ = next;
size_ = size;
}
@ -58,20 +58,18 @@ class Segment {
Segment* next() const { return next_; }
void clear_next() { next_ = nullptr; }
int size() const { return size_; }
int capacity() const { return size_ - sizeof(Segment); }
size_t size() const { return size_; }
size_t capacity() const { return size_ - sizeof(Segment); }
Address start() const { return address(sizeof(Segment)); }
Address end() const { return address(size_); }
private:
// Computes the address of the nth byte in this segment.
Address address(int n) const {
return Address(this) + n;
}
Address address(size_t n) const { return Address(this) + n; }
Segment* next_;
int size_;
size_t size_;
};
@ -91,7 +89,7 @@ Zone::~Zone() {
}
void* Zone::New(int size) {
void* Zone::New(size_t size) {
// Round up the requested size to fit the alignment.
size = RoundUp(size, kAlignment);
@ -106,8 +104,8 @@ void* Zone::New(int size) {
// Check if the requested size is available without expanding.
Address result = position_;
const int size_with_redzone = size + kASanRedzoneBytes;
if (size_with_redzone > limit_ - position_) {
const size_t size_with_redzone = size + kASanRedzoneBytes;
if (limit_ < position_ + size_with_redzone) {
result = NewExpand(size_with_redzone);
} else {
position_ += size_with_redzone;
@ -141,7 +139,7 @@ void Zone::DeleteAll() {
keep = current;
keep->clear_next();
} else {
int size = current->size();
size_t size = current->size();
#ifdef DEBUG
// Un-poison first so the zapping doesn't trigger ASan complaints.
ASAN_UNPOISON_MEMORY_REGION(current, size);
@ -185,7 +183,7 @@ void Zone::DeleteKeptSegment() {
DCHECK(segment_head_ == nullptr || segment_head_->next() == nullptr);
if (segment_head_ != nullptr) {
int size = segment_head_->size();
size_t size = segment_head_->size();
#ifdef DEBUG
// Un-poison first so the zapping doesn't trigger ASan complaints.
ASAN_UNPOISON_MEMORY_REGION(segment_head_, size);
@ -202,7 +200,7 @@ void Zone::DeleteKeptSegment() {
// Creates a new segment, sets it size, and pushes it to the front
// of the segment chain. Returns the new segment.
Segment* Zone::NewSegment(int size) {
Segment* Zone::NewSegment(size_t size) {
Segment* result = reinterpret_cast<Segment*>(Malloced::New(size));
segment_bytes_allocated_ += size;
if (result != nullptr) {
@ -214,17 +212,17 @@ Segment* Zone::NewSegment(int size) {
// Deletes the given segment. Does not touch the segment chain.
void Zone::DeleteSegment(Segment* segment, int size) {
void Zone::DeleteSegment(Segment* segment, size_t size) {
segment_bytes_allocated_ -= size;
Malloced::Delete(segment);
}
Address Zone::NewExpand(int size) {
Address Zone::NewExpand(size_t size) {
// Make sure the requested size is already properly aligned and that
// there isn't enough room in the Zone to satisfy the request.
DCHECK(size == RoundDown(size, kAlignment));
DCHECK(size > limit_ - position_);
DCHECK_EQ(size, RoundDown(size, kAlignment));
DCHECK_LT(limit_, position_ + size);
// Compute the new segment size. We use a 'high water mark'
// strategy, where we increase the segment size every time we expand
@ -235,27 +233,26 @@ Address Zone::NewExpand(int size) {
static const size_t kSegmentOverhead = sizeof(Segment) + kAlignment;
const size_t new_size_no_overhead = size + (old_size << 1);
size_t new_size = kSegmentOverhead + new_size_no_overhead;
const size_t min_new_size = kSegmentOverhead + static_cast<size_t>(size);
const size_t min_new_size = kSegmentOverhead + size;
// Guard against integer overflow.
if (new_size_no_overhead < static_cast<size_t>(size) ||
new_size < static_cast<size_t>(kSegmentOverhead)) {
if (new_size_no_overhead < size || new_size < kSegmentOverhead) {
V8::FatalProcessOutOfMemory("Zone");
return nullptr;
}
if (new_size < static_cast<size_t>(kMinimumSegmentSize)) {
if (new_size < kMinimumSegmentSize) {
new_size = kMinimumSegmentSize;
} else if (new_size > static_cast<size_t>(kMaximumSegmentSize)) {
} else if (new_size > kMaximumSegmentSize) {
// Limit the size of new segments to avoid growing the segment size
// exponentially, thus putting pressure on contiguous virtual address space.
// All the while making sure to allocate a segment large enough to hold the
// requested size.
new_size = Max(min_new_size, static_cast<size_t>(kMaximumSegmentSize));
new_size = Max(min_new_size, kMaximumSegmentSize);
}
if (new_size > INT_MAX) {
V8::FatalProcessOutOfMemory("Zone");
return nullptr;
}
Segment* segment = NewSegment(static_cast<int>(new_size));
Segment* segment = NewSegment(new_size);
if (segment == nullptr) {
V8::FatalProcessOutOfMemory("Zone");
return nullptr;

View File

@ -40,12 +40,11 @@ class Zone FINAL {
// Allocate 'size' bytes of memory in the Zone; expands the Zone by
// allocating new segments of memory on demand using malloc().
void* New(int size);
void* New(size_t size);
template <typename T>
T* NewArray(int length) {
DCHECK(std::numeric_limits<int>::max() / static_cast<int>(sizeof(T)) >
length);
T* NewArray(size_t length) {
DCHECK_LT(length, std::numeric_limits<size_t>::max() / sizeof(T));
return static_cast<T*>(New(length * sizeof(T)));
}
@ -63,51 +62,51 @@ class Zone FINAL {
return segment_bytes_allocated_ > kExcessLimit;
}
unsigned allocation_size() const { return allocation_size_; }
size_t allocation_size() const { return allocation_size_; }
private:
// All pointers returned from New() have this alignment. In addition, if the
// object being allocated has a size that is divisible by 8 then its alignment
// will be 8. ASan requires 8-byte alignment.
#ifdef V8_USE_ADDRESS_SANITIZER
static const int kAlignment = 8;
static const size_t kAlignment = 8;
STATIC_ASSERT(kPointerSize <= 8);
#else
static const int kAlignment = kPointerSize;
static const size_t kAlignment = kPointerSize;
#endif
// Never allocate segments smaller than this size in bytes.
static const int kMinimumSegmentSize = 8 * KB;
static const size_t kMinimumSegmentSize = 8 * KB;
// Never allocate segments larger than this size in bytes.
static const int kMaximumSegmentSize = 1 * MB;
static const size_t kMaximumSegmentSize = 1 * MB;
// Never keep segments larger than this size in bytes around.
static const int kMaximumKeptSegmentSize = 64 * KB;
static const size_t kMaximumKeptSegmentSize = 64 * KB;
// Report zone excess when allocation exceeds this limit.
static const int kExcessLimit = 256 * MB;
static const size_t kExcessLimit = 256 * MB;
// The number of bytes allocated in this zone so far.
unsigned allocation_size_;
size_t allocation_size_;
// The number of bytes allocated in segments. Note that this number
// includes memory allocated from the OS but not yet allocated from
// the zone.
int segment_bytes_allocated_;
size_t segment_bytes_allocated_;
// Expand the Zone to hold at least 'size' more bytes and allocate
// the bytes. Returns the address of the newly allocated chunk of
// memory in the Zone. Should only be called if there isn't enough
// room in the Zone already.
Address NewExpand(int size);
Address NewExpand(size_t size);
// Creates a new segment, sets it size, and pushes it to the front
// of the segment chain. Returns the new segment.
inline Segment* NewSegment(int size);
inline Segment* NewSegment(size_t size);
// Deletes the given segment. Does not touch the segment chain.
inline void DeleteSegment(Segment* segment, int size);
inline void DeleteSegment(Segment* segment, size_t size);
// The free region in the current (front) segment is represented as
// the half-open interval [position, limit). The 'position' variable
@ -124,9 +123,7 @@ class Zone FINAL {
class ZoneObject {
public:
// Allocate a new ZoneObject of 'size' bytes in the Zone.
void* operator new(size_t size, Zone* zone) {
return zone->New(static_cast<int>(size));
}
void* operator new(size_t size, Zone* zone) { return zone->New(size); }
// Ideally, the delete operator should be private instead of
// public, but unfortunately the compiler sometimes synthesizes
@ -160,7 +157,7 @@ class ZoneScope FINAL {
class ZoneAllocationPolicy FINAL {
public:
explicit ZoneAllocationPolicy(Zone* zone) : zone_(zone) { }
void* New(size_t size) { return zone()->New(static_cast<int>(size)); }
void* New(size_t size) { return zone()->New(size); }
static void Delete(void* pointer) {}
Zone* zone() const { return zone_; }
@ -181,9 +178,7 @@ class ZoneList FINAL : public List<T, ZoneAllocationPolicy> {
ZoneList(int capacity, Zone* zone)
: List<T, ZoneAllocationPolicy>(capacity, ZoneAllocationPolicy(zone)) { }
void* operator new(size_t size, Zone* zone) {
return zone->New(static_cast<int>(size));
}
void* operator new(size_t size, Zone* zone) { return zone->New(size); }
// Construct a new ZoneList by copying the elements of the given ZoneList.
ZoneList(const ZoneList<T>& other, Zone* zone)
@ -239,9 +234,7 @@ class ZoneSplayTree FINAL : public SplayTree<Config, ZoneAllocationPolicy> {
SplayTree<Config, ZoneAllocationPolicy>::ResetRoot();
}
void* operator new(size_t size, Zone* zone) {
return zone->New(static_cast<int>(size));
}
void* operator new(size_t size, Zone* zone) { return zone->New(size); }
void operator delete(void* pointer) { UNREACHABLE(); }
void operator delete(void* pointer, Zone* zone) { UNREACHABLE(); }

View File

@ -32,9 +32,9 @@ class ZonePoolTest : public TestWithIsolate {
size_t Allocate(Zone* zone) {
size_t bytes = rng.NextInt(25) + 7;
int size_before = zone->allocation_size();
zone->New(static_cast<int>(bytes));
return static_cast<size_t>(zone->allocation_size() - size_before);
size_t size_before = zone->allocation_size();
zone->New(bytes);
return zone->allocation_size() - size_before;
}
private: