Remove high promotion mode.
BUG= R=mstarzinger@chromium.org Review URL: https://codereview.chromium.org/296413004 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21493 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
82b3b2a367
commit
6ed0102b1d
@ -1980,34 +1980,12 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
|
||||
Register scratch1,
|
||||
Register scratch2,
|
||||
Label* gc_required) {
|
||||
Label allocate_new_space, install_map;
|
||||
AllocationFlags flags = TAG_OBJECT;
|
||||
|
||||
ExternalReference high_promotion_mode = ExternalReference::
|
||||
new_space_high_promotion_mode_active_address(isolate());
|
||||
mov(scratch1, Operand(high_promotion_mode));
|
||||
ldr(scratch1, MemOperand(scratch1, 0));
|
||||
cmp(scratch1, Operand::Zero());
|
||||
b(eq, &allocate_new_space);
|
||||
|
||||
Allocate(ConsString::kSize,
|
||||
result,
|
||||
scratch1,
|
||||
scratch2,
|
||||
gc_required,
|
||||
static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
|
||||
|
||||
jmp(&install_map);
|
||||
|
||||
bind(&allocate_new_space);
|
||||
Allocate(ConsString::kSize,
|
||||
result,
|
||||
scratch1,
|
||||
scratch2,
|
||||
gc_required,
|
||||
flags);
|
||||
|
||||
bind(&install_map);
|
||||
TAG_OBJECT);
|
||||
|
||||
InitializeNewString(result,
|
||||
length,
|
||||
|
@ -3539,33 +3539,12 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
|
||||
Register scratch1,
|
||||
Register scratch2,
|
||||
Label* gc_required) {
|
||||
Label allocate_new_space, install_map;
|
||||
AllocationFlags flags = TAG_OBJECT;
|
||||
|
||||
ExternalReference high_promotion_mode = ExternalReference::
|
||||
new_space_high_promotion_mode_active_address(isolate());
|
||||
Mov(scratch1, high_promotion_mode);
|
||||
Ldr(scratch1, MemOperand(scratch1));
|
||||
Cbz(scratch1, &allocate_new_space);
|
||||
|
||||
Allocate(ConsString::kSize,
|
||||
result,
|
||||
scratch1,
|
||||
scratch2,
|
||||
gc_required,
|
||||
static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
|
||||
|
||||
B(&install_map);
|
||||
|
||||
Bind(&allocate_new_space);
|
||||
Allocate(ConsString::kSize,
|
||||
result,
|
||||
scratch1,
|
||||
scratch2,
|
||||
gc_required,
|
||||
flags);
|
||||
|
||||
Bind(&install_map);
|
||||
TAG_OBJECT);
|
||||
|
||||
InitializeNewString(result,
|
||||
length,
|
||||
|
@ -1204,13 +1204,6 @@ ExternalReference ExternalReference::old_data_space_allocation_limit_address(
|
||||
}
|
||||
|
||||
|
||||
ExternalReference ExternalReference::
|
||||
new_space_high_promotion_mode_active_address(Isolate* isolate) {
|
||||
return ExternalReference(
|
||||
isolate->heap()->NewSpaceHighPromotionModeActiveAddress());
|
||||
}
|
||||
|
||||
|
||||
ExternalReference ExternalReference::handle_scope_level_address(
|
||||
Isolate* isolate) {
|
||||
return ExternalReference(HandleScope::current_level_address(isolate));
|
||||
|
@ -874,8 +874,6 @@ class ExternalReference BASE_EMBEDDED {
|
||||
Isolate* isolate);
|
||||
static ExternalReference old_data_space_allocation_limit_address(
|
||||
Isolate* isolate);
|
||||
static ExternalReference new_space_high_promotion_mode_active_address(
|
||||
Isolate* isolate);
|
||||
|
||||
static ExternalReference mod_two_doubles_operation(Isolate* isolate);
|
||||
static ExternalReference power_double_double_function(Isolate* isolate);
|
||||
|
@ -729,10 +729,6 @@ Object* StackGuard::HandleInterrupts() {
|
||||
return isolate_->TerminateExecution();
|
||||
}
|
||||
|
||||
if (CheckAndClearInterrupt(FULL_DEOPT, access)) {
|
||||
Deoptimizer::DeoptimizeAll(isolate_);
|
||||
}
|
||||
|
||||
if (CheckAndClearInterrupt(DEOPT_MARKED_ALLOCATION_SITES, access)) {
|
||||
isolate_->heap()->DeoptMarkedAllocationSites();
|
||||
}
|
||||
|
@ -155,7 +155,6 @@ class StackGuard V8_FINAL {
|
||||
V(DEBUGCOMMAND, DebugCommand) \
|
||||
V(TERMINATE_EXECUTION, TerminateExecution) \
|
||||
V(GC_REQUEST, GC) \
|
||||
V(FULL_DEOPT, FullDeopt) \
|
||||
V(INSTALL_CODE, InstallCode) \
|
||||
V(API_INTERRUPT, ApiInterrupt) \
|
||||
V(DEOPT_MARKED_ALLOCATION_SITES, DeoptMarkedAllocationSites)
|
||||
|
75
src/heap.cc
75
src/heap.cc
@ -84,7 +84,6 @@ Heap::Heap()
|
||||
#ifdef DEBUG
|
||||
allocation_timeout_(0),
|
||||
#endif // DEBUG
|
||||
new_space_high_promotion_mode_active_(false),
|
||||
old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit),
|
||||
size_of_old_gen_at_last_old_space_gc_(0),
|
||||
external_allocation_limit_(0),
|
||||
@ -98,14 +97,10 @@ Heap::Heap()
|
||||
total_regexp_code_generated_(0),
|
||||
tracer_(NULL),
|
||||
high_survival_rate_period_length_(0),
|
||||
low_survival_rate_period_length_(0),
|
||||
survival_rate_(0),
|
||||
promoted_objects_size_(0),
|
||||
promotion_rate_(0),
|
||||
semi_space_copied_object_size_(0),
|
||||
semi_space_copied_rate_(0),
|
||||
previous_survival_rate_trend_(Heap::STABLE),
|
||||
survival_rate_trend_(Heap::STABLE),
|
||||
max_gc_pause_(0.0),
|
||||
total_gc_time_ms_(0.0),
|
||||
max_alive_after_gc_(0),
|
||||
@ -1013,7 +1008,7 @@ void Heap::ClearNormalizedMapCaches() {
|
||||
}
|
||||
|
||||
|
||||
void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
|
||||
void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
|
||||
if (start_new_space_size == 0) return;
|
||||
|
||||
promotion_rate_ =
|
||||
@ -1031,24 +1026,6 @@ void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
|
||||
} else {
|
||||
high_survival_rate_period_length_ = 0;
|
||||
}
|
||||
|
||||
if (survival_rate < kYoungSurvivalRateLowThreshold) {
|
||||
low_survival_rate_period_length_++;
|
||||
} else {
|
||||
low_survival_rate_period_length_ = 0;
|
||||
}
|
||||
|
||||
double survival_rate_diff = survival_rate_ - survival_rate;
|
||||
|
||||
if (survival_rate_diff > kYoungSurvivalRateAllowedDeviation) {
|
||||
set_survival_rate_trend(DECREASING);
|
||||
} else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) {
|
||||
set_survival_rate_trend(INCREASING);
|
||||
} else {
|
||||
set_survival_rate_trend(STABLE);
|
||||
}
|
||||
|
||||
survival_rate_ = survival_rate;
|
||||
}
|
||||
|
||||
bool Heap::PerformGarbageCollection(
|
||||
@ -1108,51 +1085,7 @@ bool Heap::PerformGarbageCollection(
|
||||
tracer_ = NULL;
|
||||
}
|
||||
|
||||
UpdateSurvivalRateTrend(start_new_space_size);
|
||||
|
||||
if (!new_space_high_promotion_mode_active_ &&
|
||||
new_space_.Capacity() == new_space_.MaximumCapacity() &&
|
||||
IsStableOrIncreasingSurvivalTrend() &&
|
||||
IsHighSurvivalRate()) {
|
||||
// Stable high survival rates even though young generation is at
|
||||
// maximum capacity indicates that most objects will be promoted.
|
||||
// To decrease scavenger pauses and final mark-sweep pauses, we
|
||||
// have to limit maximal capacity of the young generation.
|
||||
SetNewSpaceHighPromotionModeActive(true);
|
||||
if (FLAG_trace_gc) {
|
||||
PrintPID("Limited new space size due to high promotion rate: %d MB\n",
|
||||
new_space_.InitialCapacity() / MB);
|
||||
}
|
||||
// The high promotion mode is our indicator to turn on pretenuring. We have
|
||||
// to deoptimize all optimized code in global pretenuring mode and all
|
||||
// code which should be tenured in local pretenuring mode.
|
||||
if (FLAG_pretenuring) {
|
||||
if (!FLAG_allocation_site_pretenuring) {
|
||||
isolate_->stack_guard()->RequestFullDeopt();
|
||||
}
|
||||
}
|
||||
} else if (new_space_high_promotion_mode_active_ &&
|
||||
IsStableOrDecreasingSurvivalTrend() &&
|
||||
IsLowSurvivalRate()) {
|
||||
// Decreasing low survival rates might indicate that the above high
|
||||
// promotion mode is over and we should allow the young generation
|
||||
// to grow again.
|
||||
SetNewSpaceHighPromotionModeActive(false);
|
||||
if (FLAG_trace_gc) {
|
||||
PrintPID("Unlimited new space size due to low promotion rate: %d MB\n",
|
||||
new_space_.MaximumCapacity() / MB);
|
||||
}
|
||||
// Trigger deoptimization here to turn off global pretenuring as soon as
|
||||
// possible.
|
||||
if (FLAG_pretenuring && !FLAG_allocation_site_pretenuring) {
|
||||
isolate_->stack_guard()->RequestFullDeopt();
|
||||
}
|
||||
}
|
||||
|
||||
if (new_space_high_promotion_mode_active_ &&
|
||||
new_space_.Capacity() > new_space_.InitialCapacity()) {
|
||||
new_space_.Shrink();
|
||||
}
|
||||
UpdateSurvivalStatistics(start_new_space_size);
|
||||
|
||||
isolate_->counters()->objs_since_last_young()->Set(0);
|
||||
|
||||
@ -1353,8 +1286,7 @@ static void VerifyNonPointerSpacePointers(Heap* heap) {
|
||||
|
||||
void Heap::CheckNewSpaceExpansionCriteria() {
|
||||
if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
|
||||
survived_since_last_expansion_ > new_space_.Capacity() &&
|
||||
!new_space_high_promotion_mode_active_) {
|
||||
survived_since_last_expansion_ > new_space_.Capacity()) {
|
||||
// Grow the size of new space if there is room to grow, enough data
|
||||
// has survived scavenge since the last expansion and we are not in
|
||||
// high promotion mode.
|
||||
@ -6203,7 +6135,6 @@ GCTracer::~GCTracer() {
|
||||
PrintF("nodes_died_in_new=%d ", nodes_died_in_new_space_);
|
||||
PrintF("nodes_copied_in_new=%d ", nodes_copied_in_new_space_);
|
||||
PrintF("nodes_promoted=%d ", nodes_promoted_);
|
||||
PrintF("survival_rate=%.1f%% ", heap_->survival_rate_);
|
||||
PrintF("promotion_rate=%.1f%% ", heap_->promotion_rate_);
|
||||
PrintF("semi_space_copy_rate=%.1f%% ", heap_->semi_space_copied_rate_);
|
||||
|
||||
|
74
src/heap.h
74
src/heap.h
@ -1034,20 +1034,10 @@ class Heap {
|
||||
inline int64_t AdjustAmountOfExternalAllocatedMemory(
|
||||
int64_t change_in_bytes);
|
||||
|
||||
// This is only needed for testing high promotion mode.
|
||||
void SetNewSpaceHighPromotionModeActive(bool mode) {
|
||||
new_space_high_promotion_mode_active_ = mode;
|
||||
}
|
||||
|
||||
// Returns the allocation mode (pre-tenuring) based on observed promotion
|
||||
// rates of previous collections.
|
||||
inline PretenureFlag GetPretenureMode() {
|
||||
return FLAG_pretenuring && new_space_high_promotion_mode_active_
|
||||
? TENURED : NOT_TENURED;
|
||||
}
|
||||
|
||||
inline Address* NewSpaceHighPromotionModeActiveAddress() {
|
||||
return reinterpret_cast<Address*>(&new_space_high_promotion_mode_active_);
|
||||
return FLAG_pretenuring ? TENURED : NOT_TENURED;
|
||||
}
|
||||
|
||||
inline intptr_t PromotedTotalSize() {
|
||||
@ -1581,11 +1571,6 @@ class Heap {
|
||||
int allocation_timeout_;
|
||||
#endif // DEBUG
|
||||
|
||||
// Indicates that the new space should be kept small due to high promotion
|
||||
// rates caused by the mutator allocating a lot of long-lived objects.
|
||||
// TODO(hpayer): change to bool if no longer accessed from generated code
|
||||
intptr_t new_space_high_promotion_mode_active_;
|
||||
|
||||
// Limit that triggers a global GC on the next (normally caused) GC. This
|
||||
// is checked when we have already decided to do a GC to help determine
|
||||
// which collector to invoke, before expanding a paged space in the old
|
||||
@ -2026,76 +2011,25 @@ class Heap {
|
||||
void AddAllocationSiteToScratchpad(AllocationSite* site,
|
||||
ScratchpadSlotMode mode);
|
||||
|
||||
void UpdateSurvivalRateTrend(int start_new_space_size);
|
||||
|
||||
enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
|
||||
void UpdateSurvivalStatistics(int start_new_space_size);
|
||||
|
||||
static const int kYoungSurvivalRateHighThreshold = 90;
|
||||
static const int kYoungSurvivalRateLowThreshold = 10;
|
||||
static const int kYoungSurvivalRateAllowedDeviation = 15;
|
||||
|
||||
static const int kOldSurvivalRateLowThreshold = 10;
|
||||
|
||||
int high_survival_rate_period_length_;
|
||||
int low_survival_rate_period_length_;
|
||||
double survival_rate_;
|
||||
intptr_t promoted_objects_size_;
|
||||
double promotion_rate_;
|
||||
intptr_t semi_space_copied_object_size_;
|
||||
double semi_space_copied_rate_;
|
||||
SurvivalRateTrend previous_survival_rate_trend_;
|
||||
SurvivalRateTrend survival_rate_trend_;
|
||||
|
||||
void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
|
||||
ASSERT(survival_rate_trend != FLUCTUATING);
|
||||
previous_survival_rate_trend_ = survival_rate_trend_;
|
||||
survival_rate_trend_ = survival_rate_trend;
|
||||
}
|
||||
|
||||
SurvivalRateTrend survival_rate_trend() {
|
||||
if (survival_rate_trend_ == STABLE) {
|
||||
return STABLE;
|
||||
} else if (previous_survival_rate_trend_ == STABLE) {
|
||||
return survival_rate_trend_;
|
||||
} else if (survival_rate_trend_ != previous_survival_rate_trend_) {
|
||||
return FLUCTUATING;
|
||||
} else {
|
||||
return survival_rate_trend_;
|
||||
}
|
||||
}
|
||||
|
||||
bool IsStableOrIncreasingSurvivalTrend() {
|
||||
switch (survival_rate_trend()) {
|
||||
case STABLE:
|
||||
case INCREASING:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool IsStableOrDecreasingSurvivalTrend() {
|
||||
switch (survival_rate_trend()) {
|
||||
case STABLE:
|
||||
case DECREASING:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool IsIncreasingSurvivalTrend() {
|
||||
return survival_rate_trend() == INCREASING;
|
||||
}
|
||||
|
||||
// TODO(hpayer): Allocation site pretenuring may make this method obsolete.
|
||||
// Re-visit incremental marking heuristics.
|
||||
bool IsHighSurvivalRate() {
|
||||
return high_survival_rate_period_length_ > 0;
|
||||
}
|
||||
|
||||
bool IsLowSurvivalRate() {
|
||||
return low_survival_rate_period_length_ > 0;
|
||||
}
|
||||
|
||||
void SelectScavengingVisitorsTable();
|
||||
|
||||
void StartIdleRound() {
|
||||
|
@ -1796,32 +1796,13 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
|
||||
Register scratch1,
|
||||
Register scratch2,
|
||||
Label* gc_required) {
|
||||
Label allocate_new_space, install_map;
|
||||
AllocationFlags flags = TAG_OBJECT;
|
||||
|
||||
ExternalReference high_promotion_mode = ExternalReference::
|
||||
new_space_high_promotion_mode_active_address(isolate());
|
||||
|
||||
test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
|
||||
j(zero, &allocate_new_space);
|
||||
|
||||
Allocate(ConsString::kSize,
|
||||
result,
|
||||
scratch1,
|
||||
scratch2,
|
||||
gc_required,
|
||||
static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
|
||||
jmp(&install_map);
|
||||
TAG_OBJECT);
|
||||
|
||||
bind(&allocate_new_space);
|
||||
Allocate(ConsString::kSize,
|
||||
result,
|
||||
scratch1,
|
||||
scratch2,
|
||||
gc_required,
|
||||
flags);
|
||||
|
||||
bind(&install_map);
|
||||
// Set the map. The other fields are left uninitialized.
|
||||
mov(FieldOperand(result, HeapObject::kMapOffset),
|
||||
Immediate(isolate()->factory()->cons_ascii_string_map()));
|
||||
|
@ -3117,33 +3117,12 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
|
||||
Register scratch1,
|
||||
Register scratch2,
|
||||
Label* gc_required) {
|
||||
Label allocate_new_space, install_map;
|
||||
AllocationFlags flags = TAG_OBJECT;
|
||||
|
||||
ExternalReference high_promotion_mode = ExternalReference::
|
||||
new_space_high_promotion_mode_active_address(isolate());
|
||||
li(scratch1, Operand(high_promotion_mode));
|
||||
lw(scratch1, MemOperand(scratch1, 0));
|
||||
Branch(&allocate_new_space, eq, scratch1, Operand(zero_reg));
|
||||
|
||||
Allocate(ConsString::kSize,
|
||||
result,
|
||||
scratch1,
|
||||
scratch2,
|
||||
gc_required,
|
||||
static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
|
||||
|
||||
jmp(&install_map);
|
||||
|
||||
bind(&allocate_new_space);
|
||||
Allocate(ConsString::kSize,
|
||||
result,
|
||||
scratch1,
|
||||
scratch2,
|
||||
gc_required,
|
||||
flags);
|
||||
|
||||
bind(&install_map);
|
||||
TAG_OBJECT);
|
||||
|
||||
InitializeNewString(result,
|
||||
length,
|
||||
|
@ -487,54 +487,49 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
|
||||
UNCLASSIFIED,
|
||||
58,
|
||||
"Heap::OldDataSpaceAllocationLimitAddress");
|
||||
Add(ExternalReference::new_space_high_promotion_mode_active_address(isolate).
|
||||
address(),
|
||||
UNCLASSIFIED,
|
||||
59,
|
||||
"Heap::NewSpaceAllocationLimitAddress");
|
||||
Add(ExternalReference::allocation_sites_list_address(isolate).address(),
|
||||
UNCLASSIFIED,
|
||||
60,
|
||||
59,
|
||||
"Heap::allocation_sites_list_address()");
|
||||
Add(ExternalReference::address_of_uint32_bias().address(),
|
||||
UNCLASSIFIED,
|
||||
61,
|
||||
60,
|
||||
"uint32_bias");
|
||||
Add(ExternalReference::get_mark_code_as_executed_function(isolate).address(),
|
||||
UNCLASSIFIED,
|
||||
62,
|
||||
61,
|
||||
"Code::MarkCodeAsExecuted");
|
||||
|
||||
Add(ExternalReference::is_profiling_address(isolate).address(),
|
||||
UNCLASSIFIED,
|
||||
63,
|
||||
62,
|
||||
"CpuProfiler::is_profiling");
|
||||
|
||||
Add(ExternalReference::scheduled_exception_address(isolate).address(),
|
||||
UNCLASSIFIED,
|
||||
64,
|
||||
63,
|
||||
"Isolate::scheduled_exception");
|
||||
|
||||
Add(ExternalReference::invoke_function_callback(isolate).address(),
|
||||
UNCLASSIFIED,
|
||||
65,
|
||||
64,
|
||||
"InvokeFunctionCallback");
|
||||
|
||||
Add(ExternalReference::invoke_accessor_getter_callback(isolate).address(),
|
||||
UNCLASSIFIED,
|
||||
66,
|
||||
65,
|
||||
"InvokeAccessorGetterCallback");
|
||||
|
||||
// Debug addresses
|
||||
Add(ExternalReference::debug_after_break_target_address(isolate).address(),
|
||||
UNCLASSIFIED,
|
||||
67,
|
||||
66,
|
||||
"Debug::after_break_target_address()");
|
||||
|
||||
Add(ExternalReference::debug_restarter_frame_function_pointer_address(
|
||||
isolate).address(),
|
||||
UNCLASSIFIED,
|
||||
68,
|
||||
67,
|
||||
"Debug::restarter_frame_function_pointer_address()");
|
||||
|
||||
// Add a small set of deopt entry addresses to encoder without generating the
|
||||
|
@ -4560,33 +4560,12 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
|
||||
Register scratch1,
|
||||
Register scratch2,
|
||||
Label* gc_required) {
|
||||
Label allocate_new_space, install_map;
|
||||
AllocationFlags flags = TAG_OBJECT;
|
||||
|
||||
ExternalReference high_promotion_mode = ExternalReference::
|
||||
new_space_high_promotion_mode_active_address(isolate());
|
||||
|
||||
Load(scratch1, high_promotion_mode);
|
||||
testb(scratch1, Immediate(1));
|
||||
j(zero, &allocate_new_space);
|
||||
Allocate(ConsString::kSize,
|
||||
result,
|
||||
scratch1,
|
||||
scratch2,
|
||||
gc_required,
|
||||
static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
|
||||
|
||||
jmp(&install_map);
|
||||
|
||||
bind(&allocate_new_space);
|
||||
Allocate(ConsString::kSize,
|
||||
result,
|
||||
scratch1,
|
||||
scratch2,
|
||||
gc_required,
|
||||
flags);
|
||||
|
||||
bind(&install_map);
|
||||
TAG_OBJECT);
|
||||
|
||||
// Set the map. The other fields are left uninitialized.
|
||||
LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
|
||||
|
@ -1689,32 +1689,13 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
|
||||
Register scratch1,
|
||||
Register scratch2,
|
||||
Label* gc_required) {
|
||||
Label allocate_new_space, install_map;
|
||||
AllocationFlags flags = TAG_OBJECT;
|
||||
|
||||
ExternalReference high_promotion_mode = ExternalReference::
|
||||
new_space_high_promotion_mode_active_address(isolate());
|
||||
|
||||
test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
|
||||
j(zero, &allocate_new_space);
|
||||
|
||||
Allocate(ConsString::kSize,
|
||||
result,
|
||||
scratch1,
|
||||
scratch2,
|
||||
gc_required,
|
||||
static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
|
||||
jmp(&install_map);
|
||||
TAG_OBJECT);
|
||||
|
||||
bind(&allocate_new_space);
|
||||
Allocate(ConsString::kSize,
|
||||
result,
|
||||
scratch1,
|
||||
scratch2,
|
||||
gc_required,
|
||||
flags);
|
||||
|
||||
bind(&install_map);
|
||||
// Set the map. The other fields are left uninitialized.
|
||||
mov(FieldOperand(result, HeapObject::kMapOffset),
|
||||
Immediate(isolate()->factory()->cons_ascii_string_map()));
|
||||
|
@ -2200,7 +2200,6 @@ TEST(OptimizedPretenuringAllocationFolding) {
|
||||
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
|
||||
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
|
||||
|
||||
v8::Local<v8::Value> res = CompileRun(
|
||||
"function DataObject() {"
|
||||
@ -2243,7 +2242,6 @@ TEST(OptimizedPretenuringAllocationFoldingBlocks) {
|
||||
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
|
||||
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
|
||||
|
||||
v8::Local<v8::Value> res = CompileRun(
|
||||
"var number_elements = 30000;"
|
||||
@ -2590,7 +2588,6 @@ TEST(OptimizedPretenuringCallNew) {
|
||||
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
|
||||
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
|
||||
|
||||
AlwaysAllocateScope always_allocate(CcTest::i_isolate());
|
||||
v8::Local<v8::Value> res = CompileRun(
|
||||
@ -3723,10 +3720,6 @@ TEST(DisableInlineAllocation) {
|
||||
CcTest::heap()->DisableInlineAllocation();
|
||||
CompileRun("run()");
|
||||
|
||||
// Run test with inline allocation disabled and pretenuring.
|
||||
CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
|
||||
CompileRun("run()");
|
||||
|
||||
// Run test with inline allocation re-enabled.
|
||||
CcTest::heap()->EnableInlineAllocation();
|
||||
CompileRun("run()");
|
||||
|
Loading…
Reference in New Issue
Block a user