Allocation space decisions are precisely made in hydrogen.
BUG= R=mstarzinger@chromium.org Review URL: https://codereview.chromium.org/21089006 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@15970 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
dff0e84b1b
commit
b6a6fc76dc
@ -5355,10 +5355,12 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
|
||||
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
|
||||
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
|
||||
}
|
||||
if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
|
||||
ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
|
||||
if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
|
||||
} else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
|
||||
} else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
|
||||
}
|
||||
|
||||
@ -5417,10 +5419,12 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
|
||||
__ Push(Smi::FromInt(size));
|
||||
}
|
||||
|
||||
if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
|
||||
ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
|
||||
if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr);
|
||||
} else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
|
||||
} else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr);
|
||||
} else {
|
||||
CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
|
||||
|
@ -434,14 +434,10 @@ HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
|
||||
checker.Then();
|
||||
|
||||
HValue* size_in_bytes = AddInstruction(new(zone) HConstant(size));
|
||||
HAllocate::Flags flags = HAllocate::CAN_ALLOCATE_IN_NEW_SPACE;
|
||||
if (isolate()->heap()->ShouldGloballyPretenure()) {
|
||||
flags = static_cast<HAllocate::Flags>(
|
||||
flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
|
||||
}
|
||||
|
||||
HInstruction* object = AddInstruction(new(zone)
|
||||
HAllocate(context(), size_in_bytes, HType::JSObject(), flags));
|
||||
HAllocate(context(), size_in_bytes, HType::JSObject(),
|
||||
isolate()->heap()->ShouldGloballyPretenure()));
|
||||
|
||||
for (int i = 0; i < size; i += kPointerSize) {
|
||||
HObjectAccess access = HObjectAccess::ForJSObjectOffset(i);
|
||||
@ -466,11 +462,8 @@ HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
|
||||
Zone* zone = this->zone();
|
||||
|
||||
HValue* size = AddInstruction(new(zone) HConstant(AllocationSite::kSize));
|
||||
HAllocate::Flags flags = HAllocate::DefaultFlags();
|
||||
flags = static_cast<HAllocate::Flags>(
|
||||
flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
|
||||
HInstruction* object = AddInstruction(new(zone)
|
||||
HAllocate(context(), size, HType::JSObject(), flags));
|
||||
HAllocate(context(), size, HType::JSObject(), true));
|
||||
|
||||
// Store the map
|
||||
Handle<Map> allocation_site_map(isolate()->heap()->allocation_site_map(),
|
||||
|
@ -3766,8 +3766,8 @@ void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
|
||||
HValue* current_size = size();
|
||||
// We can just fold allocations that are guaranteed in new space.
|
||||
// TODO(hpayer): Add support for non-constant allocation in dominator.
|
||||
if (!GuaranteedInNewSpace() || !current_size->IsInteger32Constant() ||
|
||||
!dominator_allocate_instr->GuaranteedInNewSpace() ||
|
||||
if (!IsNewSpaceAllocation() || !current_size->IsInteger32Constant() ||
|
||||
!dominator_allocate_instr->IsNewSpaceAllocation() ||
|
||||
!dominator_size->IsInteger32Constant()) {
|
||||
if (FLAG_trace_allocation_folding) {
|
||||
PrintF("#%d (%s) cannot fold into #%d (%s)\n",
|
||||
@ -3785,7 +3785,7 @@ void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
|
||||
|
||||
if (MustAllocateDoubleAligned()) {
|
||||
if (!dominator_allocate_instr->MustAllocateDoubleAligned()) {
|
||||
dominator_allocate_instr->SetFlags(HAllocate::ALLOCATE_DOUBLE_ALIGNED);
|
||||
dominator_allocate_instr->MakeDoubleAligned();
|
||||
}
|
||||
if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
|
||||
dominator_size_constant += kDoubleSize / 2;
|
||||
@ -3810,7 +3810,7 @@ void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
if (FLAG_verify_heap) {
|
||||
dominator_allocate_instr->SetFlags(HAllocate::PREFILL_WITH_FILLER);
|
||||
dominator_allocate_instr->MakePrefillWithFiller();
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -3830,7 +3830,7 @@ void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
|
||||
|
||||
void HAllocate::PrintDataTo(StringStream* stream) {
|
||||
size()->PrintNameTo(stream);
|
||||
if (!GuaranteedInNewSpace()) stream->Add(" (pretenure)");
|
||||
if (!IsNewSpaceAllocation()) stream->Add(" (pretenure)");
|
||||
}
|
||||
|
||||
|
||||
|
@ -5240,16 +5240,11 @@ class HLoadGlobalGeneric: public HTemplateInstruction<2> {
|
||||
|
||||
class HAllocate: public HTemplateInstruction<2> {
|
||||
public:
|
||||
enum Flags {
|
||||
CAN_ALLOCATE_IN_NEW_SPACE = 1 << 0,
|
||||
CAN_ALLOCATE_IN_OLD_DATA_SPACE = 1 << 1,
|
||||
CAN_ALLOCATE_IN_OLD_POINTER_SPACE = 1 << 2,
|
||||
ALLOCATE_DOUBLE_ALIGNED = 1 << 3,
|
||||
PREFILL_WITH_FILLER = 1 << 4
|
||||
};
|
||||
|
||||
HAllocate(HValue* context, HValue* size, HType type, Flags flags)
|
||||
: flags_(flags) {
|
||||
HAllocate(HValue* context,
|
||||
HValue* size,
|
||||
HType type,
|
||||
bool pretenure,
|
||||
ElementsKind kind = FAST_ELEMENTS) {
|
||||
SetOperandAt(0, context);
|
||||
SetOperandAt(1, size);
|
||||
set_type(type);
|
||||
@ -5257,24 +5252,25 @@ class HAllocate: public HTemplateInstruction<2> {
|
||||
SetFlag(kTrackSideEffectDominators);
|
||||
SetGVNFlag(kChangesNewSpacePromotion);
|
||||
SetGVNFlag(kDependsOnNewSpacePromotion);
|
||||
if (pretenure) {
|
||||
if (IsFastDoubleElementsKind(kind)) {
|
||||
flags_ = static_cast<HAllocate::Flags>(ALLOCATE_IN_OLD_DATA_SPACE |
|
||||
ALLOCATE_DOUBLE_ALIGNED);
|
||||
} else {
|
||||
flags_ = ALLOCATE_IN_OLD_POINTER_SPACE;
|
||||
}
|
||||
} else {
|
||||
flags_ = ALLOCATE_IN_NEW_SPACE;
|
||||
if (IsFastDoubleElementsKind(kind)) {
|
||||
flags_ = static_cast<HAllocate::Flags>(flags_ |
|
||||
ALLOCATE_DOUBLE_ALIGNED);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Maximum instance size for which allocations will be inlined.
|
||||
static const int kMaxInlineSize = 64 * kPointerSize;
|
||||
|
||||
static Flags DefaultFlags() {
|
||||
return CAN_ALLOCATE_IN_NEW_SPACE;
|
||||
}
|
||||
|
||||
static Flags DefaultFlags(ElementsKind kind) {
|
||||
Flags flags = CAN_ALLOCATE_IN_NEW_SPACE;
|
||||
if (IsFastDoubleElementsKind(kind)) {
|
||||
flags = static_cast<HAllocate::Flags>(
|
||||
flags | HAllocate::ALLOCATE_DOUBLE_ALIGNED);
|
||||
}
|
||||
return flags;
|
||||
}
|
||||
|
||||
HValue* context() { return OperandAt(0); }
|
||||
HValue* size() { return OperandAt(1); }
|
||||
|
||||
@ -5294,25 +5290,16 @@ class HAllocate: public HTemplateInstruction<2> {
|
||||
known_initial_map_ = known_initial_map;
|
||||
}
|
||||
|
||||
bool CanAllocateInNewSpace() const {
|
||||
return (flags_ & CAN_ALLOCATE_IN_NEW_SPACE) != 0;
|
||||
bool IsNewSpaceAllocation() const {
|
||||
return (flags_ & ALLOCATE_IN_NEW_SPACE) != 0;
|
||||
}
|
||||
|
||||
bool CanAllocateInOldDataSpace() const {
|
||||
return (flags_ & CAN_ALLOCATE_IN_OLD_DATA_SPACE) != 0;
|
||||
bool IsOldDataSpaceAllocation() const {
|
||||
return (flags_ & ALLOCATE_IN_OLD_DATA_SPACE) != 0;
|
||||
}
|
||||
|
||||
bool CanAllocateInOldPointerSpace() const {
|
||||
return (flags_ & CAN_ALLOCATE_IN_OLD_POINTER_SPACE) != 0;
|
||||
}
|
||||
|
||||
bool CanAllocateInOldSpace() const {
|
||||
return CanAllocateInOldDataSpace() ||
|
||||
CanAllocateInOldPointerSpace();
|
||||
}
|
||||
|
||||
bool GuaranteedInNewSpace() const {
|
||||
return CanAllocateInNewSpace() && !CanAllocateInOldSpace();
|
||||
bool IsOldPointerSpaceAllocation() const {
|
||||
return (flags_ & ALLOCATE_IN_OLD_POINTER_SPACE) != 0;
|
||||
}
|
||||
|
||||
bool MustAllocateDoubleAligned() const {
|
||||
@ -5323,8 +5310,12 @@ class HAllocate: public HTemplateInstruction<2> {
|
||||
return (flags_ & PREFILL_WITH_FILLER) != 0;
|
||||
}
|
||||
|
||||
void SetFlags(Flags flags) {
|
||||
flags_ = static_cast<HAllocate::Flags>(flags_ | flags);
|
||||
void MakePrefillWithFiller() {
|
||||
flags_ = static_cast<HAllocate::Flags>(flags_ | PREFILL_WITH_FILLER);
|
||||
}
|
||||
|
||||
void MakeDoubleAligned() {
|
||||
flags_ = static_cast<HAllocate::Flags>(flags_ | ALLOCATE_DOUBLE_ALIGNED);
|
||||
}
|
||||
|
||||
void UpdateSize(HValue* size) {
|
||||
@ -5339,6 +5330,14 @@ class HAllocate: public HTemplateInstruction<2> {
|
||||
DECLARE_CONCRETE_INSTRUCTION(Allocate)
|
||||
|
||||
private:
|
||||
enum Flags {
|
||||
ALLOCATE_IN_NEW_SPACE = 1 << 0,
|
||||
ALLOCATE_IN_OLD_DATA_SPACE = 1 << 1,
|
||||
ALLOCATE_IN_OLD_POINTER_SPACE = 1 << 2,
|
||||
ALLOCATE_DOUBLE_ALIGNED = 1 << 3,
|
||||
PREFILL_WITH_FILLER = 1 << 4
|
||||
};
|
||||
|
||||
Flags flags_;
|
||||
Handle<Map> known_initial_map_;
|
||||
};
|
||||
@ -5389,7 +5388,7 @@ inline bool ReceiverObjectNeedsWriteBarrier(HValue* object,
|
||||
}
|
||||
if (object != new_space_dominator) return true;
|
||||
if (object->IsAllocate()) {
|
||||
return !HAllocate::cast(object)->GuaranteedInNewSpace();
|
||||
return !HAllocate::cast(object)->IsNewSpaceAllocation();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -1325,20 +1325,8 @@ HValue* HGraphBuilder::BuildAllocateElements(HValue* context,
|
||||
HAdd::New(zone, context, mul, header_size));
|
||||
total_size->ClearFlag(HValue::kCanOverflow);
|
||||
|
||||
HAllocate::Flags flags = HAllocate::DefaultFlags(kind);
|
||||
if (isolate()->heap()->ShouldGloballyPretenure()) {
|
||||
// TODO(hpayer): When pretenuring can be internalized, flags can become
|
||||
// private to HAllocate.
|
||||
if (IsFastDoubleElementsKind(kind)) {
|
||||
flags = static_cast<HAllocate::Flags>(
|
||||
flags | HAllocate::CAN_ALLOCATE_IN_OLD_DATA_SPACE);
|
||||
} else {
|
||||
flags = static_cast<HAllocate::Flags>(
|
||||
flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
|
||||
}
|
||||
}
|
||||
|
||||
return Add<HAllocate>(context, total_size, HType::JSArray(), flags);
|
||||
return Add<HAllocate>(context, total_size, HType::JSArray(),
|
||||
isolate()->heap()->ShouldGloballyPretenure(), kind);
|
||||
}
|
||||
|
||||
|
||||
@ -1669,14 +1657,14 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
|
||||
: FixedArray::SizeFor(length);
|
||||
}
|
||||
|
||||
HAllocate::Flags allocate_flags = HAllocate::DefaultFlags(kind);
|
||||
// Allocate both the JS array and the elements array in one big
|
||||
// allocation. This avoids multiple limit checks.
|
||||
HValue* size_in_bytes = Add<HConstant>(size);
|
||||
HInstruction* object = Add<HAllocate>(context,
|
||||
size_in_bytes,
|
||||
HType::JSObject(),
|
||||
allocate_flags);
|
||||
false,
|
||||
kind);
|
||||
|
||||
// Copy the JS array part.
|
||||
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
|
||||
@ -1958,9 +1946,8 @@ HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
|
||||
Representation::Smi());
|
||||
|
||||
// Allocate (dealing with failure appropriately)
|
||||
HAllocate::Flags flags = HAllocate::DefaultFlags(kind_);
|
||||
HAllocate* new_object = builder()->Add<HAllocate>(context, size_in_bytes,
|
||||
HType::JSArray(), flags);
|
||||
HType::JSArray(), false, kind_);
|
||||
|
||||
// Fill in the fields: map, properties, length
|
||||
HValue* map;
|
||||
@ -4573,7 +4560,7 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
|
||||
HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
|
||||
HInstruction* heap_number = Add<HAllocate>(
|
||||
environment()->LookupContext(), heap_number_size,
|
||||
HType::HeapNumber(), HAllocate::CAN_ALLOCATE_IN_NEW_SPACE);
|
||||
HType::HeapNumber(), false);
|
||||
AddStoreMapConstant(heap_number, isolate()->factory()->heap_number_map());
|
||||
AddStore(heap_number, HObjectAccess::ForHeapNumberValue(), value);
|
||||
instr = new(zone()) HStoreNamedField(
|
||||
@ -7209,14 +7196,10 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
|
||||
|
||||
// Allocate an instance of the implicit receiver object.
|
||||
HValue* size_in_bytes = Add<HConstant>(instance_size);
|
||||
HAllocate::Flags flags = HAllocate::DefaultFlags();
|
||||
if (FLAG_pretenuring_call_new &&
|
||||
isolate()->heap()->ShouldGloballyPretenure()) {
|
||||
flags = static_cast<HAllocate::Flags>(
|
||||
flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
|
||||
}
|
||||
bool pretenure = FLAG_pretenuring_call_new &&
|
||||
isolate()->heap()->ShouldGloballyPretenure();
|
||||
HAllocate* receiver =
|
||||
Add<HAllocate>(context, size_in_bytes, HType::JSObject(), flags);
|
||||
Add<HAllocate>(context, size_in_bytes, HType::JSObject(), pretenure);
|
||||
receiver->set_known_initial_map(initial_map);
|
||||
|
||||
// Load the initial map from the constructor.
|
||||
@ -8298,12 +8281,9 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
|
||||
|
||||
if (isolate()->heap()->ShouldGloballyPretenure()) {
|
||||
if (data_size != 0) {
|
||||
HAllocate::Flags data_flags =
|
||||
static_cast<HAllocate::Flags>(HAllocate::DefaultFlags(kind) |
|
||||
HAllocate::CAN_ALLOCATE_IN_OLD_DATA_SPACE);
|
||||
HValue* size_in_bytes = Add<HConstant>(data_size);
|
||||
data_target = Add<HAllocate>(context, size_in_bytes, HType::JSObject(),
|
||||
data_flags);
|
||||
true, FAST_DOUBLE_ELEMENTS);
|
||||
Handle<Map> free_space_map = isolate()->factory()->free_space_map();
|
||||
AddStoreMapConstant(data_target, free_space_map);
|
||||
HObjectAccess access =
|
||||
@ -8311,17 +8291,14 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
|
||||
AddStore(data_target, access, size_in_bytes);
|
||||
}
|
||||
if (pointer_size != 0) {
|
||||
HAllocate::Flags pointer_flags =
|
||||
static_cast<HAllocate::Flags>(HAllocate::DefaultFlags() |
|
||||
HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
|
||||
HValue* size_in_bytes = Add<HConstant>(pointer_size);
|
||||
target = Add<HAllocate>(context, size_in_bytes, HType::JSObject(),
|
||||
pointer_flags);
|
||||
true);
|
||||
}
|
||||
} else {
|
||||
HAllocate::Flags flags = HAllocate::DefaultFlags(kind);
|
||||
HValue* size_in_bytes = Add<HConstant>(data_size + pointer_size);
|
||||
target = Add<HAllocate>(context, size_in_bytes, HType::JSObject(), flags);
|
||||
target = Add<HAllocate>(context, size_in_bytes, HType::JSObject(), false,
|
||||
kind);
|
||||
}
|
||||
|
||||
int offset = 0;
|
||||
|
@ -6050,10 +6050,12 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
|
||||
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
|
||||
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
|
||||
}
|
||||
if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
|
||||
ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
|
||||
if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
|
||||
} else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
|
||||
} else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
|
||||
}
|
||||
|
||||
@ -6105,11 +6107,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
|
||||
__ push(Immediate(Smi::FromInt(size)));
|
||||
}
|
||||
|
||||
if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
|
||||
ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
|
||||
if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
CallRuntimeFromDeferred(
|
||||
Runtime::kAllocateInOldPointerSpace, 1, instr, instr->context());
|
||||
} else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
|
||||
} else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
CallRuntimeFromDeferred(
|
||||
Runtime::kAllocateInOldDataSpace, 1, instr, instr->context());
|
||||
} else {
|
||||
|
@ -5332,10 +5332,12 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
|
||||
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
|
||||
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
|
||||
}
|
||||
if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
|
||||
ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
|
||||
if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
|
||||
} else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
|
||||
} else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
|
||||
}
|
||||
if (instr->size()->IsConstantOperand()) {
|
||||
@ -5393,10 +5395,12 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
|
||||
__ Push(Smi::FromInt(size));
|
||||
}
|
||||
|
||||
if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
|
||||
ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
|
||||
if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr);
|
||||
} else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
|
||||
} else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr);
|
||||
} else {
|
||||
CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
|
||||
|
@ -5091,10 +5091,12 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
|
||||
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
|
||||
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
|
||||
}
|
||||
if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
|
||||
ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
|
||||
if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
|
||||
} else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
|
||||
} else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
|
||||
}
|
||||
|
||||
@ -5146,10 +5148,12 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
|
||||
__ Push(Smi::FromInt(size));
|
||||
}
|
||||
|
||||
if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
|
||||
ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
|
||||
if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr);
|
||||
} else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
|
||||
} else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
|
||||
ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
|
||||
CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr);
|
||||
} else {
|
||||
CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
|
||||
|
Loading…
Reference in New Issue
Block a user