[heap] Enable inline_new for single generation heap
Currently the --single-generation flags disables inline allocations and forces all allocations to go via runtime where they are redirected to the old generation. This patch implements the young to old redirection in CSA and TF. Bug: v8:11644 Change-Id: Ie945ba684fb0f41d5414a05be2f25245d4869d6c Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2839010 Reviewed-by: Nico Hartmann <nicohartmann@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Reviewed-by: Dominik Inführ <dinfuehr@chromium.org> Commit-Queue: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#74121}
This commit is contained in:
parent
1277bb5c55
commit
0f683da350
@ -1368,6 +1368,7 @@ TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(
|
||||
TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes,
|
||||
AllocationFlags flags) {
|
||||
Comment("Allocate");
|
||||
if (FLAG_single_generation) flags |= kPretenured;
|
||||
bool const new_space = !(flags & kPretenured);
|
||||
bool const allow_large_objects = flags & kAllowLargeObjectAllocation;
|
||||
// For optimized allocations, we don't allow the allocation to happen in a
|
||||
|
@ -42,6 +42,15 @@ class MemoryLowering::AllocationGroup final : public ZoneObject {
|
||||
AllocationType const allocation_;
|
||||
Node* const size_;
|
||||
|
||||
static inline AllocationType CheckAllocationType(AllocationType allocation) {
|
||||
// For non-generational heap, all young allocations are redirected to old
|
||||
// space.
|
||||
if (FLAG_single_generation && allocation == AllocationType::kYoung) {
|
||||
return AllocationType::kOld;
|
||||
}
|
||||
return allocation;
|
||||
}
|
||||
|
||||
DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationGroup);
|
||||
};
|
||||
|
||||
@ -99,6 +108,9 @@ Reduction MemoryLowering::ReduceAllocateRaw(
|
||||
DCHECK_EQ(IrOpcode::kAllocateRaw, node->opcode());
|
||||
DCHECK_IMPLIES(allocation_folding_ == AllocationFolding::kDoAllocationFolding,
|
||||
state_ptr != nullptr);
|
||||
if (FLAG_single_generation && allocation_type == AllocationType::kYoung) {
|
||||
allocation_type = AllocationType::kOld;
|
||||
}
|
||||
// Code objects may have a maximum size smaller than kMaxHeapObjectSize due to
|
||||
// guard pages. If we need to support allocating code here we would need to
|
||||
// call MemoryChunkLayout::MaxRegularCodeObjectSize() at runtime.
|
||||
@ -563,6 +575,9 @@ WriteBarrierKind MemoryLowering::ComputeWriteBarrierKind(
|
||||
if (!ValueNeedsWriteBarrier(value, isolate())) {
|
||||
write_barrier_kind = kNoWriteBarrier;
|
||||
}
|
||||
if (FLAG_disable_write_barriers) {
|
||||
write_barrier_kind = kNoWriteBarrier;
|
||||
}
|
||||
if (write_barrier_kind == WriteBarrierKind::kAssertNoWriteBarrier) {
|
||||
write_barrier_assert_failed_(node, object, function_debug_name_, zone());
|
||||
}
|
||||
@ -587,14 +602,18 @@ bool MemoryLowering::NeedsPoisoning(LoadSensitivity load_sensitivity) const {
|
||||
MemoryLowering::AllocationGroup::AllocationGroup(Node* node,
|
||||
AllocationType allocation,
|
||||
Zone* zone)
|
||||
: node_ids_(zone), allocation_(allocation), size_(nullptr) {
|
||||
: node_ids_(zone),
|
||||
allocation_(CheckAllocationType(allocation)),
|
||||
size_(nullptr) {
|
||||
node_ids_.insert(node->id());
|
||||
}
|
||||
|
||||
MemoryLowering::AllocationGroup::AllocationGroup(Node* node,
|
||||
AllocationType allocation,
|
||||
Node* size, Zone* zone)
|
||||
: node_ids_(zone), allocation_(allocation), size_(size) {
|
||||
: node_ids_(zone),
|
||||
allocation_(CheckAllocationType(allocation)),
|
||||
size_(size) {
|
||||
node_ids_.insert(node->id());
|
||||
}
|
||||
|
||||
|
@ -420,10 +420,6 @@ DEFINE_BOOL_READONLY(
|
||||
single_generation, V8_GENERATION_BOOL,
|
||||
"allocate all objects from young generation to old generation")
|
||||
|
||||
// Prevent inline allocation into new space
|
||||
DEFINE_NEG_IMPLICATION(single_generation, inline_new)
|
||||
DEFINE_NEG_IMPLICATION(single_generation, turbo_allocation_folding)
|
||||
|
||||
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
||||
#define V8_ENABLE_CONSERVATIVE_STACK_SCANNING_BOOL true
|
||||
#else
|
||||
|
Loading…
Reference in New Issue
Block a user