Revert of Reland [heap] Avoid the use of cells to point from code to new-space objects. (patchset #3 id:40001 of https://codereview.chromium.org/2091733002/ )

Reason for revert:
This breaks gc-stress bot: https://chromegw.corp.google.com/i/client.v8/builders/V8%20Linux64%20GC%20Stress%20-%20custom%20snapshot

#
# Fatal error in ../../src/heap/mark-compact.cc, line 3715
# Check failed: Page::FromAddress(reinterpret_cast<HeapObject*>(*slot)->address()) ->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION).
#

I can reproduce locally, and local revert also fixes it -> revert.

Reproduce with:
 out/Debug/d8 --test --random-seed=2140216864 --nohard-abort --nodead-code-elimination --nofold-constants --enable-slow-asserts --debug-code --verify-heap --allow-natives-syntax --harmony-tailcalls test/mjsunit/mjsunit.js  test/mjsunit/es6/tail-call-megatest-shard2.js --gc-interval=500 --stress-compaction --concurrent-recompilation-queue-length=64 --concurrent-recompilation-delay=500 --concurrent-recompilation

(Maybe run in loop; it's flaky when broken; but passes reliably w/ revert.)

Original issue's description:
> Reland [heap] Avoid the use of cells to point from code to new-space objects.
>
> The reason for reverting was: [Sheriff] Breaks arm debug:
> https://build.chromium.org/p/client.v8.ports/builders/V8%20Linux%20-%20arm%20-%20sim%20-%20debug/builds/1038.
>
> The problem was the dereferencing of handles for smi checks. It turned out
> that these smi checks can be removed anyways, both on arm and on mips.
>
> Additionally some rebasing was necessary.
>
> Original issue's description:
>
> Cells were needed originally because there was no typed remembered set to
> record direct pointers from code space to new space. A previous
> CL (https://codereview.chromium.org/2003553002/) already introduced
> the remembered set, this CL uses it.
>
> This CL
> * stores direct pointers in code objects, even if the target is in new space,
> * records the slot of the pointer in typed-old-to-new remembered set,
> * adds a list which stores weak code-to-new-space references,
> * adds a test to test-heap.cc for weak code-to-new-space references,
> * removes prints in tail-call-megatest.js
>
> R=mlippautz@chromium.org
>
> Committed: https://crrev.com/5508e16592522658587da71ba6743c8e832fe4d1
> Cr-Commit-Position: refs/heads/master@{#37217}

TBR=mlippautz@chromium.org,ahaas@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true

Review-Url: https://codereview.chromium.org/2090983002
Cr-Commit-Position: refs/heads/master@{#37221}
This commit is contained in:
vogelheim 2016-06-23 09:02:33 -07:00 committed by Commit bot
parent ee657f0bed
commit 25d59e9d48
30 changed files with 167 additions and 251 deletions

View File

@ -138,7 +138,6 @@ void RelocInfo::set_target_object(Object* target,
target->IsHeapObject()) {
host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
host(), this, HeapObject::cast(target));
host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
}
}

View File

@ -275,6 +275,7 @@ Operand::Operand(Handle<Object> handle) {
// Verify all Objects referred by code are NOT in new space.
Object* obj = *handle;
if (obj->IsHeapObject()) {
DCHECK(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
imm32_ = reinterpret_cast<intptr_t>(handle.location());
rmode_ = RelocInfo::EMBEDDED_OBJECT;
} else {

View File

@ -240,7 +240,19 @@ void MacroAssembler::Push(Handle<Object> handle) {
void MacroAssembler::Move(Register dst, Handle<Object> value) {
mov(dst, Operand(value));
AllowDeferredHandleDereference smi_check;
if (value->IsSmi()) {
mov(dst, Operand(value));
} else {
DCHECK(value->IsHeapObject());
if (isolate()->heap()->InNewSpace(*value)) {
Handle<Cell> cell = isolate()->factory()->NewCell(value);
mov(dst, Operand(cell));
ldr(dst, FieldMemOperand(dst, Cell::kValueOffset));
} else {
mov(dst, Operand(value));
}
}
}

View File

@ -731,7 +731,6 @@ void RelocInfo::set_target_object(Object* target,
target->IsHeapObject()) {
host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
host(), this, HeapObject::cast(target));
host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
}
}

View File

@ -280,6 +280,7 @@ void Immediate::InitializeHandle(Handle<Object> handle) {
// Verify all Objects referred by code are NOT in new space.
Object* obj = *handle;
if (obj->IsHeapObject()) {
DCHECK(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
value_ = reinterpret_cast<intptr_t>(handle.location());
rmode_ = RelocInfo::EMBEDDED_OBJECT;
} else {

View File

@ -1425,7 +1425,14 @@ void MacroAssembler::LoadTrueFalseRoots(Register true_root,
void MacroAssembler::LoadHeapObject(Register result,
Handle<HeapObject> object) {
Mov(result, Operand(object));
AllowDeferredHandleDereference using_raw_address;
if (isolate()->heap()->InNewSpace(*object)) {
Handle<Cell> cell = isolate()->factory()->NewCell(object);
Mov(result, Operand(cell));
Ldr(result, FieldMemOperand(result, Cell::kValueOffset));
} else {
Mov(result, Operand(object));
}
}

View File

@ -284,14 +284,9 @@ void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object,
Handle<Code> code) {
Handle<WeakCell> cell = Code::WeakCellFor(code);
Heap* heap = isolate->heap();
if (heap->InNewSpace(*object)) {
heap->AddWeakNewSpaceObjectToCodeDependency(object, cell);
} else {
Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
dep =
DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
heap->AddWeakObjectToCodeDependency(object, dep);
}
Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
dep = DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
heap->AddWeakObjectToCodeDependency(object, dep);
}
} // namespace

View File

@ -426,12 +426,6 @@ void Heap::RecordWrite(Object* object, int offset, Object* o) {
HeapObject::cast(object)->address() + offset);
}
void Heap::RecordWriteIntoCode(Code* host, RelocInfo* rinfo, Object* value) {
if (InNewSpace(value)) {
RecordWriteIntoCodeSlow(host, rinfo, value);
}
}
void Heap::RecordFixedArrayElements(FixedArray* array, int offset, int length) {
if (InNewSpace(array)) return;
Page* page = Page::FromAddress(reinterpret_cast<Address>(array));

View File

@ -1469,6 +1469,38 @@ void Heap::MarkCompactPrologue() {
}
#ifdef VERIFY_HEAP
// Visitor class to verify pointers in code or data space do not point into
// new space.
class VerifyNonPointerSpacePointersVisitor : public ObjectVisitor {
public:
explicit VerifyNonPointerSpacePointersVisitor(Heap* heap) : heap_(heap) {}
void VisitPointers(Object** start, Object** end) override {
for (Object** current = start; current < end; current++) {
if ((*current)->IsHeapObject()) {
CHECK(!heap_->InNewSpace(HeapObject::cast(*current)));
}
}
}
private:
Heap* heap_;
};
static void VerifyNonPointerSpacePointers(Heap* heap) {
// Verify that there are no pointers to new space in spaces where we
// do not expect them.
VerifyNonPointerSpacePointersVisitor v(heap);
HeapObjectIterator code_it(heap->code_space());
for (HeapObject* object = code_it.Next(); object != NULL;
object = code_it.Next())
object->Iterate(&v);
}
#endif // VERIFY_HEAP
void Heap::CheckNewSpaceExpansionCriteria() {
if (FLAG_experimental_new_space_growth_heuristic) {
if (new_space_.TotalCapacity() < new_space_.MaximumCapacity() &&
@ -1581,6 +1613,10 @@ void Heap::Scavenge() {
mark_compact_collector()->sweeper().EnsureNewSpaceCompleted();
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) VerifyNonPointerSpacePointers(this);
#endif
gc_state_ = SCAVENGE;
// Implements Cheney's copying algorithm
@ -2823,10 +2859,6 @@ void Heap::CreateInitialObjects() {
*WeakHashTable::New(isolate(), 16, USE_DEFAULT_MINIMUM_CAPACITY,
TENURED));
set_weak_new_space_object_to_code_list(
ArrayList::cast(*(factory->NewFixedArray(16, TENURED))));
weak_new_space_object_to_code_list()->SetLength(0);
set_script_list(Smi::FromInt(0));
Handle<SeededNumberDictionary> slow_element_dictionary =
@ -2886,6 +2918,7 @@ void Heap::CreateInitialObjects() {
CreateFixedStubs();
}
bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) {
switch (root_index) {
case kNumberStringCacheRootIndex:
@ -2900,7 +2933,6 @@ bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) {
case kMicrotaskQueueRootIndex:
case kDetachedContextsRootIndex:
case kWeakObjectToCodeTableRootIndex:
case kWeakNewSpaceObjectToCodeListRootIndex:
case kRetainedMapsRootIndex:
case kNoScriptSharedFunctionInfosRootIndex:
case kWeakStackTraceListRootIndex:
@ -5532,18 +5564,6 @@ void Heap::RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback) {
UNREACHABLE();
}
// TODO(ishell): Find a better place for this.
void Heap::AddWeakNewSpaceObjectToCodeDependency(Handle<HeapObject> obj,
Handle<WeakCell> code) {
DCHECK(InNewSpace(*obj));
DCHECK(!InNewSpace(*code));
Handle<ArrayList> list(weak_new_space_object_to_code_list(), isolate());
list = ArrayList::Add(list, isolate()->factory()->NewWeakCell(obj), code);
if (*list != weak_new_space_object_to_code_list()) {
set_weak_new_space_object_to_code_list(*list);
}
}
// TODO(ishell): Find a better place for this.
void Heap::AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
Handle<DependentCode> dep) {
@ -5699,26 +5719,6 @@ void Heap::ClearRecordedSlotRange(Address start, Address end) {
}
}
void Heap::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
Object* value) {
DCHECK(InNewSpace(value));
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
RelocInfo::Mode rmode = rinfo->rmode();
Address addr = rinfo->pc();
SlotType slot_type = SlotTypeForRelocInfoMode(rmode);
if (rinfo->IsInConstantPool()) {
addr = rinfo->constant_pool_entry_address();
if (RelocInfo::IsCodeTarget(rmode)) {
slot_type = CODE_ENTRY_SLOT;
} else {
DCHECK(RelocInfo::IsEmbeddedObject(rmode));
slot_type = OBJECT_SLOT;
}
}
RememberedSet<OLD_TO_NEW>::InsertTyped(
source_page, reinterpret_cast<Address>(host), slot_type, addr);
}
Space* AllSpaces::next() {
switch (counter_++) {
case NEW_SPACE:

View File

@ -194,11 +194,6 @@ using v8::MemoryPressureLevel;
V(FixedArray, detached_contexts, DetachedContexts) \
V(ArrayList, retained_maps, RetainedMaps) \
V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable) \
/* weak_new_space_object_to_code_list is an array of weak cells, where */ \
/* slots with even indices refer to the weak object, and the subsequent */ \
/* slots refer to the code with the reference to the weak object. */ \
V(ArrayList, weak_new_space_object_to_code_list, \
WeakNewSpaceObjectToCodeList) \
V(Object, weak_stack_trace_list, WeakStackTraceList) \
V(Object, noscript_shared_function_infos, NoScriptSharedFunctionInfos) \
V(FixedArray, serialized_templates, SerializedTemplates) \
@ -865,9 +860,6 @@ class Heap {
return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
}
void AddWeakNewSpaceObjectToCodeDependency(Handle<HeapObject> obj,
Handle<WeakCell> code);
void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
Handle<DependentCode> dep);
@ -1128,8 +1120,6 @@ class Heap {
// Write barrier support for object[offset] = o;
inline void RecordWrite(Object* object, int offset, Object* o);
inline void RecordWriteIntoCode(Code* host, RelocInfo* rinfo, Object* target);
void RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, Object* target);
inline void RecordFixedArrayElements(FixedArray* array, int offset,
int length);

View File

@ -1569,9 +1569,6 @@ class RecordMigratedSlotVisitor final : public ObjectVisitor {
DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
Code* host = rinfo->host();
// The target is always in old space, we don't have to record the slot in
// the old-to-new remembered set.
DCHECK(!collector_->heap()->InNewSpace(target));
collector_->RecordRelocSlot(host, rinfo, target);
}
@ -1580,9 +1577,6 @@ class RecordMigratedSlotVisitor final : public ObjectVisitor {
rinfo->IsPatchedDebugBreakSlotSequence());
Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
Code* host = rinfo->host();
// The target is always in old space, we don't have to record the slot in
// the old-to-new remembered set.
DCHECK(!collector_->heap()->InNewSpace(target));
collector_->RecordRelocSlot(host, rinfo, target);
}
@ -1590,7 +1584,6 @@ class RecordMigratedSlotVisitor final : public ObjectVisitor {
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
HeapObject* object = HeapObject::cast(rinfo->target_object());
Code* host = rinfo->host();
collector_->heap()->RecordWriteIntoCode(host, rinfo, object);
collector_->RecordRelocSlot(host, rinfo, object);
}
@ -1598,9 +1591,6 @@ class RecordMigratedSlotVisitor final : public ObjectVisitor {
DCHECK(rinfo->rmode() == RelocInfo::CELL);
Cell* cell = rinfo->target_cell();
Code* host = rinfo->host();
// The cell is always in old space, we don't have to record the slot in
// the old-to-new remembered set.
DCHECK(!collector_->heap()->InNewSpace(cell));
collector_->RecordRelocSlot(host, rinfo, cell);
}
@ -2466,35 +2456,6 @@ void MarkCompactCollector::MarkDependentCodeForDeoptimization(
current = current->next_link();
}
{
ArrayList* list = heap_->weak_new_space_object_to_code_list();
int counter = 0;
for (int i = 0; i < list->Length(); i += 2) {
WeakCell* obj = WeakCell::cast(list->Get(i));
WeakCell* dep = WeakCell::cast(list->Get(i + 1));
if (obj->cleared() || dep->cleared()) {
if (!dep->cleared()) {
Code* code = Code::cast(dep->value());
if (!code->marked_for_deoptimization()) {
DependentCode::SetMarkedForDeoptimization(
code, DependentCode::DependencyGroup::kWeakCodeGroup);
code->InvalidateEmbeddedObjects();
have_code_to_deoptimize_ = true;
}
}
} else {
// We record the slot manually because marking is finished at this
// point and the write barrier would bailout.
list->Set(counter, obj, SKIP_WRITE_BARRIER);
RecordSlot(list, list->Slot(counter), obj);
counter++;
list->Set(counter, dep, SKIP_WRITE_BARRIER);
RecordSlot(list, list->Slot(counter), dep);
counter++;
}
}
}
WeakHashTable* table = heap_->weak_object_to_code_table();
uint32_t capacity = table->Capacity();
for (uint32_t i = 0; i < capacity; i++) {
@ -2839,16 +2800,30 @@ void MarkCompactCollector::AbortTransitionArrays() {
heap()->set_encountered_transition_arrays(Smi::FromInt(0));
}
static inline SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
if (RelocInfo::IsCodeTarget(rmode)) {
return CODE_TARGET_SLOT;
} else if (RelocInfo::IsCell(rmode)) {
return CELL_TARGET_SLOT;
} else if (RelocInfo::IsEmbeddedObject(rmode)) {
return EMBEDDED_OBJECT_SLOT;
} else if (RelocInfo::IsDebugBreakSlot(rmode)) {
return DEBUG_TARGET_SLOT;
}
UNREACHABLE();
return NUMBER_OF_SLOT_TYPES;
}
void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo,
Object* target) {
Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
RelocInfo::Mode rmode = rinfo->rmode();
if (target_page->IsEvacuationCandidate() &&
(rinfo->host() == NULL ||
!ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
RelocInfo::Mode rmode = rinfo->rmode();
Address addr = rinfo->pc();
SlotType slot_type = SlotTypeForRelocInfoMode(rmode);
SlotType slot_type = SlotTypeForRMode(rmode);
if (rinfo->IsInConstantPool()) {
addr = rinfo->constant_pool_entry_address();
if (RelocInfo::IsCodeTarget(rmode)) {
@ -3472,12 +3447,6 @@ int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p,
}
void MarkCompactCollector::InvalidateCode(Code* code) {
Page* page = Page::FromAddress(code->address());
Address start = code->instruction_start();
Address end = code->address() + code->Size();
RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end);
if (heap_->incremental_marking()->IsCompacting() &&
!ShouldSkipEvacuationSlotRecording(code)) {
DCHECK(compacting_);
@ -3489,7 +3458,11 @@ void MarkCompactCollector::InvalidateCode(Code* code) {
// Ignore all slots that might have been recorded in the body of the
// deoptimized code object. Assumption: no slots will be recorded for
// this object after invalidating it.
Page* page = Page::FromAddress(code->address());
Address start = code->instruction_start();
Address end = code->address() + code->Size();
RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end);
RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end);
}
}
@ -4090,9 +4063,6 @@ void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
MarkBit mark_bit = Marking::MarkBitFrom(host);
if (Marking::IsBlack(mark_bit)) {
RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
// The target is always in old space, we don't have to record the slot in
// the old-to-new remembered set.
DCHECK(!heap()->InNewSpace(target));
RecordRelocSlot(host, &rinfo, target);
}
}

View File

@ -9,7 +9,6 @@
#include "src/heap/slot-set.h"
#include "src/heap/spaces.h"
#include "src/heap/store-buffer.h"
#include "src/macro-assembler.h"
namespace v8 {
namespace internal {
@ -26,19 +25,6 @@ void RememberedSet<direction>::ClearInvalidSlots(Heap* heap) {
});
}
}
for (MemoryChunk* chunk : *heap->code_space()) {
TypedSlotSet* slots = GetTypedSlotSet(chunk);
if (slots != nullptr) {
slots->Iterate(
[heap, chunk](SlotType type, Address host_addr, Address addr) {
if (Marking::IsBlack(Marking::MarkBitFrom(host_addr))) {
return KEEP_SLOT;
} else {
return REMOVE_SLOT;
}
});
}
}
}
template <PointerDirection direction>

View File

@ -345,20 +345,6 @@ class UpdateTypedSlotHelper {
}
};
inline SlotType SlotTypeForRelocInfoMode(RelocInfo::Mode rmode) {
if (RelocInfo::IsCodeTarget(rmode)) {
return CODE_TARGET_SLOT;
} else if (RelocInfo::IsCell(rmode)) {
return CELL_TARGET_SLOT;
} else if (RelocInfo::IsEmbeddedObject(rmode)) {
return EMBEDDED_OBJECT_SLOT;
} else if (RelocInfo::IsDebugBreakSlot(rmode)) {
return DEBUG_TARGET_SLOT;
}
UNREACHABLE();
return NUMBER_OF_SLOT_TYPES;
}
} // namespace internal
} // namespace v8

View File

@ -137,7 +137,8 @@ void RelocInfo::set_target_object(Object* target,
if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
host() != NULL &&
target->IsHeapObject()) {
host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
host(), this, HeapObject::cast(target));
}
}
@ -340,6 +341,7 @@ Immediate::Immediate(Handle<Object> handle) {
// Verify all Objects referred by code are NOT in new space.
Object* obj = *handle;
if (obj->IsHeapObject()) {
DCHECK(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
x_ = reinterpret_cast<intptr_t>(handle.location());
rmode_ = RelocInfo::EMBEDDED_OBJECT;
} else {
@ -378,6 +380,7 @@ void Assembler::emit(Handle<Object> handle) {
AllowDeferredHandleDereference heap_object_check;
// Verify all Objects referred by code are NOT in new space.
Object* obj = *handle;
DCHECK(!isolate()->heap()->InNewSpace(obj));
if (obj->IsHeapObject()) {
emit(reinterpret_cast<intptr_t>(handle.location()),
RelocInfo::EMBEDDED_OBJECT);

View File

@ -2595,15 +2595,37 @@ int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
void MacroAssembler::LoadHeapObject(Register result,
Handle<HeapObject> object) {
mov(result, object);
AllowDeferredHandleDereference embedding_raw_address;
if (isolate()->heap()->InNewSpace(*object)) {
Handle<Cell> cell = isolate()->factory()->NewCell(object);
mov(result, Operand::ForCell(cell));
} else {
mov(result, object);
}
}
void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
cmp(reg, object);
AllowDeferredHandleDereference using_raw_address;
if (isolate()->heap()->InNewSpace(*object)) {
Handle<Cell> cell = isolate()->factory()->NewCell(object);
cmp(reg, Operand::ForCell(cell));
} else {
cmp(reg, object);
}
}
void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
AllowDeferredHandleDereference using_raw_address;
if (isolate()->heap()->InNewSpace(*object)) {
Handle<Cell> cell = isolate()->factory()->NewCell(object);
push(Operand::ForCell(cell));
} else {
Push(object);
}
}
void MacroAssembler::PushHeapObject(Handle<HeapObject> object) { Push(object); }
void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
Register scratch) {

View File

@ -225,7 +225,6 @@ void RelocInfo::set_target_object(Object* target,
target->IsHeapObject()) {
host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
host(), this, HeapObject::cast(target));
host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
}
}

View File

@ -225,6 +225,7 @@ Operand::Operand(Handle<Object> handle) {
// Verify all Objects referred by code are NOT in new space.
Object* obj = *handle;
if (obj->IsHeapObject()) {
DCHECK(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
imm32_ = reinterpret_cast<intptr_t>(handle.location());
rmode_ = RelocInfo::EMBEDDED_OBJECT;
} else {

View File

@ -1402,7 +1402,19 @@ void MacroAssembler::Usdc1(FPURegister fd, const MemOperand& rs,
void MacroAssembler::li(Register dst, Handle<Object> value, LiFlags mode) {
li(dst, Operand(value), mode);
AllowDeferredHandleDereference smi_check;
if (value->IsSmi()) {
li(dst, Operand(value), mode);
} else {
DCHECK(value->IsHeapObject());
if (isolate()->heap()->InNewSpace(*value)) {
Handle<Cell> cell = isolate()->factory()->NewCell(value);
li(dst, Operand(cell));
lw(dst, FieldMemOperand(dst, Cell::kValueOffset));
} else {
li(dst, Operand(value));
}
}
}

View File

@ -213,7 +213,6 @@ void RelocInfo::set_target_object(Object* target,
target->IsHeapObject()) {
host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
host(), this, HeapObject::cast(target));
host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
}
}

View File

@ -204,6 +204,7 @@ Operand::Operand(Handle<Object> handle) {
// Verify all Objects referred by code are NOT in new space.
Object* obj = *handle;
if (obj->IsHeapObject()) {
DCHECK(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
imm64_ = reinterpret_cast<intptr_t>(handle.location());
rmode_ = RelocInfo::EMBEDDED_OBJECT;
} else {

View File

@ -1595,7 +1595,19 @@ void MacroAssembler::Usdc1(FPURegister fd, const MemOperand& rs,
}
void MacroAssembler::li(Register dst, Handle<Object> value, LiFlags mode) {
li(dst, Operand(value), mode);
AllowDeferredHandleDereference smi_check;
if (value->IsSmi()) {
li(dst, Operand(value), mode);
} else {
DCHECK(value->IsHeapObject());
if (isolate()->heap()->InNewSpace(*value)) {
Handle<Cell> cell = isolate()->factory()->NewCell(value);
li(dst, Operand(cell));
ld(dst, FieldMemOperand(dst, Cell::kValueOffset));
} else {
li(dst, Operand(value));
}
}
}
static inline int64_t ShiftAndFixSignExtension(int64_t imm, int bitnum) {

View File

@ -703,25 +703,11 @@ void Code::VerifyEmbeddedObjectsDependency() {
CHECK(map->dependent_code()->Contains(DependentCode::kWeakCodeGroup,
cell));
} else if (obj->IsJSObject()) {
if (isolate->heap()->InNewSpace(obj)) {
ArrayList* list =
GetIsolate()->heap()->weak_new_space_object_to_code_list();
bool found = false;
for (int i = 0; i < list->Length(); i += 2) {
WeakCell* obj_cell = WeakCell::cast(list->Get(i));
if (!obj_cell->cleared() && obj_cell->value() == obj &&
WeakCell::cast(list->Get(i + 1)) == cell) {
found = true;
break;
}
}
CHECK(found);
} else {
Handle<HeapObject> key_obj(HeapObject::cast(obj), isolate);
DependentCode* dep =
GetIsolate()->heap()->LookupWeakObjectToCodeDependency(key_obj);
dep->Contains(DependentCode::kWeakCodeGroup, cell);
}
WeakHashTable* table =
GetIsolate()->heap()->weak_object_to_code_table();
Handle<HeapObject> key_obj(HeapObject::cast(obj), isolate);
CHECK(DependentCode::cast(table->Lookup(key_obj))
->Contains(DependentCode::kWeakCodeGroup, cell));
}
}
}

View File

@ -2467,8 +2467,9 @@ Object** ArrayList::Slot(int index) {
return data_start() + kFirstIndex + index;
}
void ArrayList::Set(int index, Object* obj, WriteBarrierMode mode) {
FixedArray::cast(this)->set(kFirstIndex + index, obj, mode);
void ArrayList::Set(int index, Object* obj) {
FixedArray::cast(this)->set(kFirstIndex + index, obj);
}

View File

@ -2857,8 +2857,7 @@ class ArrayList : public FixedArray {
inline void SetLength(int length);
inline Object* Get(int index);
inline Object** Slot(int index);
inline void Set(int index, Object* obj,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline void Set(int index, Object* obj);
inline void Clear(int index, Object* undefined);
bool IsFull();
DECLARE_CAST(ArrayList)

View File

@ -410,7 +410,6 @@ void RelocInfo::set_target_object(Object* target,
target->IsHeapObject()) {
host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
host(), this, HeapObject::cast(target));
host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
}
}

View File

@ -2966,8 +2966,15 @@ void MacroAssembler::Push(Handle<Object> source) {
void MacroAssembler::MoveHeapObject(Register result,
Handle<Object> object) {
AllowDeferredHandleDereference using_raw_address;
DCHECK(object->IsHeapObject());
Move(result, object, RelocInfo::EMBEDDED_OBJECT);
if (isolate()->heap()->InNewSpace(*object)) {
Handle<Cell> cell = isolate()->factory()->NewCell(object);
Move(result, cell, RelocInfo::CELL);
movp(result, Operand(result, 0));
} else {
Move(result, object, RelocInfo::EMBEDDED_OBJECT);
}
}

View File

@ -927,6 +927,7 @@ class MacroAssembler: public Assembler {
AllowDeferredHandleDereference using_raw_address;
DCHECK(!RelocInfo::IsNone(rmode));
DCHECK(value->IsHeapObject());
DCHECK(!isolate()->heap()->InNewSpace(*value));
movp(dst, reinterpret_cast<void*>(value.location()), rmode);
}

View File

@ -140,7 +140,6 @@ void RelocInfo::set_target_object(Object* target,
target->IsHeapObject()) {
host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
host(), this, HeapObject::cast(target));
host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
}
}

View File

@ -4817,67 +4817,6 @@ TEST(ObjectsInOptimizedCodeAreWeak) {
CHECK(code->marked_for_deoptimization());
}
TEST(NewSpaceObjectsInOptimizedCode) {
if (i::FLAG_always_opt || !i::FLAG_crankshaft || i::FLAG_turbo) return;
i::FLAG_weak_embedded_objects_in_optimized_code = true;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
v8::internal::Heap* heap = CcTest::heap();
if (!isolate->use_crankshaft()) return;
HandleScope outer_scope(heap->isolate());
Handle<Code> code;
{
LocalContext context;
HandleScope scope(heap->isolate());
CompileRun(
"var foo;"
"var bar;"
"(function() {"
" function foo_func(x) { with (x) { return 1 + x; } };"
" %NeverOptimizeFunction(foo_func);"
" function bar_func() {"
" return foo(1);"
" };"
" bar = bar_func;"
" foo = foo_func;"
" bar_func();"
" bar_func();"
" bar_func();"
" %OptimizeFunctionOnNextCall(bar_func);"
" bar_func();"
"})();");
Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
*v8::Local<v8::Function>::Cast(CcTest::global()
->Get(context.local(), v8_str("bar"))
.ToLocalChecked())));
Handle<JSFunction> foo = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
*v8::Local<v8::Function>::Cast(CcTest::global()
->Get(context.local(), v8_str("foo"))
.ToLocalChecked())));
CHECK(heap->InNewSpace(*foo));
heap->CollectGarbage(NEW_SPACE);
heap->CollectGarbage(NEW_SPACE);
CHECK(!heap->InNewSpace(*foo));
#ifdef VERIFY_HEAP
heap->Verify();
#endif
CHECK(!bar->code()->marked_for_deoptimization());
code = scope.CloseAndEscape(Handle<Code>(bar->code()));
}
// Now make sure that a gc should get rid of the function
for (int i = 0; i < 4; i++) {
heap->CollectAllGarbage();
}
CHECK(code->marked_for_deoptimization());
}
TEST(NoWeakHashTableLeakWithIncrementalMarking) {
if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;

View File

@ -1852,19 +1852,15 @@ TEST(CodeSerializerCell) {
assembler.enable_serializer();
Handle<HeapNumber> number = isolate->factory()->NewHeapNumber(0.3);
CHECK(isolate->heap()->InNewSpace(*number));
Handle<Code> code;
{
MacroAssembler* masm = &assembler;
Handle<Cell> cell = isolate->factory()->NewCell(number);
masm->Move(rax, cell, RelocInfo::CELL);
masm->movp(rax, Operand(rax, 0));
masm->ret(0);
CodeDesc desc;
masm->GetCode(&desc);
code = isolate->factory()->NewCode(desc, Code::ComputeFlags(Code::FUNCTION),
masm->CodeObject());
code->set_has_reloc_info_for_serialization(true);
}
MacroAssembler* masm = &assembler;
masm->MoveHeapObject(rax, number);
masm->ret(0);
CodeDesc desc;
masm->GetCode(&desc);
Handle<Code> code = isolate->factory()->NewCode(
desc, Code::ComputeFlags(Code::FUNCTION), masm->CodeObject());
code->set_has_reloc_info_for_serialization(true);
RelocIterator rit1(*code, 1 << RelocInfo::CELL);
CHECK_EQ(*number, rit1.rinfo()->target_cell()->value());