Simplify current inline allocation tracking mechanism.

R=yurys@chromium.org

Review URL: https://codereview.chromium.org/65043006

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@17626 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mstarzinger@chromium.org 2013-11-11 18:00:52 +00:00
parent a6795ea92e
commit 8f611593a4
8 changed files with 11 additions and 82 deletions

View File

@ -1335,14 +1335,6 @@ ExternalReference ExternalReference::address_of_the_hole_nan() {
}
ExternalReference ExternalReference::record_object_allocation_function(
Isolate* isolate) {
return ExternalReference(
Redirect(isolate,
FUNCTION_ADDR(HeapProfiler::RecordObjectAllocationFromMasm)));
}
ExternalReference ExternalReference::address_of_uint32_bias() {
return ExternalReference(
reinterpret_cast<void*>(&double_constants.uint32_bias));

View File

@ -725,9 +725,6 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference get_make_code_young_function(Isolate* isolate);
static ExternalReference get_mark_code_as_executed_function(Isolate* isolate);
// New heap objects tracking support.
static ExternalReference record_object_allocation_function(Isolate* isolate);
// Deoptimization support.
static ExternalReference new_deoptimizer_function(Isolate* isolate);
static ExternalReference compute_output_frames_function(Isolate* isolate);

View File

@ -171,13 +171,6 @@ void HeapProfiler::StopHeapAllocationsRecording() {
}
void HeapProfiler::RecordObjectAllocationFromMasm(Isolate* isolate,
Address obj,
int size) {
isolate->heap_profiler()->NewObjectEvent(obj, size);
}
void HeapProfiler::DropCompiledCode() {
Isolate* isolate = heap()->isolate();
HandleScope scope(isolate);

View File

@ -56,10 +56,6 @@ class HeapProfiler {
void StartHeapObjectsTracking();
void StopHeapObjectsTracking();
static void RecordObjectAllocationFromMasm(Isolate* isolate,
Address obj,
int size);
SnapshotObjectId PushHeapObjectsStats(OutputStream* stream);
int GetSnapshotsCount();
HeapSnapshot* GetSnapshot(int index);

View File

@ -840,9 +840,7 @@ static bool AbortIncrementalMarkingAndCollectGarbage(
}
void Heap::ReserveSpace(
int *sizes,
Address *locations_out) {
void Heap::ReserveSpace(int *sizes, Address *locations_out) {
bool gc_performed = true;
int counter = 0;
static const int kThreshold = 20;

View File

@ -568,17 +568,13 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
UNCLASSIFIED,
61,
"Heap::allocation_sites_list_address()");
Add(ExternalReference::record_object_allocation_function(isolate).address(),
UNCLASSIFIED,
62,
"HeapProfiler::RecordObjectAllocationFromMasm");
Add(ExternalReference::address_of_uint32_bias().address(),
UNCLASSIFIED,
63,
62,
"uint32_bias");
Add(ExternalReference::get_mark_code_as_executed_function(isolate).address(),
UNCLASSIFIED,
64,
63,
"Code::MarkCodeAsExecuted");
// Add a small set of deopt entry addresses to encoder without generating the

View File

@ -4081,7 +4081,10 @@ void MacroAssembler::Allocate(int object_size,
AllocationFlags flags) {
ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
if (!FLAG_inline_new) {
if (!FLAG_inline_new ||
// TODO(mstarzinger): Implement more efficiently by keeping then
// bump-pointer allocation area empty instead of recompiling code.
isolate()->heap_profiler()->is_tracking_allocations()) {
if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure.
movl(result, Immediate(0x7091));
@ -4100,10 +4103,6 @@ void MacroAssembler::Allocate(int object_size,
// Load address of new object into result.
LoadAllocationTopHelper(result, scratch, flags);
if (isolate()->heap_profiler()->is_tracking_allocations()) {
RecordObjectAllocation(isolate(), result, object_size);
}
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
@ -4165,7 +4164,10 @@ void MacroAssembler::Allocate(Register object_size,
Label* gc_required,
AllocationFlags flags) {
ASSERT((flags & SIZE_IN_WORDS) == 0);
if (!FLAG_inline_new) {
if (!FLAG_inline_new ||
// TODO(mstarzinger): Implement more efficiently by keeping then
// bump-pointer allocation area empty instead of recompiling code.
isolate()->heap_profiler()->is_tracking_allocations()) {
if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure.
movl(result, Immediate(0x7091));
@ -4183,10 +4185,6 @@ void MacroAssembler::Allocate(Register object_size,
// Load address of new object into result.
LoadAllocationTopHelper(result, scratch, flags);
if (isolate()->heap_profiler()->is_tracking_allocations()) {
RecordObjectAllocation(isolate(), result, object_size);
}
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
@ -4947,38 +4945,6 @@ void MacroAssembler::TestJSArrayForAllocationMemento(
}
void MacroAssembler::RecordObjectAllocation(Isolate* isolate,
Register object,
Register object_size) {
FrameScope frame(this, StackFrame::EXIT);
PushSafepointRegisters();
PrepareCallCFunction(3);
// In case object is rdx
movq(kScratchRegister, object);
movq(arg_reg_3, object_size);
movq(arg_reg_2, kScratchRegister);
movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE);
CallCFunction(
ExternalReference::record_object_allocation_function(isolate), 3);
PopSafepointRegisters();
}
void MacroAssembler::RecordObjectAllocation(Isolate* isolate,
Register object,
int object_size) {
FrameScope frame(this, StackFrame::EXIT);
PushSafepointRegisters();
PrepareCallCFunction(3);
movq(arg_reg_2, object);
movq(arg_reg_3, Immediate(object_size));
movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE);
CallCFunction(
ExternalReference::record_object_allocation_function(isolate), 3);
PopSafepointRegisters();
}
void MacroAssembler::JumpIfDictionaryInPrototypeChain(
Register object,
Register scratch0,

View File

@ -1116,15 +1116,6 @@ class MacroAssembler: public Assembler {
Label* gc_required,
AllocationFlags flags);
// Record a JS object allocation if allocations tracking mode is on.
void RecordObjectAllocation(Isolate* isolate,
Register object,
Register object_size);
void RecordObjectAllocation(Isolate* isolate,
Register object,
int object_size);
// Undo allocation in new space. The object passed and objects allocated after
// it will no longer be allocated. Make sure that no pointers are left to the
// object(s) no longer allocated as they would be invalid when allocation is