Simplify feedback vector creation and store in SharedFunctionInfo.

LOG=N
BUG=v8:3212
R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/254623002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@21085 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mvstanton@chromium.org 2014-04-30 10:51:01 +00:00
parent d60dbdaf27
commit 15dc39a86f
28 changed files with 214 additions and 225 deletions

View File

@ -133,8 +133,6 @@ void FullCodeGenerator::Generate() {
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
InitializeFeedbackVector();
profiling_counter_ = isolate()->factory()->NewCell(
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(function());
@ -1170,12 +1168,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label non_proxy;
__ bind(&fixed_array);
Handle<Object> feedback = Handle<Object>(
Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
isolate());
StoreFeedbackVectorSlot(slot, feedback);
__ Move(r1, FeedbackVector());
__ mov(r2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
__ mov(r2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
__ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot)));
__ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
@ -2719,9 +2713,6 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
__ Move(r2, FeedbackVector());
__ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
@ -2908,12 +2899,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
if (FLAG_pretenuring_call_new) {
StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
isolate()->factory()->NewAllocationSite());
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
ASSERT(expr->AllocationSiteFeedbackSlot() ==
expr->CallNewFeedbackSlot() + 1);
}

View File

@ -129,8 +129,6 @@ void FullCodeGenerator::Generate() {
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
InitializeFeedbackVector();
profiling_counter_ = isolate()->factory()->NewCell(
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(function());
@ -1181,12 +1179,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a fixed array in register x0. Iterate through that.
__ Bind(&fixed_array);
Handle<Object> feedback = Handle<Object>(
Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
isolate());
StoreFeedbackVectorSlot(slot, feedback);
__ LoadObject(x1, FeedbackVector());
__ Mov(x10, Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker));
__ Mov(x10, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
__ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
__ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
@ -2429,9 +2423,6 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
__ LoadObject(x2, FeedbackVector());
__ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot()));
@ -2628,12 +2619,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ Peek(x1, arg_count * kXRegSize);
// Record call targets in unoptimized code.
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
if (FLAG_pretenuring_call_new) {
StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
isolate()->factory()->NewAllocationSite());
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
ASSERT(expr->AllocationSiteFeedbackSlot() ==
expr->CallNewFeedbackSlot() + 1);
}

View File

@ -571,14 +571,9 @@ void Expression::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
}
int Call::ComputeFeedbackSlotCount(Isolate* isolate) {
bool Call::IsUsingCallFeedbackSlot(Isolate* isolate) const {
CallType call_type = GetCallType(isolate);
if (call_type == LOOKUP_SLOT_CALL || call_type == OTHER_CALL) {
// Call only uses a slot in some cases.
return 1;
}
return 0;
return call_type == LOOKUP_SLOT_CALL || call_type == OTHER_CALL;
}

View File

@ -159,15 +159,21 @@ class AstProperties V8_FINAL BASE_EMBEDDED {
public:
class Flags : public EnumSet<AstPropertiesFlag, int> {};
AstProperties() : node_count_(0) {}
AstProperties() : node_count_(0), feedback_slots_(0) {}
Flags* flags() { return &flags_; }
int node_count() { return node_count_; }
void add_node_count(int count) { node_count_ += count; }
int feedback_slots() const { return feedback_slots_; }
void increase_feedback_slots(int count) {
feedback_slots_ += count;
}
private:
Flags flags_;
int node_count_;
int feedback_slots_;
};
@ -906,8 +912,7 @@ class ForInStatement V8_FINAL : public ForEachStatement,
}
// Type feedback information.
virtual ComputablePhase GetComputablePhase() { return DURING_PARSE; }
virtual int ComputeFeedbackSlotCount(Isolate* isolate) { return 1; }
virtual int ComputeFeedbackSlotCount() { return 1; }
virtual void SetFirstFeedbackSlot(int slot) { for_in_feedback_slot_ = slot; }
int ForInFeedbackSlot() {
@ -1733,8 +1738,7 @@ class Call V8_FINAL : public Expression, public FeedbackSlotInterface {
ZoneList<Expression*>* arguments() const { return arguments_; }
// Type feedback information.
virtual ComputablePhase GetComputablePhase() { return AFTER_SCOPING; }
virtual int ComputeFeedbackSlotCount(Isolate* isolate);
virtual int ComputeFeedbackSlotCount() { return 1; }
virtual void SetFirstFeedbackSlot(int slot) {
call_feedback_slot_ = slot;
}
@ -1777,6 +1781,7 @@ class Call V8_FINAL : public Expression, public FeedbackSlotInterface {
// Helpers to determine how to handle the call.
CallType GetCallType(Isolate* isolate) const;
bool IsUsingCallFeedbackSlot(Isolate* isolate) const;
#ifdef DEBUG
// Used to assert that the FullCodeGenerator records the return site.
@ -1818,8 +1823,7 @@ class CallNew V8_FINAL : public Expression, public FeedbackSlotInterface {
ZoneList<Expression*>* arguments() const { return arguments_; }
// Type feedback information.
virtual ComputablePhase GetComputablePhase() { return DURING_PARSE; }
virtual int ComputeFeedbackSlotCount(Isolate* isolate) {
virtual int ComputeFeedbackSlotCount() {
return FLAG_pretenuring_call_new ? 2 : 1;
}
virtual void SetFirstFeedbackSlot(int slot) {
@ -2355,14 +2359,8 @@ class FunctionLiteral V8_FINAL : public Expression {
void set_ast_properties(AstProperties* ast_properties) {
ast_properties_ = *ast_properties;
}
void set_slot_processor(DeferredFeedbackSlotProcessor* slot_processor) {
slot_processor_ = *slot_processor;
}
void ProcessFeedbackSlots(Isolate* isolate) {
slot_processor_.ProcessFeedbackSlots(isolate);
}
int slot_count() {
return slot_processor_.slot_count();
return ast_properties_.feedback_slots();
}
bool dont_optimize() { return dont_optimize_reason_ != kNoReason; }
BailoutReason dont_optimize_reason() { return dont_optimize_reason_; }
@ -2413,7 +2411,6 @@ class FunctionLiteral V8_FINAL : public Expression {
ZoneList<Statement*>* body_;
Handle<String> inferred_name_;
AstProperties ast_properties_;
DeferredFeedbackSlotProcessor slot_processor_;
BailoutReason dont_optimize_reason_;
int materialized_literal_count_;
@ -2894,7 +2891,6 @@ class AstConstructionVisitor BASE_EMBEDDED {
AstProperties* ast_properties() { return &properties_; }
BailoutReason dont_optimize_reason() { return dont_optimize_reason_; }
DeferredFeedbackSlotProcessor* slot_processor() { return &slot_processor_; }
private:
template<class> friend class AstNodeFactory;
@ -2912,11 +2908,14 @@ class AstConstructionVisitor BASE_EMBEDDED {
}
void add_slot_node(FeedbackSlotInterface* slot_node) {
slot_processor_.add_slot_node(zone_, slot_node);
int count = slot_node->ComputeFeedbackSlotCount();
if (count > 0) {
slot_node->SetFirstFeedbackSlot(properties_.feedback_slots());
properties_.increase_feedback_slots(count);
}
}
AstProperties properties_;
DeferredFeedbackSlotProcessor slot_processor_;
BailoutReason dont_optimize_reason_;
Zone* zone_;
};

View File

@ -118,6 +118,13 @@ void CompilationInfo::Initialize(Isolate* isolate,
SetStrictMode(shared_info_->strict_mode());
}
set_bailout_reason(kUnknown);
if (!shared_info().is_null() && shared_info()->is_compiled()) {
// We should initialize the CompilationInfo feedback vector from the
// passed in shared info, rather than creating a new one.
feedback_vector_ = Handle<FixedArray>(shared_info()->feedback_vector(),
isolate);
}
}
@ -226,7 +233,13 @@ bool CompilationInfo::ShouldSelfOptimize() {
void CompilationInfo::PrepareForCompilation(Scope* scope) {
ASSERT(scope_ == NULL);
scope_ = scope;
function()->ProcessFeedbackSlots(isolate_);
int length = function()->slot_count();
if (feedback_vector_.is_null()) {
// Allocate the feedback vector too.
feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(length);
}
ASSERT(feedback_vector_->length() == length);
}
@ -548,6 +561,8 @@ static void UpdateSharedFunctionInfo(CompilationInfo* info) {
shared->ReplaceCode(*code);
if (shared->optimization_disabled()) code->set_optimizable(false);
shared->set_feedback_vector(*info->feedback_vector());
// Set the expected number of properties for instances.
FunctionLiteral* lit = info->function();
int expected = lit->expected_property_count();
@ -806,7 +821,8 @@ static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
lit->materialized_literal_count(),
lit->is_generator(),
info->code(),
ScopeInfo::Create(info->scope(), info->zone()));
ScopeInfo::Create(info->scope(), info->zone()),
info->feedback_vector());
ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
SetFunctionInfo(result, lit, true, script);
@ -1004,7 +1020,8 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
literal->materialized_literal_count(),
literal->is_generator(),
info.code(),
scope_info);
scope_info,
info.feedback_vector());
SetFunctionInfo(result, literal, false, script);
RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
result->set_allows_lazy_compilation(allow_lazy);

View File

@ -155,12 +155,14 @@ class CompilationInfo {
ASSERT(function_ == NULL);
function_ = literal;
}
// When the scope is applied, we may have deferred work to do on the function.
void PrepareForCompilation(Scope* scope);
void SetGlobalScope(Scope* global_scope) {
ASSERT(global_scope_ == NULL);
global_scope_ = global_scope;
}
Handle<FixedArray> feedback_vector() const {
return feedback_vector_;
}
void SetCode(Handle<Code> code) { code_ = code; }
void SetExtension(v8::Extension* extension) {
ASSERT(!is_lazy());
@ -396,6 +398,9 @@ class CompilationInfo {
// global script. Will be a null handle otherwise.
Handle<Context> context_;
// Used by codegen, ultimately kept rooted by the SharedFunctionInfo.
Handle<FixedArray> feedback_vector_;
// Compilation mode flag and whether deoptimization is allowed.
Mode mode_;
BailoutId osr_ast_id_;

View File

@ -155,7 +155,6 @@ Handle<TypeFeedbackInfo> Factory::NewTypeFeedbackInfo() {
Handle<TypeFeedbackInfo> info =
Handle<TypeFeedbackInfo>::cast(NewStruct(TYPE_FEEDBACK_INFO_TYPE));
info->initialize_storage();
info->set_feedback_vector(*empty_fixed_array(), SKIP_WRITE_BARRIER);
return info;
}
@ -1809,15 +1808,32 @@ void Factory::BecomeJSFunction(Handle<JSReceiver> object) {
}
Handle<FixedArray> Factory::NewTypeFeedbackVector(int slot_count) {
// Ensure we can skip the write barrier
ASSERT_EQ(isolate()->heap()->uninitialized_symbol(),
*TypeFeedbackInfo::UninitializedSentinel(isolate()));
CALL_HEAP_FUNCTION(
isolate(),
isolate()->heap()->AllocateFixedArrayWithFiller(
slot_count,
TENURED,
*TypeFeedbackInfo::UninitializedSentinel(isolate())),
FixedArray);
}
Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
Handle<String> name,
int number_of_literals,
bool is_generator,
Handle<Code> code,
Handle<ScopeInfo> scope_info) {
Handle<ScopeInfo> scope_info,
Handle<FixedArray> feedback_vector) {
Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo(name);
shared->set_code(*code);
shared->set_scope_info(*scope_info);
shared->set_feedback_vector(*feedback_vector);
int literals_array_size = number_of_literals;
// If the function contains object, regexp or array literals,
// allocate extra space for a literals array prefix containing the
@ -1875,6 +1891,7 @@ Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(Handle<String> name) {
share->set_script(*undefined_value(), SKIP_WRITE_BARRIER);
share->set_debug_info(*undefined_value(), SKIP_WRITE_BARRIER);
share->set_inferred_name(*empty_string(), SKIP_WRITE_BARRIER);
share->set_feedback_vector(*empty_fixed_array(), SKIP_WRITE_BARRIER);
share->set_initial_map(*undefined_value(), SKIP_WRITE_BARRIER);
share->set_profiler_ticks(0);
share->set_ast_node_count(0);

View File

@ -600,13 +600,16 @@ class Factory V8_FINAL {
return Handle<String>(&isolate()->heap()->hidden_string_);
}
Handle<FixedArray> NewTypeFeedbackVector(int slot_count);
// Allocates a new SharedFunctionInfo object.
Handle<SharedFunctionInfo> NewSharedFunctionInfo(
Handle<String> name,
int number_of_literals,
bool is_generator,
Handle<Code> code,
Handle<ScopeInfo> scope_info);
Handle<ScopeInfo> scope_info,
Handle<FixedArray> feedback_vector);
Handle<SharedFunctionInfo> NewSharedFunctionInfo(Handle<String> name);
// Allocates a new JSMessageObject object.

View File

@ -12,76 +12,16 @@
namespace v8 {
namespace internal {
enum ComputablePhase {
DURING_PARSE,
AFTER_SCOPING
};
class FeedbackSlotInterface {
public:
static const int kInvalidFeedbackSlot = -1;
virtual ~FeedbackSlotInterface() {}
// When can we ask how many feedback slots are necessary?
virtual ComputablePhase GetComputablePhase() = 0;
virtual int ComputeFeedbackSlotCount(Isolate* isolate) = 0;
virtual int ComputeFeedbackSlotCount() = 0;
virtual void SetFirstFeedbackSlot(int slot) = 0;
};
class DeferredFeedbackSlotProcessor {
public:
DeferredFeedbackSlotProcessor()
: slot_nodes_(NULL),
slot_count_(0) { }
void add_slot_node(Zone* zone, FeedbackSlotInterface* slot) {
if (slot->GetComputablePhase() == DURING_PARSE) {
// No need to add to the list
int count = slot->ComputeFeedbackSlotCount(zone->isolate());
slot->SetFirstFeedbackSlot(slot_count_);
slot_count_ += count;
} else {
if (slot_nodes_ == NULL) {
slot_nodes_ = new(zone) ZoneList<FeedbackSlotInterface*>(10, zone);
}
slot_nodes_->Add(slot, zone);
}
}
void ProcessFeedbackSlots(Isolate* isolate) {
// Scope analysis must have been done.
if (slot_nodes_ == NULL) {
return;
}
int current_slot = slot_count_;
for (int i = 0; i < slot_nodes_->length(); i++) {
FeedbackSlotInterface* slot_interface = slot_nodes_->at(i);
int count = slot_interface->ComputeFeedbackSlotCount(isolate);
if (count > 0) {
slot_interface->SetFirstFeedbackSlot(current_slot);
current_slot += count;
}
}
slot_count_ = current_slot;
slot_nodes_->Clear();
}
int slot_count() {
ASSERT(slot_count_ >= 0);
return slot_count_;
}
private:
ZoneList<FeedbackSlotInterface*>* slot_nodes_;
int slot_count_;
};
} } // namespace v8::internal
#endif // V8_FEEDBACK_SLOTS_H_

View File

@ -361,14 +361,10 @@ unsigned FullCodeGenerator::EmitBackEdgeTable() {
}
void FullCodeGenerator::InitializeFeedbackVector() {
int length = info_->function()->slot_count();
feedback_vector_ = isolate()->factory()->NewFixedArray(length, TENURED);
Handle<Object> sentinel = TypeFeedbackInfo::UninitializedSentinel(isolate());
// Ensure that it's safe to set without using a write barrier.
ASSERT_EQ(isolate()->heap()->uninitialized_symbol(), *sentinel);
for (int i = 0; i < length; i++) {
feedback_vector_->set(i, *sentinel, SKIP_WRITE_BARRIER);
void FullCodeGenerator::EnsureSlotContainsAllocationSite(int slot) {
Handle<FixedArray> vector = FeedbackVector();
if (!vector->get(slot)->IsAllocationSite()) {
vector->set(slot, *isolate()->factory()->NewAllocationSite());
}
}
@ -391,13 +387,13 @@ void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
info->set_ic_total_count(ic_total_count_);
info->set_feedback_vector(*FeedbackVector());
ASSERT(!isolate()->heap()->InNewSpace(*info));
code->set_type_feedback_info(*info);
}
void FullCodeGenerator::Initialize() {
InitializeAstVisitor(info_->zone());
// The generation of debug code must match between the snapshot code and the
// code that is generated later. This is assumed by the debugger when it is
// calculating PC offsets after generating a debug version of code. Therefore
@ -408,7 +404,6 @@ void FullCodeGenerator::Initialize() {
!Snapshot::HaveASnapshotToStartFrom();
masm_->set_emit_debug_code(generate_debug_code_);
masm_->set_predictable_code_size(true);
InitializeAstVisitor(info_->zone());
}
@ -1554,8 +1549,10 @@ void FullCodeGenerator::VisitNativeFunctionLiteral(
Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
bool is_generator = false;
Handle<SharedFunctionInfo> shared =
isolate()->factory()->NewSharedFunctionInfo(name, literals, is_generator,
code, Handle<ScopeInfo>(fun->shared()->scope_info()));
isolate()->factory()->NewSharedFunctionInfo(
name, literals, is_generator,
code, Handle<ScopeInfo>(fun->shared()->scope_info()),
Handle<FixedArray>(fun->shared()->feedback_vector()));
shared->set_construct_stub(*construct_stub);
// Copy the function data to the shared function info.

View File

@ -419,12 +419,9 @@ class FullCodeGenerator: public AstVisitor {
// Feedback slot support. The feedback vector will be cleared during gc and
// collected by the type-feedback oracle.
Handle<FixedArray> FeedbackVector() {
return feedback_vector_;
return info_->feedback_vector();
}
void StoreFeedbackVectorSlot(int slot, Handle<Object> object) {
feedback_vector_->set(slot, *object);
}
void InitializeFeedbackVector();
void EnsureSlotContainsAllocationSite(int slot);
// Record a call's return site offset, used to rebuild the frame if the
// called function was inlined at the site.
@ -826,7 +823,6 @@ class FullCodeGenerator: public AstVisitor {
ZoneList<BackEdgeEntry> back_edges_;
int ic_total_count_;
Handle<FixedArray> handler_table_;
Handle<FixedArray> feedback_vector_;
Handle<Cell> profiling_counter_;
bool generate_debug_code_;

View File

@ -1399,6 +1399,9 @@ void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
SetInternalReference(obj, entry,
"optimized_code_map", shared->optimized_code_map(),
SharedFunctionInfo::kOptimizedCodeMapOffset);
SetInternalReference(obj, entry,
"feedback_vector", shared->feedback_vector(),
SharedFunctionInfo::kFeedbackVectorOffset);
SetWeakReference(obj, entry,
"initial_map", shared->initial_map(),
SharedFunctionInfo::kInitialMapOffset);

View File

@ -7327,6 +7327,7 @@ bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
target_shared->set_scope_info(*target_scope_info);
}
target_shared->EnableDeoptimizationSupport(*target_info.code());
target_shared->set_feedback_vector(*target_info.feedback_vector());
Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG,
&target_info,
target_shared);

View File

@ -117,8 +117,6 @@ void FullCodeGenerator::Generate() {
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
InitializeFeedbackVector();
profiling_counter_ = isolate()->factory()->NewCell(
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(function());
@ -1112,15 +1110,10 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label non_proxy;
__ bind(&fixed_array);
Handle<Object> feedback = Handle<Object>(
Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
isolate());
StoreFeedbackVectorSlot(slot, feedback);
// No need for a write barrier, we are storing a Smi in the feedback vector.
__ LoadHeapObject(ebx, FeedbackVector());
__ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(slot)),
Immediate(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
__ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
__ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
@ -2671,9 +2664,6 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
__ LoadHeapObject(ebx, FeedbackVector());
__ mov(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
@ -2851,12 +2841,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ mov(edi, Operand(esp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
if (FLAG_pretenuring_call_new) {
StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
isolate()->factory()->NewAllocationSite());
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
ASSERT(expr->AllocationSiteFeedbackSlot() ==
expr->CallNewFeedbackSlot() + 1);
}

View File

@ -362,7 +362,6 @@ void PolymorphicCodeCache::PolymorphicCodeCacheVerify() {
void TypeFeedbackInfo::TypeFeedbackInfoVerify() {
VerifyObjectField(kStorage1Offset);
VerifyObjectField(kStorage2Offset);
VerifyHeapPointer(feedback_vector());
}
@ -543,6 +542,7 @@ void SharedFunctionInfo::SharedFunctionInfoVerify() {
VerifyObjectField(kNameOffset);
VerifyObjectField(kCodeOffset);
VerifyObjectField(kOptimizedCodeMapOffset);
VerifyObjectField(kFeedbackVectorOffset);
VerifyObjectField(kScopeInfoOffset);
VerifyObjectField(kInstanceClassNameOffset);
VerifyObjectField(kFunctionDataOffset);

View File

@ -5099,6 +5099,8 @@ ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
kOptimizedCodeMapOffset)
ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
ACCESSORS(SharedFunctionInfo, feedback_vector, FixedArray,
kFeedbackVectorOffset)
ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
kInstanceClassNameOffset)
@ -5357,6 +5359,7 @@ void SharedFunctionInfo::ReplaceCode(Code* value) {
}
ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
set_code(value);
}
@ -6789,10 +6792,6 @@ bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
}
ACCESSORS(TypeFeedbackInfo, feedback_vector, FixedArray,
kFeedbackVectorOffset)
SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)

View File

@ -547,8 +547,6 @@ void TypeFeedbackInfo::TypeFeedbackInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "TypeFeedbackInfo");
PrintF(out, " - ic_total_count: %d, ic_with_type_info_count: %d\n",
ic_total_count(), ic_with_type_info_count());
PrintF(out, " - feedback_vector: ");
feedback_vector()->FixedArrayPrint(out);
}
@ -877,6 +875,7 @@ void SharedFunctionInfo::SharedFunctionInfoPrint(FILE* out) {
PrintF(out, " - name: ");
name()->ShortPrint(out);
PrintF(out, "\n - expected_nof_properties: %d", expected_nof_properties());
PrintF(out, "\n - ast_node_count: %d", ast_node_count());
PrintF(out, "\n - instance class name = ");
instance_class_name()->Print(out);
PrintF(out, "\n - code = ");
@ -904,6 +903,8 @@ void SharedFunctionInfo::SharedFunctionInfoPrint(FILE* out) {
PrintF(out, "\n - length = %d", length());
PrintF(out, "\n - optimized_code_map = ");
optimized_code_map()->ShortPrint(out);
PrintF(out, "\n - feedback_vector = ");
feedback_vector()->FixedArrayPrint(out);
PrintF(out, "\n");
}

View File

@ -405,9 +405,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCode(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
Code* code = Code::cast(object);
if (FLAG_cleanup_code_caches_at_gc) {
code->ClearTypeFeedbackInfo(heap);
}
if (FLAG_age_code && !Serializer::enabled(heap->isolate())) {
code->MakeOlder(heap->mark_compact_collector()->marking_parity());
}
@ -423,6 +420,9 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
if (shared->ic_age() != heap->global_ic_age()) {
shared->ResetForNewContext(heap->global_ic_age());
}
if (FLAG_cleanup_code_caches_at_gc) {
shared->ClearTypeFeedbackInfo();
}
if (FLAG_cache_optimized_code &&
FLAG_flush_optimized_code_cache &&
!shared->optimized_code_map()->IsSmi()) {

View File

@ -11178,19 +11178,16 @@ void Code::ClearInlineCaches(Code::Kind* kind) {
}
void Code::ClearTypeFeedbackInfo(Heap* heap) {
if (kind() != FUNCTION) return;
Object* raw_info = type_feedback_info();
if (raw_info->IsTypeFeedbackInfo()) {
FixedArray* feedback_vector =
TypeFeedbackInfo::cast(raw_info)->feedback_vector();
for (int i = 0; i < feedback_vector->length(); i++) {
Object* obj = feedback_vector->get(i);
if (!obj->IsAllocationSite()) {
// TODO(mvstanton): Can't I avoid a write barrier for this sentinel?
feedback_vector->set(i,
TypeFeedbackInfo::RawUninitializedSentinel(heap));
}
void SharedFunctionInfo::ClearTypeFeedbackInfo() {
FixedArray* vector = feedback_vector();
Heap* heap = GetHeap();
for (int i = 0; i < vector->length(); i++) {
Object* obj = vector->get(i);
if (!obj->IsAllocationSite()) {
vector->set(
i,
TypeFeedbackInfo::RawUninitializedSentinel(heap),
SKIP_WRITE_BARRIER);
}
}
}

View File

@ -5669,8 +5669,6 @@ class Code: public HeapObject {
void ClearInlineCaches();
void ClearInlineCaches(Kind kind);
void ClearTypeFeedbackInfo(Heap* heap);
BailoutId TranslatePcOffsetToAstId(uint32_t pc_offset);
uint32_t TranslateAstIdToPcOffset(BailoutId ast_id);
@ -6933,6 +6931,8 @@ class SharedFunctionInfo: public HeapObject {
// Removed a specific optimized code object from the optimized code map.
void EvictFromOptimizedCodeMap(Code* optimized_code, const char* reason);
void ClearTypeFeedbackInfo();
// Trims the optimized code map after entries have been removed.
void TrimOptimizedCodeMap(int shrink_by);
@ -7037,6 +7037,12 @@ class SharedFunctionInfo: public HeapObject {
inline int construction_count();
inline void set_construction_count(int value);
// [feedback_vector] - accumulates ast node feedback from full-codegen and
// (increasingly) from crankshafted code where sufficient feedback isn't
// available. Currently the field is duplicated in
// TypeFeedbackInfo::feedback_vector, but the allocation is done here.
DECL_ACCESSORS(feedback_vector, FixedArray)
// [initial_map]: initial map of the first function called as a constructor.
// Saved for the duration of the tracking phase.
// This is a weak link (GC resets it to undefined_value if no other live
@ -7318,8 +7324,10 @@ class SharedFunctionInfo: public HeapObject {
static const int kScriptOffset = kFunctionDataOffset + kPointerSize;
static const int kDebugInfoOffset = kScriptOffset + kPointerSize;
static const int kInferredNameOffset = kDebugInfoOffset + kPointerSize;
static const int kInitialMapOffset =
static const int kFeedbackVectorOffset =
kInferredNameOffset + kPointerSize;
static const int kInitialMapOffset =
kFeedbackVectorOffset + kPointerSize;
#if V8_HOST_ARCH_32_BIT
// Smi fields.
static const int kLengthOffset =
@ -8399,7 +8407,6 @@ class TypeFeedbackInfo: public Struct {
inline void set_inlined_type_change_checksum(int checksum);
inline bool matches_inlined_type_change_checksum(int checksum);
DECL_ACCESSORS(feedback_vector, FixedArray)
static inline TypeFeedbackInfo* cast(Object* obj);
@ -8409,10 +8416,9 @@ class TypeFeedbackInfo: public Struct {
static const int kStorage1Offset = HeapObject::kHeaderSize;
static const int kStorage2Offset = kStorage1Offset + kPointerSize;
static const int kFeedbackVectorOffset =
kStorage2Offset + kPointerSize;
static const int kSize = kFeedbackVectorOffset + kPointerSize;
static const int kSize = kStorage2Offset + kPointerSize;
// TODO(mvstanton): move these sentinel declarations to shared function info.
// The object that indicates an uninitialized cache.
static inline Handle<Object> UninitializedSentinel(Isolate* isolate);
@ -8428,9 +8434,6 @@ class TypeFeedbackInfo: public Struct {
// garbage collection (e.g., for patching the cache).
static inline Object* RawUninitializedSentinel(Heap* heap);
static const int kForInFastCaseMarker = 0;
static const int kForInSlowCaseMarker = 1;
private:
static const int kTypeChangeChecksumBits = 7;

View File

@ -973,7 +973,6 @@ FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info,
FunctionLiteral::kNotGenerator,
0);
result->set_ast_properties(factory()->visitor()->ast_properties());
result->set_slot_processor(factory()->visitor()->slot_processor());
result->set_dont_optimize_reason(
factory()->visitor()->dont_optimize_reason());
} else if (stack_overflow()) {
@ -3235,7 +3234,6 @@ FunctionLiteral* Parser::ParseFunctionLiteral(
FunctionLiteral::IsParenthesizedFlag parenthesized = parenthesized_function_
? FunctionLiteral::kIsParenthesized
: FunctionLiteral::kNotParenthesized;
DeferredFeedbackSlotProcessor* slot_processor;
AstProperties ast_properties;
BailoutReason dont_optimize_reason = kNoReason;
// Parse function body.
@ -3406,7 +3404,6 @@ FunctionLiteral* Parser::ParseFunctionLiteral(
CHECK_OK);
}
ast_properties = *factory()->visitor()->ast_properties();
slot_processor = factory()->visitor()->slot_processor();
dont_optimize_reason = factory()->visitor()->dont_optimize_reason();
}
@ -3433,7 +3430,6 @@ FunctionLiteral* Parser::ParseFunctionLiteral(
pos);
function_literal->set_function_token_position(function_token_pos);
function_literal->set_ast_properties(&ast_properties);
function_literal->set_slot_processor(slot_processor);
function_literal->set_dont_optimize_reason(dont_optimize_reason);
if (fni_ != NULL && should_infer_name) fni_->AddFunction(function_literal);

View File

@ -3115,6 +3115,7 @@ RUNTIME_FUNCTION(Runtime_SetCode) {
target_shared->ReplaceCode(source_shared->code());
target_shared->set_scope_info(source_shared->scope_info());
target_shared->set_length(source_shared->length());
target_shared->set_feedback_vector(source_shared->feedback_vector());
target_shared->set_formal_parameter_count(
source_shared->formal_parameter_count());
target_shared->set_script(source_shared->script());
@ -8606,10 +8607,10 @@ RUNTIME_FUNCTION(Runtime_ClearFunctionTypeFeedback) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
function->shared()->ClearTypeFeedbackInfo();
Code* unoptimized = function->shared()->code();
if (unoptimized->kind() == Code::FUNCTION) {
unoptimized->ClearInlineCaches();
unoptimized->ClearTypeFeedbackInfo(isolate->heap());
}
return isolate->heap()->undefined_value();
}

View File

@ -20,18 +20,18 @@ namespace internal {
TypeFeedbackOracle::TypeFeedbackOracle(Handle<Code> code,
Handle<FixedArray> feedback_vector,
Handle<Context> native_context,
Zone* zone)
: native_context_(native_context),
zone_(zone) {
Object* raw_info = code->type_feedback_info();
if (raw_info->IsTypeFeedbackInfo()) {
feedback_vector_ = Handle<FixedArray>(TypeFeedbackInfo::cast(raw_info)->
feedback_vector());
}
BuildDictionary(code);
ASSERT(dictionary_->IsDictionary());
// We make a copy of the feedback vector because a GC could clear
// the type feedback info contained therein.
// TODO(mvstanton): revisit the decision to copy when we weakly
// traverse the feedback vector at GC time.
feedback_vector_ = isolate()->factory()->CopyFixedArray(feedback_vector);
}
@ -113,9 +113,9 @@ bool TypeFeedbackOracle::CallNewIsMonomorphic(int slot) {
byte TypeFeedbackOracle::ForInType(int feedback_vector_slot) {
Handle<Object> value = GetInfo(feedback_vector_slot);
return value->IsSmi() &&
Smi::cast(*value)->value() == TypeFeedbackInfo::kForInFastCaseMarker
? ForInStatement::FAST_FOR_IN : ForInStatement::SLOW_FOR_IN;
return value.is_identical_to(
TypeFeedbackInfo::UninitializedSentinel(isolate()))
? ForInStatement::FAST_FOR_IN : ForInStatement::SLOW_FOR_IN;
}

View File

@ -21,6 +21,7 @@ class SmallMapList;
class TypeFeedbackOracle: public ZoneObject {
public:
TypeFeedbackOracle(Handle<Code> code,
Handle<FixedArray> feedback_vector,
Handle<Context> native_context,
Zone* zone);

View File

@ -16,8 +16,9 @@ namespace internal {
AstTyper::AstTyper(CompilationInfo* info)
: info_(info),
oracle_(
Handle<Code>(info->closure()->shared()->code()),
Handle<Context>(info->closure()->context()->native_context()),
handle(info->closure()->shared()->code()),
handle(info->closure()->shared()->feedback_vector()),
handle(info->closure()->context()->native_context()),
info->zone()),
store_(info->zone()) {
InitializeAstVisitor(info->zone());
@ -507,7 +508,7 @@ void AstTyper::VisitCall(Call* expr) {
// Collect type feedback.
RECURSE(Visit(expr->expression()));
if (!expr->expression()->IsProperty() &&
expr->HasCallFeedbackSlot() &&
expr->IsUsingCallFeedbackSlot(isolate()) &&
oracle()->CallIsMonomorphic(expr->CallFeedbackSlot())) {
expr->set_target(oracle()->GetCallTarget(expr->CallFeedbackSlot()));
}

View File

@ -117,8 +117,6 @@ void FullCodeGenerator::Generate() {
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
InitializeFeedbackVector();
profiling_counter_ = isolate()->factory()->NewCell(
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(function());
@ -1148,15 +1146,10 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label non_proxy;
__ bind(&fixed_array);
Handle<Object> feedback = Handle<Object>(
Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
isolate());
StoreFeedbackVectorSlot(slot, feedback);
// No need for a write barrier, we are storing a Smi in the feedback vector.
__ Move(rbx, FeedbackVector());
__ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(slot)),
Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker));
TypeFeedbackInfo::MegamorphicSentinel(isolate()));
__ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
__ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
@ -2664,9 +2657,6 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
__ Move(rbx, FeedbackVector());
__ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot()));
@ -2844,12 +2834,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ movp(rdi, Operand(rsp, arg_count * kPointerSize));
// Record call targets in unoptimized code, but not in the snapshot.
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
if (FLAG_pretenuring_call_new) {
StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
isolate()->factory()->NewAllocationSite());
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
ASSERT(expr->AllocationSiteFeedbackSlot() ==
expr->CallNewFeedbackSlot() + 1);
}

View File

@ -290,6 +290,78 @@ TEST(GetScriptLineNumber) {
}
TEST(FeedbackVectorPreservedAcrossRecompiles) {
if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft()) return;
v8::HandleScope scope(CcTest::isolate());
// Make sure function f has a call that uses a type feedback slot.
CompileRun("function fun() {};"
"fun1 = fun;"
"function f(a) { a(); } f(fun1);");
Handle<JSFunction> f =
v8::Utils::OpenHandle(
*v8::Handle<v8::Function>::Cast(
CcTest::global()->Get(v8_str("f"))));
// We shouldn't have deoptimization support. We want to recompile and
// verify that our feedback vector preserves information.
CHECK(!f->shared()->has_deoptimization_support());
Handle<FixedArray> feedback_vector(f->shared()->feedback_vector());
// Verify that we gathered feedback.
CHECK_EQ(1, feedback_vector->length());
CHECK(feedback_vector->get(0)->IsJSFunction());
CompileRun("%OptimizeFunctionOnNextCall(f); f(fun1);");
// Verify that the feedback is still "gathered" despite a recompilation
// of the full code.
CHECK(f->IsOptimized());
CHECK(f->shared()->has_deoptimization_support());
CHECK(f->shared()->feedback_vector()->get(0)->IsJSFunction());
}
TEST(FeedbackVectorUnaffectedByScopeChanges) {
if (i::FLAG_always_opt || !i::FLAG_lazy) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
CompileRun("function builder() {"
" call_target = function() { return 3; };"
" return (function() {"
" eval('');"
" return function() {"
" 'use strict';"
" call_target();"
" }"
" })();"
"}"
"morphing_call = builder();");
Handle<JSFunction> f =
v8::Utils::OpenHandle(
*v8::Handle<v8::Function>::Cast(
CcTest::global()->Get(v8_str("morphing_call"))));
// morphing_call should have one feedback vector slot for the call to
// call_target().
CHECK_EQ(1, f->shared()->feedback_vector()->length());
// And yet it's not compiled.
CHECK(!f->shared()->is_compiled());
CompileRun("morphing_call();");
// The vector should have the same size despite the new scoping.
CHECK_EQ(1, f->shared()->feedback_vector()->length());
CHECK(f->shared()->is_compiled());
}
// Test that optimized code for different closures is actually shared
// immediately by the FastNewClosureStub when run in the same context.
TEST(OptimizedCodeSharing) {

View File

@ -3064,8 +3064,7 @@ TEST(IncrementalMarkingClearsTypeFeedbackInfo) {
*v8::Handle<v8::Function>::Cast(
CcTest::global()->Get(v8_str("f"))));
Handle<FixedArray> feedback_vector(TypeFeedbackInfo::cast(
f->shared()->code()->type_feedback_info())->feedback_vector());
Handle<FixedArray> feedback_vector(f->shared()->feedback_vector());
CHECK_EQ(2, feedback_vector->length());
CHECK(feedback_vector->get(0)->IsJSFunction());