Continued fix for 351257. Reusing the feedback vector is too complex.

Attempting to re-use the type feedback vector stored in the
SharedFunctionInfo turns out to be difficult among the various cases.
It will be much easier to do this when deferred type feedback processing
is removed, as is in the works.

Created bug v8:3212 to track re-introducing the optimization of reusing
the type vector on recompile before optimization.

The CL also brings back the type vector on the SharedFunctionInfo.

BUG=351257
LOG=Y
R=bmeurer@chromium.org, bmeuer@chromium.org

Review URL: https://codereview.chromium.org/199973004

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19919 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mvstanton@chromium.org 2014-03-14 09:28:37 +00:00
parent 0a61b7c996
commit dd28969c1c
27 changed files with 138 additions and 140 deletions

View File

@ -129,8 +129,6 @@ void FullCodeGenerator::Generate() {
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
InitializeFeedbackVector();
profiling_counter_ = isolate()->factory()->NewCell(
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(function());
@ -1162,12 +1160,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a fixed array in register x0. Iterate through that.
__ Bind(&fixed_array);
Handle<Object> feedback = Handle<Object>(
Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
isolate());
StoreFeedbackVectorSlot(slot, feedback);
__ LoadObject(x1, FeedbackVector());
__ Mov(x10, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
__ Mov(x10, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
__ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
__ Mov(x1, Operand(Smi::FromInt(1))); // Smi indicates slow check.
@ -2414,9 +2408,6 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
__ LoadObject(x2, FeedbackVector());
__ Mov(x3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
@ -2613,9 +2604,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ Peek(x1, arg_count * kXRegSize);
// Record call targets in unoptimized code.
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
__ LoadObject(x2, FeedbackVector());
__ Mov(x3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));

View File

@ -131,8 +131,6 @@ void FullCodeGenerator::Generate() {
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
InitializeFeedbackVector();
profiling_counter_ = isolate()->factory()->NewCell(
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(function());
@ -1166,12 +1164,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label non_proxy;
__ bind(&fixed_array);
Handle<Object> feedback = Handle<Object>(
Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
isolate());
StoreFeedbackVectorSlot(slot, feedback);
__ Move(r1, FeedbackVector());
__ mov(r2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
__ mov(r2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
__ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot)));
__ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
@ -2711,9 +2705,6 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
__ Move(r2, FeedbackVector());
__ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
@ -2900,9 +2891,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
__ Move(r2, FeedbackVector());
__ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));

View File

@ -249,6 +249,17 @@ void CompilationInfo::PrepareForCompilation(Scope* scope) {
ASSERT(scope_ == NULL);
scope_ = scope;
function()->ProcessFeedbackSlots(isolate_);
int length = function()->slot_count();
// Allocate the feedback vector too.
feedback_vector_ = isolate()->factory()->NewFixedArray(length, TENURED);
// Ensure we can skip the write barrier
ASSERT_EQ(isolate()->heap()->uninitialized_symbol(),
*TypeFeedbackInfo::UninitializedSentinel(isolate()));
for (int i = 0; i < length; i++) {
feedback_vector_->set(i,
*TypeFeedbackInfo::UninitializedSentinel(isolate()),
SKIP_WRITE_BARRIER);
}
}
@ -570,6 +581,8 @@ static void UpdateSharedFunctionInfo(CompilationInfo* info) {
shared->ReplaceCode(*code);
if (shared->optimization_disabled()) code->set_optimizable(false);
shared->set_feedback_vector(*info->feedback_vector());
// Set the expected number of properties for instances.
FunctionLiteral* lit = info->function();
int expected = lit->expected_property_count();
@ -823,7 +836,8 @@ static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
lit->materialized_literal_count(),
lit->is_generator(),
info->code(),
ScopeInfo::Create(info->scope(), info->zone()));
ScopeInfo::Create(info->scope(), info->zone()),
info->feedback_vector());
ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
SetFunctionInfo(result, lit, true, script);
@ -1022,7 +1036,8 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
literal->materialized_literal_count(),
literal->is_generator(),
info.code(),
scope_info);
scope_info,
info.feedback_vector());
SetFunctionInfo(result, literal, false, script);
RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
result->set_allows_lazy_compilation(allow_lazy);

View File

@ -175,6 +175,9 @@ class CompilationInfo {
ASSERT(global_scope_ == NULL);
global_scope_ = global_scope;
}
Handle<FixedArray> feedback_vector() const {
return feedback_vector_;
}
void SetCode(Handle<Code> code) { code_ = code; }
void SetExtension(v8::Extension* extension) {
ASSERT(!is_lazy());
@ -403,6 +406,9 @@ class CompilationInfo {
// global script. Will be a null handle otherwise.
Handle<Context> context_;
// Used by codegen, ultimately kept rooted by the SharedFunctionInfo.
Handle<FixedArray> feedback_vector_;
// Compilation mode flag and whether deoptimization is allowed.
Mode mode_;
BailoutId osr_ast_id_;

View File

@ -1542,10 +1542,12 @@ Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
int number_of_literals,
bool is_generator,
Handle<Code> code,
Handle<ScopeInfo> scope_info) {
Handle<ScopeInfo> scope_info,
Handle<FixedArray> feedback_vector) {
Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo(name);
shared->set_code(*code);
shared->set_scope_info(*scope_info);
shared->set_feedback_vector(*feedback_vector);
int literals_array_size = number_of_literals;
// If the function contains object, regexp or array literals,
// allocate extra space for a literals array prefix containing the

View File

@ -517,7 +517,8 @@ class Factory {
int number_of_literals,
bool is_generator,
Handle<Code> code,
Handle<ScopeInfo> scope_info);
Handle<ScopeInfo> scope_info,
Handle<FixedArray> feedback_vector);
Handle<SharedFunctionInfo> NewSharedFunctionInfo(Handle<String> name);
Handle<JSMessageObject> NewJSMessageObject(

View File

@ -386,18 +386,6 @@ unsigned FullCodeGenerator::EmitBackEdgeTable() {
}
void FullCodeGenerator::InitializeFeedbackVector() {
int length = info_->function()->slot_count();
feedback_vector_ = isolate()->factory()->NewFixedArray(length, TENURED);
Handle<Object> sentinel = TypeFeedbackInfo::UninitializedSentinel(isolate());
// Ensure that it's safe to set without using a write barrier.
ASSERT_EQ(isolate()->heap()->uninitialized_symbol(), *sentinel);
for (int i = 0; i < length; i++) {
feedback_vector_->set(i, *sentinel, SKIP_WRITE_BARRIER);
}
}
void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
// Fill in the deoptimization information.
ASSERT(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
@ -416,7 +404,6 @@ void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
info->set_ic_total_count(ic_total_count_);
info->set_feedback_vector(*FeedbackVector());
ASSERT(!isolate()->heap()->InNewSpace(*info));
code->set_type_feedback_info(*info);
}
@ -1618,7 +1605,8 @@ void FullCodeGenerator::VisitNativeFunctionLiteral(
bool is_generator = false;
Handle<SharedFunctionInfo> shared =
isolate()->factory()->NewSharedFunctionInfo(name, literals, is_generator,
code, Handle<ScopeInfo>(fun->shared()->scope_info()));
code, Handle<ScopeInfo>(fun->shared()->scope_info()),
Handle<FixedArray>(fun->shared()->feedback_vector()));
shared->set_construct_stub(*construct_stub);
// Copy the function data to the shared function info.

View File

@ -437,12 +437,8 @@ class FullCodeGenerator: public AstVisitor {
// Feedback slot support. The feedback vector will be cleared during gc and
// collected by the type-feedback oracle.
Handle<FixedArray> FeedbackVector() {
return feedback_vector_;
return info_->feedback_vector();
}
void StoreFeedbackVectorSlot(int slot, Handle<Object> object) {
feedback_vector_->set(slot, *object);
}
void InitializeFeedbackVector();
// Record a call's return site offset, used to rebuild the frame if the
// called function was inlined at the site.
@ -844,7 +840,6 @@ class FullCodeGenerator: public AstVisitor {
ZoneList<BackEdgeEntry> back_edges_;
int ic_total_count_;
Handle<FixedArray> handler_table_;
Handle<FixedArray> feedback_vector_;
Handle<Cell> profiling_counter_;
bool generate_debug_code_;

View File

@ -1398,6 +1398,9 @@ void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
SetInternalReference(obj, entry,
"optimized_code_map", shared->optimized_code_map(),
SharedFunctionInfo::kOptimizedCodeMapOffset);
SetInternalReference(obj, entry,
"feedback_vector", shared->feedback_vector(),
SharedFunctionInfo::kFeedbackVectorOffset);
SetWeakReference(obj, entry,
"initial_map", shared->initial_map(),
SharedFunctionInfo::kInitialMapOffset);

View File

@ -2687,7 +2687,6 @@ MaybeObject* Heap::AllocateTypeFeedbackInfo() {
if (!maybe_info->To(&info)) return maybe_info;
}
info->initialize_storage();
info->set_feedback_vector(empty_fixed_array(), SKIP_WRITE_BARRIER);
return info;
}
@ -3809,6 +3808,7 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
share->set_script(undefined_value(), SKIP_WRITE_BARRIER);
share->set_debug_info(undefined_value(), SKIP_WRITE_BARRIER);
share->set_inferred_name(empty_string(), SKIP_WRITE_BARRIER);
share->set_feedback_vector(empty_fixed_array(), SKIP_WRITE_BARRIER);
share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER);
share->set_ast_node_count(0);
share->set_counters(0);

View File

@ -7206,6 +7206,7 @@ bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
target_shared->set_scope_info(*target_scope_info);
}
target_shared->EnableDeoptimizationSupport(*target_info.code());
target_shared->set_feedback_vector(*target_info.feedback_vector());
Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG,
&target_info,
target_shared);

View File

@ -119,8 +119,6 @@ void FullCodeGenerator::Generate() {
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
InitializeFeedbackVector();
profiling_counter_ = isolate()->factory()->NewCell(
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(function());
@ -1104,15 +1102,10 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label non_proxy;
__ bind(&fixed_array);
Handle<Object> feedback = Handle<Object>(
Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
isolate());
StoreFeedbackVectorSlot(slot, feedback);
// No need for a write barrier, we are storing a Smi in the feedback vector.
__ LoadHeapObject(ebx, FeedbackVector());
__ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(slot)),
Immediate(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
__ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
__ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
@ -2665,9 +2658,6 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
__ LoadHeapObject(ebx, FeedbackVector());
__ mov(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
@ -2845,9 +2835,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ mov(edi, Operand(esp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
__ LoadHeapObject(ebx, FeedbackVector());
__ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));

View File

@ -139,8 +139,6 @@ void FullCodeGenerator::Generate() {
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
InitializeFeedbackVector();
profiling_counter_ = isolate()->factory()->NewCell(
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(function());
@ -1176,12 +1174,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label non_proxy;
__ bind(&fixed_array);
Handle<Object> feedback = Handle<Object>(
Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
isolate());
StoreFeedbackVectorSlot(slot, feedback);
__ li(a1, FeedbackVector());
__ li(a2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
__ li(a2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
__ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(slot)));
__ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
@ -2735,9 +2729,6 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
__ li(a2, FeedbackVector());
__ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
@ -2922,9 +2913,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ lw(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
__ li(a2, FeedbackVector());
__ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));

View File

@ -368,7 +368,6 @@ void PolymorphicCodeCache::PolymorphicCodeCacheVerify() {
void TypeFeedbackInfo::TypeFeedbackInfoVerify() {
VerifyObjectField(kStorage1Offset);
VerifyObjectField(kStorage2Offset);
VerifyHeapPointer(feedback_vector());
}
@ -552,6 +551,7 @@ void SharedFunctionInfo::SharedFunctionInfoVerify() {
VerifyObjectField(kNameOffset);
VerifyObjectField(kCodeOffset);
VerifyObjectField(kOptimizedCodeMapOffset);
VerifyObjectField(kFeedbackVectorOffset);
VerifyObjectField(kScopeInfoOffset);
VerifyObjectField(kInstanceClassNameOffset);
VerifyObjectField(kFunctionDataOffset);

View File

@ -1481,7 +1481,7 @@ AllocationSiteMode AllocationSite::GetMode(
AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
ElementsKind to) {
if (IsFastSmiElementsKind(from) &&
IsMoreGeneralElementsKindTransition(from, to)) {
IsMoreGeneralElementsKindTransition(from, to)) {
return TRACK_ALLOCATION_SITE;
}
@ -4980,6 +4980,8 @@ ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
kOptimizedCodeMapOffset)
ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
ACCESSORS(SharedFunctionInfo, feedback_vector, FixedArray,
kFeedbackVectorOffset)
ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
kInstanceClassNameOffset)
@ -6652,10 +6654,6 @@ bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
}
ACCESSORS(TypeFeedbackInfo, feedback_vector, FixedArray,
kFeedbackVectorOffset)
SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)

View File

@ -566,8 +566,6 @@ void TypeFeedbackInfo::TypeFeedbackInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "TypeFeedbackInfo");
PrintF(out, " - ic_total_count: %d, ic_with_type_info_count: %d\n",
ic_total_count(), ic_with_type_info_count());
PrintF(out, " - feedback_vector: ");
feedback_vector()->FixedArrayPrint(out);
}
@ -881,6 +879,8 @@ void SharedFunctionInfo::SharedFunctionInfoPrint(FILE* out) {
PrintF(out, "\n - length = %d", length());
PrintF(out, "\n - optimized_code_map = ");
optimized_code_map()->ShortPrint(out);
PrintF(out, "\n - feedback_vector = ");
feedback_vector()->FixedArrayPrint(out);
PrintF(out, "\n");
}

View File

@ -427,9 +427,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCode(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
Code* code = Code::cast(object);
if (FLAG_cleanup_code_caches_at_gc) {
code->ClearTypeFeedbackInfo(heap);
}
if (FLAG_age_code && !Serializer::enabled()) {
code->MakeOlder(heap->mark_compact_collector()->marking_parity());
}
@ -445,6 +442,9 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
if (shared->ic_age() != heap->global_ic_age()) {
shared->ResetForNewContext(heap->global_ic_age());
}
if (FLAG_cleanup_code_caches_at_gc) {
shared->ClearTypeFeedbackInfo(heap);
}
if (FLAG_cache_optimized_code &&
FLAG_flush_optimized_code_cache &&
!shared->optimized_code_map()->IsSmi()) {

View File

@ -10640,19 +10640,16 @@ void Code::ClearInlineCaches(Code::Kind* kind) {
}
void Code::ClearTypeFeedbackInfo(Heap* heap) {
if (kind() != FUNCTION) return;
Object* raw_info = type_feedback_info();
if (raw_info->IsTypeFeedbackInfo()) {
FixedArray* feedback_vector =
TypeFeedbackInfo::cast(raw_info)->feedback_vector();
for (int i = 0; i < feedback_vector->length(); i++) {
Object* obj = feedback_vector->get(i);
if (!obj->IsAllocationSite()) {
// TODO(mvstanton): Can't I avoid a write barrier for this sentinel?
feedback_vector->set(i,
TypeFeedbackInfo::RawUninitializedSentinel(heap));
}
void SharedFunctionInfo::ClearTypeFeedbackInfo(Heap* heap) {
FixedArray* vector = feedback_vector();
for (int i = 0; i < vector->length(); i++) {
Object* obj = vector->get(i);
if (!obj->IsAllocationSite()) {
// The assert verifies we can skip the write barrier.
ASSERT(heap->uninitialized_symbol() ==
TypeFeedbackInfo::RawUninitializedSentinel(heap));
vector->set(i, TypeFeedbackInfo::RawUninitializedSentinel(heap),
SKIP_WRITE_BARRIER);
}
}
}

View File

@ -5497,8 +5497,6 @@ class Code: public HeapObject {
void ClearInlineCaches();
void ClearInlineCaches(Kind kind);
void ClearTypeFeedbackInfo(Heap* heap);
BailoutId TranslatePcOffsetToAstId(uint32_t pc_offset);
uint32_t TranslateAstIdToPcOffset(BailoutId ast_id);
@ -6694,6 +6692,8 @@ class SharedFunctionInfo: public HeapObject {
// Removed a specific optimized code object from the optimized code map.
void EvictFromOptimizedCodeMap(Code* optimized_code, const char* reason);
void ClearTypeFeedbackInfo(Heap* heap);
// Trims the optimized code map after entries have been removed.
void TrimOptimizedCodeMap(int shrink_by);
@ -6802,6 +6802,12 @@ class SharedFunctionInfo: public HeapObject {
inline int construction_count();
inline void set_construction_count(int value);
// [feedback_vector] - accumulates ast node feedback from full-codegen and
// (increasingly) from crankshafted code where sufficient feedback isn't
// available. Currently the field is duplicated in
// TypeFeedbackInfo::feedback_vector, but the allocation is done here.
DECL_ACCESSORS(feedback_vector, FixedArray)
// [initial_map]: initial map of the first function called as a constructor.
// Saved for the duration of the tracking phase.
// This is a weak link (GC resets it to undefined_value if no other live
@ -7082,8 +7088,10 @@ class SharedFunctionInfo: public HeapObject {
static const int kScriptOffset = kFunctionDataOffset + kPointerSize;
static const int kDebugInfoOffset = kScriptOffset + kPointerSize;
static const int kInferredNameOffset = kDebugInfoOffset + kPointerSize;
static const int kInitialMapOffset =
static const int kFeedbackVectorOffset =
kInferredNameOffset + kPointerSize;
static const int kInitialMapOffset =
kFeedbackVectorOffset + kPointerSize;
// ast_node_count is a Smi field. It could be grouped with another Smi field
// into a PSEUDO_SMI_ACCESSORS pair (on x64), if one becomes available.
static const int kAstNodeCountOffset =
@ -8169,8 +8177,6 @@ class TypeFeedbackInfo: public Struct {
inline void set_inlined_type_change_checksum(int checksum);
inline bool matches_inlined_type_change_checksum(int checksum);
DECL_ACCESSORS(feedback_vector, FixedArray)
static inline TypeFeedbackInfo* cast(Object* obj);
// Dispatched behavior.
@ -8179,10 +8185,9 @@ class TypeFeedbackInfo: public Struct {
static const int kStorage1Offset = HeapObject::kHeaderSize;
static const int kStorage2Offset = kStorage1Offset + kPointerSize;
static const int kFeedbackVectorOffset =
kStorage2Offset + kPointerSize;
static const int kSize = kFeedbackVectorOffset + kPointerSize;
static const int kSize = kStorage2Offset + kPointerSize;
// TODO(mvstanton): move these sentinel declarations to shared function info.
// The object that indicates an uninitialized cache.
static inline Handle<Object> UninitializedSentinel(Isolate* isolate);
@ -8198,9 +8203,6 @@ class TypeFeedbackInfo: public Struct {
// garbage collection (e.g., for patching the cache).
static inline Object* RawUninitializedSentinel(Heap* heap);
static const int kForInFastCaseMarker = 0;
static const int kForInSlowCaseMarker = 1;
private:
static const int kTypeChangeChecksumBits = 7;

View File

@ -2929,6 +2929,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetCode) {
// Set the code, scope info, formal parameter count, and the length
// of the target shared function info.
target_shared->ReplaceCode(source_shared->code());
target_shared->set_feedback_vector(source_shared->feedback_vector());
target_shared->set_scope_info(source_shared->scope_info());
target_shared->set_length(source_shared->length());
target_shared->set_formal_parameter_count(
@ -8477,10 +8478,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ClearFunctionTypeFeedback) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
function->shared()->ClearTypeFeedbackInfo(isolate->heap());
Code* unoptimized = function->shared()->code();
if (unoptimized->kind() == Code::FUNCTION) {
unoptimized->ClearInlineCaches();
unoptimized->ClearTypeFeedbackInfo(isolate->heap());
}
return isolate->heap()->undefined_value();
}

View File

@ -43,16 +43,12 @@ namespace internal {
TypeFeedbackOracle::TypeFeedbackOracle(Handle<Code> code,
Handle<FixedArray> feedback_vector,
Handle<Context> native_context,
Zone* zone)
: native_context_(native_context),
zone_(zone) {
Object* raw_info = code->type_feedback_info();
if (raw_info->IsTypeFeedbackInfo()) {
feedback_vector_ = Handle<FixedArray>(TypeFeedbackInfo::cast(raw_info)->
feedback_vector());
}
zone_(zone),
feedback_vector_(feedback_vector) {
BuildDictionary(code);
ASSERT(dictionary_->IsDictionary());
}
@ -132,9 +128,9 @@ bool TypeFeedbackOracle::CallNewIsMonomorphic(int slot) {
byte TypeFeedbackOracle::ForInType(int feedback_vector_slot) {
Handle<Object> value = GetInfo(feedback_vector_slot);
return value->IsSmi() &&
Smi::cast(*value)->value() == TypeFeedbackInfo::kForInFastCaseMarker
? ForInStatement::FAST_FOR_IN : ForInStatement::SLOW_FOR_IN;
return value.is_identical_to(
TypeFeedbackInfo::UninitializedSentinel(isolate()))
? ForInStatement::FAST_FOR_IN : ForInStatement::SLOW_FOR_IN;
}

View File

@ -44,6 +44,7 @@ class SmallMapList;
class TypeFeedbackOracle: public ZoneObject {
public:
TypeFeedbackOracle(Handle<Code> code,
Handle<FixedArray> feedback_vector,
Handle<Context> native_context,
Zone* zone);

View File

@ -40,6 +40,7 @@ AstTyper::AstTyper(CompilationInfo* info)
: info_(info),
oracle_(
Handle<Code>(info->closure()->shared()->code()),
Handle<FixedArray>(info->closure()->shared()->feedback_vector()),
Handle<Context>(info->closure()->context()->native_context()),
info->zone()),
store_(info->zone()) {

View File

@ -119,8 +119,6 @@ void FullCodeGenerator::Generate() {
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
InitializeFeedbackVector();
profiling_counter_ = isolate()->factory()->NewCell(
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(function());
@ -1127,15 +1125,10 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label non_proxy;
__ bind(&fixed_array);
Handle<Object> feedback = Handle<Object>(
Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
isolate());
StoreFeedbackVectorSlot(slot, feedback);
// No need for a write barrier, we are storing a Smi in the feedback vector.
__ Move(rbx, FeedbackVector());
__ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(slot)),
Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker));
TypeFeedbackInfo::MegamorphicSentinel(isolate()));
__ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
__ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
@ -2648,9 +2641,6 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
__ Move(rbx, FeedbackVector());
__ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot()));
@ -2828,9 +2818,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ movp(rdi, Operand(rsp, arg_count * kPointerSize));
// Record call targets in unoptimized code, but not in the snapshot.
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
__ Move(rbx, FeedbackVector());
__ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot()));

View File

@ -312,6 +312,43 @@ TEST(GetScriptLineNumber) {
}
TEST(FeedbackVectorRecreatedOnScopeChanges) {
if (i::FLAG_always_opt || !i::FLAG_lazy) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
CompileRun("function builder() {"
" call_target = function() { return 3; };"
" return (function() {"
" eval('');"
" return function() {"
" 'use strict';"
" call_target();"
" }"
" })();"
"}"
"morphing_call = builder();");
Handle<JSFunction> f =
v8::Utils::OpenHandle(
*v8::Handle<v8::Function>::Cast(
CcTest::global()->Get(v8_str("morphing_call"))));
// morphing_call should have one feedback vector slot for the call to
// call_target(), scoping analysis having been performed.
CHECK_EQ(1, f->shared()->feedback_vector()->length());
// And yet it's not compiled.
CHECK(!f->shared()->is_compiled());
CompileRun("morphing_call();");
// On scoping analysis after lazy compile, the call is now a global
// call which needs no feedback vector slot.
CHECK_EQ(0, f->shared()->feedback_vector()->length());
CHECK(f->shared()->is_compiled());
}
// Test that optimized code for different closures is actually shared
// immediately by the FastNewClosureStub when run in the same context.
TEST(OptimizedCodeSharing) {

View File

@ -2857,8 +2857,7 @@ TEST(IncrementalMarkingClearsTypeFeedbackInfo) {
*v8::Handle<v8::Function>::Cast(
CcTest::global()->Get(v8_str("f"))));
Handle<FixedArray> feedback_vector(TypeFeedbackInfo::cast(
f->shared()->code()->type_feedback_info())->feedback_vector());
Handle<FixedArray> feedback_vector(f->shared()->feedback_vector());
CHECK_EQ(2, feedback_vector->length());
CHECK(feedback_vector->get(0)->IsJSFunction());

View File

@ -0,0 +1,17 @@
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --stress-opt --always-opt
function foo(x) { return x; }
function container() {
x = 0;
eval('"use strict"; var x = 9;');
(function() {
"use strict";
foo(x);
})();
}
container();