diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc index fd8e8b5d54..fa97a3b321 100644 --- a/src/arm/assembler-arm.cc +++ b/src/arm/assembler-arm.cc @@ -315,6 +315,7 @@ Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size) no_const_pool_before_ = 0; last_const_pool_end_ = 0; last_bound_pos_ = 0; + ast_id_for_reloc_info_ = kNoASTId; } @@ -2722,7 +2723,14 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { } } ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here - reloc_info_writer.Write(&rinfo); + if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { + ASSERT(ast_id_for_reloc_info_ != kNoASTId); + RelocInfo reloc_info_with_ast_id(pc_, rmode, ast_id_for_reloc_info_); + ast_id_for_reloc_info_ = kNoASTId; + reloc_info_writer.Write(&reloc_info_with_ast_id); + } else { + reloc_info_writer.Write(&rinfo); + } } } diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h index 9050c2c5d9..10fc749396 100644 --- a/src/arm/assembler-arm.h +++ b/src/arm/assembler-arm.h @@ -1166,6 +1166,10 @@ class Assembler : public AssemblerBase { // Mark address of a debug break slot. void RecordDebugBreakSlot(); + // Record the AST id of the CallIC being compiled, so that it can be placed + // in the relocation information. + void RecordAstId(unsigned ast_id) { ast_id_for_reloc_info_ = ast_id; } + // Record a comment relocation entry that can be used by a disassembler. // Use --code-comments to enable. void RecordComment(const char* msg); @@ -1223,6 +1227,11 @@ class Assembler : public AssemblerBase { void CheckConstPool(bool force_emit, bool require_jump); protected: + // Relocation for a type-recording IC has the AST id added to it. This + // member variable is a way to pass the information from the call site to + // the relocation info. + unsigned ast_id_for_reloc_info_; + bool emit_debug_code() const { return emit_debug_code_; } int buffer_space() const { return reloc_info_writer.pos() - pc_; } diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index 85e42627f8..971be5d5c3 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -857,7 +857,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { // Record position before stub call for type feedback. SetSourcePosition(clause->position()); Handle ic = CompareIC::GetUninitialized(Token::EQ_STRICT); - EmitCallIC(ic, &patch_site); + EmitCallIC(ic, &patch_site, clause->label()->id()); __ cmp(r0, Operand(0)); __ b(ne, &next_test); __ Drop(1); // Switch value is no longer needed. @@ -1109,6 +1109,67 @@ void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { } +void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( + Slot* slot, + TypeofState typeof_state, + Label* slow) { + Register current = cp; + Register next = r1; + Register temp = r2; + + Scope* s = scope(); + while (s != NULL) { + if (s->num_heap_slots() > 0) { + if (s->calls_eval()) { + // Check that extension is NULL. + __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); + __ tst(temp, temp); + __ b(ne, slow); + } + // Load next context in chain. + __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX)); + __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset)); + // Walk the rest of the chain without clobbering cp. + current = next; + } + // If no outer scope calls eval, we do not need to check more + // context extensions. + if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; + s = s->outer_scope(); + } + + if (s->is_eval_scope()) { + Label loop, fast; + if (!current.is(next)) { + __ Move(next, current); + } + __ bind(&loop); + // Terminate at global context. + __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); + __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex); + __ cmp(temp, ip); + __ b(eq, &fast); + // Check that extension is NULL. + __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); + __ tst(temp, temp); + __ b(ne, slow); + // Load next context in chain. + __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX)); + __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset)); + __ b(&loop); + __ bind(&fast); + } + + __ ldr(r0, GlobalObjectOperand()); + __ mov(r2, Operand(slot->var()->name())); + RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) + ? RelocInfo::CODE_TARGET + : RelocInfo::CODE_TARGET_CONTEXT; + Handle ic = isolate()->builtins()->LoadIC_Initialize(); + EmitCallIC(ic, mode); +} + + MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( Slot* slot, Label* slow) { @@ -1196,67 +1257,6 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( } -void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( - Slot* slot, - TypeofState typeof_state, - Label* slow) { - Register current = cp; - Register next = r1; - Register temp = r2; - - Scope* s = scope(); - while (s != NULL) { - if (s->num_heap_slots() > 0) { - if (s->calls_eval()) { - // Check that extension is NULL. - __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); - __ tst(temp, temp); - __ b(ne, slow); - } - // Load next context in chain. - __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX)); - __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset)); - // Walk the rest of the chain without clobbering cp. - current = next; - } - // If no outer scope calls eval, we do not need to check more - // context extensions. - if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; - s = s->outer_scope(); - } - - if (s->is_eval_scope()) { - Label loop, fast; - if (!current.is(next)) { - __ Move(next, current); - } - __ bind(&loop); - // Terminate at global context. - __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); - __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex); - __ cmp(temp, ip); - __ b(eq, &fast); - // Check that extension is NULL. - __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); - __ tst(temp, temp); - __ b(ne, slow); - // Load next context in chain. - __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX)); - __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset)); - __ b(&loop); - __ bind(&fast); - } - - __ ldr(r0, GlobalObjectOperand()); - __ mov(r2, Operand(slot->var()->name())); - RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) - ? RelocInfo::CODE_TARGET - : RelocInfo::CODE_TARGET_CONTEXT; - Handle ic = isolate()->builtins()->LoadIC_Initialize(); - EmitCallIC(ic, mode); -} - - void FullCodeGenerator::EmitVariableLoad(Variable* var) { // Four cases: non-this global variables, lookup slots, all other // types of slots, and parameters that rewrite to explicit property @@ -1438,8 +1438,10 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { VisitForAccumulatorValue(value); __ mov(r2, Operand(key->handle())); __ ldr(r1, MemOperand(sp)); - Handle ic = isolate()->builtins()->StoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + Handle ic = is_strict_mode() + ? isolate()->builtins()->StoreIC_Initialize_Strict() + : isolate()->builtins()->StoreIC_Initialize(); + EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, key->id()); PrepareForBailoutForId(key->id(), NO_REGISTERS); } else { VisitForEffect(value); @@ -1651,13 +1653,13 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) { SetSourcePosition(expr->position() + 1); AccumulatorValueContext context(this); if (ShouldInlineSmiCase(op)) { - EmitInlineSmiBinaryOp(expr, + EmitInlineSmiBinaryOp(expr->binary_operation(), op, mode, expr->target(), expr->value()); } else { - EmitBinaryOp(op, mode); + EmitBinaryOp(expr->binary_operation(), op, mode); } // Deoptimization point in case the binary operation may have side effects. @@ -1693,7 +1695,11 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { __ mov(r2, Operand(key->handle())); // Call load IC. It has arguments receiver and property name r0 and r2. Handle ic = isolate()->builtins()->LoadIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + if (prop->is_synthetic()) { + EmitCallIC(ic, RelocInfo::CODE_TARGET); + } else { + EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, prop->id()); + } } @@ -1701,11 +1707,15 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { SetSourcePosition(prop->position()); // Call keyed load IC. It has arguments key and receiver in r0 and r1. Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + if (prop->is_synthetic()) { + EmitCallIC(ic, RelocInfo::CODE_TARGET); + } else { + EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, prop->id()); + } } -void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr, +void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, Token::Value op, OverwriteMode mode, Expression* left_expr, @@ -1728,7 +1738,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr, __ bind(&stub_call); TypeRecordingBinaryOpStub stub(op, mode); - EmitCallIC(stub.GetCode(), &patch_site); + EmitCallIC(stub.GetCode(), &patch_site, expr->id()); __ jmp(&done); __ bind(&smi_case); @@ -1804,11 +1814,12 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr, } -void FullCodeGenerator::EmitBinaryOp(Token::Value op, +void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, + Token::Value op, OverwriteMode mode) { __ pop(r1); TypeRecordingBinaryOpStub stub(op, mode); - EmitCallIC(stub.GetCode(), NULL); + EmitCallIC(stub.GetCode(), NULL, expr->id()); context()->Plug(r0); } @@ -2006,7 +2017,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { Handle ic = is_strict_mode() ? isolate()->builtins()->StoreIC_Initialize_Strict() : isolate()->builtins()->StoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id()); // If the assignment ends an initialization block, revert to fast case. if (expr->ends_initialization_block()) { @@ -2052,7 +2063,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { Handle ic = is_strict_mode() ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() : isolate()->builtins()->KeyedStoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id()); // If the assignment ends an initialization block, revert to fast case. if (expr->ends_initialization_block()) { @@ -2104,7 +2115,9 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr, InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; Handle ic = isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop); - EmitCallIC(ic, mode); + unsigned ast_id = + (mode == RelocInfo::CODE_TARGET_WITH_ID) ? expr->id() : kNoASTId; + EmitCallIC(ic, mode, ast_id); RecordJSReturnSite(expr); // Restore context register. __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -2139,7 +2152,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, Handle ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop); __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key. - EmitCallIC(ic, mode); + EmitCallIC(ic, mode, expr->id()); RecordJSReturnSite(expr); // Restore context register. __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); @@ -2315,11 +2328,11 @@ void FullCodeGenerator::VisitCall(Call* expr) { { PreservePositionScope scope(masm()->positions_recorder()); VisitForStackValue(prop->obj()); } - EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); + EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET_WITH_ID); } else { // Call to a keyed property. // For a synthetic property use keyed load IC followed by function call, - // for a regular property use keyed CallIC. + // for a regular property use keyed EmitCallIC. if (prop->is_synthetic()) { // Do not visit the object and key subexpressions (they are shared // by all occurrences of the same rewritten parameter). @@ -2346,7 +2359,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { { PreservePositionScope scope(masm()->positions_recorder()); VisitForStackValue(prop->obj()); } - EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET); + EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET_WITH_ID); } } } else { @@ -3657,7 +3670,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { __ mov(r2, Operand(expr->name())); Handle ic = isolate()->stub_cache()->ComputeCallInitialize(arg_count, NOT_IN_LOOP); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id()); // Restore context register. __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); } else { @@ -3936,7 +3949,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { SetSourcePosition(expr->position()); TypeRecordingBinaryOpStub stub(Token::ADD, NO_OVERWRITE); - EmitCallIC(stub.GetCode(), &patch_site); + EmitCallIC(stub.GetCode(), &patch_site, expr->CountId()); __ bind(&done); // Store the value returned in r0. @@ -3967,7 +3980,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { Handle ic = is_strict_mode() ? isolate()->builtins()->StoreIC_Initialize_Strict() : isolate()->builtins()->StoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id()); PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); if (expr->is_postfix()) { if (!context()->IsEffect()) { @@ -3984,7 +3997,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { Handle ic = is_strict_mode() ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() : isolate()->builtins()->KeyedStoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id()); PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); if (expr->is_postfix()) { if (!context()->IsEffect()) { @@ -4213,7 +4226,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { // Record position and call the compare IC. SetSourcePosition(expr->position()); Handle ic = CompareIC::GetUninitialized(op); - EmitCallIC(ic, &patch_site); + EmitCallIC(ic, &patch_site, expr->id()); PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); __ cmp(r0, Operand(0)); Split(cond, if_true, if_false, fall_through); @@ -4275,9 +4288,12 @@ Register FullCodeGenerator::context_register() { } -void FullCodeGenerator::EmitCallIC(Handle ic, RelocInfo::Mode mode) { +void FullCodeGenerator::EmitCallIC(Handle ic, + RelocInfo::Mode mode, + unsigned ast_id) { ASSERT(mode == RelocInfo::CODE_TARGET || - mode == RelocInfo::CODE_TARGET_CONTEXT); + mode == RelocInfo::CODE_TARGET_CONTEXT || + mode == RelocInfo::CODE_TARGET_WITH_ID); Counters* counters = isolate()->counters(); switch (ic->kind()) { case Code::LOAD_IC: @@ -4294,11 +4310,19 @@ void FullCodeGenerator::EmitCallIC(Handle ic, RelocInfo::Mode mode) { default: break; } - __ Call(ic, mode); + if (mode == RelocInfo::CODE_TARGET_WITH_ID) { + ASSERT(ast_id != kNoASTId); + __ CallWithAstId(ic, mode, ast_id); + } else { + ASSERT(ast_id == kNoASTId); + __ Call(ic, mode); + } } -void FullCodeGenerator::EmitCallIC(Handle ic, JumpPatchSite* patch_site) { +void FullCodeGenerator::EmitCallIC(Handle ic, + JumpPatchSite* patch_site, + unsigned ast_id) { Counters* counters = isolate()->counters(); switch (ic->kind()) { case Code::LOAD_IC: @@ -4315,7 +4339,12 @@ void FullCodeGenerator::EmitCallIC(Handle ic, JumpPatchSite* patch_site) { default: break; } - __ Call(ic, RelocInfo::CODE_TARGET); + + if (ast_id != kNoASTId) { + __ CallWithAstId(ic, RelocInfo::CODE_TARGET_WITH_ID, ast_id); + } else { + __ Call(ic, RelocInfo::CODE_TARGET); + } if (patch_site != NULL && patch_site->is_bound()) { patch_site->EmitPatchInfo(); } else { diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc index 6a095d3ccd..c800ffc2fa 100644 --- a/src/arm/macro-assembler-arm.cc +++ b/src/arm/macro-assembler-arm.cc @@ -148,8 +148,9 @@ int MacroAssembler::CallSize( } -void MacroAssembler::Call( - intptr_t target, RelocInfo::Mode rmode, Condition cond) { +void MacroAssembler::Call(intptr_t target, + RelocInfo::Mode rmode, + Condition cond) { // Block constant pool for the call instruction sequence. BlockConstPoolScope block_const_pool(this); #ifdef DEBUG @@ -214,8 +215,31 @@ int MacroAssembler::CallSize( } -void MacroAssembler::Call( - Handle code, RelocInfo::Mode rmode, Condition cond) { +void MacroAssembler::CallWithAstId(Handle code, + RelocInfo::Mode rmode, + unsigned ast_id, + Condition cond) { +#ifdef DEBUG + int pre_position = pc_offset(); +#endif + + ASSERT(rmode == RelocInfo::CODE_TARGET_WITH_ID); + ASSERT(ast_id != kNoASTId); + ASSERT(ast_id_for_reloc_info_ == kNoASTId); + ast_id_for_reloc_info_ = ast_id; + // 'code' is always generated ARM code, never THUMB code + Call(reinterpret_cast(code.location()), rmode, cond); + +#ifdef DEBUG + int post_position = pc_offset(); + CHECK_EQ(pre_position + CallSize(code, rmode, cond), post_position); +#endif +} + + +void MacroAssembler::Call(Handle code, + RelocInfo::Mode rmode, + Condition cond) { #ifdef DEBUG int pre_position = pc_offset(); #endif diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h index ab5efb0b31..1ca16aefe6 100644 --- a/src/arm/macro-assembler-arm.h +++ b/src/arm/macro-assembler-arm.h @@ -1,4 +1,4 @@ -// Copyright 2010 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -105,7 +105,13 @@ class MacroAssembler: public Assembler { int CallSize(byte* target, RelocInfo::Mode rmode, Condition cond = al); void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al); int CallSize(Handle code, RelocInfo::Mode rmode, Condition cond = al); - void Call(Handle code, RelocInfo::Mode rmode, Condition cond = al); + void Call(Handle code, + RelocInfo::Mode rmode, + Condition cond = al); + void CallWithAstId(Handle code, + RelocInfo::Mode rmode, + unsigned ast_id, + Condition cond = al); void Ret(Condition cond = al); // Emit code to discard a non-negative number of pointer-sized elements @@ -958,7 +964,9 @@ class MacroAssembler: public Assembler { void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al); int CallSize(intptr_t target, RelocInfo::Mode rmode, Condition cond = al); - void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al); + void Call(intptr_t target, + RelocInfo::Mode rmode, + Condition cond = al); // Helper functions for generating invokes. void InvokePrologue(const ParameterCount& expected, diff --git a/src/assembler.cc b/src/assembler.cc index ca30e19cb4..77c7b68c4f 100644 --- a/src/assembler.cc +++ b/src/assembler.cc @@ -30,7 +30,7 @@ // The original source code covered by the above license above has been // modified significantly by Google Inc. -// Copyright 2006-2009 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. #include "v8.h" @@ -87,58 +87,85 @@ int Label::pos() const { // ----------------------------------------------------------------------------- // Implementation of RelocInfoWriter and RelocIterator // +// Relocation information is written backwards in memory, from high addresses +// towards low addresses, byte by byte. Therefore, in the encodings listed +// below, the first byte listed it at the highest address, and successive +// bytes in the record are at progressively lower addresses. +// // Encoding // // The most common modes are given single-byte encodings. Also, it is // easy to identify the type of reloc info and skip unwanted modes in // an iteration. // -// The encoding relies on the fact that there are less than 14 -// different relocation modes. +// The encoding relies on the fact that there are fewer than 14 +// different non-compactly encoded relocation modes. // -// embedded_object: [6 bits pc delta] 00 +// The first byte of a relocation record has a tag in its low 2 bits: +// Here are the record schemes, depending on the low tag and optional higher +// tags. // -// code_taget: [6 bits pc delta] 01 +// Low tag: +// 00: embedded_object: [6-bit pc delta] 00 // -// position: [6 bits pc delta] 10, -// [7 bits signed data delta] 0 +// 01: code_target: [6-bit pc delta] 01 // -// statement_position: [6 bits pc delta] 10, -// [7 bits signed data delta] 1 +// 10: short_data_record: [6-bit pc delta] 10 followed by +// [6-bit data delta] [2-bit data type tag] // -// any nondata mode: 00 [4 bits rmode] 11, // rmode: 0..13 only -// 00 [6 bits pc delta] +// 11: long_record [2-bit high tag][4 bit middle_tag] 11 +// followed by variable data depending on type. // -// pc-jump: 00 1111 11, -// 00 [6 bits pc delta] +// 2-bit data type tags, used in short_data_record and data_jump long_record: +// code_target_with_id: 00 +// position: 01 +// statement_position: 10 +// comment: 11 (not used in short_data_record) // -// pc-jump: 01 1111 11, -// (variable length) 7 - 26 bit pc delta, written in chunks of 7 -// bits, the lowest 7 bits written first. +// Long record format: +// 4-bit middle_tag: +// 0000 - 1100 : Short record for RelocInfo::Mode middle_tag + 2 +// (The middle_tag encodes rmode - RelocInfo::LAST_COMPACT_ENUM, +// and is between 0000 and 1100) +// The format is: +// 00 [4 bit middle_tag] 11 followed by +// 00 [6 bit pc delta] // -// data-jump + pos: 00 1110 11, -// signed intptr_t, lowest byte written first -// -// data-jump + st.pos: 01 1110 11, -// signed intptr_t, lowest byte written first -// -// data-jump + comm.: 10 1110 11, -// signed intptr_t, lowest byte written first +// 1101: not used (would allow one more relocation mode to be added) +// 1110: long_data_record +// The format is: [2-bit data_type_tag] 1110 11 +// signed intptr_t, lowest byte written first +// (except data_type code_target_with_id, which +// is followed by a signed int, not intptr_t.) // +// 1111: long_pc_jump +// The format is: +// pc-jump: 00 1111 11, +// 00 [6 bits pc delta] +// or +// pc-jump (variable length): +// 01 1111 11, +// [7 bits data] 0 +// ... +// [7 bits data] 1 +// (Bits 6..31 of pc delta, with leading zeroes +// dropped, and last non-zero chunk tagged with 1.) + + const int kMaxRelocModes = 14; const int kTagBits = 2; const int kTagMask = (1 << kTagBits) - 1; const int kExtraTagBits = 4; -const int kPositionTypeTagBits = 1; -const int kSmallDataBits = kBitsPerByte - kPositionTypeTagBits; +const int kLocatableTypeTagBits = 2; +const int kSmallDataBits = kBitsPerByte - kLocatableTypeTagBits; const int kEmbeddedObjectTag = 0; const int kCodeTargetTag = 1; -const int kPositionTag = 2; +const int kLocatableTag = 2; const int kDefaultTag = 3; -const int kPCJumpTag = (1 << kExtraTagBits) - 1; +const int kPCJumpExtraTag = (1 << kExtraTagBits) - 1; const int kSmallPCDeltaBits = kBitsPerByte - kTagBits; const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1; @@ -152,11 +179,12 @@ const int kLastChunkTagMask = 1; const int kLastChunkTag = 1; -const int kDataJumpTag = kPCJumpTag - 1; +const int kDataJumpExtraTag = kPCJumpExtraTag - 1; -const int kNonstatementPositionTag = 0; -const int kStatementPositionTag = 1; -const int kCommentTag = 2; +const int kCodeWithIdTag = 0; +const int kNonstatementPositionTag = 1; +const int kStatementPositionTag = 2; +const int kCommentTag = 3; uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) { @@ -164,7 +192,7 @@ uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) { // Otherwise write a variable length PC jump for the bits that do // not fit in the kSmallPCDeltaBits bits. if (is_uintn(pc_delta, kSmallPCDeltaBits)) return pc_delta; - WriteExtraTag(kPCJumpTag, kVariableLengthPCJumpTopTag); + WriteExtraTag(kPCJumpExtraTag, kVariableLengthPCJumpTopTag); uint32_t pc_jump = pc_delta >> kSmallPCDeltaBits; ASSERT(pc_jump > 0); // Write kChunkBits size chunks of the pc_jump. @@ -187,7 +215,7 @@ void RelocInfoWriter::WriteTaggedPC(uint32_t pc_delta, int tag) { void RelocInfoWriter::WriteTaggedData(intptr_t data_delta, int tag) { - *--pos_ = static_cast(data_delta << kPositionTypeTagBits | tag); + *--pos_ = static_cast(data_delta << kLocatableTypeTagBits | tag); } @@ -206,11 +234,20 @@ void RelocInfoWriter::WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag) { } +void RelocInfoWriter::WriteExtraTaggedIntData(int data_delta, int top_tag) { + WriteExtraTag(kDataJumpExtraTag, top_tag); + for (int i = 0; i < kIntSize; i++) { + *--pos_ = static_cast(data_delta); + // Signed right shift is arithmetic shift. Tested in test-utils.cc. + data_delta = data_delta >> kBitsPerByte; + } +} + void RelocInfoWriter::WriteExtraTaggedData(intptr_t data_delta, int top_tag) { - WriteExtraTag(kDataJumpTag, top_tag); + WriteExtraTag(kDataJumpExtraTag, top_tag); for (int i = 0; i < kIntptrSize; i++) { *--pos_ = static_cast(data_delta); - // Signed right shift is arithmetic shift. Tested in test-utils.cc. + // Signed right shift is arithmetic shift. Tested in test-utils.cc. data_delta = data_delta >> kBitsPerByte; } } @@ -221,7 +258,8 @@ void RelocInfoWriter::Write(const RelocInfo* rinfo) { byte* begin_pos = pos_; #endif ASSERT(rinfo->pc() - last_pc_ >= 0); - ASSERT(RelocInfo::NUMBER_OF_MODES <= kMaxRelocModes); + ASSERT(RelocInfo::NUMBER_OF_MODES - RelocInfo::LAST_COMPACT_ENUM <= + kMaxRelocModes); // Use unsigned delta-encoding for pc. uint32_t pc_delta = static_cast(rinfo->pc() - last_pc_); RelocInfo::Mode rmode = rinfo->rmode(); @@ -232,35 +270,48 @@ void RelocInfoWriter::Write(const RelocInfo* rinfo) { } else if (rmode == RelocInfo::CODE_TARGET) { WriteTaggedPC(pc_delta, kCodeTargetTag); ASSERT(begin_pos - pos_ <= RelocInfo::kMaxCallSize); - } else if (RelocInfo::IsPosition(rmode)) { - // Use signed delta-encoding for data. - intptr_t data_delta = rinfo->data() - last_data_; - int pos_type_tag = rmode == RelocInfo::POSITION ? kNonstatementPositionTag - : kStatementPositionTag; - // Check if data is small enough to fit in a tagged byte. - // We cannot use is_intn because data_delta is not an int32_t. - if (data_delta >= -(1 << (kSmallDataBits-1)) && - data_delta < 1 << (kSmallDataBits-1)) { - WriteTaggedPC(pc_delta, kPositionTag); - WriteTaggedData(data_delta, pos_type_tag); - last_data_ = rinfo->data(); + } else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { + // Use signed delta-encoding for id. + ASSERT(static_cast(rinfo->data()) == rinfo->data()); + int id_delta = static_cast(rinfo->data()) - last_id_; + // Check if delta is small enough to fit in a tagged byte. + if (is_intn(id_delta, kSmallDataBits)) { + WriteTaggedPC(pc_delta, kLocatableTag); + WriteTaggedData(id_delta, kCodeWithIdTag); } else { // Otherwise, use costly encoding. - WriteExtraTaggedPC(pc_delta, kPCJumpTag); - WriteExtraTaggedData(data_delta, pos_type_tag); - last_data_ = rinfo->data(); + WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag); + WriteExtraTaggedIntData(id_delta, kCodeWithIdTag); } + last_id_ = static_cast(rinfo->data()); + } else if (RelocInfo::IsPosition(rmode)) { + // Use signed delta-encoding for position. + ASSERT(static_cast(rinfo->data()) == rinfo->data()); + int pos_delta = static_cast(rinfo->data()) - last_position_; + int pos_type_tag = (rmode == RelocInfo::POSITION) ? kNonstatementPositionTag + : kStatementPositionTag; + // Check if delta is small enough to fit in a tagged byte. + if (is_intn(pos_delta, kSmallDataBits)) { + WriteTaggedPC(pc_delta, kLocatableTag); + WriteTaggedData(pos_delta, pos_type_tag); + } else { + // Otherwise, use costly encoding. + WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag); + WriteExtraTaggedIntData(pos_delta, pos_type_tag); + } + last_position_ = static_cast(rinfo->data()); } else if (RelocInfo::IsComment(rmode)) { // Comments are normally not generated, so we use the costly encoding. - WriteExtraTaggedPC(pc_delta, kPCJumpTag); - WriteExtraTaggedData(rinfo->data() - last_data_, kCommentTag); - last_data_ = rinfo->data(); + WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag); + WriteExtraTaggedData(rinfo->data(), kCommentTag); ASSERT(begin_pos - pos_ >= RelocInfo::kMinRelocCommentSize); } else { + ASSERT(rmode > RelocInfo::LAST_COMPACT_ENUM); + int saved_mode = rmode - RelocInfo::LAST_COMPACT_ENUM; // For all other modes we simply use the mode as the extra tag. // None of these modes need a data component. - ASSERT(rmode < kPCJumpTag && rmode < kDataJumpTag); - WriteExtraTaggedPC(pc_delta, rmode); + ASSERT(saved_mode < kPCJumpExtraTag && saved_mode < kDataJumpExtraTag); + WriteExtraTaggedPC(pc_delta, saved_mode); } last_pc_ = rinfo->pc(); #ifdef DEBUG @@ -294,12 +345,32 @@ inline void RelocIterator::AdvanceReadPC() { } +void RelocIterator::AdvanceReadId() { + int x = 0; + for (int i = 0; i < kIntSize; i++) { + x |= static_cast(*--pos_) << i * kBitsPerByte; + } + last_id_ += x; + rinfo_.data_ = last_id_; +} + + +void RelocIterator::AdvanceReadPosition() { + int x = 0; + for (int i = 0; i < kIntSize; i++) { + x |= static_cast(*--pos_) << i * kBitsPerByte; + } + last_position_ += x; + rinfo_.data_ = last_position_; +} + + void RelocIterator::AdvanceReadData() { intptr_t x = 0; for (int i = 0; i < kIntptrSize; i++) { x |= static_cast(*--pos_) << i * kBitsPerByte; } - rinfo_.data_ += x; + rinfo_.data_ = x; } @@ -319,27 +390,33 @@ void RelocIterator::AdvanceReadVariableLengthPCJump() { } -inline int RelocIterator::GetPositionTypeTag() { - return *pos_ & ((1 << kPositionTypeTagBits) - 1); +inline int RelocIterator::GetLocatableTypeTag() { + return *pos_ & ((1 << kLocatableTypeTagBits) - 1); } -inline void RelocIterator::ReadTaggedData() { +inline void RelocIterator::ReadTaggedId() { int8_t signed_b = *pos_; // Signed right shift is arithmetic shift. Tested in test-utils.cc. - rinfo_.data_ += signed_b >> kPositionTypeTagBits; + last_id_ += signed_b >> kLocatableTypeTagBits; + rinfo_.data_ = last_id_; } -inline RelocInfo::Mode RelocIterator::DebugInfoModeFromTag(int tag) { - if (tag == kStatementPositionTag) { - return RelocInfo::STATEMENT_POSITION; - } else if (tag == kNonstatementPositionTag) { - return RelocInfo::POSITION; - } else { - ASSERT(tag == kCommentTag); - return RelocInfo::COMMENT; - } +inline void RelocIterator::ReadTaggedPosition() { + int8_t signed_b = *pos_; + // Signed right shift is arithmetic shift. Tested in test-utils.cc. + last_position_ += signed_b >> kLocatableTypeTagBits; + rinfo_.data_ = last_position_; +} + + +static inline RelocInfo::Mode GetPositionModeFromTag(int tag) { + ASSERT(tag == kNonstatementPositionTag || + tag == kStatementPositionTag); + return (tag == kNonstatementPositionTag) ? + RelocInfo::POSITION : + RelocInfo::STATEMENT_POSITION; } @@ -358,37 +435,64 @@ void RelocIterator::next() { } else if (tag == kCodeTargetTag) { ReadTaggedPC(); if (SetMode(RelocInfo::CODE_TARGET)) return; - } else if (tag == kPositionTag) { + } else if (tag == kLocatableTag) { ReadTaggedPC(); Advance(); - // Check if we want source positions. - if (mode_mask_ & RelocInfo::kPositionMask) { - ReadTaggedData(); - if (SetMode(DebugInfoModeFromTag(GetPositionTypeTag()))) return; + int locatable_tag = GetLocatableTypeTag(); + if (locatable_tag == kCodeWithIdTag) { + if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) { + ReadTaggedId(); + return; + } + } else { + // Compact encoding is never used for comments, + // so it must be a position. + ASSERT(locatable_tag == kNonstatementPositionTag || + locatable_tag == kStatementPositionTag); + if (mode_mask_ & RelocInfo::kPositionMask) { + ReadTaggedPosition(); + if (SetMode(GetPositionModeFromTag(locatable_tag))) return; + } } } else { ASSERT(tag == kDefaultTag); int extra_tag = GetExtraTag(); - if (extra_tag == kPCJumpTag) { + if (extra_tag == kPCJumpExtraTag) { int top_tag = GetTopTag(); if (top_tag == kVariableLengthPCJumpTopTag) { AdvanceReadVariableLengthPCJump(); } else { AdvanceReadPC(); } - } else if (extra_tag == kDataJumpTag) { - // Check if we want debug modes (the only ones with data). - if (mode_mask_ & RelocInfo::kDebugMask) { - int top_tag = GetTopTag(); - AdvanceReadData(); - if (SetMode(DebugInfoModeFromTag(top_tag))) return; + } else if (extra_tag == kDataJumpExtraTag) { + int locatable_tag = GetTopTag(); + if (locatable_tag == kCodeWithIdTag) { + if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) { + AdvanceReadId(); + return; + } + Advance(kIntSize); + } else if (locatable_tag != kCommentTag) { + ASSERT(locatable_tag == kNonstatementPositionTag || + locatable_tag == kStatementPositionTag); + if (mode_mask_ & RelocInfo::kPositionMask) { + AdvanceReadPosition(); + if (SetMode(GetPositionModeFromTag(locatable_tag))) return; + } else { + Advance(kIntSize); + } } else { - // Otherwise, just skip over the data. + ASSERT(locatable_tag == kCommentTag); + if (SetMode(RelocInfo::COMMENT)) { + AdvanceReadData(); + return; + } Advance(kIntptrSize); } } else { AdvanceReadPC(); - if (SetMode(static_cast(extra_tag))) return; + int rmode = extra_tag + RelocInfo::LAST_COMPACT_ENUM; + if (SetMode(static_cast(rmode))) return; } } } @@ -404,6 +508,8 @@ RelocIterator::RelocIterator(Code* code, int mode_mask) { end_ = code->relocation_start(); done_ = false; mode_mask_ = mode_mask; + last_id_ = 0; + last_position_ = 0; if (mode_mask_ == 0) pos_ = end_; next(); } @@ -417,6 +523,8 @@ RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask) { end_ = pos_ - desc.reloc_size; done_ = false; mode_mask_ = mode_mask; + last_id_ = 0; + last_position_ = 0; if (mode_mask_ == 0) pos_ = end_; next(); } @@ -444,6 +552,8 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) { return "debug break"; case RelocInfo::CODE_TARGET: return "code target"; + case RelocInfo::CODE_TARGET_WITH_ID: + return "code target with id"; case RelocInfo::GLOBAL_PROPERTY_CELL: return "global property cell"; case RelocInfo::RUNTIME_ENTRY: @@ -490,6 +600,9 @@ void RelocInfo::Print(FILE* out) { Code* code = Code::GetCodeFromTargetAddress(target_address()); PrintF(out, " (%s) (%p)", Code::Kind2String(code->kind()), target_address()); + if (rmode_ == CODE_TARGET_WITH_ID) { + PrintF(" (id=%d)", static_cast(data_)); + } } else if (IsPosition(rmode_)) { PrintF(out, " (%" V8_PTR_PREFIX "d)", data()); } else if (rmode_ == RelocInfo::RUNTIME_ENTRY && @@ -523,6 +636,7 @@ void RelocInfo::Verify() { #endif case CONSTRUCT_CALL: case CODE_TARGET_CONTEXT: + case CODE_TARGET_WITH_ID: case CODE_TARGET: { // convert inline target address to code object Address addr = target_address(); diff --git a/src/assembler.h b/src/assembler.h index e8cecc3fdb..918de62a42 100644 --- a/src/assembler.h +++ b/src/assembler.h @@ -42,7 +42,7 @@ namespace v8 { namespace internal { - +const unsigned kNoASTId = -1; // ----------------------------------------------------------------------------- // Platform independent assembler base class. @@ -209,10 +209,11 @@ class RelocInfo BASE_EMBEDDED { enum Mode { // Please note the order is important (see IsCodeTarget, IsGCRelocMode). + CODE_TARGET, // Code target which is not any of the above. + CODE_TARGET_WITH_ID, CONSTRUCT_CALL, // code target that is a call to a JavaScript constructor. CODE_TARGET_CONTEXT, // Code target used for contextual loads and stores. DEBUG_BREAK, // Code target for the debugger statement. - CODE_TARGET, // Code target which is not any of the above. EMBEDDED_OBJECT, GLOBAL_PROPERTY_CELL, @@ -228,10 +229,12 @@ class RelocInfo BASE_EMBEDDED { // add more as needed // Pseudo-types - NUMBER_OF_MODES, // must be no greater than 14 - see RelocInfoWriter + NUMBER_OF_MODES, // There are at most 14 modes with noncompact encoding. NONE, // never recorded - LAST_CODE_ENUM = CODE_TARGET, - LAST_GCED_ENUM = GLOBAL_PROPERTY_CELL + LAST_CODE_ENUM = DEBUG_BREAK, + LAST_GCED_ENUM = GLOBAL_PROPERTY_CELL, + // Modes <= LAST_COMPACT_ENUM are guaranteed to have compact encoding. + LAST_COMPACT_ENUM = CODE_TARGET_WITH_ID }; @@ -361,7 +364,8 @@ class RelocInfo BASE_EMBEDDED { static const int kCodeTargetMask = (1 << (LAST_CODE_ENUM + 1)) - 1; static const int kPositionMask = 1 << POSITION | 1 << STATEMENT_POSITION; - static const int kDebugMask = kPositionMask | 1 << COMMENT; + static const int kDataMask = + (1 << CODE_TARGET_WITH_ID) | kPositionMask | (1 << COMMENT); static const int kApplyMask; // Modes affected by apply. Depends on arch. private: @@ -380,9 +384,14 @@ class RelocInfo BASE_EMBEDDED { // lower addresses. class RelocInfoWriter BASE_EMBEDDED { public: - RelocInfoWriter() : pos_(NULL), last_pc_(NULL), last_data_(0) {} - RelocInfoWriter(byte* pos, byte* pc) : pos_(pos), last_pc_(pc), - last_data_(0) {} + RelocInfoWriter() : pos_(NULL), + last_pc_(NULL), + last_id_(0), + last_position_(0) {} + RelocInfoWriter(byte* pos, byte* pc) : pos_(pos), + last_pc_(pc), + last_id_(0), + last_position_(0) {} byte* pos() const { return pos_; } byte* last_pc() const { return last_pc_; } @@ -407,13 +416,15 @@ class RelocInfoWriter BASE_EMBEDDED { inline uint32_t WriteVariableLengthPCJump(uint32_t pc_delta); inline void WriteTaggedPC(uint32_t pc_delta, int tag); inline void WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag); + inline void WriteExtraTaggedIntData(int data_delta, int top_tag); inline void WriteExtraTaggedData(intptr_t data_delta, int top_tag); inline void WriteTaggedData(intptr_t data_delta, int tag); inline void WriteExtraTag(int extra_tag, int top_tag); byte* pos_; byte* last_pc_; - intptr_t last_data_; + int last_id_; + int last_position_; DISALLOW_COPY_AND_ASSIGN(RelocInfoWriter); }; @@ -455,12 +466,13 @@ class RelocIterator: public Malloced { int GetTopTag(); void ReadTaggedPC(); void AdvanceReadPC(); + void AdvanceReadId(); + void AdvanceReadPosition(); void AdvanceReadData(); void AdvanceReadVariableLengthPCJump(); - int GetPositionTypeTag(); - void ReadTaggedData(); - - static RelocInfo::Mode DebugInfoModeFromTag(int tag); + int GetLocatableTypeTag(); + void ReadTaggedId(); + void ReadTaggedPosition(); // If the given mode is wanted, set it in rinfo_ and return true. // Else return false. Used for efficiently skipping unwanted modes. @@ -473,6 +485,8 @@ class RelocIterator: public Malloced { RelocInfo rinfo_; bool done_; int mode_mask_; + int last_id_; + int last_position_; DISALLOW_COPY_AND_ASSIGN(RelocIterator); }; diff --git a/src/disassembler.cc b/src/disassembler.cc index 65e16681a0..368c3a89c1 100644 --- a/src/disassembler.cc +++ b/src/disassembler.cc @@ -282,6 +282,9 @@ static int DecodeIt(FILE* f, } else { out.AddFormatted(" %s", Code::Kind2String(kind)); } + if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { + out.AddFormatted(" (id = %d)", static_cast(relocinfo.data())); + } } else if (rmode == RelocInfo::RUNTIME_ENTRY && Isolate::Current()->deoptimizer_data() != NULL) { // A runtime entry reloinfo might be a deoptimization bailout. diff --git a/src/full-codegen.cc b/src/full-codegen.cc index d6ba56e8df..1c2f7bfd85 100644 --- a/src/full-codegen.cc +++ b/src/full-codegen.cc @@ -744,7 +744,7 @@ void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { if (ShouldInlineSmiCase(op)) { EmitInlineSmiBinaryOp(expr, op, mode, left, right); } else { - EmitBinaryOp(op, mode); + EmitBinaryOp(expr, op, mode); } break; } diff --git a/src/full-codegen.h b/src/full-codegen.h index d6ed1b9ff0..ff815c3efa 100644 --- a/src/full-codegen.h +++ b/src/full-codegen.h @@ -1,4 +1,4 @@ -// Copyright 2010 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -445,12 +445,13 @@ class FullCodeGenerator: public AstVisitor { // Apply the compound assignment operator. Expects the left operand on top // of the stack and the right one in the accumulator. - void EmitBinaryOp(Token::Value op, + void EmitBinaryOp(BinaryOperation* expr, + Token::Value op, OverwriteMode mode); // Helper functions for generating inlined smi code for certain // binary operations. - void EmitInlineSmiBinaryOp(Expression* expr, + void EmitInlineSmiBinaryOp(BinaryOperation* expr, Token::Value op, OverwriteMode mode, Expression* left, @@ -512,12 +513,16 @@ class FullCodeGenerator: public AstVisitor { static Register context_register(); // Helper for calling an IC stub. - void EmitCallIC(Handle ic, RelocInfo::Mode mode); + void EmitCallIC(Handle ic, + RelocInfo::Mode mode, + unsigned ast_id = AstNode::kNoNumber); // Calling an IC stub with a patch site. Passing NULL for patch_site // or non NULL patch_site which is not activated indicates no inlined smi code // and emits a nop after the IC call. - void EmitCallIC(Handle ic, JumpPatchSite* patch_site); + void EmitCallIC(Handle ic, + JumpPatchSite* patch_site, + unsigned ast_id = AstNode::kNoNumber); // Set fields in the stack frame. Offsets are the frame pointer relative // offsets defined in, e.g., StandardFrameConstants. diff --git a/src/ia32/assembler-ia32-inl.h b/src/ia32/assembler-ia32-inl.h index a9247f46fd..7f7e349f2e 100644 --- a/src/ia32/assembler-ia32-inl.h +++ b/src/ia32/assembler-ia32-inl.h @@ -30,7 +30,7 @@ // The original source code covered by the above license above has been // modified significantly by Google Inc. -// Copyright 2006-2008 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // A light-weight IA32 Assembler. @@ -311,8 +311,12 @@ void Assembler::emit(Handle handle) { } -void Assembler::emit(uint32_t x, RelocInfo::Mode rmode) { - if (rmode != RelocInfo::NONE) RecordRelocInfo(rmode); +void Assembler::emit(uint32_t x, RelocInfo::Mode rmode, unsigned id) { + if (rmode == RelocInfo::CODE_TARGET && id != kNoASTId) { + RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, static_cast(id)); + } else if (rmode != RelocInfo::NONE) { + RecordRelocInfo(rmode); + } emit(x); } diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc index 92730372ee..a91b0c44a5 100644 --- a/src/ia32/assembler-ia32.cc +++ b/src/ia32/assembler-ia32.cc @@ -1589,13 +1589,15 @@ void Assembler::call(const Operand& adr) { } -void Assembler::call(Handle code, RelocInfo::Mode rmode) { +void Assembler::call(Handle code, + RelocInfo::Mode rmode, + unsigned ast_id) { positions_recorder()->WriteRecordedPositions(); EnsureSpace ensure_space(this); last_pc_ = pc_; ASSERT(RelocInfo::IsCodeTarget(rmode)); EMIT(0xE8); - emit(reinterpret_cast(code.location()), rmode); + emit(reinterpret_cast(code.location()), rmode, ast_id); } diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h index 079dca7577..86ce8a68e4 100644 --- a/src/ia32/assembler-ia32.h +++ b/src/ia32/assembler-ia32.h @@ -848,7 +848,9 @@ class Assembler : public AssemblerBase { void call(Label* L); void call(byte* entry, RelocInfo::Mode rmode); void call(const Operand& adr); - void call(Handle code, RelocInfo::Mode rmode); + void call(Handle code, + RelocInfo::Mode rmode, + unsigned ast_id = kNoASTId); // Jumps void jmp(Label* L); // unconditional jump to L @@ -1070,7 +1072,9 @@ class Assembler : public AssemblerBase { void GrowBuffer(); inline void emit(uint32_t x); inline void emit(Handle handle); - inline void emit(uint32_t x, RelocInfo::Mode rmode); + inline void emit(uint32_t x, + RelocInfo::Mode rmode, + unsigned ast_id = kNoASTId); inline void emit(const Immediate& x); inline void emit_w(const Immediate& x); diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 69d5e77363..4cade5f13a 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -44,6 +44,11 @@ namespace internal { #define __ ACCESS_MASM(masm_) +static unsigned GetPropertyId(Property* property) { + if (property->is_synthetic()) return AstNode::kNoNumber; + return property->id(); +} + class JumpPatchSite BASE_EMBEDDED { public: @@ -808,7 +813,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { // Record position before stub call for type feedback. SetSourcePosition(clause->position()); Handle ic = CompareIC::GetUninitialized(Token::EQ_STRICT); - EmitCallIC(ic, &patch_site); + EmitCallIC(ic, &patch_site, clause->label()->id()); __ test(eax, Operand(eax)); __ j(not_equal, &next_test); __ Drop(1); // Switch value is no longer needed. @@ -1187,7 +1192,7 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( __ mov(eax, Immediate(key_literal->handle())); Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property)); __ jmp(done); } } @@ -1273,7 +1278,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) { // Do a keyed property load. Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property)); // Drop key and object left on the stack by IC. context()->Plug(eax); @@ -1386,7 +1391,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { Handle ic = is_strict_mode() ? isolate()->builtins()->StoreIC_Initialize_Strict() : isolate()->builtins()->StoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, key->id()); PrepareForBailoutForId(key->id(), NO_REGISTERS); } else { VisitForEffect(value); @@ -1593,13 +1598,13 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) { SetSourcePosition(expr->position() + 1); AccumulatorValueContext context(this); if (ShouldInlineSmiCase(op)) { - EmitInlineSmiBinaryOp(expr, + EmitInlineSmiBinaryOp(expr->binary_operation(), op, mode, expr->target(), expr->value()); } else { - EmitBinaryOp(op, mode); + EmitBinaryOp(expr->binary_operation(), op, mode); } // Deoptimization point in case the binary operation may have side effects. @@ -1634,18 +1639,18 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { Literal* key = prop->key()->AsLiteral(); __ mov(ecx, Immediate(key->handle())); Handle ic = isolate()->builtins()->LoadIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop)); } void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { SetSourcePosition(prop->position()); Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop)); } -void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr, +void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, Token::Value op, OverwriteMode mode, Expression* left, @@ -1662,7 +1667,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr, __ bind(&stub_call); __ mov(eax, ecx); TypeRecordingBinaryOpStub stub(op, mode); - EmitCallIC(stub.GetCode(), &patch_site); + EmitCallIC(stub.GetCode(), &patch_site, expr->id()); __ jmp(&done); // Smi case. @@ -1740,11 +1745,13 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr, } -void FullCodeGenerator::EmitBinaryOp(Token::Value op, +void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, + Token::Value op, OverwriteMode mode) { __ pop(edx); TypeRecordingBinaryOpStub stub(op, mode); - EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code. + // NULL signals no inlined smi code. + EmitCallIC(stub.GetCode(), NULL, expr->id()); context()->Plug(eax); } @@ -1936,7 +1943,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { Handle ic = is_strict_mode() ? isolate()->builtins()->StoreIC_Initialize_Strict() : isolate()->builtins()->StoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id()); // If the assignment ends an initialization block, revert to fast case. if (expr->ends_initialization_block()) { @@ -1976,7 +1983,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { Handle ic = is_strict_mode() ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() : isolate()->builtins()->KeyedStoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id()); // If the assignment ends an initialization block, revert to fast case. if (expr->ends_initialization_block()) { @@ -2027,7 +2034,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr, InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; Handle ic = isolate()->stub_cache()->ComputeCallInitialize( arg_count, in_loop); - EmitCallIC(ic, mode); + EmitCallIC(ic, mode, expr->id()); RecordJSReturnSite(expr); // Restore context register. __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); @@ -2061,7 +2068,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, Handle ic = isolate()->stub_cache()->ComputeKeyedCallInitialize( arg_count, in_loop); __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key. - EmitCallIC(ic, mode); + EmitCallIC(ic, mode, expr->id()); RecordJSReturnSite(expr); // Restore context register. __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); @@ -2252,7 +2259,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { SetSourcePosition(prop->position()); Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop)); // Push result (function). __ push(eax); // Push Global receiver. @@ -3611,7 +3618,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; Handle ic = isolate()->stub_cache()->ComputeCallInitialize( arg_count, in_loop); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id()); // Restore context register. __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); } else { @@ -3898,7 +3905,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { __ mov(edx, eax); __ mov(eax, Immediate(Smi::FromInt(1))); TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE); - EmitCallIC(stub.GetCode(), &patch_site); + EmitCallIC(stub.GetCode(), &patch_site, expr->CountId()); __ bind(&done); // Store the value returned in eax. @@ -3931,7 +3938,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { Handle ic = is_strict_mode() ? isolate()->builtins()->StoreIC_Initialize_Strict() : isolate()->builtins()->StoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id()); PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); if (expr->is_postfix()) { if (!context()->IsEffect()) { @@ -3948,7 +3955,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { Handle ic = is_strict_mode() ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() : isolate()->builtins()->KeyedStoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id()); PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); if (expr->is_postfix()) { // Result is on the stack @@ -4175,7 +4182,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { // Record position and call the compare IC. SetSourcePosition(expr->position()); Handle ic = CompareIC::GetUninitialized(op); - EmitCallIC(ic, &patch_site); + EmitCallIC(ic, &patch_site, expr->id()); PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); __ test(eax, Operand(eax)); @@ -4235,7 +4242,9 @@ Register FullCodeGenerator::context_register() { } -void FullCodeGenerator::EmitCallIC(Handle ic, RelocInfo::Mode mode) { +void FullCodeGenerator::EmitCallIC(Handle ic, + RelocInfo::Mode mode, + unsigned ast_id) { ASSERT(mode == RelocInfo::CODE_TARGET || mode == RelocInfo::CODE_TARGET_CONTEXT); switch (ic->kind()) { @@ -4253,11 +4262,13 @@ void FullCodeGenerator::EmitCallIC(Handle ic, RelocInfo::Mode mode) { default: break; } - __ call(ic, mode); + __ call(ic, mode, ast_id); } -void FullCodeGenerator::EmitCallIC(Handle ic, JumpPatchSite* patch_site) { +void FullCodeGenerator::EmitCallIC(Handle ic, + JumpPatchSite* patch_site, + unsigned ast_id) { Counters* counters = isolate()->counters(); switch (ic->kind()) { case Code::LOAD_IC: @@ -4274,7 +4285,7 @@ void FullCodeGenerator::EmitCallIC(Handle ic, JumpPatchSite* patch_site) { default: break; } - __ call(ic, RelocInfo::CODE_TARGET); + __ call(ic, RelocInfo::CODE_TARGET, ast_id); if (patch_site != NULL && patch_site->is_bound()) { patch_site->EmitPatchInfo(); } else { diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc index ad567bca3d..0c24fc44f9 100644 --- a/src/ia32/macro-assembler-ia32.cc +++ b/src/ia32/macro-assembler-ia32.cc @@ -1104,9 +1104,9 @@ void MacroAssembler::TryGetFunctionPrototype(Register function, } -void MacroAssembler::CallStub(CodeStub* stub) { +void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) { ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. - call(stub->GetCode(), RelocInfo::CODE_TARGET); + call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); } diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h index 690927262c..f1666ba411 100644 --- a/src/ia32/macro-assembler-ia32.h +++ b/src/ia32/macro-assembler-ia32.h @@ -452,7 +452,7 @@ class MacroAssembler: public Assembler { // Runtime calls // Call a code stub. Generate the code if necessary. - void CallStub(CodeStub* stub); + void CallStub(CodeStub* stub, unsigned ast_id = kNoASTId); // Call a code stub and return the code object called. Try to generate // the code if necessary. Do not perform a GC but instead return a retry diff --git a/src/type-info.cc b/src/type-info.cc index 1940601c47..02f69d00af 100644 --- a/src/type-info.cc +++ b/src/type-info.cc @@ -69,8 +69,8 @@ TypeFeedbackOracle::TypeFeedbackOracle(Handle code, } -Handle TypeFeedbackOracle::GetInfo(int pos) { - int entry = dictionary_->FindEntry(pos); +Handle TypeFeedbackOracle::GetInfo(unsigned ast_id) { + int entry = dictionary_->FindEntry(ast_id); return entry != NumberDictionary::kNotFound ? Handle(dictionary_->ValueAt(entry)) : Isolate::Current()->factory()->undefined_value(); @@ -78,7 +78,7 @@ Handle TypeFeedbackOracle::GetInfo(int pos) { bool TypeFeedbackOracle::LoadIsMonomorphic(Property* expr) { - Handle map_or_code(GetInfo(expr->position())); + Handle map_or_code(GetInfo(expr->id())); if (map_or_code->IsMap()) return true; if (map_or_code->IsCode()) { Handle code(Code::cast(*map_or_code)); @@ -90,7 +90,7 @@ bool TypeFeedbackOracle::LoadIsMonomorphic(Property* expr) { bool TypeFeedbackOracle::StoreIsMonomorphic(Expression* expr) { - Handle map_or_code(GetInfo(expr->position())); + Handle map_or_code(GetInfo(expr->id())); if (map_or_code->IsMap()) return true; if (map_or_code->IsCode()) { Handle code(Code::cast(*map_or_code)); @@ -102,7 +102,7 @@ bool TypeFeedbackOracle::StoreIsMonomorphic(Expression* expr) { bool TypeFeedbackOracle::CallIsMonomorphic(Call* expr) { - Handle value = GetInfo(expr->position()); + Handle value = GetInfo(expr->id()); return value->IsMap() || value->IsSmi(); } @@ -110,7 +110,7 @@ bool TypeFeedbackOracle::CallIsMonomorphic(Call* expr) { Handle TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) { ASSERT(LoadIsMonomorphic(expr)); Handle map_or_code( - Handle::cast(GetInfo(expr->position()))); + Handle::cast(GetInfo(expr->id()))); if (map_or_code->IsCode()) { Handle code(Code::cast(*map_or_code)); return Handle(code->FindFirstMap()); @@ -122,7 +122,7 @@ Handle TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) { Handle TypeFeedbackOracle::StoreMonomorphicReceiverType(Expression* expr) { ASSERT(StoreIsMonomorphic(expr)); Handle map_or_code( - Handle::cast(GetInfo(expr->position()))); + Handle::cast(GetInfo(expr->id()))); if (map_or_code->IsCode()) { Handle code(Code::cast(*map_or_code)); return Handle(code->FindFirstMap()); @@ -134,14 +134,14 @@ Handle TypeFeedbackOracle::StoreMonomorphicReceiverType(Expression* expr) { ZoneMapList* TypeFeedbackOracle::LoadReceiverTypes(Property* expr, Handle name) { Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NORMAL); - return CollectReceiverTypes(expr->position(), name, flags); + return CollectReceiverTypes(expr->id(), name, flags); } ZoneMapList* TypeFeedbackOracle::StoreReceiverTypes(Assignment* expr, Handle name) { Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, NORMAL); - return CollectReceiverTypes(expr->position(), name, flags); + return CollectReceiverTypes(expr->id(), name, flags); } @@ -158,12 +158,12 @@ ZoneMapList* TypeFeedbackOracle::CallReceiverTypes(Call* expr, OWN_MAP, NOT_IN_LOOP, arity); - return CollectReceiverTypes(expr->position(), name, flags); + return CollectReceiverTypes(expr->id(), name, flags); } CheckType TypeFeedbackOracle::GetCallCheckType(Call* expr) { - Handle value = GetInfo(expr->position()); + Handle value = GetInfo(expr->id()); if (!value->IsSmi()) return RECEIVER_MAP_CHECK; CheckType check = static_cast(Smi::cast(*value)->value()); ASSERT(check != RECEIVER_MAP_CHECK); @@ -172,14 +172,14 @@ CheckType TypeFeedbackOracle::GetCallCheckType(Call* expr) { ExternalArrayType TypeFeedbackOracle::GetKeyedLoadExternalArrayType( Property* expr) { - Handle stub = GetInfo(expr->position()); + Handle stub = GetInfo(expr->id()); ASSERT(stub->IsCode()); return Code::cast(*stub)->external_array_type(); } ExternalArrayType TypeFeedbackOracle::GetKeyedStoreExternalArrayType( Expression* expr) { - Handle stub = GetInfo(expr->position()); + Handle stub = GetInfo(expr->id()); ASSERT(stub->IsCode()); return Code::cast(*stub)->external_array_type(); } @@ -207,13 +207,13 @@ Handle TypeFeedbackOracle::GetPrototypeForPrimitiveCheck( bool TypeFeedbackOracle::LoadIsBuiltin(Property* expr, Builtins::Name id) { - return *GetInfo(expr->position()) == + return *GetInfo(expr->id()) == Isolate::Current()->builtins()->builtin(id); } TypeInfo TypeFeedbackOracle::CompareType(CompareOperation* expr) { - Handle object = GetInfo(expr->position()); + Handle object = GetInfo(expr->id()); TypeInfo unknown = TypeInfo::Unknown(); if (!object->IsCode()) return unknown; Handle code = Handle::cast(object); @@ -240,7 +240,7 @@ TypeInfo TypeFeedbackOracle::CompareType(CompareOperation* expr) { TypeInfo TypeFeedbackOracle::BinaryType(BinaryOperation* expr) { - Handle object = GetInfo(expr->position()); + Handle object = GetInfo(expr->id()); TypeInfo unknown = TypeInfo::Unknown(); if (!object->IsCode()) return unknown; Handle code = Handle::cast(object); @@ -289,7 +289,7 @@ TypeInfo TypeFeedbackOracle::BinaryType(BinaryOperation* expr) { TypeInfo TypeFeedbackOracle::SwitchType(CaseClause* clause) { - Handle object = GetInfo(clause->position()); + Handle object = GetInfo(clause->label()->id()); TypeInfo unknown = TypeInfo::Unknown(); if (!object->IsCode()) return unknown; Handle code = Handle::cast(object); @@ -315,11 +315,11 @@ TypeInfo TypeFeedbackOracle::SwitchType(CaseClause* clause) { } -ZoneMapList* TypeFeedbackOracle::CollectReceiverTypes(int position, +ZoneMapList* TypeFeedbackOracle::CollectReceiverTypes(unsigned ast_id, Handle name, Code::Flags flags) { Isolate* isolate = Isolate::Current(); - Handle object = GetInfo(position); + Handle object = GetInfo(ast_id); if (object->IsUndefined() || object->IsSmi()) return NULL; if (*object == isolate->builtins()->builtin(Builtins::kStoreIC_GlobalProxy)) { @@ -342,8 +342,9 @@ ZoneMapList* TypeFeedbackOracle::CollectReceiverTypes(int position, } -void TypeFeedbackOracle::SetInfo(int position, Object* target) { - MaybeObject* maybe_result = dictionary_->AtNumberPut(position, target); +void TypeFeedbackOracle::SetInfo(unsigned ast_id, Object* target) { + ASSERT(dictionary_->FindEntry(ast_id) == NumberDictionary::kNotFound); + MaybeObject* maybe_result = dictionary_->AtNumberPut(ast_id, target); USE(maybe_result); #ifdef DEBUG Object* result; @@ -360,53 +361,47 @@ void TypeFeedbackOracle::PopulateMap(Handle code) { const int kInitialCapacity = 16; List code_positions(kInitialCapacity); - List source_positions(kInitialCapacity); - CollectPositions(*code, &code_positions, &source_positions); + List ast_ids(kInitialCapacity); + CollectIds(*code, &code_positions, &ast_ids); ASSERT(dictionary_.is_null()); // Only initialize once. dictionary_ = isolate->factory()->NewNumberDictionary( code_positions.length()); - int length = code_positions.length(); - ASSERT(source_positions.length() == length); + const int length = code_positions.length(); + ASSERT(ast_ids.length() == length); for (int i = 0; i < length; i++) { AssertNoAllocation no_allocation; RelocInfo info(code->instruction_start() + code_positions[i], RelocInfo::CODE_TARGET, 0); Code* target = Code::GetCodeFromTargetAddress(info.target_address()); - int position = source_positions[i]; + unsigned id = ast_ids[i]; InlineCacheState state = target->ic_state(); Code::Kind kind = target->kind(); if (kind == Code::TYPE_RECORDING_BINARY_OP_IC || kind == Code::COMPARE_IC) { - // TODO(kasperl): Avoid having multiple ICs with the same - // position by making sure that we have position information - // recorded for all binary ICs. - int entry = dictionary_->FindEntry(position); - if (entry == NumberDictionary::kNotFound) { - SetInfo(position, target); - } + SetInfo(id, target); } else if (state == MONOMORPHIC) { if (kind == Code::KEYED_EXTERNAL_ARRAY_LOAD_IC || kind == Code::KEYED_EXTERNAL_ARRAY_STORE_IC) { - SetInfo(position, target); - } else if (target->kind() != Code::CALL_IC || - target->check_type() == RECEIVER_MAP_CHECK) { + SetInfo(id, target); + } else if (kind != Code::CALL_IC || + target->check_type() == RECEIVER_MAP_CHECK) { Map* map = target->FindFirstMap(); if (map == NULL) { - SetInfo(position, target); + SetInfo(id, target); } else { - SetInfo(position, map); + SetInfo(id, map); } } else { ASSERT(target->kind() == Code::CALL_IC); CheckType check = target->check_type(); ASSERT(check != RECEIVER_MAP_CHECK); - SetInfo(position, Smi::FromInt(check)); + SetInfo(id, Smi::FromInt(check)); } } else if (state == MEGAMORPHIC) { - SetInfo(position, target); + SetInfo(id, target); } } // Allocate handle in the parent scope. @@ -414,41 +409,31 @@ void TypeFeedbackOracle::PopulateMap(Handle code) { } -void TypeFeedbackOracle::CollectPositions(Code* code, - List* code_positions, - List* source_positions) { +void TypeFeedbackOracle::CollectIds(Code* code, + List* code_positions, + List* ast_ids) { AssertNoAllocation no_allocation; - int position = 0; - // Because the ICs we use for global variables access in the full - // code generator do not have any meaningful positions, we avoid - // collecting those by filtering out contextual code targets. - int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | - RelocInfo::kPositionMask; + int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID); for (RelocIterator it(code, mask); !it.done(); it.next()) { RelocInfo* info = it.rinfo(); - RelocInfo::Mode mode = info->rmode(); - if (RelocInfo::IsCodeTarget(mode)) { - Code* target = Code::GetCodeFromTargetAddress(info->target_address()); - if (target->is_inline_cache_stub()) { - InlineCacheState state = target->ic_state(); - Code::Kind kind = target->kind(); - if (kind == Code::TYPE_RECORDING_BINARY_OP_IC) { - if (target->type_recording_binary_op_type() == - TRBinaryOpIC::GENERIC) { - continue; - } - } else if (kind == Code::COMPARE_IC) { - if (target->compare_state() == CompareIC::GENERIC) continue; - } else { - if (state != MONOMORPHIC && state != MEGAMORPHIC) continue; + ASSERT(RelocInfo::IsCodeTarget(info->rmode())); + Code* target = Code::GetCodeFromTargetAddress(info->target_address()); + if (target->is_inline_cache_stub()) { + InlineCacheState state = target->ic_state(); + Code::Kind kind = target->kind(); + if (kind == Code::TYPE_RECORDING_BINARY_OP_IC) { + if (target->type_recording_binary_op_type() == + TRBinaryOpIC::GENERIC) { + continue; } - code_positions->Add( - static_cast(info->pc() - code->instruction_start())); - source_positions->Add(position); + } else if (kind == Code::COMPARE_IC) { + if (target->compare_state() == CompareIC::GENERIC) continue; + } else { + if (state != MONOMORPHIC && state != MEGAMORPHIC) continue; } - } else { - ASSERT(RelocInfo::IsPosition(mode)); - position = static_cast(info->data()); + code_positions->Add( + static_cast(info->pc() - code->instruction_start())); + ast_ids->Add(static_cast(info->data())); } } } diff --git a/src/type-info.h b/src/type-info.h index f6e6729117..905625a97e 100644 --- a/src/type-info.h +++ b/src/type-info.h @@ -36,18 +36,18 @@ namespace v8 { namespace internal { // Unknown -// | | -// | \--------------| -// Primitive Non-primitive -// | \--------| | -// Number String | -// / | | | -// Double Integer32 | / -// | | / / -// | Smi / / -// | | / / -// | | / / -// Uninitialized.--/ +// | \____________ +// | | +// Primitive Non-primitive +// | \_______ | +// | | | +// Number String | +// / \ | | +// Double Integer32 | / +// | | / / +// | Smi / / +// | | / __/ +// Uninitialized. class TypeInfo { public: @@ -263,21 +263,21 @@ class TypeFeedbackOracle BASE_EMBEDDED { TypeInfo SwitchType(CaseClause* clause); private: - ZoneMapList* CollectReceiverTypes(int position, + ZoneMapList* CollectReceiverTypes(unsigned ast_id, Handle name, Code::Flags flags); - void SetInfo(int position, Object* target); + void SetInfo(unsigned ast_id, Object* target); void PopulateMap(Handle code); - void CollectPositions(Code* code, - List* code_positions, - List* source_positions); + void CollectIds(Code* code, + List* code_positions, + List* ast_ids); // Returns an element from the backing store. Returns undefined if // there is no information. - Handle GetInfo(int pos); + Handle GetInfo(unsigned ast_id); Handle global_context_; Handle dictionary_; diff --git a/src/x64/assembler-x64-inl.h b/src/x64/assembler-x64-inl.h index 9541a58bfb..8db54f0752 100644 --- a/src/x64/assembler-x64-inl.h +++ b/src/x64/assembler-x64-inl.h @@ -61,9 +61,15 @@ void Assembler::emitw(uint16_t x) { } -void Assembler::emit_code_target(Handle target, RelocInfo::Mode rmode) { +void Assembler::emit_code_target(Handle target, + RelocInfo::Mode rmode, + unsigned ast_id) { ASSERT(RelocInfo::IsCodeTarget(rmode)); - RecordRelocInfo(rmode); + if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) { + RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id); + } else { + RecordRelocInfo(rmode); + } int current = code_targets_.length(); if (current > 0 && code_targets_.last().is_identical_to(target)) { // Optimization if we keep jumping to the same code target. diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc index c06bc0c451..c5e35af1db 100644 --- a/src/x64/assembler-x64.cc +++ b/src/x64/assembler-x64.cc @@ -869,12 +869,14 @@ void Assembler::call(Label* L) { } -void Assembler::call(Handle target, RelocInfo::Mode rmode) { +void Assembler::call(Handle target, + RelocInfo::Mode rmode, + unsigned ast_id) { positions_recorder()->WriteRecordedPositions(); EnsureSpace ensure_space(this); // 1110 1000 #32-bit disp. emit(0xE8); - emit_code_target(target, rmode); + emit_code_target(target, rmode, ast_id); } diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h index 8a9938ba09..32db4b892b 100644 --- a/src/x64/assembler-x64.h +++ b/src/x64/assembler-x64.h @@ -1183,7 +1183,9 @@ class Assembler : public AssemblerBase { // Calls // Call near relative 32-bit displacement, relative to next instruction. void call(Label* L); - void call(Handle target, RelocInfo::Mode rmode); + void call(Handle target, + RelocInfo::Mode rmode, + unsigned ast_id = kNoASTId); // Calls directly to the given address using a relative offset. // Should only ever be used in Code objects for calls within the @@ -1427,7 +1429,9 @@ class Assembler : public AssemblerBase { inline void emitl(uint32_t x); inline void emitq(uint64_t x, RelocInfo::Mode rmode); inline void emitw(uint16_t x); - inline void emit_code_target(Handle target, RelocInfo::Mode rmode); + inline void emit_code_target(Handle target, + RelocInfo::Mode rmode, + unsigned ast_id = kNoASTId); void emit(Immediate x) { emitl(x.value_); } // Emits a REX prefix that encodes a 64-bit operand size and diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index d5fb7da7f6..df377ae821 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -44,6 +44,12 @@ namespace internal { #define __ ACCESS_MASM(masm_) +static unsigned GetPropertyId(Property* property) { + if (property->is_synthetic()) return AstNode::kNoNumber; + return property->id(); +} + + class JumpPatchSite BASE_EMBEDDED { public: explicit JumpPatchSite(MacroAssembler* masm) @@ -743,7 +749,7 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable, Handle ic = is_strict_mode() ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() : isolate()->builtins()->KeyedStoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop)); } } } @@ -816,7 +822,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { // Record position before stub call for type feedback. SetSourcePosition(clause->position()); Handle ic = CompareIC::GetUninitialized(Token::EQ_STRICT); - EmitCallIC(ic, &patch_site); + EmitCallIC(ic, &patch_site, clause->label()->id()); __ testq(rax, rax); __ j(not_equal, &next_test); @@ -1206,7 +1212,7 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( __ Move(rax, key_literal->handle()); Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property)); __ jmp(done); } } @@ -1292,7 +1298,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) { // Do a keyed property load. Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property)); context()->Plug(rax); } } @@ -1403,7 +1409,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { __ movq(rdx, Operand(rsp, 0)); if (property->emit_store()) { Handle ic = isolate()->builtins()->StoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, key->id()); PrepareForBailoutForId(key->id(), NO_REGISTERS); } break; @@ -1606,13 +1612,13 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) { SetSourcePosition(expr->position() + 1); AccumulatorValueContext context(this); if (ShouldInlineSmiCase(op)) { - EmitInlineSmiBinaryOp(expr, + EmitInlineSmiBinaryOp(expr->binary_operation(), op, mode, expr->target(), expr->value()); } else { - EmitBinaryOp(op, mode); + EmitBinaryOp(expr->binary_operation(), op, mode); } // Deoptimization point in case the binary operation may have side effects. PrepareForBailout(expr->binary_operation(), TOS_REG); @@ -1646,18 +1652,18 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { Literal* key = prop->key()->AsLiteral(); __ Move(rcx, key->handle()); Handle ic = isolate()->builtins()->LoadIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop)); } void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { SetSourcePosition(prop->position()); Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop)); } -void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr, +void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, Token::Value op, OverwriteMode mode, Expression* left, @@ -1675,7 +1681,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr, __ bind(&stub_call); __ movq(rax, rcx); TypeRecordingBinaryOpStub stub(op, mode); - EmitCallIC(stub.GetCode(), &patch_site); + EmitCallIC(stub.GetCode(), &patch_site, expr->id()); __ jmp(&done); __ bind(&smi_case); @@ -1717,11 +1723,13 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr, } -void FullCodeGenerator::EmitBinaryOp(Token::Value op, +void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, + Token::Value op, OverwriteMode mode) { __ pop(rdx); TypeRecordingBinaryOpStub stub(op, mode); - EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code. + // NULL signals no inlined smi code. + EmitCallIC(stub.GetCode(), NULL, expr->id()); context()->Plug(rax); } @@ -1953,7 +1961,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { Handle ic = is_strict_mode() ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() : isolate()->builtins()->KeyedStoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id()); // If the assignment ends an initialization block, revert to fast case. if (expr->ends_initialization_block()) { @@ -2005,7 +2013,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr, InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; Handle ic = ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop); - EmitCallIC(ic, mode); + EmitCallIC(ic, mode, expr->id()); RecordJSReturnSite(expr); // Restore context register. __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); @@ -2040,7 +2048,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, Handle ic = ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop); __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key. - EmitCallIC(ic, mode); + EmitCallIC(ic, mode, expr->id()); RecordJSReturnSite(expr); // Restore context register. __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); @@ -2232,7 +2240,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { SetSourcePosition(prop->position()); Handle ic = isolate()->builtins()->KeyedLoadIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop)); // Push result (function). __ push(rax); // Push Global receiver. @@ -3592,7 +3600,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; Handle ic = ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id()); // Restore context register. __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); } else { @@ -3877,7 +3885,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { __ movq(rdx, rax); __ Move(rax, Smi::FromInt(1)); } - EmitCallIC(stub.GetCode(), &patch_site); + EmitCallIC(stub.GetCode(), &patch_site, expr->CountId()); __ bind(&done); // Store the value returned in rax. @@ -3910,7 +3918,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { Handle ic = is_strict_mode() ? isolate()->builtins()->StoreIC_Initialize_Strict() : isolate()->builtins()->StoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id()); PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); if (expr->is_postfix()) { if (!context()->IsEffect()) { @@ -3927,7 +3935,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { Handle ic = is_strict_mode() ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() : isolate()->builtins()->KeyedStoreIC_Initialize(); - EmitCallIC(ic, RelocInfo::CODE_TARGET); + EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id()); PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); if (expr->is_postfix()) { if (!context()->IsEffect()) { @@ -4152,7 +4160,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { // Record position and call the compare IC. SetSourcePosition(expr->position()); Handle ic = CompareIC::GetUninitialized(op); - EmitCallIC(ic, &patch_site); + EmitCallIC(ic, &patch_site, expr->id()); PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); __ testq(rax, rax); @@ -4212,7 +4220,9 @@ Register FullCodeGenerator::context_register() { } -void FullCodeGenerator::EmitCallIC(Handle ic, RelocInfo::Mode mode) { +void FullCodeGenerator::EmitCallIC(Handle ic, + RelocInfo::Mode mode, + unsigned ast_id) { ASSERT(mode == RelocInfo::CODE_TARGET || mode == RelocInfo::CODE_TARGET_CONTEXT); Counters* counters = isolate()->counters(); @@ -4231,11 +4241,13 @@ void FullCodeGenerator::EmitCallIC(Handle ic, RelocInfo::Mode mode) { default: break; } - __ call(ic, mode); + __ call(ic, mode, ast_id); } -void FullCodeGenerator::EmitCallIC(Handle ic, JumpPatchSite* patch_site) { +void FullCodeGenerator::EmitCallIC(Handle ic, + JumpPatchSite* patch_site, + unsigned ast_id) { Counters* counters = isolate()->counters(); switch (ic->kind()) { case Code::LOAD_IC: @@ -4252,7 +4264,7 @@ void FullCodeGenerator::EmitCallIC(Handle ic, JumpPatchSite* patch_site) { default: break; } - __ call(ic, RelocInfo::CODE_TARGET); + __ call(ic, RelocInfo::CODE_TARGET, ast_id); if (patch_site != NULL && patch_site->is_bound()) { patch_site->EmitPatchInfo(); } else { diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc index 3394206791..24f2fefcd8 100644 --- a/src/x64/macro-assembler-x64.cc +++ b/src/x64/macro-assembler-x64.cc @@ -425,9 +425,9 @@ void MacroAssembler::Abort(const char* msg) { } -void MacroAssembler::CallStub(CodeStub* stub) { +void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) { ASSERT(allow_stub_calls()); // calls are not allowed in some stubs - Call(stub->GetCode(), RelocInfo::CODE_TARGET); + Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); } @@ -1610,12 +1610,14 @@ void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) { } -void MacroAssembler::Call(Handle code_object, RelocInfo::Mode rmode) { +void MacroAssembler::Call(Handle code_object, + RelocInfo::Mode rmode, + unsigned ast_id) { #ifdef DEBUG int end_position = pc_offset() + CallSize(code_object); #endif ASSERT(RelocInfo::IsCodeTarget(rmode)); - call(code_object, rmode); + call(code_object, rmode, ast_id); #ifdef DEBUG CHECK_EQ(end_position, pc_offset()); #endif diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h index 4c177205b6..8499edfb5a 100644 --- a/src/x64/macro-assembler-x64.h +++ b/src/x64/macro-assembler-x64.h @@ -692,7 +692,9 @@ class MacroAssembler: public Assembler { void Call(Address destination, RelocInfo::Mode rmode); void Call(ExternalReference ext); - void Call(Handle code_object, RelocInfo::Mode rmode); + void Call(Handle code_object, + RelocInfo::Mode rmode, + unsigned ast_id = kNoASTId); // The size of the code generated for different call instructions. int CallSize(Address destination, RelocInfo::Mode rmode) { @@ -932,7 +934,7 @@ class MacroAssembler: public Assembler { // Runtime calls // Call a code stub. - void CallStub(CodeStub* stub); + void CallStub(CodeStub* stub, unsigned ast_id = kNoASTId); // Call a code stub and return the code object called. Try to generate // the code if necessary. Do not perform a GC but instead return a retry diff --git a/test/cctest/test-debug.cc b/test/cctest/test-debug.cc index b81129e638..9847c891e7 100644 --- a/test/cctest/test-debug.cc +++ b/test/cctest/test-debug.cc @@ -501,7 +501,11 @@ void CheckDebugBreakFunction(DebugLocalContext* env, CHECK(Debug::HasDebugInfo(shared)); TestBreakLocationIterator it1(Debug::GetDebugInfo(shared)); it1.FindBreakLocationFromPosition(position); - CHECK_EQ(mode, it1.it()->rinfo()->rmode()); + v8::internal::RelocInfo::Mode actual_mode = it1.it()->rinfo()->rmode(); + if (actual_mode == v8::internal::RelocInfo::CODE_TARGET_WITH_ID) { + actual_mode = v8::internal::RelocInfo::CODE_TARGET; + } + CHECK_EQ(mode, actual_mode); if (mode != v8::internal::RelocInfo::JS_RETURN) { CHECK_EQ(debug_break, Code::GetCodeFromTargetAddress(it1.it()->rinfo()->target_address())); @@ -516,7 +520,11 @@ void CheckDebugBreakFunction(DebugLocalContext* env, CHECK(debug->EnsureDebugInfo(shared)); TestBreakLocationIterator it2(Debug::GetDebugInfo(shared)); it2.FindBreakLocationFromPosition(position); - CHECK_EQ(mode, it2.it()->rinfo()->rmode()); + actual_mode = it2.it()->rinfo()->rmode(); + if (actual_mode == v8::internal::RelocInfo::CODE_TARGET_WITH_ID) { + actual_mode = v8::internal::RelocInfo::CODE_TARGET; + } + CHECK_EQ(mode, actual_mode); if (mode == v8::internal::RelocInfo::JS_RETURN) { CHECK(!Debug::IsDebugBreakAtReturn(it2.it()->rinfo())); }