From eac7f04669208cb15132d03ca15c8acd1a2be869 Mon Sep 17 00:00:00 2001 From: mbrandy Date: Thu, 4 Jun 2015 07:44:00 -0700 Subject: [PATCH] Add support for Embedded Constant Pools for PPC and Arm Embed constant pools within their corresponding Code objects. This removes support for out-of-line constant pools in favor of the new approach -- the main advantage being that it eliminates the need to allocate and manage separate constant pool array objects. Currently supported on PPC and ARM. Enabled by default on PPC only. This yields a 6% improvment in Octane on PPC64. R=bmeurer@chromium.org, rmcilroy@chromium.org, michael_dawson@ca.ibm.com BUG=chromium:478811 LOG=Y Review URL: https://codereview.chromium.org/1162993006 Cr-Commit-Position: refs/heads/master@{#28801} --- include/v8.h | 2 +- src/arm/assembler-arm-inl.h | 22 +- src/arm/assembler-arm.cc | 550 +++++++++---------------- src/arm/assembler-arm.h | 138 ++----- src/arm/builtins-arm.cc | 12 +- src/arm/code-stubs-arm.cc | 12 +- src/arm/constants-arm.h | 5 + src/arm/debug-arm.cc | 2 +- src/arm/deoptimizer-arm.cc | 2 +- src/arm/frames-arm.cc | 11 +- src/arm/frames-arm.h | 8 +- src/arm/full-codegen-arm.cc | 11 +- src/arm/lithium-codegen-arm.cc | 6 +- src/arm/macro-assembler-arm.cc | 59 +-- src/arm/macro-assembler-arm.h | 10 +- src/arm64/assembler-arm64-inl.h | 10 +- src/arm64/assembler-arm64.cc | 14 - src/arm64/assembler-arm64.h | 24 +- src/arm64/deoptimizer-arm64.cc | 2 +- src/arm64/frames-arm64.cc | 6 - src/assembler.cc | 204 ++++++++- src/assembler.h | 157 +++++-- src/compiler/arm/code-generator-arm.cc | 2 +- src/compiler/ppc/code-generator-ppc.cc | 15 +- src/debug.cc | 2 +- src/deoptimizer.cc | 22 +- src/factory.cc | 38 +- src/factory.h | 10 - src/flag-definitions.h | 4 +- src/frames.cc | 35 +- src/frames.h | 38 +- src/globals.h | 24 +- src/heap-snapshot-generator.cc | 3 - src/heap/heap-inl.h | 6 - src/heap/heap.cc | 139 +------ src/heap/heap.h | 24 -- src/heap/mark-compact.cc | 50 +-- src/heap/mark-compact.h | 3 + src/heap/objects-visiting-inl.h | 33 -- src/heap/objects-visiting.cc | 3 - src/heap/objects-visiting.h | 2 - src/heap/spaces.cc | 3 +- src/ia32/assembler-ia32-inl.h | 12 +- src/ia32/assembler-ia32.cc | 20 +- src/ia32/assembler-ia32.h | 29 +- src/ia32/deoptimizer-ia32.cc | 2 +- src/ia32/frames-ia32.cc | 6 - src/ic/ic-inl.h | 23 +- src/ic/ic-state.cc | 2 +- src/ic/ic-state.h | 3 +- src/ic/ic.cc | 32 +- src/ic/ic.h | 34 +- src/ic/ppc/handler-compiler-ppc.cc | 6 +- src/lithium.cc | 4 - src/macro-assembler.h | 24 +- src/mips/assembler-mips.cc | 21 +- src/mips/assembler-mips.h | 28 +- src/mips/deoptimizer-mips.cc | 2 +- src/mips/frames-mips.cc | 6 - src/mips64/assembler-mips64.cc | 21 +- src/mips64/assembler-mips64.h | 28 +- src/mips64/deoptimizer-mips64.cc | 2 +- src/mips64/frames-mips64.cc | 6 - src/objects-debug.cc | 17 - src/objects-inl.h | 415 +------------------ src/objects-printer.cc | 40 -- src/objects.cc | 92 ++--- src/objects.h | 326 +-------------- src/ppc/assembler-ppc-inl.h | 184 ++++++++- src/ppc/assembler-ppc.cc | 167 +++++--- src/ppc/assembler-ppc.h | 132 +++++- src/ppc/builtins-ppc.cc | 37 +- src/ppc/code-stubs-ppc.cc | 22 +- src/ppc/constants-ppc.h | 5 + src/ppc/debug-ppc.cc | 2 +- src/ppc/deoptimizer-ppc.cc | 4 +- src/ppc/frames-ppc.cc | 14 +- src/ppc/frames-ppc.h | 7 +- src/ppc/full-codegen-ppc.cc | 11 +- src/ppc/lithium-codegen-ppc.cc | 8 +- src/ppc/macro-assembler-ppc.cc | 130 +++++- src/ppc/macro-assembler-ppc.h | 19 +- src/runtime/runtime-generator.cc | 2 +- src/snapshot/serialize.cc | 19 +- src/x64/assembler-x64-inl.h | 6 +- src/x64/assembler-x64.cc | 20 +- src/x64/assembler-x64.h | 28 +- src/x64/deoptimizer-x64.cc | 2 +- src/x64/frames-x64.cc | 6 - src/x87/assembler-x87-inl.h | 12 +- src/x87/assembler-x87.cc | 20 +- src/x87/assembler-x87.h | 29 +- src/x87/deoptimizer-x87.cc | 2 +- src/x87/frames-x87.cc | 6 - test/cctest/test-compiler.cc | 3 + test/cctest/test-constantpool.cc | 530 ++++++++++-------------- test/cctest/test-reloc-info.cc | 4 +- 97 files changed, 1845 insertions(+), 2510 deletions(-) diff --git a/include/v8.h b/include/v8.h index d7e6760d60..56fe98d404 100644 --- a/include/v8.h +++ b/include/v8.h @@ -6955,7 +6955,7 @@ class Internals { static const int kNodeIsIndependentShift = 3; static const int kNodeIsPartiallyDependentShift = 4; - static const int kJSObjectType = 0xbf; + static const int kJSObjectType = 0xbe; static const int kFirstNonstringType = 0x80; static const int kOddballType = 0x83; static const int kForeignType = 0x87; diff --git a/src/arm/assembler-arm-inl.h b/src/arm/assembler-arm-inl.h index 0b5ced5159..5e590a7d03 100644 --- a/src/arm/assembler-arm-inl.h +++ b/src/arm/assembler-arm-inl.h @@ -118,7 +118,7 @@ Address RelocInfo::target_address_address() { DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || rmode_ == EMBEDDED_OBJECT || rmode_ == EXTERNAL_REFERENCE); - if (FLAG_enable_ool_constant_pool || + if (FLAG_enable_embedded_constant_pool || Assembler::IsMovW(Memory::int32_at(pc_))) { // We return the PC for ool constant pool since this function is used by the // serializer and expects the address to reside within the code object. @@ -545,7 +545,7 @@ Address Assembler::return_address_from_call_start(Address pc) { void Assembler::deserialization_set_special_target_at( Address constant_pool_entry, Code* code, Address target) { - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { set_target_address_at(constant_pool_entry, code, target); } else { Memory::Address_at(constant_pool_entry) = target; @@ -562,21 +562,21 @@ void Assembler::deserialization_set_target_internal_reference_at( bool Assembler::is_constant_pool_load(Address pc) { if (CpuFeatures::IsSupported(ARMv7)) { return !Assembler::IsMovW(Memory::int32_at(pc)) || - (FLAG_enable_ool_constant_pool && + (FLAG_enable_embedded_constant_pool && Assembler::IsLdrPpRegOffset( Memory::int32_at(pc + 2 * Assembler::kInstrSize))); } else { return !Assembler::IsMovImmed(Memory::int32_at(pc)) || - (FLAG_enable_ool_constant_pool && + (FLAG_enable_embedded_constant_pool && Assembler::IsLdrPpRegOffset( Memory::int32_at(pc + 4 * Assembler::kInstrSize))); } } -Address Assembler::constant_pool_entry_address( - Address pc, ConstantPoolArray* constant_pool) { - if (FLAG_enable_ool_constant_pool) { +Address Assembler::constant_pool_entry_address(Address pc, + Address constant_pool) { + if (FLAG_enable_embedded_constant_pool) { DCHECK(constant_pool != NULL); int cp_offset; if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) { @@ -604,7 +604,7 @@ Address Assembler::constant_pool_entry_address( DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc))); cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc)); } - return reinterpret_cast
(constant_pool) + cp_offset; + return constant_pool + cp_offset; } else { DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc))); Instr instr = Memory::int32_at(pc); @@ -613,8 +613,7 @@ Address Assembler::constant_pool_entry_address( } -Address Assembler::target_address_at(Address pc, - ConstantPoolArray* constant_pool) { +Address Assembler::target_address_at(Address pc, Address constant_pool) { if (is_constant_pool_load(pc)) { // This is a constant pool lookup. Return the value in the constant pool. return Memory::Address_at(constant_pool_entry_address(pc, constant_pool)); @@ -645,8 +644,7 @@ Address Assembler::target_address_at(Address pc, } -void Assembler::set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, +void Assembler::set_target_address_at(Address pc, Address constant_pool, Address target, ICacheFlushMode icache_flush_mode) { if (is_constant_pool_load(pc)) { diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc index 315eeb1b66..ed8664a9c5 100644 --- a/src/arm/assembler-arm.cc +++ b/src/arm/assembler-arm.cc @@ -234,9 +234,9 @@ const int RelocInfo::kApplyMask = 0; bool RelocInfo::IsCodedSpecially() { // The deserializer needs to know whether a pointer is specially coded.  Being // specially coded on ARM means that it is a movw/movt instruction, or is an - // out of line constant pool entry.  These only occur if - // FLAG_enable_ool_constant_pool is true. - return FLAG_enable_ool_constant_pool; + // embedded constant pool entry.  These only occur if + // FLAG_enable_embedded_constant_pool is true. + return FLAG_enable_embedded_constant_pool; } @@ -449,11 +449,11 @@ const Instr kLdrStrInstrTypeMask = 0xffff0000; Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) : AssemblerBase(isolate, buffer, buffer_size), recorded_ast_id_(TypeFeedbackId::None()), - constant_pool_builder_(), + constant_pool_builder_(kLdrMaxReachBits, kVldrMaxReachBits), positions_recorder_(this) { reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); - num_pending_32_bit_reloc_info_ = 0; - num_pending_64_bit_reloc_info_ = 0; + num_pending_32_bit_constants_ = 0; + num_pending_64_bit_constants_ = 0; next_buffer_check_ = 0; const_pool_blocked_nesting_ = 0; no_const_pool_before_ = 0; @@ -471,17 +471,23 @@ Assembler::~Assembler() { void Assembler::GetCode(CodeDesc* desc) { reloc_info_writer.Finish(); - if (!FLAG_enable_ool_constant_pool) { - // Emit constant pool if necessary. + + // Emit constant pool if necessary. + int constant_pool_offset = 0; + if (FLAG_enable_embedded_constant_pool) { + constant_pool_offset = EmitEmbeddedConstantPool(); + } else { CheckConstPool(true, false); - DCHECK(num_pending_32_bit_reloc_info_ == 0); - DCHECK(num_pending_64_bit_reloc_info_ == 0); + DCHECK(num_pending_32_bit_constants_ == 0); + DCHECK(num_pending_64_bit_constants_ == 0); } // Set up code descriptor. desc->buffer = buffer_; desc->buffer_size = buffer_size_; desc->instr_size = pc_offset(); desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); + desc->constant_pool_size = + (constant_pool_offset ? desc->instr_size - constant_pool_offset : 0); desc->origin = this; } @@ -623,7 +629,7 @@ Register Assembler::GetRm(Instr instr) { Instr Assembler::GetConsantPoolLoadPattern() { - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { return kLdrPpImmedPattern; } else { return kLdrPCImmedPattern; @@ -632,7 +638,7 @@ Instr Assembler::GetConsantPoolLoadPattern() { Instr Assembler::GetConsantPoolLoadMask() { - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { return kLdrPpImmedMask; } else { return kLdrPCImmedMask; @@ -1044,8 +1050,8 @@ bool Operand::must_output_reloc_info(const Assembler* assembler) const { static bool use_mov_immediate_load(const Operand& x, const Assembler* assembler) { - if (FLAG_enable_ool_constant_pool && assembler != NULL && - !assembler->is_ool_constant_pool_available()) { + if (FLAG_enable_embedded_constant_pool && assembler != NULL && + !assembler->is_constant_pool_available()) { return true; } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && (assembler == NULL || !assembler->predictable_code_size())) { @@ -1074,8 +1080,9 @@ int Operand::instructions_required(const Assembler* assembler, if (use_mov_immediate_load(*this, assembler)) { // A movw / movt or mov / orr immediate load. instructions = CpuFeatures::IsSupported(ARMv7) ? 2 : 4; - } else if (assembler != NULL && assembler->use_extended_constant_pool()) { - // An extended constant pool load. + } else if (assembler != NULL && + assembler->ConstantPoolAccessIsInOverflow()) { + // An overflowed constant pool load. instructions = CpuFeatures::IsSupported(ARMv7) ? 3 : 5; } else { // A small constant pool load. @@ -1100,23 +1107,23 @@ int Operand::instructions_required(const Assembler* assembler, void Assembler::move_32_bit_immediate(Register rd, const Operand& x, Condition cond) { - RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL); uint32_t imm32 = static_cast(x.imm32_); if (x.must_output_reloc_info(this)) { - RecordRelocInfo(rinfo); + RecordRelocInfo(x.rmode_); } if (use_mov_immediate_load(x, this)) { Register target = rd.code() == pc.code() ? ip : rd; if (CpuFeatures::IsSupported(ARMv7)) { - if (!FLAG_enable_ool_constant_pool && x.must_output_reloc_info(this)) { + if (!FLAG_enable_embedded_constant_pool && + x.must_output_reloc_info(this)) { // Make sure the movw/movt doesn't get separated. BlockConstPoolFor(2); } movw(target, imm32 & 0xffff, cond); movt(target, imm32 >> 16, cond); } else { - DCHECK(FLAG_enable_ool_constant_pool); + DCHECK(FLAG_enable_embedded_constant_pool); mov(target, Operand(imm32 & kImm8Mask), LeaveCC, cond); orr(target, target, Operand(imm32 & (kImm8Mask << 8)), LeaveCC, cond); orr(target, target, Operand(imm32 & (kImm8Mask << 16)), LeaveCC, cond); @@ -1126,10 +1133,11 @@ void Assembler::move_32_bit_immediate(Register rd, mov(rd, target, LeaveCC, cond); } } else { - DCHECK(!FLAG_enable_ool_constant_pool || is_ool_constant_pool_available()); - ConstantPoolArray::LayoutSection section = ConstantPoolAddEntry(rinfo); - if (section == ConstantPoolArray::EXTENDED_SECTION) { - DCHECK(FLAG_enable_ool_constant_pool); + DCHECK(!FLAG_enable_embedded_constant_pool || is_constant_pool_available()); + ConstantPoolEntry::Access access = + ConstantPoolAddEntry(pc_offset(), x.rmode_, x.imm32_); + if (access == ConstantPoolEntry::OVERFLOWED) { + DCHECK(FLAG_enable_embedded_constant_pool); Register target = rd.code() == pc.code() ? ip : rd; // Emit instructions to load constant pool offset. if (CpuFeatures::IsSupported(ARMv7)) { @@ -1144,8 +1152,9 @@ void Assembler::move_32_bit_immediate(Register rd, // Load from constant pool at offset. ldr(rd, MemOperand(pp, target), cond); } else { - DCHECK(section == ConstantPoolArray::SMALL_SECTION); - ldr(rd, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0), cond); + DCHECK(access == ConstantPoolEntry::REGULAR); + ldr(rd, MemOperand(FLAG_enable_embedded_constant_pool ? pp : pc, 0), + cond); } } } @@ -2554,7 +2563,7 @@ void Assembler::vmov(const DwVfpRegister dst, int vd, d; dst.split_code(&vd, &d); emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc); - } else if (FLAG_enable_vldr_imm && is_ool_constant_pool_available()) { + } else if (FLAG_enable_vldr_imm && is_constant_pool_available()) { // TODO(jfb) Temporarily turned off until we have constant blinding or // some equivalent mitigation: an attacker can otherwise control // generated data which also happens to be executable, a Very Bad @@ -2570,18 +2579,17 @@ void Assembler::vmov(const DwVfpRegister dst, // The code could also randomize the order of values, though // that's tricky because vldr has a limited reach. Furthermore // it breaks load locality. - RelocInfo rinfo(pc_, imm); - ConstantPoolArray::LayoutSection section = ConstantPoolAddEntry(rinfo); - if (section == ConstantPoolArray::EXTENDED_SECTION) { - DCHECK(FLAG_enable_ool_constant_pool); + ConstantPoolEntry::Access access = ConstantPoolAddEntry(pc_offset(), imm); + if (access == ConstantPoolEntry::OVERFLOWED) { + DCHECK(FLAG_enable_embedded_constant_pool); // Emit instructions to load constant pool offset. movw(ip, 0); movt(ip, 0); // Load from constant pool at offset. vldr(dst, MemOperand(pp, ip)); } else { - DCHECK(section == ConstantPoolArray::SMALL_SECTION); - vldr(dst, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0)); + DCHECK(access == ConstantPoolEntry::REGULAR); + vldr(dst, MemOperand(FLAG_enable_embedded_constant_pool ? pp : pc, 0)); } } else { // Synthesise the double from ARM immediates. @@ -3556,22 +3564,6 @@ void Assembler::GrowBuffer() { // None of our relocation types are pc relative pointing outside the code // buffer nor pc absolute pointing inside the code buffer, so there is no need // to relocate any emitted relocation entries. - - // Relocate pending relocation entries. - for (int i = 0; i < num_pending_32_bit_reloc_info_; i++) { - RelocInfo& rinfo = pending_32_bit_reloc_info_[i]; - DCHECK(rinfo.rmode() != RelocInfo::COMMENT && - rinfo.rmode() != RelocInfo::POSITION); - if (rinfo.rmode() != RelocInfo::JS_RETURN) { - rinfo.set_pc(rinfo.pc() + pc_delta); - } - } - for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) { - RelocInfo& rinfo = pending_64_bit_reloc_info_[i]; - DCHECK(rinfo.rmode() == RelocInfo::NONE64); - rinfo.set_pc(rinfo.pc() + pc_delta); - } - constant_pool_builder_.Relocate(pc_delta); } @@ -3579,8 +3571,8 @@ void Assembler::db(uint8_t data) { // No relocation info should be pending while using db. db is used // to write pure data with no pointers and the constant pool should // be emitted before using db. - DCHECK(num_pending_32_bit_reloc_info_ == 0); - DCHECK(num_pending_64_bit_reloc_info_ == 0); + DCHECK(num_pending_32_bit_constants_ == 0); + DCHECK(num_pending_64_bit_constants_ == 0); CheckBuffer(); *reinterpret_cast(pc_) = data; pc_ += sizeof(uint8_t); @@ -3591,14 +3583,26 @@ void Assembler::dd(uint32_t data) { // No relocation info should be pending while using dd. dd is used // to write pure data with no pointers and the constant pool should // be emitted before using dd. - DCHECK(num_pending_32_bit_reloc_info_ == 0); - DCHECK(num_pending_64_bit_reloc_info_ == 0); + DCHECK(num_pending_32_bit_constants_ == 0); + DCHECK(num_pending_64_bit_constants_ == 0); CheckBuffer(); *reinterpret_cast(pc_) = data; pc_ += sizeof(uint32_t); } +void Assembler::dq(uint64_t value) { + // No relocation info should be pending while using dq. dq is used + // to write pure data with no pointers and the constant pool should + // be emitted before using dd. + DCHECK(num_pending_32_bit_constants_ == 0); + DCHECK(num_pending_64_bit_constants_ == 0); + CheckBuffer(); + *reinterpret_cast(pc_) = value; + pc_ += sizeof(uint64_t); +} + + void Assembler::emit_code_stub_address(Code* stub) { CheckBuffer(); *reinterpret_cast(pc_) = @@ -3608,64 +3612,73 @@ void Assembler::emit_code_stub_address(Code* stub) { void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { + if (RelocInfo::IsNone(rmode) || + // Don't record external references unless the heap will be serialized. + (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() && + !emit_debug_code())) { + return; + } + DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here + if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { + data = RecordedAstId().ToInt(); + ClearRecordedAstId(); + } RelocInfo rinfo(pc_, rmode, data, NULL); - RecordRelocInfo(rinfo); + reloc_info_writer.Write(&rinfo); } -void Assembler::RecordRelocInfo(const RelocInfo& rinfo) { - if (!RelocInfo::IsNone(rinfo.rmode())) { - // Don't record external references unless the heap will be serialized. - if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE && - !serializer_enabled() && !emit_debug_code()) { - return; - } - DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here - if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) { - RelocInfo reloc_info_with_ast_id(rinfo.pc(), - rinfo.rmode(), - RecordedAstId().ToInt(), - NULL); - ClearRecordedAstId(); - reloc_info_writer.Write(&reloc_info_with_ast_id); - } else { - reloc_info_writer.Write(&rinfo); +ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position, + RelocInfo::Mode rmode, + intptr_t value) { + DCHECK(rmode != RelocInfo::COMMENT && rmode != RelocInfo::POSITION && + rmode != RelocInfo::STATEMENT_POSITION && + rmode != RelocInfo::CONST_POOL && rmode != RelocInfo::NONE64); + bool sharing_ok = RelocInfo::IsNone(rmode) || + !(serializer_enabled() || rmode < RelocInfo::CELL); + if (FLAG_enable_embedded_constant_pool) { + return constant_pool_builder_.AddEntry(position, value, sharing_ok); + } else { + DCHECK(num_pending_32_bit_constants_ < kMaxNumPending32Constants); + if (num_pending_32_bit_constants_ == 0) { + first_const_pool_32_use_ = position; } + ConstantPoolEntry entry(position, value, sharing_ok); + pending_32_bit_constants_[num_pending_32_bit_constants_++] = entry; + + // Make sure the constant pool is not emitted in place of the next + // instruction for which we just recorded relocation info. + BlockConstPoolFor(1); + return ConstantPoolEntry::REGULAR; } } -ConstantPoolArray::LayoutSection Assembler::ConstantPoolAddEntry( - const RelocInfo& rinfo) { - if (FLAG_enable_ool_constant_pool) { - return constant_pool_builder_.AddEntry(this, rinfo); +ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position, + double value) { + if (FLAG_enable_embedded_constant_pool) { + return constant_pool_builder_.AddEntry(position, value); } else { - if (rinfo.rmode() == RelocInfo::NONE64) { - DCHECK(num_pending_64_bit_reloc_info_ < kMaxNumPending64RelocInfo); - if (num_pending_64_bit_reloc_info_ == 0) { - first_const_pool_64_use_ = pc_offset(); - } - pending_64_bit_reloc_info_[num_pending_64_bit_reloc_info_++] = rinfo; - } else { - DCHECK(num_pending_32_bit_reloc_info_ < kMaxNumPending32RelocInfo); - if (num_pending_32_bit_reloc_info_ == 0) { - first_const_pool_32_use_ = pc_offset(); - } - pending_32_bit_reloc_info_[num_pending_32_bit_reloc_info_++] = rinfo; + DCHECK(num_pending_64_bit_constants_ < kMaxNumPending64Constants); + if (num_pending_64_bit_constants_ == 0) { + first_const_pool_64_use_ = position; } + ConstantPoolEntry entry(position, value); + pending_64_bit_constants_[num_pending_64_bit_constants_++] = entry; + // Make sure the constant pool is not emitted in place of the next // instruction for which we just recorded relocation info. BlockConstPoolFor(1); - return ConstantPoolArray::SMALL_SECTION; + return ConstantPoolEntry::REGULAR; } } void Assembler::BlockConstPoolFor(int instructions) { - if (FLAG_enable_ool_constant_pool) { - // Should be a no-op if using an out-of-line constant pool. - DCHECK(num_pending_32_bit_reloc_info_ == 0); - DCHECK(num_pending_64_bit_reloc_info_ == 0); + if (FLAG_enable_embedded_constant_pool) { + // Should be a no-op if using an embedded constant pool. + DCHECK(num_pending_32_bit_constants_ == 0); + DCHECK(num_pending_64_bit_constants_ == 0); return; } @@ -3674,10 +3687,11 @@ void Assembler::BlockConstPoolFor(int instructions) { // Max pool start (if we need a jump and an alignment). #ifdef DEBUG int start = pc_limit + kInstrSize + 2 * kPointerSize; - DCHECK((num_pending_32_bit_reloc_info_ == 0) || + DCHECK((num_pending_32_bit_constants_ == 0) || (start - first_const_pool_32_use_ + - num_pending_64_bit_reloc_info_ * kDoubleSize < kMaxDistToIntPool)); - DCHECK((num_pending_64_bit_reloc_info_ == 0) || + num_pending_64_bit_constants_ * kDoubleSize < + kMaxDistToIntPool)); + DCHECK((num_pending_64_bit_constants_ == 0) || (start - first_const_pool_64_use_ < kMaxDistToFPPool)); #endif no_const_pool_before_ = pc_limit; @@ -3690,10 +3704,10 @@ void Assembler::BlockConstPoolFor(int instructions) { void Assembler::CheckConstPool(bool force_emit, bool require_jump) { - if (FLAG_enable_ool_constant_pool) { - // Should be a no-op if using an out-of-line constant pool. - DCHECK(num_pending_32_bit_reloc_info_ == 0); - DCHECK(num_pending_64_bit_reloc_info_ == 0); + if (FLAG_enable_embedded_constant_pool) { + // Should be a no-op if using an embedded constant pool. + DCHECK(num_pending_32_bit_constants_ == 0); + DCHECK(num_pending_64_bit_constants_ == 0); return; } @@ -3707,8 +3721,8 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { } // There is nothing to do if there are no pending constant pool entries. - if ((num_pending_32_bit_reloc_info_ == 0) && - (num_pending_64_bit_reloc_info_ == 0)) { + if ((num_pending_32_bit_constants_ == 0) && + (num_pending_64_bit_constants_ == 0)) { // Calculate the offset of the next check. next_buffer_check_ = pc_offset() + kCheckPoolInterval; return; @@ -3719,15 +3733,15 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { // the gap to the relocation information). int jump_instr = require_jump ? kInstrSize : 0; int size_up_to_marker = jump_instr + kInstrSize; - int size_after_marker = num_pending_32_bit_reloc_info_ * kPointerSize; - bool has_fp_values = (num_pending_64_bit_reloc_info_ > 0); + int size_after_marker = num_pending_32_bit_constants_ * kPointerSize; + bool has_fp_values = (num_pending_64_bit_constants_ > 0); bool require_64_bit_align = false; if (has_fp_values) { require_64_bit_align = (((uintptr_t)pc_ + size_up_to_marker) & 0x7); if (require_64_bit_align) { size_after_marker += kInstrSize; } - size_after_marker += num_pending_64_bit_reloc_info_ * kDoubleSize; + size_after_marker += num_pending_64_bit_constants_ * kDoubleSize; } int size = size_up_to_marker + size_after_marker; @@ -3744,9 +3758,8 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { DCHECK((first_const_pool_32_use_ >= 0) || (first_const_pool_64_use_ >= 0)); bool need_emit = false; if (has_fp_values) { - int dist64 = pc_offset() + - size - - num_pending_32_bit_reloc_info_ * kPointerSize - + int dist64 = pc_offset() + size - + num_pending_32_bit_constants_ * kPointerSize - first_const_pool_64_use_; if ((dist64 >= kMaxDistToFPPool - kCheckPoolInterval) || (!require_jump && (dist64 >= kMaxDistToFPPool / 2))) { @@ -3788,60 +3801,52 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { // Emit 64-bit constant pool entries first: their range is smaller than // 32-bit entries. - for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) { - RelocInfo& rinfo = pending_64_bit_reloc_info_[i]; + for (int i = 0; i < num_pending_64_bit_constants_; i++) { + ConstantPoolEntry& entry = pending_64_bit_constants_[i]; DCHECK(!((uintptr_t)pc_ & 0x7)); // Check 64-bit alignment. - Instr instr = instr_at(rinfo.pc()); + Instr instr = instr_at(entry.position()); // Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0. DCHECK((IsVldrDPcImmediateOffset(instr) && GetVldrDRegisterImmediateOffset(instr) == 0)); - int delta = pc_ - rinfo.pc() - kPcLoadDelta; + int delta = pc_offset() - entry.position() - kPcLoadDelta; DCHECK(is_uint10(delta)); bool found = false; - uint64_t value = rinfo.raw_data64(); + uint64_t value = entry.value64(); for (int j = 0; j < i; j++) { - RelocInfo& rinfo2 = pending_64_bit_reloc_info_[j]; - if (value == rinfo2.raw_data64()) { + ConstantPoolEntry& entry2 = pending_64_bit_constants_[j]; + if (value == entry2.value64()) { found = true; - DCHECK(rinfo2.rmode() == RelocInfo::NONE64); - Instr instr2 = instr_at(rinfo2.pc()); + Instr instr2 = instr_at(entry2.position()); DCHECK(IsVldrDPcImmediateOffset(instr2)); delta = GetVldrDRegisterImmediateOffset(instr2); - delta += rinfo2.pc() - rinfo.pc(); + delta += entry2.position() - entry.position(); break; } } - instr_at_put(rinfo.pc(), SetVldrDRegisterImmediateOffset(instr, delta)); + instr_at_put(entry.position(), + SetVldrDRegisterImmediateOffset(instr, delta)); if (!found) { - uint64_t uint_data = rinfo.raw_data64(); - emit(uint_data & 0xFFFFFFFF); - emit(uint_data >> 32); + dq(entry.value64()); } } // Emit 32-bit constant pool entries. - for (int i = 0; i < num_pending_32_bit_reloc_info_; i++) { - RelocInfo& rinfo = pending_32_bit_reloc_info_[i]; - DCHECK(rinfo.rmode() != RelocInfo::COMMENT && - rinfo.rmode() != RelocInfo::POSITION && - rinfo.rmode() != RelocInfo::STATEMENT_POSITION && - rinfo.rmode() != RelocInfo::CONST_POOL && - rinfo.rmode() != RelocInfo::NONE64); - - Instr instr = instr_at(rinfo.pc()); + for (int i = 0; i < num_pending_32_bit_constants_; i++) { + ConstantPoolEntry& entry = pending_32_bit_constants_[i]; + Instr instr = instr_at(entry.position()); // 64-bit loads shouldn't get here. DCHECK(!IsVldrDPcImmediateOffset(instr)); if (IsLdrPcImmediateOffset(instr) && GetLdrRegisterImmediateOffset(instr) == 0) { - int delta = pc_ - rinfo.pc() - kPcLoadDelta; + int delta = pc_offset() - entry.position() - kPcLoadDelta; DCHECK(is_uint12(delta)); // 0 is the smallest delta: // ldr rd, [pc, #0] @@ -3849,16 +3854,15 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { // data bool found = false; - if (!serializer_enabled() && rinfo.rmode() >= RelocInfo::CELL) { + if (entry.sharing_ok()) { for (int j = 0; j < i; j++) { - RelocInfo& rinfo2 = pending_32_bit_reloc_info_[j]; + ConstantPoolEntry& entry2 = pending_32_bit_constants_[j]; - if ((rinfo2.data() == rinfo.data()) && - (rinfo2.rmode() == rinfo.rmode())) { - Instr instr2 = instr_at(rinfo2.pc()); + if (entry2.value() == entry.value()) { + Instr instr2 = instr_at(entry2.position()); if (IsLdrPcImmediateOffset(instr2)) { delta = GetLdrRegisterImmediateOffset(instr2); - delta += rinfo2.pc() - rinfo.pc(); + delta += entry2.position() - entry.position(); found = true; break; } @@ -3866,18 +3870,19 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { } } - instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta)); + instr_at_put(entry.position(), + SetLdrRegisterImmediateOffset(instr, delta)); if (!found) { - emit(rinfo.data()); + emit(entry.value()); } } else { DCHECK(IsMovW(instr)); } } - num_pending_32_bit_reloc_info_ = 0; - num_pending_64_bit_reloc_info_ = 0; + num_pending_32_bit_constants_ = 0; + num_pending_64_bit_constants_ = 0; first_const_pool_32_use_ = -1; first_const_pool_64_use_ = -1; @@ -3894,225 +3899,56 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { } -Handle Assembler::NewConstantPool(Isolate* isolate) { - if (!FLAG_enable_ool_constant_pool) { - return isolate->factory()->empty_constant_pool_array(); - } - return constant_pool_builder_.New(isolate); -} +void Assembler::PatchConstantPoolAccessInstruction( + int pc_offset, int offset, ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type) { + DCHECK(FLAG_enable_embedded_constant_pool); + Address pc = buffer_ + pc_offset; - -void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { - constant_pool_builder_.Populate(this, constant_pool); -} - - -ConstantPoolBuilder::ConstantPoolBuilder() - : entries_(), current_section_(ConstantPoolArray::SMALL_SECTION) {} - - -bool ConstantPoolBuilder::IsEmpty() { - return entries_.size() == 0; -} - - -ConstantPoolArray::Type ConstantPoolBuilder::GetConstantPoolType( - RelocInfo::Mode rmode) { - if (rmode == RelocInfo::NONE64) { - return ConstantPoolArray::INT64; - } else if (!RelocInfo::IsGCRelocMode(rmode)) { - return ConstantPoolArray::INT32; - } else if (RelocInfo::IsCodeTarget(rmode)) { - return ConstantPoolArray::CODE_PTR; - } else { - DCHECK(RelocInfo::IsGCRelocMode(rmode) && !RelocInfo::IsCodeTarget(rmode)); - return ConstantPoolArray::HEAP_PTR; - } -} - - -ConstantPoolArray::LayoutSection ConstantPoolBuilder::AddEntry( - Assembler* assm, const RelocInfo& rinfo) { - RelocInfo::Mode rmode = rinfo.rmode(); - DCHECK(rmode != RelocInfo::COMMENT && - rmode != RelocInfo::POSITION && - rmode != RelocInfo::STATEMENT_POSITION && - rmode != RelocInfo::CONST_POOL); - - // Try to merge entries which won't be patched. - int merged_index = -1; - ConstantPoolArray::LayoutSection entry_section = current_section_; - if (RelocInfo::IsNone(rmode) || - (!assm->serializer_enabled() && (rmode >= RelocInfo::CELL))) { - size_t i; - std::vector::const_iterator it; - for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) { - if (RelocInfo::IsEqual(rinfo, it->rinfo_)) { - // Merge with found entry. - merged_index = i; - entry_section = entries_[i].section_; - break; - } - } - } - DCHECK(entry_section <= current_section_); - entries_.push_back(ConstantPoolEntry(rinfo, entry_section, merged_index)); - - if (merged_index == -1) { - // Not merged, so update the appropriate count. - number_of_entries_[entry_section].increment(GetConstantPoolType(rmode)); - } - - // Check if we still have room for another entry in the small section - // given Arm's ldr and vldr immediate offset range. - if (current_section_ == ConstantPoolArray::SMALL_SECTION && - !(is_uint12(ConstantPoolArray::SizeFor(*small_entries())) && - is_uint10(ConstantPoolArray::MaxInt64Offset( - small_entries()->count_of(ConstantPoolArray::INT64))))) { - current_section_ = ConstantPoolArray::EXTENDED_SECTION; - } - return entry_section; -} - - -void ConstantPoolBuilder::Relocate(int pc_delta) { - for (std::vector::iterator entry = entries_.begin(); - entry != entries_.end(); entry++) { - DCHECK(entry->rinfo_.rmode() != RelocInfo::JS_RETURN); - entry->rinfo_.set_pc(entry->rinfo_.pc() + pc_delta); - } -} - - -Handle ConstantPoolBuilder::New(Isolate* isolate) { - if (IsEmpty()) { - return isolate->factory()->empty_constant_pool_array(); - } else if (extended_entries()->is_empty()) { - return isolate->factory()->NewConstantPoolArray(*small_entries()); - } else { - DCHECK(current_section_ == ConstantPoolArray::EXTENDED_SECTION); - return isolate->factory()->NewExtendedConstantPoolArray( - *small_entries(), *extended_entries()); - } -} - - -void ConstantPoolBuilder::Populate(Assembler* assm, - ConstantPoolArray* constant_pool) { - DCHECK_EQ(extended_entries()->is_empty(), - !constant_pool->is_extended_layout()); - DCHECK(small_entries()->equals(ConstantPoolArray::NumberOfEntries( - constant_pool, ConstantPoolArray::SMALL_SECTION))); - if (constant_pool->is_extended_layout()) { - DCHECK(extended_entries()->equals(ConstantPoolArray::NumberOfEntries( - constant_pool, ConstantPoolArray::EXTENDED_SECTION))); - } - - // Set up initial offsets. - int offsets[ConstantPoolArray::NUMBER_OF_LAYOUT_SECTIONS] - [ConstantPoolArray::NUMBER_OF_TYPES]; - for (int section = 0; section <= constant_pool->final_section(); section++) { - int section_start = (section == ConstantPoolArray::EXTENDED_SECTION) - ? small_entries()->total_count() - : 0; - for (int i = 0; i < ConstantPoolArray::NUMBER_OF_TYPES; i++) { - ConstantPoolArray::Type type = static_cast(i); - if (number_of_entries_[section].count_of(type) != 0) { - offsets[section][type] = constant_pool->OffsetOfElementAt( - number_of_entries_[section].base_of(type) + section_start); - } - } - } - - for (std::vector::iterator entry = entries_.begin(); - entry != entries_.end(); entry++) { - RelocInfo rinfo = entry->rinfo_; - RelocInfo::Mode rmode = entry->rinfo_.rmode(); - ConstantPoolArray::Type type = GetConstantPoolType(rmode); - - // Update constant pool if necessary and get the entry's offset. - int offset; - if (entry->merged_index_ == -1) { - offset = offsets[entry->section_][type]; - offsets[entry->section_][type] += ConstantPoolArray::entry_size(type); - if (type == ConstantPoolArray::INT64) { - constant_pool->set_at_offset(offset, rinfo.data64()); - } else if (type == ConstantPoolArray::INT32) { - constant_pool->set_at_offset(offset, - static_cast(rinfo.data())); - } else if (type == ConstantPoolArray::CODE_PTR) { - constant_pool->set_at_offset(offset, - reinterpret_cast
(rinfo.data())); - } else { - DCHECK(type == ConstantPoolArray::HEAP_PTR); - constant_pool->set_at_offset(offset, - reinterpret_cast(rinfo.data())); - } - offset -= kHeapObjectTag; - entry->merged_index_ = offset; // Stash offset for merged entries. + // Patch vldr/ldr instruction with correct offset. + Instr instr = instr_at(pc); + if (access == ConstantPoolEntry::OVERFLOWED) { + if (CpuFeatures::IsSupported(ARMv7)) { + // Instructions to patch must be 'movw rd, [#0]' and 'movt rd, [#0]. + Instr next_instr = instr_at(pc + kInstrSize); + DCHECK((IsMovW(instr) && Instruction::ImmedMovwMovtValue(instr) == 0)); + DCHECK((IsMovT(next_instr) && + Instruction::ImmedMovwMovtValue(next_instr) == 0)); + instr_at_put(pc, PatchMovwImmediate(instr, offset & 0xffff)); + instr_at_put(pc + kInstrSize, + PatchMovwImmediate(next_instr, offset >> 16)); } else { - DCHECK(entry->merged_index_ < (entry - entries_.begin())); - offset = entries_[entry->merged_index_].merged_index_; - } - - // Patch vldr/ldr instruction with correct offset. - Instr instr = assm->instr_at(rinfo.pc()); - if (entry->section_ == ConstantPoolArray::EXTENDED_SECTION) { - if (CpuFeatures::IsSupported(ARMv7)) { - // Instructions to patch must be 'movw rd, [#0]' and 'movt rd, [#0]. - Instr next_instr = assm->instr_at(rinfo.pc() + Assembler::kInstrSize); - DCHECK((Assembler::IsMovW(instr) && - Instruction::ImmedMovwMovtValue(instr) == 0)); - DCHECK((Assembler::IsMovT(next_instr) && - Instruction::ImmedMovwMovtValue(next_instr) == 0)); - assm->instr_at_put( - rinfo.pc(), Assembler::PatchMovwImmediate(instr, offset & 0xffff)); - assm->instr_at_put( - rinfo.pc() + Assembler::kInstrSize, - Assembler::PatchMovwImmediate(next_instr, offset >> 16)); - } else { - // Instructions to patch must be 'mov rd, [#0]' and 'orr rd, rd, [#0]. - Instr instr_2 = assm->instr_at(rinfo.pc() + Assembler::kInstrSize); - Instr instr_3 = assm->instr_at(rinfo.pc() + 2 * Assembler::kInstrSize); - Instr instr_4 = assm->instr_at(rinfo.pc() + 3 * Assembler::kInstrSize); - DCHECK((Assembler::IsMovImmed(instr) && - Instruction::Immed8Value(instr) == 0)); - DCHECK((Assembler::IsOrrImmed(instr_2) && - Instruction::Immed8Value(instr_2) == 0) && - Assembler::GetRn(instr_2).is(Assembler::GetRd(instr_2))); - DCHECK((Assembler::IsOrrImmed(instr_3) && - Instruction::Immed8Value(instr_3) == 0) && - Assembler::GetRn(instr_3).is(Assembler::GetRd(instr_3))); - DCHECK((Assembler::IsOrrImmed(instr_4) && - Instruction::Immed8Value(instr_4) == 0) && - Assembler::GetRn(instr_4).is(Assembler::GetRd(instr_4))); - assm->instr_at_put( - rinfo.pc(), Assembler::PatchShiftImm(instr, (offset & kImm8Mask))); - assm->instr_at_put( - rinfo.pc() + Assembler::kInstrSize, - Assembler::PatchShiftImm(instr_2, (offset & (kImm8Mask << 8)))); - assm->instr_at_put( - rinfo.pc() + 2 * Assembler::kInstrSize, - Assembler::PatchShiftImm(instr_3, (offset & (kImm8Mask << 16)))); - assm->instr_at_put( - rinfo.pc() + 3 * Assembler::kInstrSize, - Assembler::PatchShiftImm(instr_4, (offset & (kImm8Mask << 24)))); - } - } else if (type == ConstantPoolArray::INT64) { - // Instruction to patch must be 'vldr rd, [pp, #0]'. - DCHECK((Assembler::IsVldrDPpImmediateOffset(instr) && - Assembler::GetVldrDRegisterImmediateOffset(instr) == 0)); - DCHECK(is_uint10(offset)); - assm->instr_at_put(rinfo.pc(), Assembler::SetVldrDRegisterImmediateOffset( - instr, offset)); - } else { - // Instruction to patch must be 'ldr rd, [pp, #0]'. - DCHECK((Assembler::IsLdrPpImmediateOffset(instr) && - Assembler::GetLdrRegisterImmediateOffset(instr) == 0)); - DCHECK(is_uint12(offset)); - assm->instr_at_put( - rinfo.pc(), Assembler::SetLdrRegisterImmediateOffset(instr, offset)); + // Instructions to patch must be 'mov rd, [#0]' and 'orr rd, rd, [#0]. + Instr instr_2 = instr_at(pc + kInstrSize); + Instr instr_3 = instr_at(pc + 2 * kInstrSize); + Instr instr_4 = instr_at(pc + 3 * kInstrSize); + DCHECK((IsMovImmed(instr) && Instruction::Immed8Value(instr) == 0)); + DCHECK((IsOrrImmed(instr_2) && Instruction::Immed8Value(instr_2) == 0) && + GetRn(instr_2).is(GetRd(instr_2))); + DCHECK((IsOrrImmed(instr_3) && Instruction::Immed8Value(instr_3) == 0) && + GetRn(instr_3).is(GetRd(instr_3))); + DCHECK((IsOrrImmed(instr_4) && Instruction::Immed8Value(instr_4) == 0) && + GetRn(instr_4).is(GetRd(instr_4))); + instr_at_put(pc, PatchShiftImm(instr, (offset & kImm8Mask))); + instr_at_put(pc + kInstrSize, + PatchShiftImm(instr_2, (offset & (kImm8Mask << 8)))); + instr_at_put(pc + 2 * kInstrSize, + PatchShiftImm(instr_3, (offset & (kImm8Mask << 16)))); + instr_at_put(pc + 3 * kInstrSize, + PatchShiftImm(instr_4, (offset & (kImm8Mask << 24)))); } + } else if (type == ConstantPoolEntry::DOUBLE) { + // Instruction to patch must be 'vldr rd, [pp, #0]'. + DCHECK((IsVldrDPpImmediateOffset(instr) && + GetVldrDRegisterImmediateOffset(instr) == 0)); + DCHECK(is_uint10(offset)); + instr_at_put(pc, SetVldrDRegisterImmediateOffset(instr, offset)); + } else { + // Instruction to patch must be 'ldr rd, [pp, #0]'. + DCHECK((IsLdrPpImmediateOffset(instr) && + GetLdrRegisterImmediateOffset(instr) == 0)); + DCHECK(is_uint12(offset)); + instr_at_put(pc, SetLdrRegisterImmediateOffset(instr, offset)); } } diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h index 836ff4f3d0..82d786fe51 100644 --- a/src/arm/assembler-arm.h +++ b/src/arm/assembler-arm.h @@ -94,7 +94,7 @@ const int kRegister_pc_Code = 15; struct Register { static const int kNumRegisters = 16; static const int kMaxNumAllocatableRegisters = - FLAG_enable_ool_constant_pool ? 8 : 9; + FLAG_enable_embedded_constant_pool ? 8 : 9; static const int kSizeInBytes = 4; inline static int NumAllocatableRegisters(); @@ -122,7 +122,7 @@ struct Register { "r7", "r8", }; - if (FLAG_enable_ool_constant_pool && (index >= 7)) { + if (FLAG_enable_embedded_constant_pool && (index >= 7)) { return names[index + 1]; } return names[index]; @@ -164,7 +164,7 @@ const Register r5 = { kRegister_r5_Code }; const Register r6 = { kRegister_r6_Code }; // Used as context register. const Register r7 = {kRegister_r7_Code}; -// Used as constant pool pointer register if FLAG_enable_ool_constant_pool. +// Used as constant pool pointer register if FLAG_enable_embedded_constant_pool. const Register r8 = { kRegister_r8_Code }; // Used as lithium codegen scratch register. const Register r9 = { kRegister_r9_Code }; @@ -651,52 +651,6 @@ class NeonListOperand BASE_EMBEDDED { }; -// Class used to build a constant pool. -class ConstantPoolBuilder BASE_EMBEDDED { - public: - ConstantPoolBuilder(); - ConstantPoolArray::LayoutSection AddEntry(Assembler* assm, - const RelocInfo& rinfo); - void Relocate(int pc_delta); - bool IsEmpty(); - Handle New(Isolate* isolate); - void Populate(Assembler* assm, ConstantPoolArray* constant_pool); - - inline ConstantPoolArray::LayoutSection current_section() const { - return current_section_; - } - - inline ConstantPoolArray::NumberOfEntries* number_of_entries( - ConstantPoolArray::LayoutSection section) { - return &number_of_entries_[section]; - } - - inline ConstantPoolArray::NumberOfEntries* small_entries() { - return number_of_entries(ConstantPoolArray::SMALL_SECTION); - } - - inline ConstantPoolArray::NumberOfEntries* extended_entries() { - return number_of_entries(ConstantPoolArray::EXTENDED_SECTION); - } - - private: - struct ConstantPoolEntry { - ConstantPoolEntry(RelocInfo rinfo, ConstantPoolArray::LayoutSection section, - int merged_index) - : rinfo_(rinfo), section_(section), merged_index_(merged_index) {} - - RelocInfo rinfo_; - ConstantPoolArray::LayoutSection section_; - int merged_index_; - }; - - ConstantPoolArray::Type GetConstantPoolType(RelocInfo::Mode rmode); - - std::vector entries_; - ConstantPoolArray::LayoutSection current_section_; - ConstantPoolArray::NumberOfEntries number_of_entries_[2]; -}; - struct VmovIndex { unsigned char index; }; @@ -754,19 +708,16 @@ class Assembler : public AssemblerBase { // Return the address in the constant pool of the code target address used by // the branch/call instruction at pc, or the object in a mov. - INLINE(static Address constant_pool_entry_address( - Address pc, ConstantPoolArray* constant_pool)); + INLINE(static Address constant_pool_entry_address(Address pc, + Address constant_pool)); // Read/Modify the code target address in the branch/call instruction at pc. - INLINE(static Address target_address_at(Address pc, - ConstantPoolArray* constant_pool)); - INLINE(static void set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, - Address target, - ICacheFlushMode icache_flush_mode = - FLUSH_ICACHE_IF_NEEDED)); + INLINE(static Address target_address_at(Address pc, Address constant_pool)); + INLINE(static void set_target_address_at( + Address pc, Address constant_pool, Address target, + ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)); INLINE(static Address target_address_at(Address pc, Code* code)) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; return target_address_at(pc, constant_pool); } INLINE(static void set_target_address_at(Address pc, @@ -774,7 +725,7 @@ class Assembler : public AssemblerBase { Address target, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; set_target_address_at(pc, constant_pool, target, icache_flush_mode); } @@ -1450,11 +1401,13 @@ class Assembler : public AssemblerBase { void RecordConstPool(int size); // Writes a single byte or word of data in the code stream. Used - // for inline tables, e.g., jump-tables. The constant pool should be - // emitted before any use of db and dd to ensure that constant pools + // for inline tables, e.g., jump-tables. CheckConstantPool() should be + // called before any use of db/dd/dq/dp to ensure that constant pools // are not emitted as part of the tables generated. void db(uint8_t data); void dd(uint32_t data); + void dq(uint64_t data); + void dp(uintptr_t data) { dd(data); } // Emits the address of the code stub's first instruction. void emit_code_stub_address(Code* stub); @@ -1526,8 +1479,8 @@ class Assembler : public AssemblerBase { static const int kMaxDistToIntPool = 4*KB; static const int kMaxDistToFPPool = 1*KB; // All relocations could be integer, it therefore acts as the limit. - static const int kMaxNumPending32RelocInfo = kMaxDistToIntPool/kInstrSize; - static const int kMaxNumPending64RelocInfo = kMaxDistToFPPool/kInstrSize; + static const int kMaxNumPending32Constants = kMaxDistToIntPool / kInstrSize; + static const int kMaxNumPending64Constants = kMaxDistToFPPool / kInstrSize; // Postpone the generation of the constant pool for the specified number of // instructions. @@ -1536,17 +1489,19 @@ class Assembler : public AssemblerBase { // Check if is time to emit a constant pool. void CheckConstPool(bool force_emit, bool require_jump); - // Allocate a constant pool of the correct size for the generated code. - Handle NewConstantPool(Isolate* isolate); - - // Generate the constant pool for the generated code. - void PopulateConstantPool(ConstantPoolArray* constant_pool); - - bool use_extended_constant_pool() const { - return constant_pool_builder_.current_section() == - ConstantPoolArray::EXTENDED_SECTION; + int EmitEmbeddedConstantPool() { + DCHECK(FLAG_enable_embedded_constant_pool); + return constant_pool_builder_.Emit(this); } + bool ConstantPoolAccessIsInOverflow() const { + return constant_pool_builder_.NextAccess(ConstantPoolEntry::INTPTR) == + ConstantPoolEntry::OVERFLOWED; + } + + void PatchConstantPoolAccessInstruction(int pc_offset, int offset, + ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type); protected: // Relocation for a type-recording IC has the AST id added to it. This @@ -1581,10 +1536,10 @@ class Assembler : public AssemblerBase { // Max pool start (if we need a jump and an alignment). int start = pc_offset() + kInstrSize + 2 * kPointerSize; // Check the constant pool hasn't been blocked for too long. - DCHECK((num_pending_32_bit_reloc_info_ == 0) || - (start + num_pending_64_bit_reloc_info_ * kDoubleSize < + DCHECK((num_pending_32_bit_constants_ == 0) || + (start + num_pending_64_bit_constants_ * kDoubleSize < (first_const_pool_32_use_ + kMaxDistToIntPool))); - DCHECK((num_pending_64_bit_reloc_info_ == 0) || + DCHECK((num_pending_64_bit_constants_ == 0) || (start < (first_const_pool_64_use_ + kMaxDistToFPPool))); #endif // Two cases: @@ -1643,20 +1598,20 @@ class Assembler : public AssemblerBase { static const int kMaxRelocSize = RelocInfoWriter::kMaxSize; RelocInfoWriter reloc_info_writer; - // Relocation info records are also used during code generation as temporary + // ConstantPoolEntry records are used during code generation as temporary // containers for constants and code target addresses until they are emitted - // to the constant pool. These pending relocation info records are temporarily - // stored in a separate buffer until a constant pool is emitted. + // to the constant pool. These records are temporarily stored in a separate + // buffer until a constant pool is emitted. // If every instruction in a long sequence is accessing the pool, we need one // pending relocation entry per instruction. - // The buffers of pending relocation info. - RelocInfo pending_32_bit_reloc_info_[kMaxNumPending32RelocInfo]; - RelocInfo pending_64_bit_reloc_info_[kMaxNumPending64RelocInfo]; - // Number of pending reloc info entries in the 32 bits buffer. - int num_pending_32_bit_reloc_info_; - // Number of pending reloc info entries in the 64 bits buffer. - int num_pending_64_bit_reloc_info_; + // The buffers of pending constant pool entries. + ConstantPoolEntry pending_32_bit_constants_[kMaxNumPending32Constants]; + ConstantPoolEntry pending_64_bit_constants_[kMaxNumPending64Constants]; + // Number of pending constant pool entries in the 32 bits buffer. + int num_pending_32_bit_constants_; + // Number of pending constant pool entries in the 64 bits buffer. + int num_pending_64_bit_constants_; ConstantPoolBuilder constant_pool_builder_; @@ -1685,15 +1640,12 @@ class Assembler : public AssemblerBase { void bind_to(Label* L, int pos); void next(Label* L); - enum UseConstantPoolMode { - USE_CONSTANT_POOL, - DONT_USE_CONSTANT_POOL - }; - // Record reloc info for current pc_ void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0); - void RecordRelocInfo(const RelocInfo& rinfo); - ConstantPoolArray::LayoutSection ConstantPoolAddEntry(const RelocInfo& rinfo); + ConstantPoolEntry::Access ConstantPoolAddEntry(int position, + RelocInfo::Mode rmode, + intptr_t value); + ConstantPoolEntry::Access ConstantPoolAddEntry(int position, double value); friend class RelocInfo; friend class CodePatcher; diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc index eb09bc1061..7ebef530f3 100644 --- a/src/arm/builtins-arm.cc +++ b/src/arm/builtins-arm.cc @@ -874,7 +874,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // r2: receiver // r3: argc // r4: argv - // r5-r6, r8 (if not FLAG_enable_ool_constant_pool) and cp may be clobbered + // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered ProfileEntryHookStub::MaybeCallEntryHook(masm); // Clear the context before we push it when entering the internal frame. @@ -922,7 +922,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); __ mov(r5, Operand(r4)); __ mov(r6, Operand(r4)); - if (!FLAG_enable_ool_constant_pool) { + if (!FLAG_enable_embedded_constant_pool) { __ mov(r8, Operand(r4)); } if (kR9Available == 1) { @@ -1166,8 +1166,8 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset)); { ConstantPoolUnavailableScope constant_pool_unavailable(masm); - if (FLAG_enable_ool_constant_pool) { - __ ldr(pp, FieldMemOperand(r0, Code::kConstantPoolOffset)); + if (FLAG_enable_embedded_constant_pool) { + __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0); } // Load the OSR entrypoint offset from the deoptimization data. @@ -1649,8 +1649,8 @@ static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { __ SmiTag(r0); __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | - (FLAG_enable_ool_constant_pool ? pp.bit() : 0) | - fp.bit() | lr.bit()); + (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) | + fp.bit() | lr.bit()); __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); } diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index da06b1034c..ecd2082cd0 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -1100,8 +1100,8 @@ void CEntryStub::Generate(MacroAssembler* masm) { __ ldr(r1, MemOperand(r1)); __ mov(r2, Operand(pending_handler_offset_address)); __ ldr(r2, MemOperand(r2)); - if (FLAG_enable_ool_constant_pool) { - __ ldr(pp, FieldMemOperand(r1, Code::kConstantPoolOffset)); + if (FLAG_enable_embedded_constant_pool) { + __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r1); } __ add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); __ add(pc, r1, r2); @@ -1148,8 +1148,8 @@ void JSEntryStub::Generate(MacroAssembler* masm) { // r3: argc // r4: argv int marker = type(); - if (FLAG_enable_ool_constant_pool) { - __ mov(r8, Operand(isolate()->factory()->empty_constant_pool_array())); + if (FLAG_enable_embedded_constant_pool) { + __ mov(r8, Operand::Zero()); } __ mov(r7, Operand(Smi::FromInt(marker))); __ mov(r6, Operand(Smi::FromInt(marker))); @@ -1158,8 +1158,8 @@ void JSEntryStub::Generate(MacroAssembler* masm) { __ ldr(r5, MemOperand(r5)); __ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used. __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | - (FLAG_enable_ool_constant_pool ? r8.bit() : 0) | - ip.bit()); + (FLAG_enable_embedded_constant_pool ? r8.bit() : 0) | + ip.bit()); // Set up frame pointer for the frame to be pushed. __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); diff --git a/src/arm/constants-arm.h b/src/arm/constants-arm.h index af68fb24e2..7b8529c4bb 100644 --- a/src/arm/constants-arm.h +++ b/src/arm/constants-arm.h @@ -42,6 +42,11 @@ const int kNumVFPRegisters = kNumVFPSingleRegisters + kNumVFPDoubleRegisters; const int kPCRegister = 15; const int kNoRegister = -1; +// Used in embedded constant pool builder - max reach in bits for +// various load instructions (unsigned) +const int kLdrMaxReachBits = 12; +const int kVldrMaxReachBits = 10; + // ----------------------------------------------------------------------------- // Conditions. diff --git a/src/arm/debug-arm.cc b/src/arm/debug-arm.cc index 831edaf951..e1af27de02 100644 --- a/src/arm/debug-arm.cc +++ b/src/arm/debug-arm.cc @@ -265,7 +265,7 @@ void DebugCodegen::GenerateFrameDropperLiveEdit(MacroAssembler* masm) { StandardFrameConstants::kConstantPoolOffset - kPointerSize)); // Pop return address, frame and constant pool pointer (if - // FLAG_enable_ool_constant_pool). + // FLAG_enable_embedded_constant_pool). __ LeaveFrame(StackFrame::INTERNAL); { ConstantPoolUnavailableScope constant_pool_unavailable(masm); diff --git a/src/arm/deoptimizer-arm.cc b/src/arm/deoptimizer-arm.cc index 867dc300f4..a9bcea9726 100644 --- a/src/arm/deoptimizer-arm.cc +++ b/src/arm/deoptimizer-arm.cc @@ -353,7 +353,7 @@ void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) { - DCHECK(FLAG_enable_ool_constant_pool); + DCHECK(FLAG_enable_embedded_constant_pool); SetFrameSlot(offset, value); } diff --git a/src/arm/frames-arm.cc b/src/arm/frames-arm.cc index af33c75932..3f3c4f04c2 100644 --- a/src/arm/frames-arm.cc +++ b/src/arm/frames-arm.cc @@ -21,7 +21,7 @@ namespace internal { Register JavaScriptFrame::fp_register() { return v8::internal::fp; } Register JavaScriptFrame::context_register() { return cp; } Register JavaScriptFrame::constant_pool_pointer_register() { - DCHECK(FLAG_enable_ool_constant_pool); + DCHECK(FLAG_enable_embedded_constant_pool); return pp; } @@ -29,18 +29,11 @@ Register JavaScriptFrame::constant_pool_pointer_register() { Register StubFailureTrampolineFrame::fp_register() { return v8::internal::fp; } Register StubFailureTrampolineFrame::context_register() { return cp; } Register StubFailureTrampolineFrame::constant_pool_pointer_register() { - DCHECK(FLAG_enable_ool_constant_pool); + DCHECK(FLAG_enable_embedded_constant_pool); return pp; } -Object*& ExitFrame::constant_pool_slot() const { - DCHECK(FLAG_enable_ool_constant_pool); - const int offset = ExitFrameConstants::kConstantPoolOffset; - return Memory::Object_at(fp() + offset); -} - - } // namespace internal } // namespace v8 diff --git a/src/arm/frames-arm.h b/src/arm/frames-arm.h index 3720a2bde0..ea621c965b 100644 --- a/src/arm/frames-arm.h +++ b/src/arm/frames-arm.h @@ -84,11 +84,11 @@ class EntryFrameConstants : public AllStatic { class ExitFrameConstants : public AllStatic { public: - static const int kFrameSize = FLAG_enable_ool_constant_pool ? - 3 * kPointerSize : 2 * kPointerSize; + static const int kFrameSize = + FLAG_enable_embedded_constant_pool ? 3 * kPointerSize : 2 * kPointerSize; - static const int kConstantPoolOffset = FLAG_enable_ool_constant_pool ? - -3 * kPointerSize : 0; + static const int kConstantPoolOffset = + FLAG_enable_embedded_constant_pool ? -3 * kPointerSize : 0; static const int kCodeOffset = -2 * kPointerSize; static const int kSPOffset = -1 * kPointerSize; diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index a4c6be8aea..6aac79ec11 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -96,7 +96,7 @@ class JumpPatchSite BASE_EMBEDDED { // The live registers are: // o r1: the JS function object being called (i.e., ourselves) // o cp: our context -// o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool) +// o pp: our caller's constant pool pointer (if enabled) // o fp: our caller's frame pointer // o sp: stack pointer // o lr: return address @@ -2339,7 +2339,7 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator, __ bind(&resume_frame); // lr = return address. // fp = caller's frame pointer. - // pp = caller's constant pool (if FLAG_enable_ool_constant_pool), + // pp = caller's constant pool (if FLAG_enable_embedded_constant_pool), // cp = callee's context, // r4 = callee's JS function. __ PushFixedFrame(r4); @@ -2360,10 +2360,9 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator, __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); { ConstantPoolUnavailableScope constant_pool_unavailable(masm_); - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { // Load the new code object's constant pool pointer. - __ ldr(pp, - MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize)); + __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3); } __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); @@ -5480,7 +5479,7 @@ void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) { static Address GetInterruptImmediateLoadAddress(Address pc) { Address load_address = pc - 2 * Assembler::kInstrSize; - if (!FLAG_enable_ool_constant_pool) { + if (!FLAG_enable_embedded_constant_pool) { DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address))); } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) { // This is an extended constant pool lookup. diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc index 8a15bd59e6..0cf8ecccd8 100644 --- a/src/arm/lithium-codegen-arm.cc +++ b/src/arm/lithium-codegen-arm.cc @@ -113,7 +113,7 @@ bool LCodeGen::GeneratePrologue() { // r1: Callee's JS function. // cp: Callee's context. - // pp: Callee's constant pool pointer (if FLAG_enable_ool_constant_pool) + // pp: Callee's constant pool pointer (if enabled) // fp: Caller's frame pointer. // lr: Caller's pc. @@ -1010,10 +1010,6 @@ void LCodeGen::RecordSafepoint( safepoint.DefinePointerRegister(ToRegister(pointer), zone()); } } - if (FLAG_enable_ool_constant_pool && (kind & Safepoint::kWithRegisters)) { - // Register pp always contains a pointer to the constant pool. - safepoint.DefinePointerRegister(pp, zone()); - } } diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc index 31d2f73352..61b59edf86 100644 --- a/src/arm/macro-assembler-arm.cc +++ b/src/arm/macro-assembler-arm.cc @@ -691,21 +691,17 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests. void MacroAssembler::PushFixedFrame(Register marker_reg) { DCHECK(!marker_reg.is_valid() || marker_reg.code() < cp.code()); - stm(db_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | - cp.bit() | - (FLAG_enable_ool_constant_pool ? pp.bit() : 0) | - fp.bit() | - lr.bit()); + stm(db_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | cp.bit() | + (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) | + fp.bit() | lr.bit()); } void MacroAssembler::PopFixedFrame(Register marker_reg) { DCHECK(!marker_reg.is_valid() || marker_reg.code() < cp.code()); - ldm(ia_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | - cp.bit() | - (FLAG_enable_ool_constant_pool ? pp.bit() : 0) | - fp.bit() | - lr.bit()); + ldm(ia_w, sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) | cp.bit() | + (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) | + fp.bit() | lr.bit()); } @@ -985,13 +981,20 @@ void MacroAssembler::VmovLow(DwVfpRegister dst, Register src) { } +void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress( + Register code_target_address) { + DCHECK(FLAG_enable_embedded_constant_pool); + ldr(pp, MemOperand(code_target_address, + Code::kConstantPoolOffset - Code::kHeaderSize)); + add(pp, pp, code_target_address); +} + + void MacroAssembler::LoadConstantPoolPointerRegister() { - if (FLAG_enable_ool_constant_pool) { - int constant_pool_offset = Code::kConstantPoolOffset - Code::kHeaderSize - - pc_offset() - Instruction::kPCReadOffset; - DCHECK(ImmediateFitsAddrMode2Instruction(constant_pool_offset)); - ldr(pp, MemOperand(pc, constant_pool_offset)); - } + DCHECK(FLAG_enable_embedded_constant_pool); + int entry_offset = pc_offset() + Instruction::kPCReadOffset; + sub(ip, pc, Operand(entry_offset)); + LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip); } @@ -1000,9 +1003,9 @@ void MacroAssembler::StubPrologue() { Push(Smi::FromInt(StackFrame::STUB)); // Adjust FP to point to saved FP. add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { LoadConstantPoolPointerRegister(); - set_ool_constant_pool_available(true); + set_constant_pool_available(true); } } @@ -1025,9 +1028,9 @@ void MacroAssembler::Prologue(bool code_pre_aging) { add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); } } - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { LoadConstantPoolPointerRegister(); - set_ool_constant_pool_available(true); + set_constant_pool_available(true); } } @@ -1036,7 +1039,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) { // r0-r3: preserved PushFixedFrame(); - if (FLAG_enable_ool_constant_pool && load_constant_pool_pointer_reg) { + if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) { LoadConstantPoolPointerRegister(); } mov(ip, Operand(Smi::FromInt(type))); @@ -1056,9 +1059,9 @@ int MacroAssembler::LeaveFrame(StackFrame::Type type) { // Drop the execution stack down to the frame pointer and restore // the caller frame pointer, return address and constant pool pointer - // (if FLAG_enable_ool_constant_pool). + // (if FLAG_enable_embedded_constant_pool). int frame_ends; - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { add(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset)); frame_ends = pc_offset(); ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit()); @@ -1084,7 +1087,7 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) { mov(ip, Operand::Zero()); str(ip, MemOperand(fp, ExitFrameConstants::kSPOffset)); } - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { str(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset)); } mov(ip, Operand(CodeObject())); @@ -1103,7 +1106,7 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) { // fp - ExitFrameConstants::kFrameSize - // DwVfpRegister::kMaxNumRegisters * kDoubleSize, // since the sp slot, code slot and constant pool slot (if - // FLAG_enable_ool_constant_pool) were pushed after the fp. + // FLAG_enable_embedded_constant_pool) were pushed after the fp. } // Reserve place for the return address and stack space and align the frame @@ -1183,7 +1186,7 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count, #endif // Tear down the exit frame, pop the arguments, and return. - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { ldr(pp, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset)); } mov(sp, Operand(fp)); @@ -3402,7 +3405,7 @@ void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, Label small_constant_pool_load, load_result; ldr(result, MemOperand(ldr_location)); - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { // Check if this is an extended constant pool load. and_(scratch, result, Operand(GetConsantPoolLoadMask())); teq(scratch, Operand(GetConsantPoolLoadPattern())); @@ -3456,7 +3459,7 @@ void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, bind(&load_result); // Get the address of the constant. - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { add(result, pp, Operand(result)); } else { add(result, ldr_location, Operand(result)); diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h index e6047adc48..7ece4b2fa6 100644 --- a/src/arm/macro-assembler-arm.h +++ b/src/arm/macro-assembler-arm.h @@ -437,7 +437,7 @@ class MacroAssembler: public Assembler { } // Push a fixed frame, consisting of lr, fp, constant pool (if - // FLAG_enable_ool_constant_pool), context and JS function / marker id if + // FLAG_enable_embedded_constant_pool), context and JS function / marker id if // marker_reg is a valid register. void PushFixedFrame(Register marker_reg = no_reg); void PopFixedFrame(Register marker_reg = no_reg); @@ -1441,6 +1441,11 @@ class MacroAssembler: public Assembler { void JumpIfDictionaryInPrototypeChain(Register object, Register scratch0, Register scratch1, Label* found); + // Loads the constant pool pointer (pp) register. + void LoadConstantPoolPointerRegisterFromCodeTargetAddress( + Register code_target_address); + void LoadConstantPoolPointerRegister(); + private: void CallCFunctionHelper(Register function, int num_reg_arguments, @@ -1482,9 +1487,6 @@ class MacroAssembler: public Assembler { MemOperand SafepointRegisterSlot(Register reg); MemOperand SafepointRegistersAndDoublesSlot(Register reg); - // Loads the constant pool pointer (pp) register. - void LoadConstantPoolPointerRegister(); - bool generating_stub_; bool has_frame_; // This handle will be patched with the code object on installation. diff --git a/src/arm64/assembler-arm64-inl.h b/src/arm64/assembler-arm64-inl.h index 1432c288d8..bbd44c5f10 100644 --- a/src/arm64/assembler-arm64-inl.h +++ b/src/arm64/assembler-arm64-inl.h @@ -586,14 +586,13 @@ Address Assembler::target_pointer_address_at(Address pc) { // Read/Modify the code target address in the branch/call instruction at pc. -Address Assembler::target_address_at(Address pc, - ConstantPoolArray* constant_pool) { +Address Assembler::target_address_at(Address pc, Address constant_pool) { return Memory::Address_at(target_pointer_address_at(pc)); } Address Assembler::target_address_at(Address pc, Code* code) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; return target_address_at(pc, constant_pool); } @@ -665,8 +664,7 @@ void Assembler::deserialization_set_target_internal_reference_at( } -void Assembler::set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, +void Assembler::set_target_address_at(Address pc, Address constant_pool, Address target, ICacheFlushMode icache_flush_mode) { Memory::Address_at(target_pointer_address_at(pc)) = target; @@ -685,7 +683,7 @@ void Assembler::set_target_address_at(Address pc, Code* code, Address target, ICacheFlushMode icache_flush_mode) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; set_target_address_at(pc, constant_pool, target, icache_flush_mode); } diff --git a/src/arm64/assembler-arm64.cc b/src/arm64/assembler-arm64.cc index d351685ffd..5445fe1a1b 100644 --- a/src/arm64/assembler-arm64.cc +++ b/src/arm64/assembler-arm64.cc @@ -3142,20 +3142,6 @@ void Assembler::RecordConstPool(int size) { } -Handle Assembler::NewConstantPool(Isolate* isolate) { - // No out-of-line constant pool support. - DCHECK(!FLAG_enable_ool_constant_pool); - return isolate->factory()->empty_constant_pool_array(); -} - - -void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { - // No out-of-line constant pool support. - DCHECK(!FLAG_enable_ool_constant_pool); - return; -} - - void PatchingAssembler::PatchAdrFar(int64_t target_offset) { // The code at the current instruction should be: // adr rd, 0 diff --git a/src/arm64/assembler-arm64.h b/src/arm64/assembler-arm64.h index 814170936b..7346648a9b 100644 --- a/src/arm64/assembler-arm64.h +++ b/src/arm64/assembler-arm64.h @@ -871,13 +871,10 @@ class Assembler : public AssemblerBase { inline static Address target_pointer_address_at(Address pc); // Read/Modify the code target address in the branch/call instruction at pc. - inline static Address target_address_at(Address pc, - ConstantPoolArray* constant_pool); - inline static void set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, - Address target, - ICacheFlushMode icache_flush_mode = - FLUSH_ICACHE_IF_NEEDED); + inline static Address target_address_at(Address pc, Address constant_pool); + inline static void set_target_address_at( + Address pc, Address constant_pool, Address target, + ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED); static inline Address target_address_at(Address pc, Code* code); static inline void set_target_address_at(Address pc, Code* code, @@ -1767,6 +1764,8 @@ class Assembler : public AssemblerBase { // Required by V8. void dd(uint32_t data) { dc32(data); } void db(uint8_t data) { dc8(data); } + void dq(uint64_t data) { dc64(data); } + void dp(uintptr_t data) { dc64(data); } // Code generation helpers -------------------------------------------------- @@ -1909,11 +1908,12 @@ class Assembler : public AssemblerBase { // Check if is time to emit a constant pool. void CheckConstPool(bool force_emit, bool require_jump); - // Allocate a constant pool of the correct size for the generated code. - Handle NewConstantPool(Isolate* isolate); - - // Generate the constant pool for the generated code. - void PopulateConstantPool(ConstantPoolArray* constant_pool); + void PatchConstantPoolAccessInstruction(int pc_offset, int offset, + ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type) { + // No embedded constant pool support. + UNREACHABLE(); + } // Returns true if we should emit a veneer as soon as possible for a branch // which can at most reach to specified pc. diff --git a/src/arm64/deoptimizer-arm64.cc b/src/arm64/deoptimizer-arm64.cc index 02a6918325..41a87643f2 100644 --- a/src/arm64/deoptimizer-arm64.cc +++ b/src/arm64/deoptimizer-arm64.cc @@ -354,7 +354,7 @@ void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) { - // No out-of-line constant pool support. + // No embedded constant pool support. UNREACHABLE(); } diff --git a/src/arm64/frames-arm64.cc b/src/arm64/frames-arm64.cc index 56999f9ce0..73c678aaa6 100644 --- a/src/arm64/frames-arm64.cc +++ b/src/arm64/frames-arm64.cc @@ -31,12 +31,6 @@ Register StubFailureTrampolineFrame::constant_pool_pointer_register() { } -Object*& ExitFrame::constant_pool_slot() const { - UNREACHABLE(); - return Memory::Object_at(NULL); -} - - } // namespace internal } // namespace v8 diff --git a/src/assembler.cc b/src/assembler.cc index 2555bbbfaa..90d08bb682 100644 --- a/src/assembler.cc +++ b/src/assembler.cc @@ -135,7 +135,7 @@ AssemblerBase::AssemblerBase(Isolate* isolate, void* buffer, int buffer_size) predictable_code_size_(false), // We may use the assembler without an isolate. serializer_enabled_(isolate && isolate->serializer_enabled()), - ool_constant_pool_available_(false) { + constant_pool_available_(false) { if (FLAG_mask_constants_with_cookie && isolate != NULL) { jit_cookie_ = isolate->random_number_generator()->NextInt(); } @@ -1635,6 +1635,208 @@ bool PositionsRecorder::WriteRecordedPositions() { } +ConstantPoolBuilder::ConstantPoolBuilder(int ptr_reach_bits, + int double_reach_bits) { + info_[ConstantPoolEntry::INTPTR].entries.reserve(64); + info_[ConstantPoolEntry::INTPTR].regular_reach_bits = ptr_reach_bits; + info_[ConstantPoolEntry::DOUBLE].regular_reach_bits = double_reach_bits; +} + + +ConstantPoolEntry::Access ConstantPoolBuilder::NextAccess( + ConstantPoolEntry::Type type) const { + const PerTypeEntryInfo& info = info_[type]; + + if (info.overflow()) return ConstantPoolEntry::OVERFLOWED; + + int dbl_count = info_[ConstantPoolEntry::DOUBLE].regular_count; + int dbl_offset = dbl_count * kDoubleSize; + int ptr_count = info_[ConstantPoolEntry::INTPTR].regular_count; + int ptr_offset = ptr_count * kPointerSize + dbl_offset; + + if (type == ConstantPoolEntry::DOUBLE) { + // Double overflow detection must take into account the reach for both types + int ptr_reach_bits = info_[ConstantPoolEntry::INTPTR].regular_reach_bits; + if (!is_uintn(dbl_offset, info.regular_reach_bits) || + (ptr_count > 0 && + !is_uintn(ptr_offset + kDoubleSize - kPointerSize, ptr_reach_bits))) { + return ConstantPoolEntry::OVERFLOWED; + } + } else { + DCHECK(type == ConstantPoolEntry::INTPTR); + if (!is_uintn(ptr_offset, info.regular_reach_bits)) { + return ConstantPoolEntry::OVERFLOWED; + } + } + + return ConstantPoolEntry::REGULAR; +} + + +ConstantPoolEntry::Access ConstantPoolBuilder::AddEntry( + ConstantPoolEntry& entry, ConstantPoolEntry::Type type) { + DCHECK(!emitted_label_.is_bound()); + PerTypeEntryInfo& info = info_[type]; + const int entry_size = ConstantPoolEntry::size(type); + bool merged = false; + + if (entry.sharing_ok()) { + // Try to merge entries + std::vector::iterator it = info.shared_entries.begin(); + int end = static_cast(info.shared_entries.size()); + for (int i = 0; i < end; i++, it++) { + if ((entry_size == kPointerSize) ? entry.value() == it->value() + : entry.value64() == it->value64()) { + // Merge with found entry. + entry.set_merged_index(i); + merged = true; + break; + } + } + } + + // By definition, merged entries have regular access. + DCHECK(!merged || entry.merged_index() < info.regular_count); + ConstantPoolEntry::Access access = + (merged ? ConstantPoolEntry::REGULAR : NextAccess(type)); + + // Enforce an upper bound on search time by limiting the search to + // unique sharable entries which fit in the regular section. + if (entry.sharing_ok() && !merged && access == ConstantPoolEntry::REGULAR) { + info.shared_entries.push_back(entry); + } else { + info.entries.push_back(entry); + } + + // We're done if we found a match or have already triggered the + // overflow state. + if (merged || info.overflow()) return access; + + if (access == ConstantPoolEntry::REGULAR) { + info.regular_count++; + } else { + info.overflow_start = static_cast(info.entries.size()) - 1; + } + + return access; +} + + +void ConstantPoolBuilder::EmitSharedEntries(Assembler* assm, + ConstantPoolEntry::Type type) { + PerTypeEntryInfo& info = info_[type]; + std::vector& shared_entries = info.shared_entries; + const int entry_size = ConstantPoolEntry::size(type); + int base = emitted_label_.pos(); + DCHECK(base > 0); + int shared_end = static_cast(shared_entries.size()); + std::vector::iterator shared_it = shared_entries.begin(); + for (int i = 0; i < shared_end; i++, shared_it++) { + int offset = assm->pc_offset() - base; + shared_it->set_offset(offset); // Save offset for merged entries. + if (entry_size == kPointerSize) { + assm->dp(shared_it->value()); + } else { + assm->dq(shared_it->value64()); + } + DCHECK(is_uintn(offset, info.regular_reach_bits)); + + // Patch load sequence with correct offset. + assm->PatchConstantPoolAccessInstruction(shared_it->position(), offset, + ConstantPoolEntry::REGULAR, type); + } +} + + +void ConstantPoolBuilder::EmitGroup(Assembler* assm, + ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type) { + PerTypeEntryInfo& info = info_[type]; + const bool overflow = info.overflow(); + std::vector& entries = info.entries; + std::vector& shared_entries = info.shared_entries; + const int entry_size = ConstantPoolEntry::size(type); + int base = emitted_label_.pos(); + DCHECK(base > 0); + int begin; + int end; + + if (access == ConstantPoolEntry::REGULAR) { + // Emit any shared entries first + EmitSharedEntries(assm, type); + } + + if (access == ConstantPoolEntry::REGULAR) { + begin = 0; + end = overflow ? info.overflow_start : static_cast(entries.size()); + } else { + DCHECK(access == ConstantPoolEntry::OVERFLOWED); + if (!overflow) return; + begin = info.overflow_start; + end = static_cast(entries.size()); + } + + std::vector::iterator it = entries.begin(); + if (begin > 0) std::advance(it, begin); + for (int i = begin; i < end; i++, it++) { + // Update constant pool if necessary and get the entry's offset. + int offset; + ConstantPoolEntry::Access entry_access; + if (!it->is_merged()) { + // Emit new entry + offset = assm->pc_offset() - base; + entry_access = access; + if (entry_size == kPointerSize) { + assm->dp(it->value()); + } else { + assm->dq(it->value64()); + } + } else { + // Retrieve offset from shared entry. + offset = shared_entries[it->merged_index()].offset(); + entry_access = ConstantPoolEntry::REGULAR; + } + + DCHECK(entry_access == ConstantPoolEntry::OVERFLOWED || + is_uintn(offset, info.regular_reach_bits)); + + // Patch load sequence with correct offset. + assm->PatchConstantPoolAccessInstruction(it->position(), offset, + entry_access, type); + } +} + + +// Emit and return position of pool. Zero implies no constant pool. +int ConstantPoolBuilder::Emit(Assembler* assm) { + bool emitted = emitted_label_.is_bound(); + bool empty = IsEmpty(); + + if (!emitted) { + // Mark start of constant pool. Align if necessary. + if (!empty) assm->Align(kDoubleSize); + assm->bind(&emitted_label_); + if (!empty) { + // Emit in groups based on access and type. + // Emit doubles first for alignment purposes. + EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::DOUBLE); + EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::INTPTR); + if (info_[ConstantPoolEntry::DOUBLE].overflow()) { + assm->Align(kDoubleSize); + EmitGroup(assm, ConstantPoolEntry::OVERFLOWED, + ConstantPoolEntry::DOUBLE); + } + if (info_[ConstantPoolEntry::INTPTR].overflow()) { + EmitGroup(assm, ConstantPoolEntry::OVERFLOWED, + ConstantPoolEntry::INTPTR); + } + } + } + + return !empty ? emitted_label_.pos() : 0; +} + + // Platform specific but identical code for all the platforms. diff --git a/src/assembler.h b/src/assembler.h index d5822395b9..2bad1eb749 100644 --- a/src/assembler.h +++ b/src/assembler.h @@ -79,11 +79,11 @@ class AssemblerBase: public Malloced { return (enabled_cpu_features_ & (static_cast(1) << f)) != 0; } - bool is_ool_constant_pool_available() const { - if (FLAG_enable_ool_constant_pool) { - return ool_constant_pool_available_; + bool is_constant_pool_available() const { + if (FLAG_enable_embedded_constant_pool) { + return constant_pool_available_; } else { - // Out-of-line constant pool not supported on this architecture. + // Embedded constant pool not supported on this architecture. UNREACHABLE(); return false; } @@ -108,11 +108,11 @@ class AssemblerBase: public Malloced { int buffer_size_; bool own_buffer_; - void set_ool_constant_pool_available(bool available) { - if (FLAG_enable_ool_constant_pool) { - ool_constant_pool_available_ = available; + void set_constant_pool_available(bool available) { + if (FLAG_enable_embedded_constant_pool) { + constant_pool_available_ = available; } else { - // Out-of-line constant pool not supported on this architecture. + // Embedded constant pool not supported on this architecture. UNREACHABLE(); } } @@ -130,7 +130,7 @@ class AssemblerBase: public Malloced { // Indicates whether the constant pool can be accessed, which is only possible // if the pp register points to the current code object's constant pool. - bool ool_constant_pool_available_; + bool constant_pool_available_; // Constant pool. friend class FrameAndConstantPoolScope; @@ -413,9 +413,6 @@ class RelocInfo { RelocInfo(byte* pc, Mode rmode, intptr_t data, Code* host) : pc_(pc), rmode_(rmode), data_(data), host_(host) { } - RelocInfo(byte* pc, double data64) - : pc_(pc), rmode_(NONE64), data64_(data64), host_(NULL) { - } static inline bool IsRealRelocMode(Mode mode) { return mode >= FIRST_REAL_RELOC_MODE && @@ -487,22 +484,11 @@ class RelocInfo { } static inline int ModeMask(Mode mode) { return 1 << mode; } - // Returns true if the first RelocInfo has the same mode and raw data as the - // second one. - static inline bool IsEqual(RelocInfo first, RelocInfo second) { - return first.rmode() == second.rmode() && - (first.rmode() == RelocInfo::NONE64 ? - first.raw_data64() == second.raw_data64() : - first.data() == second.data()); - } - // Accessors byte* pc() const { return pc_; } void set_pc(byte* pc) { pc_ = pc; } Mode rmode() const { return rmode_; } intptr_t data() const { return data_; } - double data64() const { return data64_; } - uint64_t raw_data64() { return bit_cast(data64_); } Code* host() const { return host_; } void set_host(Code* host) { host_ = host; } @@ -645,10 +631,7 @@ class RelocInfo { // comment). byte* pc_; Mode rmode_; - union { - intptr_t data_; - double data64_; - }; + intptr_t data_; Code* host_; // External-reference pointers are also split across instruction-pairs // on some platforms, but are accessed via indirect pointers. This location @@ -1171,6 +1154,126 @@ class NullCallWrapper : public CallWrapper { }; +// ----------------------------------------------------------------------------- +// Constant pool support + +class ConstantPoolEntry { + public: + ConstantPoolEntry() {} + ConstantPoolEntry(int position, intptr_t value, bool sharing_ok) + : position_(position), + merged_index_(sharing_ok ? SHARING_ALLOWED : SHARING_PROHIBITED), + value_(value) {} + ConstantPoolEntry(int position, double value) + : position_(position), merged_index_(SHARING_ALLOWED), value64_(value) {} + + int position() const { return position_; } + bool sharing_ok() const { return merged_index_ != SHARING_PROHIBITED; } + bool is_merged() const { return merged_index_ >= 0; } + int merged_index(void) const { + DCHECK(is_merged()); + return merged_index_; + } + void set_merged_index(int index) { + merged_index_ = index; + DCHECK(is_merged()); + } + int offset(void) const { + DCHECK(merged_index_ >= 0); + return merged_index_; + } + void set_offset(int offset) { + DCHECK(offset >= 0); + merged_index_ = offset; + } + intptr_t value() const { return value_; } + uint64_t value64() const { return bit_cast(value64_); } + + enum Type { INTPTR, DOUBLE, NUMBER_OF_TYPES }; + + static int size(Type type) { + return (type == INTPTR) ? kPointerSize : kDoubleSize; + } + + enum Access { REGULAR, OVERFLOWED }; + + private: + int position_; + int merged_index_; + union { + intptr_t value_; + double value64_; + }; + enum { SHARING_PROHIBITED = -2, SHARING_ALLOWED = -1 }; +}; + + +// ----------------------------------------------------------------------------- +// Embedded constant pool support + +class ConstantPoolBuilder BASE_EMBEDDED { + public: + ConstantPoolBuilder(int ptr_reach_bits, int double_reach_bits); + + // Add pointer-sized constant to the embedded constant pool + ConstantPoolEntry::Access AddEntry(int position, intptr_t value, + bool sharing_ok) { + ConstantPoolEntry entry(position, value, sharing_ok); + return AddEntry(entry, ConstantPoolEntry::INTPTR); + } + + // Add double constant to the embedded constant pool + ConstantPoolEntry::Access AddEntry(int position, double value) { + ConstantPoolEntry entry(position, value); + return AddEntry(entry, ConstantPoolEntry::DOUBLE); + } + + // Previews the access type required for the next new entry to be added. + ConstantPoolEntry::Access NextAccess(ConstantPoolEntry::Type type) const; + + bool IsEmpty() { + return info_[ConstantPoolEntry::INTPTR].entries.empty() && + info_[ConstantPoolEntry::INTPTR].shared_entries.empty() && + info_[ConstantPoolEntry::DOUBLE].entries.empty() && + info_[ConstantPoolEntry::DOUBLE].shared_entries.empty(); + } + + // Emit the constant pool. Invoke only after all entries have been + // added and all instructions have been emitted. + // Returns position of the emitted pool (zero implies no constant pool). + int Emit(Assembler* assm); + + // Returns the label associated with the start of the constant pool. + // Linking to this label in the function prologue may provide an + // efficient means of constant pool pointer register initialization + // on some architectures. + inline Label* EmittedPosition() { return &emitted_label_; } + + private: + ConstantPoolEntry::Access AddEntry(ConstantPoolEntry& entry, + ConstantPoolEntry::Type type); + void EmitSharedEntries(Assembler* assm, ConstantPoolEntry::Type type); + void EmitGroup(Assembler* assm, ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type); + + struct PerTypeEntryInfo { + PerTypeEntryInfo() : regular_count(0), overflow_start(-1) {} + bool overflow() const { + return (overflow_start >= 0 && + overflow_start < static_cast(entries.size())); + } + int regular_reach_bits; + int regular_count; + int overflow_start; + std::vector entries; + std::vector shared_entries; + }; + + Label emitted_label_; // Records pc_offset of emitted pool + PerTypeEntryInfo info_[ConstantPoolEntry::NUMBER_OF_TYPES]; +}; + + } } // namespace v8::internal #endif // V8_ASSEMBLER_H_ diff --git a/src/compiler/arm/code-generator-arm.cc b/src/compiler/arm/code-generator-arm.cc index f6d630fa9a..192a920241 100644 --- a/src/compiler/arm/code-generator-arm.cc +++ b/src/compiler/arm/code-generator-arm.cc @@ -926,7 +926,7 @@ void CodeGenerator::AssemblePrologue() { int stack_slots = frame()->GetSpillSlotCount(); if (descriptor->kind() == CallDescriptor::kCallAddress) { bool saved_pp; - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { __ Push(lr, fp, pp); // Adjust FP to point to saved FP. __ sub(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset)); diff --git a/src/compiler/ppc/code-generator-ppc.cc b/src/compiler/ppc/code-generator-ppc.cc index d7b038394d..f4fac6daff 100644 --- a/src/compiler/ppc/code-generator-ppc.cc +++ b/src/compiler/ppc/code-generator-ppc.cc @@ -1269,8 +1269,16 @@ void CodeGenerator::AssemblePrologue() { int register_save_area_size = 0; RegList frame_saves = fp.bit(); __ mflr(r0); - __ Push(r0, fp); - __ mr(fp, sp); + if (FLAG_enable_embedded_constant_pool) { + __ Push(r0, fp, kConstantPoolRegister); + // Adjust FP to point to saved FP. + __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset)); + register_save_area_size += kPointerSize; + frame_saves |= kConstantPoolRegister.bit(); + } else { + __ Push(r0, fp); + __ mr(fp, sp); + } // Save callee-saved registers. const RegList saves = descriptor->CalleeSavedRegisters() & ~frame_saves; for (int i = Register::kNumRegisters - 1; i >= 0; i--) { @@ -1323,6 +1331,9 @@ void CodeGenerator::AssembleReturn() { } // Restore registers. RegList frame_saves = fp.bit(); + if (FLAG_enable_embedded_constant_pool) { + frame_saves |= kConstantPoolRegister.bit(); + } const RegList saves = descriptor->CalleeSavedRegisters() & ~frame_saves; if (saves != 0) { __ MultiPop(saves); diff --git a/src/debug.cc b/src/debug.cc index eb3b408541..9ba288581d 100644 --- a/src/debug.cc +++ b/src/debug.cc @@ -1709,7 +1709,7 @@ static void RedirectActivationsToRecompiledCodeOnThread( reinterpret_cast(new_pc)); } - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { // Update constant pool pointer for new code. frame->set_constant_pool(new_code->constant_pool()); } diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc index 35f340e8a3..eaf33869ae 100644 --- a/src/deoptimizer.cc +++ b/src/deoptimizer.cc @@ -992,7 +992,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, DCHECK(!is_bottommost || !has_alignment_padding_ || (fp_value & kPointerSize) != 0); - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { // For the bottommost output frame the constant pool pointer can be gotten // from the input frame. For subsequent output frames, it can be read from // the previous frame. @@ -1077,7 +1077,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, output_frame->SetPc(pc_value); // Update constant pool. - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { intptr_t constant_pool_value = reinterpret_cast(non_optimized_code->constant_pool()); output_frame->SetConstantPool(constant_pool_value); @@ -1170,7 +1170,7 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, fp_value, output_offset, value); } - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { // Read the caller's constant pool from the previous frame. output_offset -= kPointerSize; value = output_[frame_index - 1]->GetConstantPool(); @@ -1225,7 +1225,7 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, adaptor_trampoline->instruction_start() + isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value()); output_frame->SetPc(pc_value); - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { intptr_t constant_pool_value = reinterpret_cast(adaptor_trampoline->constant_pool()); output_frame->SetConstantPool(constant_pool_value); @@ -1304,7 +1304,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator, fp_value, output_offset, value); } - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { // Read the caller's constant pool from the previous frame. output_offset -= kPointerSize; value = output_[frame_index - 1]->GetConstantPool(); @@ -1392,7 +1392,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator, construct_stub->instruction_start() + isolate_->heap()->construct_stub_deopt_pc_offset()->value()); output_frame->SetPc(pc); - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { intptr_t constant_pool_value = reinterpret_cast(construct_stub->constant_pool()); output_frame->SetConstantPool(constant_pool_value); @@ -1417,7 +1417,7 @@ void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator, // We need 1 stack entry for the return address and enough entries for the // StackFrame::INTERNAL (FP, context, frame type, code object and constant - // pool (if FLAG_enable_ool_constant_pool)- see MacroAssembler::EnterFrame). + // pool (if enabled)- see MacroAssembler::EnterFrame). // For a setter stub frame we need one additional entry for the implicit // return value, see StoreStubCompiler::CompileStoreViaSetter. unsigned fixed_frame_entries = @@ -1467,7 +1467,7 @@ void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator, fp_value, output_offset, value); } - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { // Read the caller's constant pool from the previous frame. output_offset -= kPointerSize; value = output_[frame_index - 1]->GetConstantPool(); @@ -1534,7 +1534,7 @@ void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator, intptr_t pc = reinterpret_cast( accessor_stub->instruction_start() + offset->value()); output_frame->SetPc(pc); - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { intptr_t constant_pool_value = reinterpret_cast(accessor_stub->constant_pool()); output_frame->SetConstantPool(constant_pool_value); @@ -1641,7 +1641,7 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator, top_address + output_frame_offset, output_frame_offset, value); } - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { // Read the caller's constant pool from the input frame. input_frame_offset -= kPointerSize; value = input_->GetFrameSlot(input_frame_offset); @@ -1783,7 +1783,7 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator, DCHECK(trampoline != NULL); output_frame->SetPc(reinterpret_cast( trampoline->instruction_start())); - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { Register constant_pool_reg = StubFailureTrampolineFrame::constant_pool_pointer_register(); intptr_t constant_pool_value = diff --git a/src/factory.cc b/src/factory.cc index f51b19cfa6..4aca885456 100644 --- a/src/factory.cc +++ b/src/factory.cc @@ -126,28 +126,6 @@ Handle Factory::NewFixedDoubleArrayWithHoles( } -Handle Factory::NewConstantPoolArray( - const ConstantPoolArray::NumberOfEntries& small) { - DCHECK(small.total_count() > 0); - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateConstantPoolArray(small), - ConstantPoolArray); -} - - -Handle Factory::NewExtendedConstantPoolArray( - const ConstantPoolArray::NumberOfEntries& small, - const ConstantPoolArray::NumberOfEntries& extended) { - DCHECK(small.total_count() > 0); - DCHECK(extended.total_count() > 0); - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateExtendedConstantPoolArray(small, extended), - ConstantPoolArray); -} - - Handle Factory::NewOrderedHashSet() { return OrderedHashSet::Allocate(isolate(), OrderedHashSet::kMinCapacity); } @@ -1024,14 +1002,6 @@ Handle Factory::CopyFixedDoubleArray( } -Handle Factory::CopyConstantPoolArray( - Handle array) { - CALL_HEAP_FUNCTION(isolate(), - isolate()->heap()->CopyConstantPoolArray(*array), - ConstantPoolArray); -} - - Handle Factory::NewNumber(double value, PretenureFlag pretenure) { // We need to distinguish the minus zero value and this cannot be @@ -1460,8 +1430,6 @@ Handle Factory::NewCode(const CodeDesc& desc, int prologue_offset, bool is_debug) { Handle reloc_info = NewByteArray(desc.reloc_size, TENURED); - Handle constant_pool = - desc.origin->NewConstantPool(isolate()); // Compute size. int body_size = RoundUp(desc.instr_size, kObjectAlignment); @@ -1488,6 +1456,9 @@ Handle Factory::NewCode(const CodeDesc& desc, code->set_next_code_link(*undefined_value()); code->set_handler_table(*empty_fixed_array(), SKIP_WRITE_BARRIER); code->set_prologue_offset(prologue_offset); + if (FLAG_enable_embedded_constant_pool) { + code->set_constant_pool_offset(desc.instr_size - desc.constant_pool_size); + } if (code->kind() == Code::OPTIMIZED_FUNCTION) { code->set_marked_for_deoptimization(false); } @@ -1497,9 +1468,6 @@ Handle Factory::NewCode(const CodeDesc& desc, code->set_has_debug_break_slots(true); } - desc.origin->PopulateConstantPool(*constant_pool); - code->set_constant_pool(*constant_pool); - // Allow self references to created code object by patching the handle to // point to the newly allocated Code object. if (!self_ref.is_null()) *(self_ref.location()) = *code; diff --git a/src/factory.h b/src/factory.h index bb25651c50..8b6fadab6f 100644 --- a/src/factory.h +++ b/src/factory.h @@ -46,13 +46,6 @@ class Factory final { int size, PretenureFlag pretenure = NOT_TENURED); - Handle NewConstantPoolArray( - const ConstantPoolArray::NumberOfEntries& small); - - Handle NewExtendedConstantPoolArray( - const ConstantPoolArray::NumberOfEntries& small, - const ConstantPoolArray::NumberOfEntries& extended); - Handle NewOrderedHashSet(); Handle NewOrderedHashMap(); @@ -338,9 +331,6 @@ class Factory final { Handle CopyFixedDoubleArray( Handle array); - Handle CopyConstantPoolArray( - Handle array); - // Numbers (e.g. literals) are pretenured by the parser. // The return value may be a smi or a heap number. Handle NewNumber(double value, diff --git a/src/flag-definitions.h b/src/flag-definitions.h index d9b264d167..175b760aab 100644 --- a/src/flag-definitions.h +++ b/src/flag-definitions.h @@ -1037,8 +1037,8 @@ DEFINE_INT(dump_allocations_digest_at_alloc, 0, #define FLAG FLAG_READONLY // assembler.h -DEFINE_BOOL(enable_ool_constant_pool, V8_OOL_CONSTANT_POOL, - "enable use of out-of-line constant pools (ARM only)") +DEFINE_BOOL(enable_embedded_constant_pool, V8_EMBEDDED_CONSTANT_POOL, + "enable use of embedded constant pools (ARM/PPC only)") DEFINE_BOOL(unbox_double_fields, V8_DOUBLE_FIELDS_UNBOXING, "enable in-object double fields unboxing (64-bit only)") diff --git a/src/frames.cc b/src/frames.cc index 2d4f379801..dc67b41ad8 100644 --- a/src/frames.cc +++ b/src/frames.cc @@ -321,9 +321,6 @@ bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const { if (!IsValidStackAddress(sp)) return false; StackFrame::State state; ExitFrame::FillState(fp, sp, &state); - if (!IsValidStackAddress(reinterpret_cast
(state.pc_address))) { - return false; - } return *state.pc_address != NULL; } @@ -385,9 +382,8 @@ static bool GcSafeCodeContains(HeapObject* object, Address addr); #endif -void StackFrame::IteratePc(ObjectVisitor* v, - Address* pc_address, - Code* holder) { +void StackFrame::IteratePc(ObjectVisitor* v, Address* pc_address, + Address* constant_pool_address, Code* holder) { Address pc = *pc_address; DCHECK(GcSafeCodeContains(holder, pc)); unsigned pc_offset = static_cast(pc - holder->instruction_start()); @@ -397,6 +393,9 @@ void StackFrame::IteratePc(ObjectVisitor* v, holder = reinterpret_cast(code); pc = holder->instruction_start() + pc_offset; *pc_address = pc; + if (FLAG_enable_embedded_constant_pool && constant_pool_address) { + *constant_pool_address = holder->constant_pool(); + } } } @@ -506,7 +505,7 @@ void ExitFrame::ComputeCallerState(State* state) const { state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset); state->pc_address = ResolveReturnAddressLocation( reinterpret_cast(fp() + ExitFrameConstants::kCallerPCOffset)); - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { state->constant_pool_address = reinterpret_cast( fp() + ExitFrameConstants::kConstantPoolOffset); } @@ -521,11 +520,8 @@ void ExitFrame::SetCallerFp(Address caller_fp) { void ExitFrame::Iterate(ObjectVisitor* v) const { // The arguments are traversed as part of the expression stack of // the calling frame. - IteratePc(v, pc_address(), LookupCode()); + IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); v->VisitPointer(&code_slot()); - if (FLAG_enable_ool_constant_pool) { - v->VisitPointer(&constant_pool_slot()); - } } @@ -553,8 +549,11 @@ void ExitFrame::FillState(Address fp, Address sp, State* state) { state->fp = fp; state->pc_address = ResolveReturnAddressLocation( reinterpret_cast(sp - 1 * kPCOnStackSize)); - state->constant_pool_address = - reinterpret_cast(fp + ExitFrameConstants::kConstantPoolOffset); + // The constant pool recorded in the exit frame is not associated + // with the pc in this state (the return address into a C entry + // stub). ComputeCallerState will retrieve the constant pool + // together with the associated caller pc. + state->constant_pool_address = NULL; } @@ -663,7 +662,7 @@ void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const { } // Visit the return address in the callee and incoming arguments. - IteratePc(v, pc_address(), code); + IteratePc(v, pc_address(), constant_pool_address(), code); // Visit the context in stub frame and JavaScript frame. // Visit the function in JavaScript frame. @@ -1290,7 +1289,7 @@ void ArgumentsAdaptorFrame::Print(StringStream* accumulator, void EntryFrame::Iterate(ObjectVisitor* v) const { - IteratePc(v, pc_address(), LookupCode()); + IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); } @@ -1304,7 +1303,7 @@ void StandardFrame::IterateExpressions(ObjectVisitor* v) const { void JavaScriptFrame::Iterate(ObjectVisitor* v) const { IterateExpressions(v); - IteratePc(v, pc_address(), LookupCode()); + IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); } @@ -1312,7 +1311,7 @@ void InternalFrame::Iterate(ObjectVisitor* v) const { // Internal frames only have object pointers on the expression stack // as they never have any arguments. IterateExpressions(v); - IteratePc(v, pc_address(), LookupCode()); + IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); } @@ -1325,7 +1324,7 @@ void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const { const int offset = StandardFrameConstants::kLastObjectOffset; limit = &Memory::Object_at(fp() + offset) + 1; v->VisitPointers(base, limit); - IteratePc(v, pc_address(), LookupCode()); + IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); } diff --git a/src/frames.h b/src/frames.h index 6acf3c6afa..b4247c9bdb 100644 --- a/src/frames.h +++ b/src/frames.h @@ -114,25 +114,23 @@ class StackHandler BASE_EMBEDDED { class StandardFrameConstants : public AllStatic { public: // Fixed part of the frame consists of return address, caller fp, - // constant pool (if FLAG_enable_ool_constant_pool), context, and function. - // StandardFrame::IterateExpressions assumes that kLastObjectOffset is the - // last object pointer. + // constant pool (if FLAG_enable_embedded_constant_pool), context, and + // function. StandardFrame::IterateExpressions assumes that kLastObjectOffset + // is the last object pointer. static const int kCPSlotSize = - FLAG_enable_ool_constant_pool ? kPointerSize : 0; + FLAG_enable_embedded_constant_pool ? kPointerSize : 0; static const int kFixedFrameSizeFromFp = 2 * kPointerSize + kCPSlotSize; - static const int kFixedFrameSize = kPCOnStackSize + kFPOnStackSize + - kFixedFrameSizeFromFp; - static const int kExpressionsOffset = -3 * kPointerSize - kCPSlotSize; - static const int kMarkerOffset = -2 * kPointerSize - kCPSlotSize; - static const int kContextOffset = -1 * kPointerSize - kCPSlotSize; - static const int kConstantPoolOffset = FLAG_enable_ool_constant_pool ? - -1 * kPointerSize : 0; - static const int kCallerFPOffset = 0 * kPointerSize; - static const int kCallerPCOffset = +1 * kFPOnStackSize; - static const int kCallerSPOffset = kCallerPCOffset + 1 * kPCOnStackSize; + static const int kFixedFrameSize = + kPCOnStackSize + kFPOnStackSize + kFixedFrameSizeFromFp; + static const int kExpressionsOffset = -3 * kPointerSize - kCPSlotSize; + static const int kMarkerOffset = -2 * kPointerSize - kCPSlotSize; + static const int kContextOffset = -1 * kPointerSize - kCPSlotSize; + static const int kConstantPoolOffset = kCPSlotSize ? -1 * kPointerSize : 0; + static const int kCallerFPOffset = 0 * kPointerSize; + static const int kCallerPCOffset = +1 * kFPOnStackSize; + static const int kCallerSPOffset = kCallerPCOffset + 1 * kPCOnStackSize; - static const int kLastObjectOffset = FLAG_enable_ool_constant_pool ? - kConstantPoolOffset : kContextOffset; + static const int kLastObjectOffset = kContextOffset; }; @@ -215,8 +213,8 @@ class StackFrame BASE_EMBEDDED { void set_pc(Address pc) { *pc_address() = pc; } Address constant_pool() const { return *constant_pool_address(); } - void set_constant_pool(ConstantPoolArray* constant_pool) { - *constant_pool_address() = reinterpret_cast
(constant_pool); + void set_constant_pool(Address constant_pool) { + *constant_pool_address() = constant_pool; } virtual void SetCallerFp(Address caller_fp) = 0; @@ -258,7 +256,8 @@ class StackFrame BASE_EMBEDDED { unsigned* stack_slots); virtual void Iterate(ObjectVisitor* v) const = 0; - static void IteratePc(ObjectVisitor* v, Address* pc_address, Code* holder); + static void IteratePc(ObjectVisitor* v, Address* pc_address, + Address* constant_pool_address, Code* holder); // Sets a callback function for return-address rewriting profilers // to resolve the location of a return address to the location of the @@ -380,7 +379,6 @@ class ExitFrame: public StackFrame { virtual Code* unchecked_code() const; Object*& code_slot() const; - Object*& constant_pool_slot() const; // Garbage collection support. virtual void Iterate(ObjectVisitor* v) const; diff --git a/src/globals.h b/src/globals.h index 218535cde3..f01c10044e 100644 --- a/src/globals.h +++ b/src/globals.h @@ -74,8 +74,13 @@ namespace internal { #endif #endif -// Determine whether the architecture uses an out-of-line constant pool. -#define V8_OOL_CONSTANT_POOL 0 +// Determine whether the architecture uses an embedded constant pool +// (contiguous constant pool embedded in code object). +#if V8_TARGET_ARCH_PPC +#define V8_EMBEDDED_CONSTANT_POOL 1 +#else +#define V8_EMBEDDED_CONSTANT_POOL 0 +#endif #ifdef V8_TARGET_ARCH_ARM // Set stack limit lower for ARM than for other architectures because @@ -510,13 +515,15 @@ enum ParseRestriction { // A CodeDesc describes a buffer holding instructions and relocation // information. The instructions start at the beginning of the buffer // and grow forward, the relocation information starts at the end of -// the buffer and grows backward. +// the buffer and grows backward. A constant pool may exist at the +// end of the instructions. // -// |<--------------- buffer_size ---------------->| -// |<-- instr_size -->| |<-- reloc_size -->| -// +==================+========+==================+ -// | instructions | free | reloc info | -// +==================+========+==================+ +// |<--------------- buffer_size ----------------------------------->| +// |<------------- instr_size ---------->| |<-- reloc_size -->| +// | |<- const_pool_size ->| | +// +=====================================+========+==================+ +// | instructions | data | free | reloc info | +// +=====================================+========+==================+ // ^ // | // buffer @@ -526,6 +533,7 @@ struct CodeDesc { int buffer_size; int instr_size; int reloc_size; + int constant_pool_size; Assembler* origin; }; diff --git a/src/heap-snapshot-generator.cc b/src/heap-snapshot-generator.cc index 2cb8e3ff76..2ee0c2df17 100644 --- a/src/heap-snapshot-generator.cc +++ b/src/heap-snapshot-generator.cc @@ -1503,9 +1503,6 @@ void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) { SetInternalReference(code, entry, "gc_metadata", code->gc_metadata(), Code::kGCMetadataOffset); - SetInternalReference(code, entry, - "constant_pool", code->constant_pool(), - Code::kConstantPoolOffset); if (code->kind() == Code::OPTIMIZED_FUNCTION) { SetWeakReference(code, entry, "next_code_link", code->next_code_link(), diff --git a/src/heap/heap-inl.h b/src/heap/heap-inl.h index 0f2d0f22a3..b170e04798 100644 --- a/src/heap/heap-inl.h +++ b/src/heap/heap-inl.h @@ -149,12 +149,6 @@ AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { } -AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) { - if (src->length() == 0) return src; - return CopyConstantPoolArrayWithMap(src, src->map()); -} - - AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space, AllocationAlignment alignment) { diff --git a/src/heap/heap.cc b/src/heap/heap.cc index 08eaf20308..e00107a04d 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -1974,10 +1974,6 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0); // NOLINT -STATIC_ASSERT((ConstantPoolArray::kFirstEntryOffset & kDoubleAlignmentMask) == - 0); // NOLINT -STATIC_ASSERT((ConstantPoolArray::kExtendedFirstOffset & - kDoubleAlignmentMask) == 0); // NOLINT STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) == 0); // NOLINT #ifdef V8_HOST_ARCH_32_BIT @@ -2617,8 +2613,6 @@ bool Heap::CreateInitialMaps() { ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array); ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined); ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null); - ALLOCATE_PARTIAL_MAP(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel, - constant_pool_array); #undef ALLOCATE_PARTIAL_MAP } @@ -2655,13 +2649,6 @@ bool Heap::CreateInitialMaps() { } set_empty_descriptor_array(DescriptorArray::cast(obj)); - // Allocate the constant pool array. - { - AllocationResult allocation = AllocateEmptyConstantPoolArray(); - if (!allocation.To(&obj)) return false; - } - set_empty_constant_pool_array(ConstantPoolArray::cast(obj)); - // Fix the instance_descriptors for the existing maps. meta_map()->set_code_cache(empty_fixed_array()); meta_map()->set_dependent_code(DependentCode::cast(empty_fixed_array())); @@ -2698,16 +2685,6 @@ bool Heap::CreateInitialMaps() { null_map()->set_layout_descriptor(LayoutDescriptor::FastPointerLayout()); } - constant_pool_array_map()->set_code_cache(empty_fixed_array()); - constant_pool_array_map()->set_dependent_code( - DependentCode::cast(empty_fixed_array())); - constant_pool_array_map()->set_raw_transitions(Smi::FromInt(0)); - constant_pool_array_map()->set_instance_descriptors(empty_descriptor_array()); - if (FLAG_unbox_double_fields) { - constant_pool_array_map()->set_layout_descriptor( - LayoutDescriptor::FastPointerLayout()); - } - // Fix prototype object for existing maps. meta_map()->set_prototype(null_value()); meta_map()->set_constructor_or_backpointer(null_value()); @@ -2721,9 +2698,6 @@ bool Heap::CreateInitialMaps() { null_map()->set_prototype(null_value()); null_map()->set_constructor_or_backpointer(null_value()); - constant_pool_array_map()->set_prototype(null_value()); - constant_pool_array_map()->set_constructor_or_backpointer(null_value()); - { // Map allocation #define ALLOCATE_MAP(instance_type, size, field_name) \ { \ @@ -3822,16 +3796,6 @@ AllocationResult Heap::AllocateCode(int object_size, bool immovable) { AllocationResult Heap::CopyCode(Code* code) { AllocationResult allocation; - HeapObject* new_constant_pool; - if (FLAG_enable_ool_constant_pool && - code->constant_pool() != empty_constant_pool_array()) { - // Copy the constant pool, since edits to the copied code may modify - // the constant pool. - allocation = CopyConstantPoolArray(code->constant_pool()); - if (!allocation.To(&new_constant_pool)) return allocation; - } else { - new_constant_pool = empty_constant_pool_array(); - } HeapObject* result = NULL; // Allocate an object the same size as the code object. @@ -3845,9 +3809,6 @@ AllocationResult Heap::CopyCode(Code* code) { CopyBlock(new_addr, old_addr, obj_size); Code* new_code = Code::cast(result); - // Update the constant pool. - new_code->set_constant_pool(new_constant_pool); - // Relocate the copy. DCHECK(IsAligned(bit_cast(new_code->address()), kCodeAlignment)); DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || @@ -3858,24 +3819,14 @@ AllocationResult Heap::CopyCode(Code* code) { AllocationResult Heap::CopyCode(Code* code, Vector reloc_info) { - // Allocate ByteArray and ConstantPoolArray before the Code object, so that we - // do not risk leaving uninitialized Code object (and breaking the heap). + // Allocate ByteArray before the Code object, so that we do not risk + // leaving uninitialized Code object (and breaking the heap). ByteArray* reloc_info_array; { AllocationResult allocation = AllocateByteArray(reloc_info.length(), TENURED); if (!allocation.To(&reloc_info_array)) return allocation; } - HeapObject* new_constant_pool; - if (FLAG_enable_ool_constant_pool && - code->constant_pool() != empty_constant_pool_array()) { - // Copy the constant pool, since edits to the copied code may modify - // the constant pool. - AllocationResult allocation = CopyConstantPoolArray(code->constant_pool()); - if (!allocation.To(&new_constant_pool)) return allocation; - } else { - new_constant_pool = empty_constant_pool_array(); - } int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); @@ -3900,9 +3851,6 @@ AllocationResult Heap::CopyCode(Code* code, Vector reloc_info) { Code* new_code = Code::cast(result); new_code->set_relocation_info(reloc_info_array); - // Update constant pool. - new_code->set_constant_pool(new_constant_pool); - // Copy patched rinfo. CopyBytes(new_code->relocation_start(), reloc_info.start(), static_cast(reloc_info.length())); @@ -4395,31 +4343,6 @@ AllocationResult Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, } -AllocationResult Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, - Map* map) { - HeapObject* obj; - if (src->is_extended_layout()) { - ConstantPoolArray::NumberOfEntries small(src, - ConstantPoolArray::SMALL_SECTION); - ConstantPoolArray::NumberOfEntries extended( - src, ConstantPoolArray::EXTENDED_SECTION); - AllocationResult allocation = - AllocateExtendedConstantPoolArray(small, extended); - if (!allocation.To(&obj)) return allocation; - } else { - ConstantPoolArray::NumberOfEntries small(src, - ConstantPoolArray::SMALL_SECTION); - AllocationResult allocation = AllocateConstantPoolArray(small); - if (!allocation.To(&obj)) return allocation; - } - obj->set_map_no_write_barrier(map); - CopyBlock(obj->address() + ConstantPoolArray::kFirstEntryOffset, - src->address() + ConstantPoolArray::kFirstEntryOffset, - src->size() - ConstantPoolArray::kFirstEntryOffset); - return obj; -} - - AllocationResult Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { if (length < 0 || length > FixedArray::kMaxLength) { @@ -4508,64 +4431,6 @@ AllocationResult Heap::AllocateRawFixedDoubleArray(int length, } -AllocationResult Heap::AllocateConstantPoolArray( - const ConstantPoolArray::NumberOfEntries& small) { - CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType)); - int size = ConstantPoolArray::SizeFor(small); - AllocationSpace space = SelectSpace(size, TENURED); - - HeapObject* object = nullptr; - { - AllocationResult allocation = - AllocateRaw(size, space, OLD_SPACE, kDoubleAligned); - if (!allocation.To(&object)) return allocation; - } - object->set_map_no_write_barrier(constant_pool_array_map()); - - ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); - constant_pool->Init(small); - constant_pool->ClearPtrEntries(isolate()); - return constant_pool; -} - - -AllocationResult Heap::AllocateExtendedConstantPoolArray( - const ConstantPoolArray::NumberOfEntries& small, - const ConstantPoolArray::NumberOfEntries& extended) { - CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType)); - CHECK(extended.are_in_range(0, kMaxInt)); - int size = ConstantPoolArray::SizeForExtended(small, extended); - AllocationSpace space = SelectSpace(size, TENURED); - - HeapObject* object = nullptr; - { - AllocationResult allocation = - AllocateRaw(size, space, OLD_SPACE, kDoubleAligned); - if (!allocation.To(&object)) return allocation; - } - object->set_map_no_write_barrier(constant_pool_array_map()); - - ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); - constant_pool->InitExtended(small, extended); - constant_pool->ClearPtrEntries(isolate()); - return constant_pool; -} - - -AllocationResult Heap::AllocateEmptyConstantPoolArray() { - ConstantPoolArray::NumberOfEntries small(0, 0, 0, 0); - int size = ConstantPoolArray::SizeFor(small); - HeapObject* result = NULL; - { - AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); - if (!allocation.To(&result)) return allocation; - } - result->set_map_no_write_barrier(constant_pool_array_map()); - ConstantPoolArray::cast(result)->Init(small); - return result; -} - - AllocationResult Heap::AllocateSymbol() { // Statically ensure that it is safe to allocate symbols in paged spaces. STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize); diff --git a/src/heap/heap.h b/src/heap/heap.h index 1d2afb96fe..8850bf2036 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -53,7 +53,6 @@ namespace internal { V(Map, scope_info_map, ScopeInfoMap) \ V(Map, fixed_cow_array_map, FixedCOWArrayMap) \ V(Map, fixed_double_array_map, FixedDoubleArrayMap) \ - V(Map, constant_pool_array_map, ConstantPoolArrayMap) \ V(Map, weak_cell_map, WeakCellMap) \ V(Map, one_byte_string_map, OneByteStringMap) \ V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap) \ @@ -61,7 +60,6 @@ namespace internal { V(FixedArray, empty_fixed_array, EmptyFixedArray) \ V(ByteArray, empty_byte_array, EmptyByteArray) \ V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \ - V(ConstantPoolArray, empty_constant_pool_array, EmptyConstantPoolArray) \ /* The roots above this line should be boring from a GC point of view. */ \ /* This means they are never in new space and never on a page that is */ \ /* being compacted. */ \ @@ -352,7 +350,6 @@ namespace internal { V(ScopeInfoMap) \ V(FixedCOWArrayMap) \ V(FixedDoubleArrayMap) \ - V(ConstantPoolArrayMap) \ V(WeakCellMap) \ V(NoInterceptorResultSentinel) \ V(HashTableMap) \ @@ -360,7 +357,6 @@ namespace internal { V(EmptyFixedArray) \ V(EmptyByteArray) \ V(EmptyDescriptorArray) \ - V(EmptyConstantPoolArray) \ V(ArgumentsMarker) \ V(SymbolMap) \ V(SloppyArgumentsElementsMap) \ @@ -1974,12 +1970,6 @@ class Heap { MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray( FixedDoubleArray* src); - // Make a copy of src and return it. Returns - // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - MUST_USE_RESULT inline AllocationResult CopyConstantPoolArray( - ConstantPoolArray* src); - - // Computes a single character string where the character has code. // A cache is used for one-byte (Latin1) codes. MUST_USE_RESULT AllocationResult @@ -1988,17 +1978,6 @@ class Heap { // Allocate a symbol in old space. MUST_USE_RESULT AllocationResult AllocateSymbol(); - // Make a copy of src, set the map, and return the copy. - MUST_USE_RESULT AllocationResult - CopyConstantPoolArrayWithMap(ConstantPoolArray* src, Map* map); - - MUST_USE_RESULT AllocationResult AllocateConstantPoolArray( - const ConstantPoolArray::NumberOfEntries& small); - - MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray( - const ConstantPoolArray::NumberOfEntries& small, - const ConstantPoolArray::NumberOfEntries& extended); - // Allocates an external array of the specified length and type. MUST_USE_RESULT AllocationResult AllocateExternalArray(int length, ExternalArrayType array_type, @@ -2038,9 +2017,6 @@ class Heap { MUST_USE_RESULT AllocationResult AllocateEmptyFixedTypedArray(ExternalArrayType array_type); - // Allocate empty constant pool array. - MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray(); - // Allocate a tenured simple cell. MUST_USE_RESULT AllocationResult AllocateCell(Object* value); diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index 521be78e2e..31af80553f 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -2736,30 +2736,6 @@ void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot, SlotsBuffer::IGNORE_OVERFLOW); } - } else if (dst->IsConstantPoolArray()) { - // We special case ConstantPoolArrays since they could contain integers - // value entries which look like tagged pointers. - // TODO(mstarzinger): restructure this code to avoid this special-casing. - ConstantPoolArray* array = ConstantPoolArray::cast(dst); - ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR); - while (!code_iter.is_finished()) { - Address code_entry_slot = - dst_addr + array->OffsetOfElementAt(code_iter.next_index()); - Address code_entry = Memory::Address_at(code_entry_slot); - - if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { - SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, - SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot, - SlotsBuffer::IGNORE_OVERFLOW); - } - } - ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR); - while (!heap_iter.is_finished()) { - Address heap_slot = - dst_addr + array->OffsetOfElementAt(heap_iter.next_index()); - Object* value = Memory::Object_at(heap_slot); - RecordMigratedSlot(value, heap_slot); - } } } else if (dest == CODE_SPACE) { PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); @@ -3150,15 +3126,6 @@ bool MarkCompactCollector::IsSlotInLiveObject(Address slot) { InstanceType type = object->map()->instance_type(); // Slots in maps and code can't be invalid because they are never shrunk. if (type == MAP_TYPE || type == CODE_TYPE) return true; - if (type == CONSTANT_POOL_ARRAY_TYPE) { - if (FLAG_enable_ool_constant_pool) { - // TODO(ishell): implement constant pool support if we ever enable it. - UNIMPLEMENTED(); - } else { - // This is left here just to make constant pool unit tests work. - return true; - } - } // Consider slots in objects that contain ONLY raw data as invalid. if (object->MayContainRawValues()) return false; if (FLAG_unbox_double_fields) { @@ -3384,6 +3351,10 @@ static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v, rinfo.Visit(isolate, v); break; } + case SlotsBuffer::OBJECT_SLOT: { + v->VisitPointer(reinterpret_cast(addr)); + break; + } default: UNREACHABLE(); break; @@ -4553,9 +4524,20 @@ void MarkCompactCollector::RecordRelocSlot(RelocInfo* rinfo, Object* target) { if (target_page->IsEvacuationCandidate() && (rinfo->host() == NULL || !ShouldSkipEvacuationSlotRecording(rinfo->host()))) { + Address addr = rinfo->pc(); + SlotsBuffer::SlotType slot_type = SlotTypeForRMode(rmode); + if (rinfo->IsInConstantPool()) { + addr = rinfo->constant_pool_entry_address(); + if (RelocInfo::IsCodeTarget(rmode)) { + slot_type = SlotsBuffer::CODE_ENTRY_SLOT; + } else { + DCHECK(RelocInfo::IsEmbeddedObject(rmode)); + slot_type = SlotsBuffer::OBJECT_SLOT; + } + } bool success = SlotsBuffer::AddTo( &slots_buffer_allocator_, target_page->slots_buffer_address(), - SlotTypeForRMode(rmode), rinfo->pc(), SlotsBuffer::FAIL_ON_OVERFLOW); + slot_type, addr, SlotsBuffer::FAIL_ON_OVERFLOW); if (!success) { EvictPopularEvacuationCandidate(target_page); } diff --git a/src/heap/mark-compact.h b/src/heap/mark-compact.h index 015ac31154..a5a5f02ee7 100644 --- a/src/heap/mark-compact.h +++ b/src/heap/mark-compact.h @@ -330,6 +330,7 @@ class SlotsBuffer { enum SlotType { EMBEDDED_OBJECT_SLOT, + OBJECT_SLOT, RELOCATED_CODE_OBJECT, CELL_TARGET_SLOT, CODE_TARGET_SLOT, @@ -343,6 +344,8 @@ class SlotsBuffer { switch (type) { case EMBEDDED_OBJECT_SLOT: return "EMBEDDED_OBJECT_SLOT"; + case OBJECT_SLOT: + return "OBJECT_SLOT"; case RELOCATED_CODE_OBJECT: return "RELOCATED_CODE_OBJECT"; case CELL_TARGET_SLOT: diff --git a/src/heap/objects-visiting-inl.h b/src/heap/objects-visiting-inl.h index 3ef51866b6..d1d52a4c7a 100644 --- a/src/heap/objects-visiting-inl.h +++ b/src/heap/objects-visiting-inl.h @@ -137,8 +137,6 @@ void StaticMarkingVisitor::Initialize() { table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit); - table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray); - table_.Register(kVisitNativeContext, &VisitNativeContext); table_.Register(kVisitAllocationSite, &VisitAllocationSite); @@ -447,34 +445,6 @@ void StaticMarkingVisitor::VisitSharedFunctionInfo( } -template -void StaticMarkingVisitor::VisitConstantPoolArray( - Map* map, HeapObject* object) { - Heap* heap = map->GetHeap(); - ConstantPoolArray* array = ConstantPoolArray::cast(object); - ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR); - while (!code_iter.is_finished()) { - Address code_entry = reinterpret_cast
( - array->RawFieldOfElementAt(code_iter.next_index())); - StaticVisitor::VisitCodeEntry(heap, code_entry); - } - - ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR); - while (!heap_iter.is_finished()) { - Object** slot = array->RawFieldOfElementAt(heap_iter.next_index()); - HeapObject* object = HeapObject::cast(*slot); - heap->mark_compact_collector()->RecordSlot(slot, slot, object); - bool is_weak_object = - (array->get_weak_object_state() == - ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE && - Code::IsWeakObjectInOptimizedCode(object)); - if (!is_weak_object) { - StaticVisitor::MarkObject(heap, object); - } - } -} - - template void StaticMarkingVisitor::VisitJSFunction(Map* map, HeapObject* object) { @@ -826,7 +796,6 @@ void Code::CodeIterateBody(ObjectVisitor* v) { IteratePointer(v, kDeoptimizationDataOffset); IteratePointer(v, kTypeFeedbackInfoOffset); IterateNextCodeLink(v, kNextCodeLinkOffset); - IteratePointer(v, kConstantPoolOffset); RelocIterator it(this, mode_mask); Isolate* isolate = this->GetIsolate(); @@ -863,8 +832,6 @@ void Code::CodeIterateBody(Heap* heap) { reinterpret_cast(this->address() + kTypeFeedbackInfoOffset)); StaticVisitor::VisitNextCodeLink( heap, reinterpret_cast(this->address() + kNextCodeLinkOffset)); - StaticVisitor::VisitPointer( - heap, reinterpret_cast(this->address() + kConstantPoolOffset)); RelocIterator it(this, mode_mask); diff --git a/src/heap/objects-visiting.cc b/src/heap/objects-visiting.cc index 7e8d16d685..49ce4f97ab 100644 --- a/src/heap/objects-visiting.cc +++ b/src/heap/objects-visiting.cc @@ -51,9 +51,6 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId( case FIXED_DOUBLE_ARRAY_TYPE: return kVisitFixedDoubleArray; - case CONSTANT_POOL_ARRAY_TYPE: - return kVisitConstantPoolArray; - case ODDBALL_TYPE: return kVisitOddball; diff --git a/src/heap/objects-visiting.h b/src/heap/objects-visiting.h index 87785e534d..1b788e893b 100644 --- a/src/heap/objects-visiting.h +++ b/src/heap/objects-visiting.h @@ -34,7 +34,6 @@ class StaticVisitorBase : public AllStatic { V(FixedDoubleArray) \ V(FixedTypedArray) \ V(FixedFloat64Array) \ - V(ConstantPoolArray) \ V(NativeContext) \ V(AllocationSite) \ V(DataObject2) \ @@ -427,7 +426,6 @@ class StaticMarkingVisitor : public StaticVisitorBase { INLINE(static void VisitMap(Map* map, HeapObject* object)); INLINE(static void VisitCode(Map* map, HeapObject* object)); INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object)); - INLINE(static void VisitConstantPoolArray(Map* map, HeapObject* object)); INLINE(static void VisitAllocationSite(Map* map, HeapObject* object)); INLINE(static void VisitWeakCollection(Map* map, HeapObject* object)); INLINE(static void VisitJSFunction(Map* map, HeapObject* object)); diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc index e06455f05b..5192a01b14 100644 --- a/src/heap/spaces.cc +++ b/src/heap/spaces.cc @@ -3037,8 +3037,7 @@ void LargeObjectSpace::Verify() { // large object space. CHECK(object->IsCode() || object->IsSeqString() || object->IsExternalString() || object->IsFixedArray() || - object->IsFixedDoubleArray() || object->IsByteArray() || - object->IsConstantPoolArray()); + object->IsFixedDoubleArray() || object->IsByteArray()); // The object itself should look OK. object->ObjectVerify(); diff --git a/src/ia32/assembler-ia32-inl.h b/src/ia32/assembler-ia32-inl.h index 64ff491979..5a4036627d 100644 --- a/src/ia32/assembler-ia32-inl.h +++ b/src/ia32/assembler-ia32-inl.h @@ -411,6 +411,12 @@ void Assembler::emit(uint32_t x) { } +void Assembler::emit_q(uint64_t x) { + *reinterpret_cast(pc_) = x; + pc_ += sizeof(uint64_t); +} + + void Assembler::emit(Handle handle) { AllowDeferredHandleDereference heap_object_check; // Verify all Objects referred by code are NOT in new space. @@ -475,14 +481,12 @@ void Assembler::emit_w(const Immediate& x) { } -Address Assembler::target_address_at(Address pc, - ConstantPoolArray* constant_pool) { +Address Assembler::target_address_at(Address pc, Address constant_pool) { return pc + sizeof(int32_t) + *reinterpret_cast(pc); } -void Assembler::set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, +void Assembler::set_target_address_at(Address pc, Address constant_pool, Address target, ICacheFlushMode icache_flush_mode) { int32_t* p = reinterpret_cast(pc); diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc index 6951fa30c5..9066788b1f 100644 --- a/src/ia32/assembler-ia32.cc +++ b/src/ia32/assembler-ia32.cc @@ -2921,6 +2921,12 @@ void Assembler::dd(uint32_t data) { } +void Assembler::dq(uint64_t data) { + EnsureSpace ensure_space(this); + emit_q(data); +} + + void Assembler::dd(Label* label) { EnsureSpace ensure_space(this); RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); @@ -2940,20 +2946,6 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { } -Handle Assembler::NewConstantPool(Isolate* isolate) { - // No out-of-line constant pool support. - DCHECK(!FLAG_enable_ool_constant_pool); - return isolate->factory()->empty_constant_pool_array(); -} - - -void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { - // No out-of-line constant pool support. - DCHECK(!FLAG_enable_ool_constant_pool); - return; -} - - #ifdef GENERATED_CODE_COVERAGE static FILE* coverage_log = NULL; diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h index 48f0603752..0d70191699 100644 --- a/src/ia32/assembler-ia32.h +++ b/src/ia32/assembler-ia32.h @@ -512,15 +512,12 @@ class Assembler : public AssemblerBase { void GetCode(CodeDesc* desc); // Read/Modify the code target in the branch/call instruction at pc. - inline static Address target_address_at(Address pc, - ConstantPoolArray* constant_pool); - inline static void set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, - Address target, - ICacheFlushMode icache_flush_mode = - FLUSH_ICACHE_IF_NEEDED); + inline static Address target_address_at(Address pc, Address constant_pool); + inline static void set_target_address_at( + Address pc, Address constant_pool, Address target, + ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED); static inline Address target_address_at(Address pc, Code* code) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; return target_address_at(pc, constant_pool); } static inline void set_target_address_at(Address pc, @@ -528,7 +525,7 @@ class Assembler : public AssemblerBase { Address target, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; set_target_address_at(pc, constant_pool, target); } @@ -1451,6 +1448,8 @@ class Assembler : public AssemblerBase { // inline tables, e.g., jump-tables. void db(uint8_t data); void dd(uint32_t data); + void dq(uint64_t data); + void dp(uintptr_t data) { dd(data); } void dd(Label* label); // Check if there is less than kGap bytes available in the buffer. @@ -1477,11 +1476,12 @@ class Assembler : public AssemblerBase { byte byte_at(int pos) { return buffer_[pos]; } void set_byte_at(int pos, byte value) { buffer_[pos] = value; } - // Allocate a constant pool of the correct size for the generated code. - Handle NewConstantPool(Isolate* isolate); - - // Generate the constant pool for the generated code. - void PopulateConstantPool(ConstantPoolArray* constant_pool); + void PatchConstantPoolAccessInstruction(int pc_offset, int offset, + ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type) { + // No embedded constant pool support. + UNREACHABLE(); + } protected: void emit_sse_operand(XMMRegister reg, const Operand& adr); @@ -1512,6 +1512,7 @@ class Assembler : public AssemblerBase { TypeFeedbackId id = TypeFeedbackId::None()); inline void emit(const Immediate& x); inline void emit_w(const Immediate& x); + inline void emit_q(uint64_t x); // Emit the code-object-relative offset of the label's position inline void emit_code_relative_offset(Label* label); diff --git a/src/ia32/deoptimizer-ia32.cc b/src/ia32/deoptimizer-ia32.cc index 98182db614..46985793b8 100644 --- a/src/ia32/deoptimizer-ia32.cc +++ b/src/ia32/deoptimizer-ia32.cc @@ -423,7 +423,7 @@ void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) { - // No out-of-line constant pool support. + // No embedded constant pool support. UNREACHABLE(); } diff --git a/src/ia32/frames-ia32.cc b/src/ia32/frames-ia32.cc index 5f5b0d1b69..a9c47274d6 100644 --- a/src/ia32/frames-ia32.cc +++ b/src/ia32/frames-ia32.cc @@ -31,12 +31,6 @@ Register StubFailureTrampolineFrame::constant_pool_pointer_register() { } -Object*& ExitFrame::constant_pool_slot() const { - UNREACHABLE(); - return Memory::Object_at(NULL); -} - - } // namespace internal } // namespace v8 diff --git a/src/ic/ic-inl.h b/src/ic/ic-inl.h index 4db1b39510..55572f7527 100644 --- a/src/ic/ic-inl.h +++ b/src/ic/ic-inl.h @@ -48,41 +48,40 @@ Address IC::address() const { } -ConstantPoolArray* IC::constant_pool() const { - if (!FLAG_enable_ool_constant_pool) { +Address IC::constant_pool() const { + if (!FLAG_enable_embedded_constant_pool) { return NULL; } else { - Handle result = raw_constant_pool_; + Address constant_pool = raw_constant_pool(); Debug* debug = isolate()->debug(); // First check if any break points are active if not just return the // original constant pool. - if (!debug->has_break_points()) return *result; + if (!debug->has_break_points()) return constant_pool; // At least one break point is active perform additional test to ensure that // break point locations are updated correctly. Address target = Assembler::target_address_from_return_address(pc()); if (debug->IsDebugBreak( - Assembler::target_address_at(target, raw_constant_pool()))) { + Assembler::target_address_at(target, constant_pool))) { // If the call site is a call to debug break then we want to return the // constant pool for the original code instead of the breakpointed code. return GetOriginalCode()->constant_pool(); } - return *result; + return constant_pool; } } -ConstantPoolArray* IC::raw_constant_pool() const { - if (FLAG_enable_ool_constant_pool) { - return *raw_constant_pool_; +Address IC::raw_constant_pool() const { + if (FLAG_enable_embedded_constant_pool) { + return *constant_pool_address_; } else { return NULL; } } -Code* IC::GetTargetAtAddress(Address address, - ConstantPoolArray* constant_pool) { +Code* IC::GetTargetAtAddress(Address address, Address constant_pool) { // Get the target address of the IC. Address target = Assembler::target_address_at(address, constant_pool); // Convert target address to the code object. Code::GetCodeFromTargetAddress @@ -94,7 +93,7 @@ Code* IC::GetTargetAtAddress(Address address, void IC::SetTargetAtAddress(Address address, Code* target, - ConstantPoolArray* constant_pool) { + Address constant_pool) { if (AddressIsDeoptimizedCode(target->GetIsolate(), address)) return; DCHECK(target->is_inline_cache_stub() || target->is_compare_ic_stub()); diff --git a/src/ic/ic-state.cc b/src/ic/ic-state.cc index 2fbab4b4b1..e944c390d3 100644 --- a/src/ic/ic-state.cc +++ b/src/ic/ic-state.cc @@ -12,7 +12,7 @@ namespace internal { // static void ICUtility::Clear(Isolate* isolate, Address address, - ConstantPoolArray* constant_pool) { + Address constant_pool) { IC::Clear(isolate, address, constant_pool); } diff --git a/src/ic/ic-state.h b/src/ic/ic-state.h index e451fb2c80..f191c96f1e 100644 --- a/src/ic/ic-state.h +++ b/src/ic/ic-state.h @@ -17,8 +17,7 @@ const int kMaxKeyedPolymorphism = 4; class ICUtility : public AllStatic { public: // Clear the inline cache to initial state. - static void Clear(Isolate* isolate, Address address, - ConstantPoolArray* constant_pool); + static void Clear(Isolate* isolate, Address address, Address constant_pool); }; diff --git a/src/ic/ic.cc b/src/ic/ic.cc index a8323ffe5a..f938336ba1 100644 --- a/src/ic/ic.cc +++ b/src/ic/ic.cc @@ -150,10 +150,10 @@ IC::IC(FrameDepth depth, Isolate* isolate, FeedbackNexus* nexus, // levels of the stack frame iteration code. This yields a ~35% speedup when // running DeltaBlue and a ~25% speedup of gbemu with the '--nouse-ic' flag. const Address entry = Isolate::c_entry_fp(isolate->thread_local_top()); - Address constant_pool = NULL; - if (FLAG_enable_ool_constant_pool) { - constant_pool = - Memory::Address_at(entry + ExitFrameConstants::kConstantPoolOffset); + Address* constant_pool = NULL; + if (FLAG_enable_embedded_constant_pool) { + constant_pool = reinterpret_cast( + entry + ExitFrameConstants::kConstantPoolOffset); } Address* pc_address = reinterpret_cast(entry + ExitFrameConstants::kCallerPCOffset); @@ -162,9 +162,9 @@ IC::IC(FrameDepth depth, Isolate* isolate, FeedbackNexus* nexus, // StubFailureTrampoline, we need to look one frame further down the stack to // find the frame pointer and the return address stack slot. if (depth == EXTRA_CALL_FRAME) { - if (FLAG_enable_ool_constant_pool) { - constant_pool = - Memory::Address_at(fp + StandardFrameConstants::kConstantPoolOffset); + if (FLAG_enable_embedded_constant_pool) { + constant_pool = reinterpret_cast( + fp + StandardFrameConstants::kConstantPoolOffset); } const int kCallerPCOffset = StandardFrameConstants::kCallerPCOffset; pc_address = reinterpret_cast(fp + kCallerPCOffset); @@ -177,10 +177,8 @@ IC::IC(FrameDepth depth, Isolate* isolate, FeedbackNexus* nexus, DCHECK(fp == frame->fp() && pc_address == frame->pc_address()); #endif fp_ = fp; - if (FLAG_enable_ool_constant_pool) { - raw_constant_pool_ = handle( - ConstantPoolArray::cast(reinterpret_cast(constant_pool)), - isolate); + if (FLAG_enable_embedded_constant_pool) { + constant_pool_address_ = constant_pool; } pc_address_ = StackFrame::ResolveReturnAddressLocation(pc_address); target_ = handle(raw_target(), isolate); @@ -479,8 +477,7 @@ void IC::PostPatching(Address address, Code* target, Code* old_target) { } -void IC::Clear(Isolate* isolate, Address address, - ConstantPoolArray* constant_pool) { +void IC::Clear(Isolate* isolate, Address address, Address constant_pool) { Code* target = GetTargetAtAddress(address, constant_pool); // Don't clear debug break inline cache as it will remove the break point. @@ -543,7 +540,7 @@ void LoadIC::Clear(Isolate* isolate, Code* host, LoadICNexus* nexus) { void StoreIC::Clear(Isolate* isolate, Address address, Code* target, - ConstantPoolArray* constant_pool) { + Address constant_pool) { if (IsCleared(target)) return; Code* code = PropertyICCompiler::FindPreMonomorphic(isolate, Code::STORE_IC, target->extra_ic_state()); @@ -552,7 +549,7 @@ void StoreIC::Clear(Isolate* isolate, Address address, Code* target, void KeyedStoreIC::Clear(Isolate* isolate, Address address, Code* target, - ConstantPoolArray* constant_pool) { + Address constant_pool) { if (IsCleared(target)) return; Handle code = pre_monomorphic_stub( isolate, StoreICState::GetLanguageMode(target->extra_ic_state())); @@ -561,7 +558,7 @@ void KeyedStoreIC::Clear(Isolate* isolate, Address address, Code* target, void CompareIC::Clear(Isolate* isolate, Address address, Code* target, - ConstantPoolArray* constant_pool) { + Address constant_pool) { DCHECK(CodeStub::GetMajorKey(target) == CodeStub::CompareIC); CompareICStub stub(target->stub_key(), isolate); // Only clear CompareICs that can retain objects. @@ -2605,8 +2602,7 @@ RUNTIME_FUNCTION(CompareIC_Miss) { } -void CompareNilIC::Clear(Address address, Code* target, - ConstantPoolArray* constant_pool) { +void CompareNilIC::Clear(Address address, Code* target, Address constant_pool) { if (IsCleared(target)) return; ExtraICState state = target->extra_ic_state(); diff --git a/src/ic/ic.h b/src/ic/ic.h index 199b1f60c8..e8613436f9 100644 --- a/src/ic/ic.h +++ b/src/ic/ic.h @@ -78,8 +78,7 @@ class IC { } // Clear the inline cache to initial state. - static void Clear(Isolate* isolate, Address address, - ConstantPoolArray* constant_pool); + static void Clear(Isolate* isolate, Address address, Address constant_pool); #ifdef DEBUG bool IsLoadStub() const { @@ -170,9 +169,9 @@ class IC { // Access the target code for the given IC address. static inline Code* GetTargetAtAddress(Address address, - ConstantPoolArray* constant_pool); + Address constant_pool); static inline void SetTargetAtAddress(Address address, Code* target, - ConstantPoolArray* constant_pool); + Address constant_pool); static void OnTypeFeedbackChanged(Isolate* isolate, Address address, State old_state, State new_state, bool target_remains_ic_stub); @@ -256,8 +255,8 @@ class IC { private: inline Code* raw_target() const; - inline ConstantPoolArray* constant_pool() const; - inline ConstantPoolArray* raw_constant_pool() const; + inline Address constant_pool() const; + inline Address raw_constant_pool() const; void FindTargetMaps() { if (target_maps_set_) return; @@ -277,17 +276,17 @@ class IC { // Frame pointer for the frame that uses (calls) the IC. Address fp_; - // All access to the program counter of an IC structure is indirect - // to make the code GC safe. This feature is crucial since + // All access to the program counter and constant pool of an IC structure is + // indirect to make the code GC safe. This feature is crucial since // GetProperty and SetProperty are called and they in turn might // invoke the garbage collector. Address* pc_address_; - Isolate* isolate_; - // The constant pool of the code which originally called the IC (which might // be for the breakpointed copy of the original code). - Handle raw_constant_pool_; + Address* constant_pool_address_; + + Isolate* isolate_; // The original code target that missed. Handle target_; @@ -429,7 +428,7 @@ class LoadIC : public IC { Handle SimpleFieldLoad(FieldIndex index); static void Clear(Isolate* isolate, Address address, Code* target, - ConstantPoolArray* constant_pool); + Address constant_pool); friend class IC; }; @@ -486,7 +485,7 @@ class KeyedLoadIC : public LoadIC { private: static void Clear(Isolate* isolate, Address address, Code* target, - ConstantPoolArray* constant_pool); + Address constant_pool); friend class IC; }; @@ -554,7 +553,7 @@ class StoreIC : public IC { inline void set_target(Code* code); static void Clear(Isolate* isolate, Address address, Code* target, - ConstantPoolArray* constant_pool); + Address constant_pool); friend class IC; }; @@ -635,7 +634,7 @@ class KeyedStoreIC : public StoreIC { inline void set_target(Code* code); static void Clear(Isolate* isolate, Address address, Code* target, - ConstantPoolArray* constant_pool); + Address constant_pool); KeyedAccessStoreMode GetStoreMode(Handle receiver, Handle key, Handle value); @@ -686,7 +685,7 @@ class CompareIC : public IC { bool strong); static void Clear(Isolate* isolate, Address address, Code* target, - ConstantPoolArray* constant_pool); + Address constant_pool); Token::Value op_; @@ -702,8 +701,7 @@ class CompareNilIC : public IC { static Handle GetUninitialized(); - static void Clear(Address address, Code* target, - ConstantPoolArray* constant_pool); + static void Clear(Address address, Code* target, Address constant_pool); static Handle DoCompareNilSlow(Isolate* isolate, NilValue nil, Handle object); diff --git a/src/ic/ppc/handler-compiler-ppc.cc b/src/ic/ppc/handler-compiler-ppc.cc index db8ee6849f..8988b08e2c 100644 --- a/src/ic/ppc/handler-compiler-ppc.cc +++ b/src/ic/ppc/handler-compiler-ppc.cc @@ -25,7 +25,7 @@ void NamedLoadHandlerCompiler::GenerateLoadViaGetter( // -- lr : return address // ----------------------------------- { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); if (accessor_index >= 0) { DCHECK(!holder.is(scratch)); @@ -62,7 +62,7 @@ void NamedStoreHandlerCompiler::GenerateStoreViaSetter( // -- lr : return address // ----------------------------------- { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); // Save value register, so we can restore it later. __ push(value()); @@ -649,7 +649,7 @@ void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup( // Save necessary data before invoking an interceptor. // Requires a frame to make GC aware of pushed pointers. { - FrameScope frame_scope(masm(), StackFrame::INTERNAL); + FrameAndConstantPoolScope frame_scope(masm(), StackFrame::INTERNAL); if (must_preserve_receiver_reg) { __ Push(receiver(), holder_reg, this->name()); } else { diff --git a/src/lithium.cc b/src/lithium.cc index 21ba883165..0bebfaa96e 100644 --- a/src/lithium.cc +++ b/src/lithium.cc @@ -456,10 +456,6 @@ void LChunk::RegisterWeakObjectsInOptimizedCode(Handle code) const { for (int i = 0; i < objects.length(); i++) { AddWeakObjectToCodeDependency(isolate(), objects.at(i), code); } - if (FLAG_enable_ool_constant_pool) { - code->constant_pool()->set_weak_object_state( - ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE); - } code->set_can_have_weak_objects(true); } diff --git a/src/macro-assembler.h b/src/macro-assembler.h index b59fd3b94d..5ea9657515 100644 --- a/src/macro-assembler.h +++ b/src/macro-assembler.h @@ -135,11 +135,11 @@ class FrameAndConstantPoolScope { : masm_(masm), type_(type), old_has_frame_(masm->has_frame()), - old_constant_pool_available_(FLAG_enable_ool_constant_pool && - masm->is_ool_constant_pool_available()) { + old_constant_pool_available_(FLAG_enable_embedded_constant_pool && + masm->is_constant_pool_available()) { masm->set_has_frame(true); - if (FLAG_enable_ool_constant_pool) { - masm->set_ool_constant_pool_available(true); + if (FLAG_enable_embedded_constant_pool) { + masm->set_constant_pool_available(true); } if (type_ != StackFrame::MANUAL && type_ != StackFrame::NONE) { masm->EnterFrame(type, !old_constant_pool_available_); @@ -149,8 +149,8 @@ class FrameAndConstantPoolScope { ~FrameAndConstantPoolScope() { masm_->LeaveFrame(type_); masm_->set_has_frame(old_has_frame_); - if (FLAG_enable_ool_constant_pool) { - masm_->set_ool_constant_pool_available(old_constant_pool_available_); + if (FLAG_enable_embedded_constant_pool) { + masm_->set_constant_pool_available(old_constant_pool_available_); } } @@ -178,15 +178,15 @@ class ConstantPoolUnavailableScope { public: explicit ConstantPoolUnavailableScope(MacroAssembler* masm) : masm_(masm), - old_constant_pool_available_(FLAG_enable_ool_constant_pool && - masm->is_ool_constant_pool_available()) { - if (FLAG_enable_ool_constant_pool) { - masm_->set_ool_constant_pool_available(false); + old_constant_pool_available_(FLAG_enable_embedded_constant_pool && + masm->is_constant_pool_available()) { + if (FLAG_enable_embedded_constant_pool) { + masm_->set_constant_pool_available(false); } } ~ConstantPoolUnavailableScope() { - if (FLAG_enable_ool_constant_pool) { - masm_->set_ool_constant_pool_available(old_constant_pool_available_); + if (FLAG_enable_embedded_constant_pool) { + masm_->set_constant_pool_available(old_constant_pool_available_); } } diff --git a/src/mips/assembler-mips.cc b/src/mips/assembler-mips.cc index c1f72d1527..fc664aafa5 100644 --- a/src/mips/assembler-mips.cc +++ b/src/mips/assembler-mips.cc @@ -2729,6 +2729,13 @@ void Assembler::dd(uint32_t data) { } +void Assembler::dq(uint64_t data) { + CheckBuffer(); + *reinterpret_cast(pc_) = data; + pc_ += sizeof(uint64_t); +} + + void Assembler::dd(Label* label) { CheckBuffer(); RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); @@ -3025,20 +3032,6 @@ void Assembler::JumpToJumpRegister(Address pc) { } -Handle Assembler::NewConstantPool(Isolate* isolate) { - // No out-of-line constant pool support. - DCHECK(!FLAG_enable_ool_constant_pool); - return isolate->factory()->empty_constant_pool_array(); -} - - -void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { - // No out-of-line constant pool support. - DCHECK(!FLAG_enable_ool_constant_pool); - return; -} - - } // namespace internal } // namespace v8 diff --git a/src/mips/assembler-mips.h b/src/mips/assembler-mips.h index ff05aab187..c116c13bcd 100644 --- a/src/mips/assembler-mips.h +++ b/src/mips/assembler-mips.h @@ -500,19 +500,16 @@ class Assembler : public AssemblerBase { ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED); // On MIPS there is no Constant Pool so we skip that parameter. - INLINE(static Address target_address_at(Address pc, - ConstantPoolArray* constant_pool)) { + INLINE(static Address target_address_at(Address pc, Address constant_pool)) { return target_address_at(pc); } - INLINE(static void set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, - Address target, - ICacheFlushMode icache_flush_mode = - FLUSH_ICACHE_IF_NEEDED)) { + INLINE(static void set_target_address_at( + Address pc, Address constant_pool, Address target, + ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)) { set_target_address_at(pc, target, icache_flush_mode); } INLINE(static Address target_address_at(Address pc, Code* code)) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; return target_address_at(pc, constant_pool); } INLINE(static void set_target_address_at(Address pc, @@ -520,7 +517,7 @@ class Assembler : public AssemblerBase { Address target, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; set_target_address_at(pc, constant_pool, target, icache_flush_mode); } @@ -1081,6 +1078,8 @@ class Assembler : public AssemblerBase { // inline tables, e.g., jump-tables. void db(uint8_t data); void dd(uint32_t data); + void dq(uint64_t data); + void dp(uintptr_t data) { dd(data); } void dd(Label* label); // Emits the address of the code stub's first instruction. @@ -1165,11 +1164,12 @@ class Assembler : public AssemblerBase { void CheckTrampolinePool(); - // Allocate a constant pool of the correct size for the generated code. - Handle NewConstantPool(Isolate* isolate); - - // Generate the constant pool for the generated code. - void PopulateConstantPool(ConstantPoolArray* constant_pool); + void PatchConstantPoolAccessInstruction(int pc_offset, int offset, + ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type) { + // No embedded constant pool support. + UNREACHABLE(); + } protected: // Relocation for a type-recording IC has the AST id added to it. This diff --git a/src/mips/deoptimizer-mips.cc b/src/mips/deoptimizer-mips.cc index 4956ff3ae1..69e8514f67 100644 --- a/src/mips/deoptimizer-mips.cc +++ b/src/mips/deoptimizer-mips.cc @@ -399,7 +399,7 @@ void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) { - // No out-of-line constant pool support. + // No embedded constant pool support. UNREACHABLE(); } diff --git a/src/mips/frames-mips.cc b/src/mips/frames-mips.cc index 555f8a8693..3e6293e2e5 100644 --- a/src/mips/frames-mips.cc +++ b/src/mips/frames-mips.cc @@ -31,12 +31,6 @@ Register StubFailureTrampolineFrame::constant_pool_pointer_register() { } -Object*& ExitFrame::constant_pool_slot() const { - UNREACHABLE(); - return Memory::Object_at(NULL); -} - - } // namespace internal } // namespace v8 diff --git a/src/mips64/assembler-mips64.cc b/src/mips64/assembler-mips64.cc index 85e8a1db49..0b9577420a 100644 --- a/src/mips64/assembler-mips64.cc +++ b/src/mips64/assembler-mips64.cc @@ -2909,6 +2909,13 @@ void Assembler::dd(uint32_t data) { } +void Assembler::dq(uint64_t data) { + CheckBuffer(); + *reinterpret_cast(pc_) = data; + pc_ += sizeof(uint64_t); +} + + void Assembler::dd(Label* label) { CheckBuffer(); RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); @@ -3117,20 +3124,6 @@ void Assembler::set_target_address_at(Address pc, } -Handle Assembler::NewConstantPool(Isolate* isolate) { - // No out-of-line constant pool support. - DCHECK(!FLAG_enable_ool_constant_pool); - return isolate->factory()->empty_constant_pool_array(); -} - - -void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { - // No out-of-line constant pool support. - DCHECK(!FLAG_enable_ool_constant_pool); - return; -} - - } // namespace internal } // namespace v8 diff --git a/src/mips64/assembler-mips64.h b/src/mips64/assembler-mips64.h index 1fea4001d1..9aabe1b316 100644 --- a/src/mips64/assembler-mips64.h +++ b/src/mips64/assembler-mips64.h @@ -493,19 +493,16 @@ class Assembler : public AssemblerBase { ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED); // On MIPS there is no Constant Pool so we skip that parameter. - INLINE(static Address target_address_at(Address pc, - ConstantPoolArray* constant_pool)) { + INLINE(static Address target_address_at(Address pc, Address constant_pool)) { return target_address_at(pc); } - INLINE(static void set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, - Address target, - ICacheFlushMode icache_flush_mode = - FLUSH_ICACHE_IF_NEEDED)) { + INLINE(static void set_target_address_at( + Address pc, Address constant_pool, Address target, + ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)) { set_target_address_at(pc, target, icache_flush_mode); } INLINE(static Address target_address_at(Address pc, Code* code)) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; return target_address_at(pc, constant_pool); } INLINE(static void set_target_address_at(Address pc, @@ -513,7 +510,7 @@ class Assembler : public AssemblerBase { Address target, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; set_target_address_at(pc, constant_pool, target, icache_flush_mode); } @@ -1117,6 +1114,8 @@ class Assembler : public AssemblerBase { // inline tables, e.g., jump-tables. void db(uint8_t data); void dd(uint32_t data); + void dq(uint64_t data); + void dp(uintptr_t data) { dq(data); } void dd(Label* label); // Emits the address of the code stub's first instruction. @@ -1201,11 +1200,12 @@ class Assembler : public AssemblerBase { void CheckTrampolinePool(); - // Allocate a constant pool of the correct size for the generated code. - Handle NewConstantPool(Isolate* isolate); - - // Generate the constant pool for the generated code. - void PopulateConstantPool(ConstantPoolArray* constant_pool); + void PatchConstantPoolAccessInstruction(int pc_offset, int offset, + ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type) { + // No embedded constant pool support. + UNREACHABLE(); + } protected: // Relocation for a type-recording IC has the AST id added to it. This diff --git a/src/mips64/deoptimizer-mips64.cc b/src/mips64/deoptimizer-mips64.cc index 703203a892..cf5700f334 100644 --- a/src/mips64/deoptimizer-mips64.cc +++ b/src/mips64/deoptimizer-mips64.cc @@ -403,7 +403,7 @@ void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) { - // No out-of-line constant pool support. + // No embedded constant pool support. UNREACHABLE(); } diff --git a/src/mips64/frames-mips64.cc b/src/mips64/frames-mips64.cc index e34be5ebfb..2f0436184e 100644 --- a/src/mips64/frames-mips64.cc +++ b/src/mips64/frames-mips64.cc @@ -32,12 +32,6 @@ Register StubFailureTrampolineFrame::constant_pool_pointer_register() { } -Object*& ExitFrame::constant_pool_slot() const { - UNREACHABLE(); - return Memory::Object_at(NULL); -} - - } // namespace internal } // namespace v8 diff --git a/src/objects-debug.cc b/src/objects-debug.cc index 6c13a5e732..635bd6140e 100644 --- a/src/objects-debug.cc +++ b/src/objects-debug.cc @@ -67,9 +67,6 @@ void HeapObject::HeapObjectVerify() { case FIXED_DOUBLE_ARRAY_TYPE: FixedDoubleArray::cast(this)->FixedDoubleArrayVerify(); break; - case CONSTANT_POOL_ARRAY_TYPE: - ConstantPoolArray::cast(this)->ConstantPoolArrayVerify(); - break; case BYTE_ARRAY_TYPE: ByteArray::cast(this)->ByteArrayVerify(); break; @@ -404,20 +401,6 @@ void FixedDoubleArray::FixedDoubleArrayVerify() { } -void ConstantPoolArray::ConstantPoolArrayVerify() { - CHECK(IsConstantPoolArray()); - ConstantPoolArray::Iterator code_iter(this, ConstantPoolArray::CODE_PTR); - while (!code_iter.is_finished()) { - Address code_entry = get_code_ptr_entry(code_iter.next_index()); - VerifyPointer(Code::GetCodeFromTargetAddress(code_entry)); - } - ConstantPoolArray::Iterator heap_iter(this, ConstantPoolArray::HEAP_PTR); - while (!heap_iter.is_finished()) { - VerifyObjectField(OffsetOfElementAt(heap_iter.next_index())); - } -} - - void JSGeneratorObject::JSGeneratorObjectVerify() { // In an expression like "new g()", there can be a point where a generator // object is allocated but its fields are all undefined, as it hasn't yet been diff --git a/src/objects-inl.h b/src/objects-inl.h index 3d46e8141c..6b18ed2c3e 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -142,8 +142,8 @@ int PropertyDetails::field_width_in_words() const { bool Object::IsFixedArrayBase() const { - return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() || - IsFixedTypedArrayBase() || IsExternalArray(); + return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase() || + IsExternalArray(); } @@ -719,7 +719,6 @@ TYPE_CHECKER(Map, MAP_TYPE) TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE) TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE) TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE) -TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE) bool Object::IsJSWeakCollection() const { @@ -1543,8 +1542,6 @@ bool HeapObject::MayContainRawValues() { // map-word). return ((type & kIsIndirectStringMask) != kIsIndirectStringTag); } - // The ConstantPoolArray contains heap pointers, but also raw values. - if (type == CONSTANT_POOL_ARRAY_TYPE) return true; return (type <= LAST_DATA_TYPE); } @@ -2451,387 +2448,6 @@ void ArrayList::Clear(int index, Object* undefined) { } -void ConstantPoolArray::NumberOfEntries::increment(Type type) { - DCHECK(type < NUMBER_OF_TYPES); - element_counts_[type]++; -} - - -int ConstantPoolArray::NumberOfEntries::equals( - const ConstantPoolArray::NumberOfEntries& other) const { - for (int i = 0; i < NUMBER_OF_TYPES; i++) { - if (element_counts_[i] != other.element_counts_[i]) return false; - } - return true; -} - - -bool ConstantPoolArray::NumberOfEntries::is_empty() const { - return total_count() == 0; -} - - -int ConstantPoolArray::NumberOfEntries::count_of(Type type) const { - DCHECK(type < NUMBER_OF_TYPES); - return element_counts_[type]; -} - - -int ConstantPoolArray::NumberOfEntries::base_of(Type type) const { - int base = 0; - DCHECK(type < NUMBER_OF_TYPES); - for (int i = 0; i < type; i++) { - base += element_counts_[i]; - } - return base; -} - - -int ConstantPoolArray::NumberOfEntries::total_count() const { - int count = 0; - for (int i = 0; i < NUMBER_OF_TYPES; i++) { - count += element_counts_[i]; - } - return count; -} - - -int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const { - for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) { - if (element_counts_[i] < min || element_counts_[i] > max) { - return false; - } - } - return true; -} - - -int ConstantPoolArray::Iterator::next_index() { - DCHECK(!is_finished()); - int ret = next_index_++; - update_section(); - return ret; -} - - -bool ConstantPoolArray::Iterator::is_finished() { - return next_index_ > array_->last_index(type_, final_section_); -} - - -void ConstantPoolArray::Iterator::update_section() { - if (next_index_ > array_->last_index(type_, current_section_) && - current_section_ != final_section_) { - DCHECK(final_section_ == EXTENDED_SECTION); - current_section_ = EXTENDED_SECTION; - next_index_ = array_->first_index(type_, EXTENDED_SECTION); - } -} - - -bool ConstantPoolArray::is_extended_layout() { - uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset); - return IsExtendedField::decode(small_layout_1); -} - - -ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() { - return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION; -} - - -int ConstantPoolArray::first_extended_section_index() { - DCHECK(is_extended_layout()); - uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset); - return TotalCountField::decode(small_layout_2); -} - - -int ConstantPoolArray::get_extended_section_header_offset() { - return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size); -} - - -ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() { - uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset); - return WeakObjectStateField::decode(small_layout_2); -} - - -void ConstantPoolArray::set_weak_object_state( - ConstantPoolArray::WeakObjectState state) { - uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset); - small_layout_2 = WeakObjectStateField::update(small_layout_2, state); - WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2); -} - - -int ConstantPoolArray::first_index(Type type, LayoutSection section) { - int index = 0; - if (section == EXTENDED_SECTION) { - DCHECK(is_extended_layout()); - index += first_extended_section_index(); - } - - for (Type type_iter = FIRST_TYPE; type_iter < type; - type_iter = next_type(type_iter)) { - index += number_of_entries(type_iter, section); - } - - return index; -} - - -int ConstantPoolArray::last_index(Type type, LayoutSection section) { - return first_index(type, section) + number_of_entries(type, section) - 1; -} - - -int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) { - if (section == SMALL_SECTION) { - uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset); - uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset); - switch (type) { - case INT64: - return Int64CountField::decode(small_layout_1); - case CODE_PTR: - return CodePtrCountField::decode(small_layout_1); - case HEAP_PTR: - return HeapPtrCountField::decode(small_layout_1); - case INT32: - return Int32CountField::decode(small_layout_2); - default: - UNREACHABLE(); - return 0; - } - } else { - DCHECK(section == EXTENDED_SECTION && is_extended_layout()); - int offset = get_extended_section_header_offset(); - switch (type) { - case INT64: - offset += kExtendedInt64CountOffset; - break; - case CODE_PTR: - offset += kExtendedCodePtrCountOffset; - break; - case HEAP_PTR: - offset += kExtendedHeapPtrCountOffset; - break; - case INT32: - offset += kExtendedInt32CountOffset; - break; - default: - UNREACHABLE(); - } - return READ_INT_FIELD(this, offset); - } -} - - -bool ConstantPoolArray::offset_is_type(int offset, Type type) { - return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) && - offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) || - (is_extended_layout() && - offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) && - offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION))); -} - - -ConstantPoolArray::Type ConstantPoolArray::get_type(int index) { - LayoutSection section; - if (is_extended_layout() && index >= first_extended_section_index()) { - section = EXTENDED_SECTION; - } else { - section = SMALL_SECTION; - } - - Type type = FIRST_TYPE; - while (index > last_index(type, section)) { - type = next_type(type); - } - DCHECK(type <= LAST_TYPE); - return type; -} - - -int64_t ConstantPoolArray::get_int64_entry(int index) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(get_type(index) == INT64); - return READ_INT64_FIELD(this, OffsetOfElementAt(index)); -} - - -double ConstantPoolArray::get_int64_entry_as_double(int index) { - STATIC_ASSERT(kDoubleSize == kInt64Size); - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(get_type(index) == INT64); - return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index)); -} - - -Address ConstantPoolArray::get_code_ptr_entry(int index) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(get_type(index) == CODE_PTR); - return reinterpret_cast
(READ_FIELD(this, OffsetOfElementAt(index))); -} - - -Object* ConstantPoolArray::get_heap_ptr_entry(int index) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(get_type(index) == HEAP_PTR); - return READ_FIELD(this, OffsetOfElementAt(index)); -} - - -int32_t ConstantPoolArray::get_int32_entry(int index) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(get_type(index) == INT32); - return READ_INT32_FIELD(this, OffsetOfElementAt(index)); -} - - -void ConstantPoolArray::set(int index, int64_t value) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(get_type(index) == INT64); - WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value); -} - - -void ConstantPoolArray::set(int index, double value) { - STATIC_ASSERT(kDoubleSize == kInt64Size); - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(get_type(index) == INT64); - WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value); -} - - -void ConstantPoolArray::set(int index, Address value) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(get_type(index) == CODE_PTR); - WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast(value)); -} - - -void ConstantPoolArray::set(int index, Object* value) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(!GetHeap()->InNewSpace(value)); - DCHECK(get_type(index) == HEAP_PTR); - WRITE_FIELD(this, OffsetOfElementAt(index), value); - WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value); -} - - -void ConstantPoolArray::set(int index, int32_t value) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(get_type(index) == INT32); - WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value); -} - - -void ConstantPoolArray::set_at_offset(int offset, int32_t value) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(offset_is_type(offset, INT32)); - WRITE_INT32_FIELD(this, offset, value); -} - - -void ConstantPoolArray::set_at_offset(int offset, int64_t value) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(offset_is_type(offset, INT64)); - WRITE_INT64_FIELD(this, offset, value); -} - - -void ConstantPoolArray::set_at_offset(int offset, double value) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(offset_is_type(offset, INT64)); - WRITE_DOUBLE_FIELD(this, offset, value); -} - - -void ConstantPoolArray::set_at_offset(int offset, Address value) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(offset_is_type(offset, CODE_PTR)); - WRITE_FIELD(this, offset, reinterpret_cast(value)); - WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast(value)); -} - - -void ConstantPoolArray::set_at_offset(int offset, Object* value) { - DCHECK(map() == GetHeap()->constant_pool_array_map()); - DCHECK(!GetHeap()->InNewSpace(value)); - DCHECK(offset_is_type(offset, HEAP_PTR)); - WRITE_FIELD(this, offset, value); - WRITE_BARRIER(GetHeap(), this, offset, value); -} - - -void ConstantPoolArray::Init(const NumberOfEntries& small) { - uint32_t small_layout_1 = - Int64CountField::encode(small.count_of(INT64)) | - CodePtrCountField::encode(small.count_of(CODE_PTR)) | - HeapPtrCountField::encode(small.count_of(HEAP_PTR)) | - IsExtendedField::encode(false); - uint32_t small_layout_2 = - Int32CountField::encode(small.count_of(INT32)) | - TotalCountField::encode(small.total_count()) | - WeakObjectStateField::encode(NO_WEAK_OBJECTS); - WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1); - WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2); - if (kHeaderSize != kFirstEntryOffset) { - DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size); - WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding. - } -} - - -void ConstantPoolArray::InitExtended(const NumberOfEntries& small, - const NumberOfEntries& extended) { - // Initialize small layout fields first. - Init(small); - - // Set is_extended_layout field. - uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset); - small_layout_1 = IsExtendedField::update(small_layout_1, true); - WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1); - - // Initialize the extended layout fields. - int extended_header_offset = get_extended_section_header_offset(); - WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt64CountOffset, - extended.count_of(INT64)); - WRITE_INT32_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset, - extended.count_of(CODE_PTR)); - WRITE_INT32_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset, - extended.count_of(HEAP_PTR)); - WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt32CountOffset, - extended.count_of(INT32)); -} - - -int ConstantPoolArray::size() { - NumberOfEntries small(this, SMALL_SECTION); - if (!is_extended_layout()) { - return SizeFor(small); - } else { - NumberOfEntries extended(this, EXTENDED_SECTION); - return SizeForExtended(small, extended); - } -} - - -int ConstantPoolArray::length() { - uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset); - int length = TotalCountField::decode(small_layout_2); - if (is_extended_layout()) { - length += number_of_entries(INT64, EXTENDED_SECTION) + - number_of_entries(CODE_PTR, EXTENDED_SECTION) + - number_of_entries(HEAP_PTR, EXTENDED_SECTION) + - number_of_entries(INT32, EXTENDED_SECTION); - } - return length; -} - - WriteBarrierMode HeapObject::GetWriteBarrierMode( const DisallowHeapAllocation& promise) { Heap* heap = GetHeap(); @@ -2843,8 +2459,7 @@ WriteBarrierMode HeapObject::GetWriteBarrierMode( AllocationAlignment HeapObject::RequiredAlignment() { #ifdef V8_HOST_ARCH_32_BIT - if ((IsFixedFloat64Array() || IsFixedDoubleArray() || - IsConstantPoolArray()) && + if ((IsFixedFloat64Array() || IsFixedDoubleArray()) && FixedArrayBase::cast(this)->length() != 0) { return kDoubleAligned; } @@ -3369,7 +2984,6 @@ CAST_ACCESSOR(Code) CAST_ACCESSOR(CodeCacheHashTable) CAST_ACCESSOR(CompilationCacheTable) CAST_ACCESSOR(ConsString) -CAST_ACCESSOR(ConstantPoolArray) CAST_ACCESSOR(DeoptimizationInputData) CAST_ACCESSOR(DeoptimizationOutputData) CAST_ACCESSOR(DependentCode) @@ -4560,9 +4174,6 @@ int HeapObject::SizeFromMap(Map* map) { return FixedDoubleArray::SizeFor( reinterpret_cast(this)->length()); } - if (instance_type == CONSTANT_POOL_ARRAY_TYPE) { - return reinterpret_cast(this)->size(); - } if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE && instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) { return reinterpret_cast( @@ -5165,15 +4776,15 @@ bool Code::is_debug_stub() { } -ConstantPoolArray* Code::constant_pool() { - return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset)); -} - - -void Code::set_constant_pool(Object* value) { - DCHECK(value->IsConstantPoolArray()); - WRITE_FIELD(this, kConstantPoolOffset, value); - WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value); +Address Code::constant_pool() { + Address constant_pool = NULL; + if (FLAG_enable_embedded_constant_pool) { + int offset = constant_pool_offset(); + if (offset < instruction_size()) { + constant_pool = FIELD_ADDR(this, kHeaderSize + offset); + } + } + return constant_pool; } @@ -6351,6 +5962,7 @@ SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset) INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset) INT_ACCESSORS(Code, prologue_offset, kPrologueOffset) +INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset) ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset) ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset) ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset) @@ -6362,7 +5974,6 @@ void Code::WipeOutHeader() { WRITE_FIELD(this, kRelocationInfoOffset, NULL); WRITE_FIELD(this, kHandlerTableOffset, NULL); WRITE_FIELD(this, kDeoptimizationDataOffset, NULL); - WRITE_FIELD(this, kConstantPoolOffset, NULL); // Do not wipe out major/minor keys on a code stub or IC if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) { WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL); diff --git a/src/objects-printer.cc b/src/objects-printer.cc index c1f20f16a9..35eea77fdf 100644 --- a/src/objects-printer.cc +++ b/src/objects-printer.cc @@ -66,9 +66,6 @@ void HeapObject::HeapObjectPrint(std::ostream& os) { // NOLINT case FIXED_DOUBLE_ARRAY_TYPE: FixedDoubleArray::cast(this)->FixedDoubleArrayPrint(os); break; - case CONSTANT_POOL_ARRAY_TYPE: - ConstantPoolArray::cast(this)->ConstantPoolArrayPrint(os); - break; case FIXED_ARRAY_TYPE: FixedArray::cast(this)->FixedArrayPrint(os); break; @@ -507,43 +504,6 @@ void FixedDoubleArray::FixedDoubleArrayPrint(std::ostream& os) { // NOLINT } -void ConstantPoolArray::ConstantPoolArrayPrint(std::ostream& os) { // NOLINT - HeapObject::PrintHeader(os, "ConstantPoolArray"); - os << " - length: " << length(); - for (int i = 0; i <= last_index(INT32, SMALL_SECTION); i++) { - if (i < last_index(INT64, SMALL_SECTION)) { - os << "\n [" << i << "]: double: " << get_int64_entry_as_double(i); - } else if (i <= last_index(CODE_PTR, SMALL_SECTION)) { - os << "\n [" << i << "]: code target pointer: " - << reinterpret_cast(get_code_ptr_entry(i)); - } else if (i <= last_index(HEAP_PTR, SMALL_SECTION)) { - os << "\n [" << i << "]: heap pointer: " - << reinterpret_cast(get_heap_ptr_entry(i)); - } else if (i <= last_index(INT32, SMALL_SECTION)) { - os << "\n [" << i << "]: int32: " << get_int32_entry(i); - } - } - if (is_extended_layout()) { - os << "\n Extended section:"; - for (int i = first_extended_section_index(); - i <= last_index(INT32, EXTENDED_SECTION); i++) { - if (i < last_index(INT64, EXTENDED_SECTION)) { - os << "\n [" << i << "]: double: " << get_int64_entry_as_double(i); - } else if (i <= last_index(CODE_PTR, EXTENDED_SECTION)) { - os << "\n [" << i << "]: code target pointer: " - << reinterpret_cast(get_code_ptr_entry(i)); - } else if (i <= last_index(HEAP_PTR, EXTENDED_SECTION)) { - os << "\n [" << i << "]: heap pointer: " - << reinterpret_cast(get_heap_ptr_entry(i)); - } else if (i <= last_index(INT32, EXTENDED_SECTION)) { - os << "\n [" << i << "]: int32: " << get_int32_entry(i); - } - } - } - os << "\n"; -} - - void JSValue::JSValuePrint(std::ostream& os) { // NOLINT HeapObject::PrintHeader(os, "ValueObject"); value()->Print(os); diff --git a/src/objects.cc b/src/objects.cc index 0ebbe2ce99..5365347130 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -1488,9 +1488,6 @@ void HeapObject::IterateBody(InstanceType type, int object_size, case FIXED_ARRAY_TYPE: FixedArray::BodyDescriptor::IterateBody(this, object_size, v); break; - case CONSTANT_POOL_ARRAY_TYPE: - reinterpret_cast(this)->ConstantPoolIterateBody(v); - break; case FIXED_DOUBLE_ARRAY_TYPE: break; case JS_OBJECT_TYPE: @@ -9504,49 +9501,6 @@ bool Map::EquivalentToForNormalization(Map* other, } -void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) { - // Unfortunately the serializer relies on pointers within an object being - // visited in-order, so we have to iterate both the code and heap pointers in - // the small section before doing so in the extended section. - for (int s = 0; s <= final_section(); ++s) { - LayoutSection section = static_cast(s); - ConstantPoolArray::Iterator code_iter(this, ConstantPoolArray::CODE_PTR, - section); - while (!code_iter.is_finished()) { - v->VisitCodeEntry(reinterpret_cast
( - RawFieldOfElementAt(code_iter.next_index()))); - } - - ConstantPoolArray::Iterator heap_iter(this, ConstantPoolArray::HEAP_PTR, - section); - while (!heap_iter.is_finished()) { - v->VisitPointer(RawFieldOfElementAt(heap_iter.next_index())); - } - } -} - - -void ConstantPoolArray::ClearPtrEntries(Isolate* isolate) { - Type type[] = { CODE_PTR, HEAP_PTR }; - Address default_value[] = { - isolate->builtins()->builtin(Builtins::kIllegal)->entry(), - reinterpret_cast
(isolate->heap()->undefined_value()) }; - - for (int i = 0; i < 2; ++i) { - for (int s = 0; s <= final_section(); ++s) { - LayoutSection section = static_cast(s); - if (number_of_entries(type[i], section) > 0) { - int offset = OffsetOfElementAt(first_index(type[i], section)); - MemsetPointer( - reinterpret_cast(HeapObject::RawField(this, offset)), - default_value[i], - number_of_entries(type[i], section)); - } - } - } -} - - void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) { // Iterate over all fields in the body but take care in dealing with // the code entry. @@ -11669,17 +11623,34 @@ void Code::Disassemble(const char* name, std::ostream& os) { // NOLINT os << "Instructions (size = " << instruction_size() << ")\n"; { Isolate* isolate = GetIsolate(); - int decode_size = is_crankshafted() - ? static_cast(safepoint_table_offset()) - : instruction_size(); - // If there might be a back edge table, stop before reaching it. - if (kind() == Code::FUNCTION) { - decode_size = - Min(decode_size, static_cast(back_edge_table_offset())); - } + int size = instruction_size(); + int safepoint_offset = + is_crankshafted() ? static_cast(safepoint_table_offset()) : size; + int back_edge_offset = (kind() == Code::FUNCTION) + ? static_cast(back_edge_table_offset()) + : size; + int constant_pool_offset = FLAG_enable_embedded_constant_pool + ? this->constant_pool_offset() + : size; + + // Stop before reaching any embedded tables + int code_size = Min(safepoint_offset, back_edge_offset); + code_size = Min(code_size, constant_pool_offset); byte* begin = instruction_start(); - byte* end = begin + decode_size; + byte* end = begin + code_size; Disassembler::Decode(isolate, &os, begin, end, this); + + if (constant_pool_offset < size) { + int constant_pool_size = size - constant_pool_offset; + DCHECK((constant_pool_size & kPointerAlignmentMask) == 0); + os << "\nConstant Pool (size = " << constant_pool_size << ")\n"; + Vector buf = Vector::New(50); + intptr_t* ptr = reinterpret_cast(begin + constant_pool_offset); + for (int i = 0; i < constant_pool_size; i += kPointerSize, ptr++) { + SNPrintF(buf, "%4d %08" V8PRIxPTR, i, *ptr); + os << static_cast(ptr) << " " << buf.start() << "\n"; + } + } } os << "\n"; @@ -11758,17 +11729,6 @@ void Code::Disassemble(const char* name, std::ostream& os) { // NOLINT it.rinfo()->Print(GetIsolate(), os); } os << "\n"; - -#ifdef OBJECT_PRINT - if (FLAG_enable_ool_constant_pool) { - ConstantPoolArray* pool = constant_pool(); - if (pool->length()) { - os << "Constant Pool\n"; - pool->Print(os); - os << "\n"; - } - } -#endif } #endif // ENABLE_DISASSEMBLER diff --git a/src/objects.h b/src/objects.h index 13ad66ccc2..f77c2407c7 100644 --- a/src/objects.h +++ b/src/objects.h @@ -447,7 +447,6 @@ const int kStubMinorKeyBits = kSmiValueSize - kStubMajorKeyBits - 1; \ V(FIXED_ARRAY_TYPE) \ V(FIXED_DOUBLE_ARRAY_TYPE) \ - V(CONSTANT_POOL_ARRAY_TYPE) \ V(SHARED_FUNCTION_INFO_TYPE) \ V(WEAK_CELL_TYPE) \ \ @@ -740,7 +739,6 @@ enum InstanceType { DEBUG_INFO_TYPE, BREAK_POINT_INFO_TYPE, FIXED_ARRAY_TYPE, - CONSTANT_POOL_ARRAY_TYPE, SHARED_FUNCTION_INFO_TYPE, CELL_TYPE, WEAK_CELL_TYPE, @@ -975,7 +973,6 @@ template inline bool Is(Object* obj); V(FixedDoubleArray) \ V(WeakFixedArray) \ V(ArrayList) \ - V(ConstantPoolArray) \ V(Context) \ V(ScriptContextTable) \ V(NativeContext) \ @@ -2682,312 +2679,6 @@ class ArrayList : public FixedArray { }; -// ConstantPoolArray describes a fixed-sized array containing constant pool -// entries. -// -// A ConstantPoolArray can be structured in two different ways depending upon -// whether it is extended or small. The is_extended_layout() method can be used -// to discover which layout the constant pool has. -// -// The format of a small constant pool is: -// [kSmallLayout1Offset] : Small section layout bitmap 1 -// [kSmallLayout2Offset] : Small section layout bitmap 2 -// [first_index(INT64, SMALL_SECTION)] : 64 bit entries -// ... : ... -// [first_index(CODE_PTR, SMALL_SECTION)] : code pointer entries -// ... : ... -// [first_index(HEAP_PTR, SMALL_SECTION)] : heap pointer entries -// ... : ... -// [first_index(INT32, SMALL_SECTION)] : 32 bit entries -// ... : ... -// -// If the constant pool has an extended layout, the extended section constant -// pool also contains an extended section, which has the following format at -// location get_extended_section_header_offset(): -// [kExtendedInt64CountOffset] : count of extended 64 bit entries -// [kExtendedCodePtrCountOffset] : count of extended code pointers -// [kExtendedHeapPtrCountOffset] : count of extended heap pointers -// [kExtendedInt32CountOffset] : count of extended 32 bit entries -// [first_index(INT64, EXTENDED_SECTION)] : 64 bit entries -// ... : ... -// [first_index(CODE_PTR, EXTENDED_SECTION)]: code pointer entries -// ... : ... -// [first_index(HEAP_PTR, EXTENDED_SECTION)]: heap pointer entries -// ... : ... -// [first_index(INT32, EXTENDED_SECTION)] : 32 bit entries -// ... : ... -// -class ConstantPoolArray: public HeapObject { - public: - enum WeakObjectState { NO_WEAK_OBJECTS, WEAK_OBJECTS_IN_OPTIMIZED_CODE }; - - enum Type { - INT64 = 0, - CODE_PTR, - HEAP_PTR, - INT32, - // Number of types stored by the ConstantPoolArrays. - NUMBER_OF_TYPES, - FIRST_TYPE = INT64, - LAST_TYPE = INT32 - }; - - enum LayoutSection { - SMALL_SECTION = 0, - EXTENDED_SECTION, - NUMBER_OF_LAYOUT_SECTIONS - }; - - class NumberOfEntries BASE_EMBEDDED { - public: - inline NumberOfEntries() { - for (int i = 0; i < NUMBER_OF_TYPES; i++) { - element_counts_[i] = 0; - } - } - - inline NumberOfEntries(int int64_count, int code_ptr_count, - int heap_ptr_count, int int32_count) { - element_counts_[INT64] = int64_count; - element_counts_[CODE_PTR] = code_ptr_count; - element_counts_[HEAP_PTR] = heap_ptr_count; - element_counts_[INT32] = int32_count; - } - - inline NumberOfEntries(ConstantPoolArray* array, LayoutSection section) { - element_counts_[INT64] = array->number_of_entries(INT64, section); - element_counts_[CODE_PTR] = array->number_of_entries(CODE_PTR, section); - element_counts_[HEAP_PTR] = array->number_of_entries(HEAP_PTR, section); - element_counts_[INT32] = array->number_of_entries(INT32, section); - } - - inline void increment(Type type); - inline int equals(const NumberOfEntries& other) const; - inline bool is_empty() const; - inline int count_of(Type type) const; - inline int base_of(Type type) const; - inline int total_count() const; - inline int are_in_range(int min, int max) const; - - private: - int element_counts_[NUMBER_OF_TYPES]; - }; - - class Iterator BASE_EMBEDDED { - public: - inline Iterator(ConstantPoolArray* array, Type type) - : array_(array), - type_(type), - final_section_(array->final_section()), - current_section_(SMALL_SECTION), - next_index_(array->first_index(type, SMALL_SECTION)) { - update_section(); - } - - inline Iterator(ConstantPoolArray* array, Type type, LayoutSection section) - : array_(array), - type_(type), - final_section_(section), - current_section_(section), - next_index_(array->first_index(type, section)) { - update_section(); - } - - inline int next_index(); - inline bool is_finished(); - - private: - inline void update_section(); - ConstantPoolArray* array_; - const Type type_; - const LayoutSection final_section_; - - LayoutSection current_section_; - int next_index_; - }; - - // Getters for the first index, the last index and the count of entries of - // a given type for a given layout section. - inline int first_index(Type type, LayoutSection layout_section); - inline int last_index(Type type, LayoutSection layout_section); - inline int number_of_entries(Type type, LayoutSection layout_section); - - // Returns the type of the entry at the given index. - inline Type get_type(int index); - inline bool offset_is_type(int offset, Type type); - - // Setter and getter for pool elements. - inline Address get_code_ptr_entry(int index); - inline Object* get_heap_ptr_entry(int index); - inline int64_t get_int64_entry(int index); - inline int32_t get_int32_entry(int index); - inline double get_int64_entry_as_double(int index); - - inline void set(int index, Address value); - inline void set(int index, Object* value); - inline void set(int index, int64_t value); - inline void set(int index, double value); - inline void set(int index, int32_t value); - - // Setters which take a raw offset rather than an index (for code generation). - inline void set_at_offset(int offset, int32_t value); - inline void set_at_offset(int offset, int64_t value); - inline void set_at_offset(int offset, double value); - inline void set_at_offset(int offset, Address value); - inline void set_at_offset(int offset, Object* value); - - // Setter and getter for weak objects state - inline void set_weak_object_state(WeakObjectState state); - inline WeakObjectState get_weak_object_state(); - - // Returns true if the constant pool has an extended layout, false if it has - // only the small layout. - inline bool is_extended_layout(); - - // Returns the last LayoutSection in this constant pool array. - inline LayoutSection final_section(); - - // Set up initial state for a small layout constant pool array. - inline void Init(const NumberOfEntries& small); - - // Set up initial state for an extended layout constant pool array. - inline void InitExtended(const NumberOfEntries& small, - const NumberOfEntries& extended); - - // Clears the pointer entries with GC safe values. - void ClearPtrEntries(Isolate* isolate); - - // returns the total number of entries in the constant pool array. - inline int length(); - - // Garbage collection support. - inline int size(); - - - inline static int MaxInt64Offset(int number_of_int64) { - return kFirstEntryOffset + (number_of_int64 * kInt64Size); - } - - inline static int SizeFor(const NumberOfEntries& small) { - int size = kFirstEntryOffset + - (small.count_of(INT64) * kInt64Size) + - (small.count_of(CODE_PTR) * kPointerSize) + - (small.count_of(HEAP_PTR) * kPointerSize) + - (small.count_of(INT32) * kInt32Size); - return RoundUp(size, kPointerSize); - } - - inline static int SizeForExtended(const NumberOfEntries& small, - const NumberOfEntries& extended) { - int size = SizeFor(small); - size = RoundUp(size, kInt64Size); // Align extended header to 64 bits. - size += kExtendedFirstOffset + - (extended.count_of(INT64) * kInt64Size) + - (extended.count_of(CODE_PTR) * kPointerSize) + - (extended.count_of(HEAP_PTR) * kPointerSize) + - (extended.count_of(INT32) * kInt32Size); - return RoundUp(size, kPointerSize); - } - - inline static int entry_size(Type type) { - switch (type) { - case INT32: - return kInt32Size; - case INT64: - return kInt64Size; - case CODE_PTR: - case HEAP_PTR: - return kPointerSize; - default: - UNREACHABLE(); - return 0; - } - } - - // Code Generation support. - inline int OffsetOfElementAt(int index) { - int offset; - LayoutSection section; - if (is_extended_layout() && index >= first_extended_section_index()) { - section = EXTENDED_SECTION; - offset = get_extended_section_header_offset() + kExtendedFirstOffset; - } else { - section = SMALL_SECTION; - offset = kFirstEntryOffset; - } - - // Add offsets for the preceding type sections. - DCHECK(index <= last_index(LAST_TYPE, section)); - for (Type type = FIRST_TYPE; index > last_index(type, section); - type = next_type(type)) { - offset += entry_size(type) * number_of_entries(type, section); - } - - // Add offset for the index in it's type. - Type type = get_type(index); - offset += entry_size(type) * (index - first_index(type, section)); - return offset; - } - - DECLARE_CAST(ConstantPoolArray) - - // Garbage collection support. - Object** RawFieldOfElementAt(int index) { - return HeapObject::RawField(this, OffsetOfElementAt(index)); - } - - // Small Layout description. - static const int kSmallLayout1Offset = HeapObject::kHeaderSize; - static const int kSmallLayout2Offset = kSmallLayout1Offset + kInt32Size; - static const int kHeaderSize = kSmallLayout2Offset + kInt32Size; - static const int kFirstEntryOffset = ROUND_UP(kHeaderSize, kInt64Size); - - static const int kSmallLayoutCountBits = 10; - static const int kMaxSmallEntriesPerType = (1 << kSmallLayoutCountBits) - 1; - - // Fields in kSmallLayout1Offset. - class Int64CountField: public BitField {}; - class CodePtrCountField: public BitField {}; - class HeapPtrCountField: public BitField {}; - class IsExtendedField: public BitField {}; - - // Fields in kSmallLayout2Offset. - class Int32CountField: public BitField {}; - class TotalCountField: public BitField {}; - class WeakObjectStateField: public BitField {}; - - // Extended layout description, which starts at - // get_extended_section_header_offset(). - static const int kExtendedInt64CountOffset = 0; - static const int kExtendedCodePtrCountOffset = - kExtendedInt64CountOffset + kInt32Size; - static const int kExtendedHeapPtrCountOffset = - kExtendedCodePtrCountOffset + kInt32Size; - static const int kExtendedInt32CountOffset = - kExtendedHeapPtrCountOffset + kInt32Size; - static const int kExtendedFirstOffset = - kExtendedInt32CountOffset + kInt32Size; - - // Dispatched behavior. - void ConstantPoolIterateBody(ObjectVisitor* v); - - DECLARE_PRINTER(ConstantPoolArray) - DECLARE_VERIFIER(ConstantPoolArray) - - private: - inline int first_extended_section_index(); - inline int get_extended_section_header_offset(); - - inline static Type next_type(Type type) { - DCHECK(type >= FIRST_TYPE && type < NUMBER_OF_TYPES); - int type_int = static_cast(type); - return static_cast(++type_int); - } - - DISALLOW_IMPLICIT_CONSTRUCTORS(ConstantPoolArray); -}; - - // DescriptorArrays are fixed arrays used to hold instance descriptors. // The format of the these objects is: // [0]: Number of descriptors @@ -5314,6 +5005,11 @@ class Code: public HeapObject { inline int prologue_offset() const; inline void set_prologue_offset(int offset); + // [constant_pool offset]: Offset of the constant pool. + // Valid for FLAG_enable_embedded_constant_pool only + inline int constant_pool_offset() const; + inline void set_constant_pool_offset(int offset); + // Unchecked accessors to be used during GC. inline ByteArray* unchecked_relocation_info(); @@ -5447,8 +5143,7 @@ class Code: public HeapObject { inline void set_marked_for_deoptimization(bool flag); // [constant_pool]: The constant pool for this function. - inline ConstantPoolArray* constant_pool(); - inline void set_constant_pool(Object* constant_pool); + inline Address constant_pool(); // Get the safepoint entry for the given pc. SafepointEntry GetSafepointEntry(Address pc); @@ -5649,6 +5344,9 @@ class Code: public HeapObject { // nesting that is deeper than 5 levels into account. static const int kMaxLoopNestingMarker = 6; + static const int kConstantPoolSize = + FLAG_enable_embedded_constant_pool ? kIntSize : 0; + // Layout description. static const int kRelocationInfoOffset = HeapObject::kHeaderSize; static const int kHandlerTableOffset = kRelocationInfoOffset + kPointerSize; @@ -5668,15 +5366,13 @@ class Code: public HeapObject { // Note: We might be able to squeeze this into the flags above. static const int kPrologueOffset = kKindSpecificFlags2Offset + kIntSize; static const int kConstantPoolOffset = kPrologueOffset + kIntSize; - - static const int kHeaderPaddingStart = kConstantPoolOffset + kPointerSize; + static const int kHeaderPaddingStart = + kConstantPoolOffset + kConstantPoolSize; // Add padding to align the instruction start following right after // the Code object header. static const int kHeaderSize = (kHeaderPaddingStart + kCodeAlignmentMask) & ~kCodeAlignmentMask; - // Ensure that the slot for the constant pool pointer is aligned. - STATIC_ASSERT((kConstantPoolOffset & kPointerAlignmentMask) == 0); // Byte offsets within kKindSpecificFlags1Offset. static const int kFullCodeFlags = kKindSpecificFlags1Offset; diff --git a/src/ppc/assembler-ppc-inl.h b/src/ppc/assembler-ppc-inl.h index d95c7ec596..14b5503a17 100644 --- a/src/ppc/assembler-ppc-inl.h +++ b/src/ppc/assembler-ppc-inl.h @@ -94,6 +94,13 @@ Address RelocInfo::target_address_address() { DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || rmode_ == EMBEDDED_OBJECT || rmode_ == EXTERNAL_REFERENCE); + if (FLAG_enable_embedded_constant_pool && + Assembler::IsConstantPoolLoadStart(pc_)) { + // We return the PC for ool constant pool since this function is used by the + // serializer and expects the address to reside within the code object. + return reinterpret_cast
(pc_); + } + // Read the address of the word containing the target_address in an // instruction stream. // The only architecture-independent user of this function is the serializer. @@ -108,6 +115,14 @@ Address RelocInfo::target_address_address() { Address RelocInfo::constant_pool_entry_address() { + if (FLAG_enable_embedded_constant_pool) { + Address constant_pool = host_->constant_pool(); + DCHECK(constant_pool); + ConstantPoolEntry::Access access; + if (Assembler::IsConstantPoolLoadStart(pc_, &access)) + return Assembler::target_constant_pool_address_at( + pc_, constant_pool, access, ConstantPoolEntry::INTPTR); + } UNREACHABLE(); return NULL; } @@ -143,12 +158,28 @@ Address Assembler::target_address_from_return_address(Address pc) { // mtlr ip // blrl // @ return address - return pc - (kMovInstructions + 2) * kInstrSize; + int len; + ConstantPoolEntry::Access access; + if (FLAG_enable_embedded_constant_pool && + IsConstantPoolLoadEnd(pc - 3 * kInstrSize, &access)) { + len = (access == ConstantPoolEntry::OVERFLOWED) ? 2 : 1; + } else { + len = kMovInstructionsNoConstantPool; + } + return pc - (len + 2) * kInstrSize; } Address Assembler::return_address_from_call_start(Address pc) { - return pc + (kMovInstructions + 2) * kInstrSize; + int len; + ConstantPoolEntry::Access access; + if (FLAG_enable_embedded_constant_pool && + IsConstantPoolLoadStart(pc, &access)) { + len = (access == ConstantPoolEntry::OVERFLOWED) ? 2 : 1; + } else { + len = kMovInstructionsNoConstantPool; + } + return pc + (len + 2) * kInstrSize; } @@ -226,8 +257,10 @@ void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode write_barrier_mode, } -static const int kNoCodeAgeInstructions = 6; -static const int kCodeAgingInstructions = Assembler::kMovInstructions + 3; +static const int kNoCodeAgeInstructions = + FLAG_enable_embedded_constant_pool ? 7 : 6; +static const int kCodeAgingInstructions = + Assembler::kMovInstructionsNoConstantPool + 3; static const int kNoCodeAgeSequenceInstructions = ((kNoCodeAgeInstructions >= kCodeAgingInstructions) ? kNoCodeAgeInstructions @@ -448,8 +481,14 @@ bool Operand::is_reg() const { return rm_.is_valid(); } // Fetch the 32bit value from the FIXED_SEQUENCE lis/ori -Address Assembler::target_address_at(Address pc, - ConstantPoolArray* constant_pool) { +Address Assembler::target_address_at(Address pc, Address constant_pool) { + if (FLAG_enable_embedded_constant_pool && constant_pool) { + ConstantPoolEntry::Access access; + if (IsConstantPoolLoadStart(pc, &access)) + return Memory::Address_at(target_constant_pool_address_at( + pc, constant_pool, access, ConstantPoolEntry::INTPTR)); + } + Instr instr1 = instr_at(pc); Instr instr2 = instr_at(pc + kInstrSize); // Interpret 2 instructions generated by lis/ori @@ -475,6 +514,127 @@ Address Assembler::target_address_at(Address pc, } +#if V8_TARGET_ARCH_PPC64 +const int kLoadIntptrOpcode = LD; +#else +const int kLoadIntptrOpcode = LWZ; +#endif + +// Constant pool load sequence detection: +// 1) REGULAR access: +// load , kConstantPoolRegister + +// +// 2) OVERFLOWED access: +// addis , kConstantPoolRegister, +// load , + +bool Assembler::IsConstantPoolLoadStart(Address pc, + ConstantPoolEntry::Access* access) { + Instr instr = instr_at(pc); + int opcode = instr & kOpcodeMask; + if (!GetRA(instr).is(kConstantPoolRegister)) return false; + bool overflowed = (opcode == ADDIS); +#ifdef DEBUG + if (overflowed) { + opcode = instr_at(pc + kInstrSize) & kOpcodeMask; + } + DCHECK(opcode == kLoadIntptrOpcode || opcode == LFD); +#endif + if (access) { + *access = (overflowed ? ConstantPoolEntry::OVERFLOWED + : ConstantPoolEntry::REGULAR); + } + return true; +} + + +bool Assembler::IsConstantPoolLoadEnd(Address pc, + ConstantPoolEntry::Access* access) { + Instr instr = instr_at(pc); + int opcode = instr & kOpcodeMask; + bool overflowed = false; + if (!(opcode == kLoadIntptrOpcode || opcode == LFD)) return false; + if (!GetRA(instr).is(kConstantPoolRegister)) { + instr = instr_at(pc - kInstrSize); + opcode = instr & kOpcodeMask; + if ((opcode != ADDIS) || !GetRA(instr).is(kConstantPoolRegister)) { + return false; + } + overflowed = true; + } + if (access) { + *access = (overflowed ? ConstantPoolEntry::OVERFLOWED + : ConstantPoolEntry::REGULAR); + } + return true; +} + + +int Assembler::GetConstantPoolOffset(Address pc, + ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type) { + bool overflowed = (access == ConstantPoolEntry::OVERFLOWED); +#ifdef DEBUG + ConstantPoolEntry::Access access_check = + static_cast(-1); + DCHECK(IsConstantPoolLoadStart(pc, &access_check)); + DCHECK(access_check == access); +#endif + int offset; + if (overflowed) { + offset = (instr_at(pc) & kImm16Mask) << 16; + offset += SIGN_EXT_IMM16(instr_at(pc + kInstrSize) & kImm16Mask); + DCHECK(!is_int16(offset)); + } else { + offset = SIGN_EXT_IMM16((instr_at(pc) & kImm16Mask)); + } + return offset; +} + + +void Assembler::PatchConstantPoolAccessInstruction( + int pc_offset, int offset, ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type) { + Address pc = buffer_ + pc_offset; + bool overflowed = (access == ConstantPoolEntry::OVERFLOWED); +#ifdef DEBUG + ConstantPoolEntry::Access access_check = + static_cast(-1); + DCHECK(IsConstantPoolLoadStart(pc, &access_check)); + DCHECK(access_check == access); + DCHECK(overflowed != is_int16(offset)); +#endif + if (overflowed) { + int hi_word = static_cast(offset >> 16); + int lo_word = static_cast(offset & 0xffff); + if (lo_word & 0x8000) hi_word++; + + Instr instr1 = instr_at(pc); + Instr instr2 = instr_at(pc + kInstrSize); + instr1 &= ~kImm16Mask; + instr1 |= (hi_word & kImm16Mask); + instr2 &= ~kImm16Mask; + instr2 |= (lo_word & kImm16Mask); + instr_at_put(pc, instr1); + instr_at_put(pc + kInstrSize, instr2); + } else { + Instr instr = instr_at(pc); + instr &= ~kImm16Mask; + instr |= (offset & kImm16Mask); + instr_at_put(pc, instr); + } +} + + +Address Assembler::target_constant_pool_address_at( + Address pc, Address constant_pool, ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type) { + Address addr = constant_pool; + DCHECK(addr); + addr += GetConstantPoolOffset(pc, access, type); + return addr; +} + + // This sets the branch destination (which gets loaded at the call address). // This is for calls and branches within generated code. The serializer // has already deserialized the mov instructions etc. @@ -497,10 +657,18 @@ void Assembler::deserialization_set_target_internal_reference_at( // This code assumes the FIXED_SEQUENCE of lis/ori -void Assembler::set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, +void Assembler::set_target_address_at(Address pc, Address constant_pool, Address target, ICacheFlushMode icache_flush_mode) { + if (FLAG_enable_embedded_constant_pool && constant_pool) { + ConstantPoolEntry::Access access; + if (IsConstantPoolLoadStart(pc, &access)) { + Memory::Address_at(target_constant_pool_address_at( + pc, constant_pool, access, ConstantPoolEntry::INTPTR)) = target; + return; + } + } + Instr instr1 = instr_at(pc); Instr instr2 = instr_at(pc + kInstrSize); // Interpret 2 instructions generated by lis/ori diff --git a/src/ppc/assembler-ppc.cc b/src/ppc/assembler-ppc.cc index f5929ec58c..cce578e9f6 100644 --- a/src/ppc/assembler-ppc.cc +++ b/src/ppc/assembler-ppc.cc @@ -148,13 +148,17 @@ const int RelocInfo::kApplyMask = 1 << RelocInfo::INTERNAL_REFERENCE | bool RelocInfo::IsCodedSpecially() { // The deserializer needs to know whether a pointer is specially // coded. Being specially coded on PPC means that it is a lis/ori - // instruction sequence, and these are always the case inside code - // objects. + // instruction sequence or is a constant pool entry, and these are + // always the case inside code objects. return true; } bool RelocInfo::IsInConstantPool() { + if (FLAG_enable_embedded_constant_pool) { + Address constant_pool = host_->constant_pool(); + return (constant_pool && Assembler::IsConstantPoolLoadStart(pc_)); + } return false; } @@ -201,11 +205,13 @@ MemOperand::MemOperand(Register ra, Register rb) { Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) : AssemblerBase(isolate, buffer, buffer_size), recorded_ast_id_(TypeFeedbackId::None()), + constant_pool_builder_(kLoadPtrMaxReachBits, kLoadDoubleMaxReachBits), positions_recorder_(this) { reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); no_trampoline_pool_before_ = 0; trampoline_pool_blocked_nesting_ = 0; + constant_pool_entry_sharing_blocked_nesting_ = 0; // We leave space (kMaxBlockTrampolineSectionSize) // for BlockTrampolinePoolScope buffer. next_buffer_check_ = @@ -221,6 +227,9 @@ Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) void Assembler::GetCode(CodeDesc* desc) { + // Emit constant pool if necessary. + int constant_pool_offset = EmitConstantPool(); + EmitRelocations(); // Set up code descriptor. @@ -228,6 +237,8 @@ void Assembler::GetCode(CodeDesc* desc) { desc->buffer_size = buffer_size_; desc->instr_size = pc_offset(); desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); + desc->constant_pool_size = + (constant_pool_offset ? desc->instr_size - constant_pool_offset : 0); desc->origin = this; } @@ -238,7 +249,12 @@ void Assembler::Align(int m) { #else DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m)); #endif - while ((pc_offset() & (m - 1)) != 0) { + // First ensure instruction alignment + while (pc_offset() & (kInstrSize - 1)) { + db(0); + } + // Then pad to requested alignedment with nops + while (pc_offset() & (m - 1)) { nop(); } } @@ -471,7 +487,8 @@ void Assembler::target_at_put(int pos, int target_pos) { // Load the address of the label in a register. Register dst = Register::from_code(instr_at(pos + kInstrSize)); CodePatcher patcher(reinterpret_cast(buffer_ + pos), - kMovInstructions, CodePatcher::DONT_FLUSH); + kMovInstructionsNoConstantPool, + CodePatcher::DONT_FLUSH); // Keep internal references relative until EmitRelocations. patcher.masm()->bitwise_mov(dst, target_pos); break; @@ -480,7 +497,7 @@ void Assembler::target_at_put(int pos, int target_pos) { CodePatcher patcher(reinterpret_cast(buffer_ + pos), kPointerSize / kInstrSize, CodePatcher::DONT_FLUSH); // Keep internal references relative until EmitRelocations. - patcher.masm()->emit_ptr(target_pos); + patcher.masm()->dp(target_pos); break; } default: @@ -1492,13 +1509,56 @@ void Assembler::function_descriptor() { Label instructions; DCHECK(pc_offset() == 0); emit_label_addr(&instructions); - emit_ptr(0); - emit_ptr(0); + dp(0); + dp(0); bind(&instructions); #endif } +int Assembler::instructions_required_for_mov(Register dst, + const Operand& src) const { + bool canOptimize = + !(src.must_output_reloc_info(this) || is_trampoline_pool_blocked()); + if (use_constant_pool_for_mov(dst, src, canOptimize)) { + if (ConstantPoolAccessIsInOverflow()) { + return kMovInstructionsConstantPool + 1; + } + return kMovInstructionsConstantPool; + } + DCHECK(!canOptimize); + return kMovInstructionsNoConstantPool; +} + + +bool Assembler::use_constant_pool_for_mov(Register dst, const Operand& src, + bool canOptimize) const { + if (!FLAG_enable_embedded_constant_pool || !is_constant_pool_available()) { + // If there is no constant pool available, we must use a mov + // immediate sequence. + return false; + } + + intptr_t value = src.immediate(); +#if V8_TARGET_ARCH_PPC64 + bool allowOverflow = !((canOptimize && is_int32(value)) || dst.is(r0)); +#else + bool allowOverflow = !(canOptimize || dst.is(r0)); +#endif + if (canOptimize && is_int16(value)) { + // Prefer a single-instruction load-immediate. + return false; + } + if (!allowOverflow && ConstantPoolAccessIsInOverflow()) { + // Prefer non-relocatable two-instruction bitwise-mov32 over + // overflow sequence. + return false; + } + + return true; +} + + void Assembler::EnsureSpaceFor(int space_needed) { if (buffer_space() <= (kGap + space_needed)) { GrowBuffer(space_needed); @@ -1531,6 +1591,30 @@ void Assembler::mov(Register dst, const Operand& src) { canOptimize = !(relocatable || (is_trampoline_pool_blocked() && !is_int16(value))); + if (use_constant_pool_for_mov(dst, src, canOptimize)) { + DCHECK(is_constant_pool_available()); + if (relocatable) { + RecordRelocInfo(src.rmode_); + } + ConstantPoolEntry::Access access = ConstantPoolAddEntry(src.rmode_, value); +#if V8_TARGET_ARCH_PPC64 + if (access == ConstantPoolEntry::OVERFLOWED) { + addis(dst, kConstantPoolRegister, Operand::Zero()); + ld(dst, MemOperand(dst, 0)); + } else { + ld(dst, MemOperand(kConstantPoolRegister, 0)); + } +#else + if (access == ConstantPoolEntry::OVERFLOWED) { + addis(dst, kConstantPoolRegister, Operand::Zero()); + lwz(dst, MemOperand(dst, 0)); + } else { + lwz(dst, MemOperand(kConstantPoolRegister, 0)); + } +#endif + return; + } + if (canOptimize) { if (is_int16(value)) { li(dst, Operand(value)); @@ -1696,8 +1780,8 @@ void Assembler::mov_label_addr(Register dst, Label* label) { BlockTrampolinePoolScope block_trampoline_pool(this); emit(kUnboundMovLabelAddrOpcode | (link & kImm26Mask)); emit(dst.code()); - DCHECK(kMovInstructions >= 2); - for (int i = 0; i < kMovInstructions - 2; i++) nop(); + DCHECK(kMovInstructionsNoConstantPool >= 2); + for (int i = 0; i < kMovInstructionsNoConstantPool - 2; i++) nop(); } } @@ -1708,7 +1792,7 @@ void Assembler::emit_label_addr(Label* label) { int position = link(label); if (label->is_bound()) { // Keep internal references relative until EmitRelocations. - emit_ptr(position); + dp(position); } else { // Encode internal reference to unbound label. We use a dummy opcode // such that it won't collide with any opcode that might appear in the @@ -1839,6 +1923,7 @@ void Assembler::isync() { emit(EXT1 | ISYNC); } void Assembler::lfd(const DoubleRegister frt, const MemOperand& src) { int offset = src.offset(); Register ra = src.ra(); + DCHECK(!ra.is(r0)); DCHECK(is_int16(offset)); int imm16 = offset & kImm16Mask; // could be x_form instruction with some casting magic @@ -1849,6 +1934,7 @@ void Assembler::lfd(const DoubleRegister frt, const MemOperand& src) { void Assembler::lfdu(const DoubleRegister frt, const MemOperand& src) { int offset = src.offset(); Register ra = src.ra(); + DCHECK(!ra.is(r0)); DCHECK(is_int16(offset)); int imm16 = offset & kImm16Mask; // could be x_form instruction with some casting magic @@ -2248,51 +2334,33 @@ void Assembler::dd(uint32_t data) { } -void Assembler::emit_ptr(intptr_t data) { +void Assembler::dq(uint64_t value) { CheckBuffer(); - *reinterpret_cast(pc_) = data; - pc_ += sizeof(intptr_t); + *reinterpret_cast(pc_) = value; + pc_ += sizeof(uint64_t); } -void Assembler::emit_double(double value) { +void Assembler::dp(uintptr_t data) { CheckBuffer(); - *reinterpret_cast(pc_) = value; - pc_ += sizeof(double); + *reinterpret_cast(pc_) = data; + pc_ += sizeof(uintptr_t); } void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { + if (RelocInfo::IsNone(rmode) || + // Don't record external references unless the heap will be serialized. + (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() && + !emit_debug_code())) { + return; + } + if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { + data = RecordedAstId().ToInt(); + ClearRecordedAstId(); + } DeferredRelocInfo rinfo(pc_offset(), rmode, data); - RecordRelocInfo(rinfo); -} - - -void Assembler::RecordRelocInfo(const DeferredRelocInfo& rinfo) { - if (rinfo.rmode() >= RelocInfo::JS_RETURN && - rinfo.rmode() <= RelocInfo::DEBUG_BREAK_SLOT) { - // Adjust code for new modes. - DCHECK(RelocInfo::IsDebugBreakSlot(rinfo.rmode()) || - RelocInfo::IsJSReturn(rinfo.rmode()) || - RelocInfo::IsComment(rinfo.rmode()) || - RelocInfo::IsPosition(rinfo.rmode())); - } - if (!RelocInfo::IsNone(rinfo.rmode())) { - // Don't record external references unless the heap will be serialized. - if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE) { - if (!serializer_enabled() && !emit_debug_code()) { - return; - } - } - if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) { - DeferredRelocInfo reloc_info_with_ast_id(rinfo.position(), rinfo.rmode(), - RecordedAstId().ToInt()); - ClearRecordedAstId(); - relocations_.push_back(reloc_info_with_ast_id); - } else { - relocations_.push_back(rinfo); - } - } + relocations_.push_back(rinfo); } @@ -2378,15 +2446,6 @@ void Assembler::CheckTrampolinePool() { } -Handle Assembler::NewConstantPool(Isolate* isolate) { - DCHECK(!FLAG_enable_ool_constant_pool); - return isolate->factory()->empty_constant_pool_array(); -} - - -void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { - DCHECK(!FLAG_enable_ool_constant_pool); -} } // namespace internal } // namespace v8 diff --git a/src/ppc/assembler-ppc.h b/src/ppc/assembler-ppc.h index bcc2d8f6b6..fb56852bb5 100644 --- a/src/ppc/assembler-ppc.h +++ b/src/ppc/assembler-ppc.h @@ -108,7 +108,8 @@ struct Register { static const int kAllocatableLowRangeBegin = 3; static const int kAllocatableLowRangeEnd = 10; static const int kAllocatableHighRangeBegin = 14; - static const int kAllocatableHighRangeEnd = 28; + static const int kAllocatableHighRangeEnd = + FLAG_enable_embedded_constant_pool ? 27 : 28; static const int kAllocatableContext = 30; static const int kNumAllocatableLow = @@ -177,6 +178,10 @@ struct Register { "r28", "cp", }; + if (FLAG_enable_embedded_constant_pool && + (index == kMaxNumAllocatableRegisters - 2)) { + return names[index + 1]; + } return names[index]; } @@ -184,7 +189,7 @@ struct Register { 1 << 3 | 1 << 4 | 1 << 5 | 1 << 6 | 1 << 7 | 1 << 8 | 1 << 9 | 1 << 10 | 1 << 14 | 1 << 15 | 1 << 16 | 1 << 17 | 1 << 18 | 1 << 19 | 1 << 20 | 1 << 21 | 1 << 22 | 1 << 23 | 1 << 24 | 1 << 25 | 1 << 26 | 1 << 27 | - 1 << 28 | 1 << 30; + (FLAG_enable_embedded_constant_pool ? 0 : 1 << 28) | 1 << 30; static Register from_code(int code) { Register r = {code}; @@ -242,7 +247,7 @@ const int kRegister_r24_Code = 24; const int kRegister_r25_Code = 25; const int kRegister_r26_Code = 26; const int kRegister_r27_Code = 27; -const int kRegister_r28_Code = 28; +const int kRegister_r28_Code = 28; // constant pool pointer const int kRegister_r29_Code = 29; // roots array pointer const int kRegister_r30_Code = 30; // context pointer const int kRegister_fp_Code = 31; // frame pointer @@ -286,6 +291,7 @@ const Register fp = {kRegister_fp_Code}; // Give alias names to registers const Register cp = {kRegister_r30_Code}; // JavaScript context pointer const Register kRootRegister = {kRegister_r29_Code}; // Roots array pointer. +const Register kConstantPoolRegister = {kRegister_r28_Code}; // Constant pool // Double word FP register. struct DoubleRegister { @@ -595,20 +601,36 @@ class Assembler : public AssemblerBase { // The high 8 bits are set to zero. void label_at_put(Label* L, int at_offset); + INLINE(static bool IsConstantPoolLoadStart( + Address pc, ConstantPoolEntry::Access* access = nullptr)); + INLINE(static bool IsConstantPoolLoadEnd( + Address pc, ConstantPoolEntry::Access* access = nullptr)); + INLINE(static int GetConstantPoolOffset(Address pc, + ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type)); + INLINE(void PatchConstantPoolAccessInstruction( + int pc_offset, int offset, ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type)); + + // Return the address in the constant pool of the code target address used by + // the branch/call instruction at pc, or the object in a mov. + INLINE(static Address target_constant_pool_address_at( + Address pc, Address constant_pool, ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type)); + // Read/Modify the code target address in the branch/call instruction at pc. - INLINE(static Address target_address_at(Address pc, - ConstantPoolArray* constant_pool)); + INLINE(static Address target_address_at(Address pc, Address constant_pool)); INLINE(static void set_target_address_at( - Address pc, ConstantPoolArray* constant_pool, Address target, + Address pc, Address constant_pool, Address target, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)); INLINE(static Address target_address_at(Address pc, Code* code)) { - ConstantPoolArray* constant_pool = NULL; + Address constant_pool = code ? code->constant_pool() : NULL; return target_address_at(pc, constant_pool); } INLINE(static void set_target_address_at( Address pc, Code* code, Address target, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)) { - ConstantPoolArray* constant_pool = NULL; + Address constant_pool = code ? code->constant_pool() : NULL; set_target_address_at(pc, constant_pool, target, icache_flush_mode); } @@ -646,12 +668,21 @@ class Assembler : public AssemblerBase { // Number of instructions to load an address via a mov sequence. #if V8_TARGET_ARCH_PPC64 - static const int kMovInstructions = 5; - static const int kTaggedLoadInstructions = 2; + static const int kMovInstructionsConstantPool = 1; + static const int kMovInstructionsNoConstantPool = 5; +#if defined(V8_PPC_TAGGING_OPT) + static const int kTaggedLoadInstructions = 1; #else - static const int kMovInstructions = 2; + static const int kTaggedLoadInstructions = 2; +#endif +#else + static const int kMovInstructionsConstantPool = 1; + static const int kMovInstructionsNoConstantPool = 2; static const int kTaggedLoadInstructions = 1; #endif + static const int kMovInstructions = FLAG_enable_embedded_constant_pool + ? kMovInstructionsConstantPool + : kMovInstructionsNoConstantPool; // Distance between the instruction referring to the address of the call // target and the return address. @@ -682,13 +713,15 @@ class Assembler : public AssemblerBase { // This is the length of the BreakLocation::SetDebugBreakAtReturn() // code patch FIXED_SEQUENCE - static const int kJSReturnSequenceInstructions = kMovInstructions + 3; + static const int kJSReturnSequenceInstructions = + kMovInstructionsNoConstantPool + 3; static const int kJSReturnSequenceLength = kJSReturnSequenceInstructions * kInstrSize; // This is the length of the code sequence from SetDebugBreakAtSlot() // FIXED_SEQUENCE - static const int kDebugBreakSlotInstructions = kMovInstructions + 2; + static const int kDebugBreakSlotInstructions = + kMovInstructionsNoConstantPool + 2; static const int kDebugBreakSlotLength = kDebugBreakSlotInstructions * kInstrSize; @@ -1201,6 +1234,23 @@ class Assembler : public AssemblerBase { DISALLOW_IMPLICIT_CONSTRUCTORS(BlockTrampolinePoolScope); }; + // Class for scoping disabling constant pool entry merging + class BlockConstantPoolEntrySharingScope { + public: + explicit BlockConstantPoolEntrySharingScope(Assembler* assem) + : assem_(assem) { + assem_->StartBlockConstantPoolEntrySharing(); + } + ~BlockConstantPoolEntrySharingScope() { + assem_->EndBlockConstantPoolEntrySharing(); + } + + private: + Assembler* assem_; + + DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstantPoolEntrySharingScope); + }; + // Debugging // Mark address of the ExitJSFrame code. @@ -1237,8 +1287,8 @@ class Assembler : public AssemblerBase { // for inline tables, e.g., jump-tables. void db(uint8_t data); void dd(uint32_t data); - void emit_ptr(intptr_t data); - void emit_double(double data); + void dq(uint64_t data); + void dp(uintptr_t data); PositionsRecorder* positions_recorder() { return &positions_recorder_; } @@ -1284,6 +1334,19 @@ class Assembler : public AssemblerBase { void BlockTrampolinePoolFor(int instructions); void CheckTrampolinePool(); + // For mov. Return the number of actual instructions required to + // load the operand into a register. This can be anywhere from + // one (constant pool small section) to five instructions (full + // 64-bit sequence). + // + // The value returned is only valid as long as no entries are added to the + // constant pool between this call and the actual instruction being emitted. + int instructions_required_for_mov(Register dst, const Operand& src) const; + + // Decide between using the constant pool vs. a mov immediate sequence. + bool use_constant_pool_for_mov(Register dst, const Operand& src, + bool canOptimize) const; + // The code currently calls CheckBuffer() too often. This has the side // effect of randomly growing the buffer in the middle of multi-instruction // sequences. @@ -1291,11 +1354,16 @@ class Assembler : public AssemblerBase { // This function allows outside callers to check and grow the buffer void EnsureSpaceFor(int space_needed); - // Allocate a constant pool of the correct size for the generated code. - Handle NewConstantPool(Isolate* isolate); + int EmitConstantPool() { return constant_pool_builder_.Emit(this); } - // Generate the constant pool for the generated code. - void PopulateConstantPool(ConstantPoolArray* constant_pool); + bool ConstantPoolAccessIsInOverflow() const { + return constant_pool_builder_.NextAccess(ConstantPoolEntry::INTPTR) == + ConstantPoolEntry::OVERFLOWED; + } + + Label* ConstantPoolPosition() { + return constant_pool_builder_.EmittedPosition(); + } void EmitRelocations(); @@ -1315,7 +1383,16 @@ class Assembler : public AssemblerBase { // Record reloc info for current pc_ void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0); - void RecordRelocInfo(const DeferredRelocInfo& rinfo); + ConstantPoolEntry::Access ConstantPoolAddEntry(RelocInfo::Mode rmode, + intptr_t value) { + bool sharing_ok = RelocInfo::IsNone(rmode) || + !(serializer_enabled() || rmode < RelocInfo::CELL || + is_constant_pool_entry_sharing_blocked()); + return constant_pool_builder_.AddEntry(pc_offset(), value, sharing_ok); + } + ConstantPoolEntry::Access ConstantPoolAddEntry(double value) { + return constant_pool_builder_.AddEntry(pc_offset(), value); + } // Block the emission of the trampoline pool before pc_offset. void BlockTrampolinePoolBefore(int pc_offset) { @@ -1329,6 +1406,16 @@ class Assembler : public AssemblerBase { return trampoline_pool_blocked_nesting_ > 0; } + void StartBlockConstantPoolEntrySharing() { + constant_pool_entry_sharing_blocked_nesting_++; + } + void EndBlockConstantPoolEntrySharing() { + constant_pool_entry_sharing_blocked_nesting_--; + } + bool is_constant_pool_entry_sharing_blocked() const { + return constant_pool_entry_sharing_blocked_nesting_ > 0; + } + bool has_exception() const { return internal_trampoline_exception_; } bool is_trampoline_emitted() const { return trampoline_emitted_; } @@ -1350,6 +1437,9 @@ class Assembler : public AssemblerBase { int trampoline_pool_blocked_nesting_; // Block emission if this is not zero. int no_trampoline_pool_before_; // Block emission before this pc offset. + // Do not share constant pool entries. + int constant_pool_entry_sharing_blocked_nesting_; + // Relocation info generation // Each relocation is encoded as a variable size value static const int kMaxRelocSize = RelocInfoWriter::kMaxSize; @@ -1359,6 +1449,8 @@ class Assembler : public AssemblerBase { // The bound position, before this we cannot do instruction elimination. int last_bound_pos_; + ConstantPoolBuilder constant_pool_builder_; + // Code emission inline void CheckBuffer(); void GrowBuffer(int needed = 0); diff --git a/src/ppc/builtins-ppc.cc b/src/ppc/builtins-ppc.cc index 6e44bc6465..5935896205 100644 --- a/src/ppc/builtins-ppc.cc +++ b/src/ppc/builtins-ppc.cc @@ -233,7 +233,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { __ push(function); // Preserve the function. __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7); { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); __ push(r3); __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); } @@ -253,7 +253,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { __ bind(&gc_required); __ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7); { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); __ push(argument); __ CallRuntime(Runtime::kNewStringWrapper, 1); } @@ -263,7 +263,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { static void CallRuntimePassFunction(MacroAssembler* masm, Runtime::FunctionId function_id) { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); // Push a copy of the function onto the stack. // Push function as parameter to the runtime call. __ Push(r4, r4); @@ -354,7 +354,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Enter a construct frame. { - FrameScope scope(masm, StackFrame::CONSTRUCT); + FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); if (create_memento) { __ AssertUndefinedOrAllocationSite(r5, r7); @@ -753,7 +753,7 @@ void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) { CHECK(!FLAG_pretenuring_call_new); { - FrameScope scope(masm, StackFrame::CONSTRUCT); + FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); // Smi-tagged arguments count. __ mr(r7, r3); @@ -962,7 +962,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) { static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); // Push a copy of the function onto the stack. // Push function as parameter to the runtime call. __ Push(r4, r4); @@ -1080,7 +1080,7 @@ void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, SaveFPRegsMode save_doubles) { { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); // Preserve registers across notification, this is important for compiled // stubs that tail call the runtime on deopts passing their parameters in @@ -1109,7 +1109,7 @@ void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, Deoptimizer::BailoutType type) { { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); // Pass the function and deoptimization type to the runtime system. __ LoadSmiLiteral(r3, Smi::FromInt(static_cast(type))); __ push(r3); @@ -1157,7 +1157,7 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { // Lookup the function in the JavaScript frame. __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); // Pass function as argument. __ push(r3); __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); @@ -1176,8 +1176,13 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset)); { + ConstantPoolUnavailableScope constant_pool_unavailable(masm); __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start + if (FLAG_enable_embedded_constant_pool) { + __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3); + } + // Load the OSR entrypoint offset from the deoptimization data. // = [#header_size + #osr_pc_offset] __ LoadP(r4, FieldMemOperand( @@ -1202,7 +1207,7 @@ void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { __ cmpl(sp, ip); __ bge(&ok); { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); __ CallRuntime(Runtime::kStackGuard, 0); } __ Jump(masm->isolate()->builtins()->OnStackReplacement(), @@ -1293,7 +1298,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { { // Enter an internal frame in order to preserve argument count. - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); __ SmiTag(r3); __ Push(r3, r5); __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); @@ -1460,7 +1465,7 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { const int kStackSize = kFormalParameters + 1; { - FrameScope frame_scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; const int kReceiverOffset = kArgumentsOffset + kPointerSize; const int kFunctionOffset = kReceiverOffset + kPointerSize; @@ -1589,7 +1594,7 @@ static void Generate_ConstructHelper(MacroAssembler* masm) { const int kStackSize = kFormalParameters + 1; { - FrameScope frame_scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize; const int kArgumentsOffset = kNewTargetOffset + kPointerSize; const int kFunctionOffset = kArgumentsOffset + kPointerSize; @@ -1689,7 +1694,11 @@ static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); __ mflr(r0); __ push(r0); - __ Push(fp, r7, r4, r3); + if (FLAG_enable_embedded_constant_pool) { + __ Push(fp, kConstantPoolRegister, r7, r4, r3); + } else { + __ Push(fp, r7, r4, r3); + } __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); } diff --git a/src/ppc/code-stubs-ppc.cc b/src/ppc/code-stubs-ppc.cc index 0e4b725a14..6e686e3346 100644 --- a/src/ppc/code-stubs-ppc.cc +++ b/src/ppc/code-stubs-ppc.cc @@ -111,7 +111,7 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm, int param_count = descriptor.GetEnvironmentParameterCount(); { // Call the runtime system in a fresh internal frame. - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); DCHECK(param_count == 0 || r3.is(descriptor.GetEnvironmentParameterRegister(param_count - 1))); // Push arguments @@ -1182,11 +1182,15 @@ void CEntryStub::Generate(MacroAssembler* masm) { __ bind(&skip); // Compute the handler entry address and jump to it. + ConstantPoolUnavailableScope constant_pool_unavailable(masm); __ mov(r4, Operand(pending_handler_code_address)); __ LoadP(r4, MemOperand(r4)); __ mov(r5, Operand(pending_handler_offset_address)); __ LoadP(r5, MemOperand(r5)); __ addi(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start + if (FLAG_enable_embedded_constant_pool) { + __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r4); + } __ add(ip, r4, r5); __ Jump(ip); } @@ -1228,6 +1232,10 @@ void JSEntryStub::Generate(MacroAssembler* masm) { // r7: argv __ li(r0, Operand(-1)); // Push a bad frame pointer to fail if it is used. __ push(r0); + if (FLAG_enable_embedded_constant_pool) { + __ li(kConstantPoolRegister, Operand::Zero()); + __ push(kConstantPoolRegister); + } int marker = type(); __ LoadSmiLiteral(r0, Smi::FromInt(marker)); __ push(r0); @@ -1542,7 +1550,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); } else { { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); __ Push(r3, r4); __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); } @@ -2553,7 +2561,7 @@ static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { // r5 : Feedback vector // r6 : slot in feedback vector (Smi) // r4 : the function to call - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); // Number-of-arguments register must be smi-tagged to call out. __ SmiTag(r3); @@ -2717,7 +2725,7 @@ static void EmitSlowCase(MacroAssembler* masm, int argc, Label* non_function) { static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { // Wrap the receiver and patch it back onto the stack. { - FrameScope frame_scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); __ Push(r4, r6); __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); __ pop(r4); @@ -3035,7 +3043,7 @@ void CallICStub::Generate(MacroAssembler* masm) { // r6 - slot // r4 - function { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); CreateWeakCellStub create_stub(masm->isolate()); __ Push(r4); __ CallStub(&create_stub); @@ -3063,7 +3071,7 @@ void CallICStub::Generate(MacroAssembler* masm) { void CallICStub::GenerateMiss(MacroAssembler* masm) { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); // Push the function and feedback info. __ Push(r4, r5, r6); @@ -4032,7 +4040,7 @@ void CompareICStub::GenerateMiss(MacroAssembler* masm) { ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate()); - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); __ Push(r4, r3); __ Push(r4, r3); __ LoadSmiLiteral(r0, Smi::FromInt(op())); diff --git a/src/ppc/constants-ppc.h b/src/ppc/constants-ppc.h index 58d4430019..e86079aa08 100644 --- a/src/ppc/constants-ppc.h +++ b/src/ppc/constants-ppc.h @@ -17,6 +17,11 @@ const int kNumFPRegisters = kNumFPDoubleRegisters; const int kNoRegister = -1; +// Used in embedded constant pool builder - max reach in bits for +// various load instructions (one less due to unsigned) +const int kLoadPtrMaxReachBits = 15; +const int kLoadDoubleMaxReachBits = 15; + // sign-extend the least significant 16-bits of value #define SIGN_EXT_IMM16(imm) ((static_cast(imm) << 16) >> 16) diff --git a/src/ppc/debug-ppc.cc b/src/ppc/debug-ppc.cc index 4dd4220a5b..a438a04e2e 100644 --- a/src/ppc/debug-ppc.cc +++ b/src/ppc/debug-ppc.cc @@ -74,7 +74,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, RegList object_regs, RegList non_object_regs) { { - FrameScope scope(masm, StackFrame::INTERNAL); + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); // Load padding words on stack. __ LoadSmiLiteral(ip, Smi::FromInt(LiveEdit::kFramePaddingValue)); diff --git a/src/ppc/deoptimizer-ppc.cc b/src/ppc/deoptimizer-ppc.cc index fff67bd280..0db074d694 100644 --- a/src/ppc/deoptimizer-ppc.cc +++ b/src/ppc/deoptimizer-ppc.cc @@ -356,8 +356,8 @@ void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) { - // No out-of-line constant pool support. - UNREACHABLE(); + DCHECK(FLAG_enable_embedded_constant_pool); + SetFrameSlot(offset, value); } diff --git a/src/ppc/frames-ppc.cc b/src/ppc/frames-ppc.cc index a09bbeb731..1e54c46963 100644 --- a/src/ppc/frames-ppc.cc +++ b/src/ppc/frames-ppc.cc @@ -21,22 +21,16 @@ namespace internal { Register JavaScriptFrame::fp_register() { return v8::internal::fp; } Register JavaScriptFrame::context_register() { return cp; } Register JavaScriptFrame::constant_pool_pointer_register() { - UNREACHABLE(); - return no_reg; + DCHECK(FLAG_enable_embedded_constant_pool); + return kConstantPoolRegister; } Register StubFailureTrampolineFrame::fp_register() { return v8::internal::fp; } Register StubFailureTrampolineFrame::context_register() { return cp; } Register StubFailureTrampolineFrame::constant_pool_pointer_register() { - UNREACHABLE(); - return no_reg; -} - - -Object*& ExitFrame::constant_pool_slot() const { - UNREACHABLE(); - return Memory::Object_at(NULL); + DCHECK(FLAG_enable_embedded_constant_pool); + return kConstantPoolRegister; } } // namespace internal } // namespace v8 diff --git a/src/ppc/frames-ppc.h b/src/ppc/frames-ppc.h index 40a68b3a37..3316086dff 100644 --- a/src/ppc/frames-ppc.h +++ b/src/ppc/frames-ppc.h @@ -116,8 +116,11 @@ class EntryFrameConstants : public AllStatic { class ExitFrameConstants : public AllStatic { public: - static const int kFrameSize = 2 * kPointerSize; - static const int kConstantPoolOffset = 0; // Not used. + static const int kFrameSize = + FLAG_enable_embedded_constant_pool ? 3 * kPointerSize : 2 * kPointerSize; + + static const int kConstantPoolOffset = + FLAG_enable_embedded_constant_pool ? -3 * kPointerSize : 0; static const int kCodeOffset = -2 * kPointerSize; static const int kSPOffset = -1 * kPointerSize; diff --git a/src/ppc/full-codegen-ppc.cc b/src/ppc/full-codegen-ppc.cc index fc65863deb..d9c0478ec2 100644 --- a/src/ppc/full-codegen-ppc.cc +++ b/src/ppc/full-codegen-ppc.cc @@ -400,6 +400,10 @@ void FullCodeGenerator::Generate() { __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); } EmitReturnSequence(); + + if (HasStackOverflow()) { + masm_->AbortConstantPoolBuilding(); + } } @@ -440,6 +444,7 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, EmitProfilingCounterDecrement(weight); { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); + Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_); // BackEdgeTable::PatchAt manipulates this sequence. __ cmpi(r6, Operand::Zero()); __ bc_short(ge, &ok); @@ -512,7 +517,7 @@ void FullCodeGenerator::EmitReturnSequence() { // With 64bit we may need nop() instructions to ensure we have // enough space to SetDebugBreakAtReturn() if (is_int16(sp_delta)) { - masm_->nop(); + if (!FLAG_enable_embedded_constant_pool) masm_->nop(); masm_->nop(); } #endif @@ -2317,6 +2322,10 @@ void FullCodeGenerator::EmitGeneratorResume( __ bne(&slow_resume, cr0); __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset)); { + ConstantPoolUnavailableScope constant_pool_unavailable(masm_); + if (FLAG_enable_embedded_constant_pool) { + __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip); + } __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset)); __ SmiUntag(r5); __ add(ip, ip, r5); diff --git a/src/ppc/lithium-codegen-ppc.cc b/src/ppc/lithium-codegen-ppc.cc index 11ea4bd049..6db655e9c1 100644 --- a/src/ppc/lithium-codegen-ppc.cc +++ b/src/ppc/lithium-codegen-ppc.cc @@ -50,8 +50,12 @@ bool LCodeGen::GenerateCode() { // the frame (that is done in GeneratePrologue). FrameScope frame_scope(masm_, StackFrame::NONE); - return GeneratePrologue() && GenerateBody() && GenerateDeferredCode() && - GenerateJumpTable() && GenerateSafepointTable(); + bool rc = GeneratePrologue() && GenerateBody() && GenerateDeferredCode() && + GenerateJumpTable() && GenerateSafepointTable(); + if (FLAG_enable_embedded_constant_pool && !rc) { + masm()->AbortConstantPoolBuilding(); + } + return rc; } diff --git a/src/ppc/macro-assembler-ppc.cc b/src/ppc/macro-assembler-ppc.cc index 40b111649b..d0960cc90a 100644 --- a/src/ppc/macro-assembler-ppc.cc +++ b/src/ppc/macro-assembler-ppc.cc @@ -103,14 +103,15 @@ void MacroAssembler::CallJSEntry(Register target) { int MacroAssembler::CallSize(Address target, RelocInfo::Mode rmode, Condition cond) { - return (2 + kMovInstructions) * kInstrSize; + Operand mov_operand = Operand(reinterpret_cast(target), rmode); + return (2 + instructions_required_for_mov(ip, mov_operand)) * kInstrSize; } int MacroAssembler::CallSizeNotPredictableCodeSize(Address target, RelocInfo::Mode rmode, Condition cond) { - return (2 + kMovInstructions) * kInstrSize; + return (2 + kMovInstructionsNoConstantPool) * kInstrSize; } @@ -513,19 +514,35 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests. void MacroAssembler::PushFixedFrame(Register marker_reg) { mflr(r0); - if (marker_reg.is_valid()) { - Push(r0, fp, cp, marker_reg); + if (FLAG_enable_embedded_constant_pool) { + if (marker_reg.is_valid()) { + Push(r0, fp, kConstantPoolRegister, cp, marker_reg); + } else { + Push(r0, fp, kConstantPoolRegister, cp); + } } else { - Push(r0, fp, cp); + if (marker_reg.is_valid()) { + Push(r0, fp, cp, marker_reg); + } else { + Push(r0, fp, cp); + } } } void MacroAssembler::PopFixedFrame(Register marker_reg) { - if (marker_reg.is_valid()) { - Pop(r0, fp, cp, marker_reg); + if (FLAG_enable_embedded_constant_pool) { + if (marker_reg.is_valid()) { + Pop(r0, fp, kConstantPoolRegister, cp, marker_reg); + } else { + Pop(r0, fp, kConstantPoolRegister, cp); + } } else { - Pop(r0, fp, cp); + if (marker_reg.is_valid()) { + Pop(r0, fp, cp, marker_reg); + } else { + Pop(r0, fp, cp); + } } mtlr(r0); } @@ -651,11 +668,37 @@ void MacroAssembler::ConvertDoubleToInt64(const DoubleRegister double_input, } +void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress( + Register code_target_address) { + lwz(kConstantPoolRegister, + MemOperand(code_target_address, + Code::kConstantPoolOffset - Code::kHeaderSize)); + add(kConstantPoolRegister, kConstantPoolRegister, code_target_address); +} + + +void MacroAssembler::LoadConstantPoolPointerRegister(Register base, + int code_start_delta) { + add_label_offset(kConstantPoolRegister, base, ConstantPoolPosition(), + code_start_delta); +} + + +void MacroAssembler::LoadConstantPoolPointerRegister() { + mov_label_addr(kConstantPoolRegister, ConstantPoolPosition()); +} + + void MacroAssembler::StubPrologue(int prologue_offset) { LoadSmiLiteral(r11, Smi::FromInt(StackFrame::STUB)); PushFixedFrame(r11); // Adjust FP to point to saved FP. addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); + if (FLAG_enable_embedded_constant_pool) { + // ip contains prologue address + LoadConstantPoolPointerRegister(ip, -prologue_offset); + set_constant_pool_available(true); + } } @@ -688,13 +731,26 @@ void MacroAssembler::Prologue(bool code_pre_aging, int prologue_offset) { } } } + if (FLAG_enable_embedded_constant_pool) { + // ip contains prologue address + LoadConstantPoolPointerRegister(ip, -prologue_offset); + set_constant_pool_available(true); + } } void MacroAssembler::EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) { - LoadSmiLiteral(ip, Smi::FromInt(type)); - PushFixedFrame(ip); + if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) { + PushFixedFrame(); + // This path should not rely on ip containing code entry. + LoadConstantPoolPointerRegister(); + LoadSmiLiteral(ip, Smi::FromInt(type)); + push(ip); + } else { + LoadSmiLiteral(ip, Smi::FromInt(type)); + PushFixedFrame(ip); + } // Adjust FP to point to saved FP. addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); @@ -704,6 +760,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type, int MacroAssembler::LeaveFrame(StackFrame::Type type, int stack_adjustment) { + ConstantPoolUnavailableScope constant_pool_unavailable(this); // r3: preserved // r4: preserved // r5: preserved @@ -713,6 +770,13 @@ int MacroAssembler::LeaveFrame(StackFrame::Type type, int stack_adjustment) { int frame_ends; LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); LoadP(ip, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); + if (FLAG_enable_embedded_constant_pool) { + const int exitOffset = ExitFrameConstants::kConstantPoolOffset; + const int standardOffset = StandardFrameConstants::kConstantPoolOffset; + const int offset = + ((type == StackFrame::EXIT) ? exitOffset : standardOffset); + LoadP(kConstantPoolRegister, MemOperand(fp, offset)); + } mtlr(r0); frame_ends = pc_offset(); Add(sp, fp, StandardFrameConstants::kCallerSPOffset + stack_adjustment, r0); @@ -759,6 +823,10 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) { li(r8, Operand::Zero()); StoreP(r8, MemOperand(fp, ExitFrameConstants::kSPOffset)); } + if (FLAG_enable_embedded_constant_pool) { + StoreP(kConstantPoolRegister, + MemOperand(fp, ExitFrameConstants::kConstantPoolOffset)); + } mov(r8, Operand(CodeObject())); StoreP(r8, MemOperand(fp, ExitFrameConstants::kCodeOffset)); @@ -828,6 +896,7 @@ int MacroAssembler::ActivationFrameAlignment() { void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count, bool restore_context, bool argument_count_is_length) { + ConstantPoolUnavailableScope constant_pool_unavailable(this); // Optionally restore all double registers. if (save_doubles) { // Calculate the stack location of the saved doubles and restore them. @@ -3177,6 +3246,21 @@ void MacroAssembler::SetRelocatedValue(Register location, Register scratch, Register new_value) { lwz(scratch, MemOperand(location)); + if (FLAG_enable_embedded_constant_pool) { + if (emit_debug_code()) { + // Check that the instruction sequence is a load from the constant pool + ExtractBitMask(scratch, scratch, 0x1f * B16); + cmpi(scratch, Operand(kConstantPoolRegister.code())); + Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool); + // Scratch was clobbered. Restore it. + lwz(scratch, MemOperand(location)); + } + // Get the address of the constant and patch it. + andi(scratch, scratch, Operand(kImm16Mask)); + StorePX(new_value, MemOperand(kConstantPoolRegister, scratch)); + return; + } + // This code assumes a FIXED_SEQUENCE for lis/ori // At this point scratch is a lis instruction. @@ -3260,6 +3344,20 @@ void MacroAssembler::GetRelocatedValue(Register location, Register result, Register scratch) { lwz(result, MemOperand(location)); + if (FLAG_enable_embedded_constant_pool) { + if (emit_debug_code()) { + // Check that the instruction sequence is a load from the constant pool + ExtractBitMask(result, result, 0x1f * B16); + cmpi(result, Operand(kConstantPoolRegister.code())); + Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool); + lwz(result, MemOperand(location)); + } + // Get the address of the constant and retrieve it. + andi(result, result, Operand(kImm16Mask)); + LoadPX(result, MemOperand(kConstantPoolRegister, result)); + return; + } + // This code assumes a FIXED_SEQUENCE for lis/ori if (emit_debug_code()) { And(result, result, Operand(kOpcodeMask | (0x1f * B16))); @@ -3697,6 +3795,18 @@ void MacroAssembler::LoadSmiLiteral(Register dst, Smi* smi) { void MacroAssembler::LoadDoubleLiteral(DoubleRegister result, double value, Register scratch) { + if (FLAG_enable_embedded_constant_pool && is_constant_pool_available() && + !(scratch.is(r0) && ConstantPoolAccessIsInOverflow())) { + ConstantPoolEntry::Access access = ConstantPoolAddEntry(value); + if (access == ConstantPoolEntry::OVERFLOWED) { + addis(scratch, kConstantPoolRegister, Operand::Zero()); + lfd(result, MemOperand(scratch, 0)); + } else { + lfd(result, MemOperand(kConstantPoolRegister, 0)); + } + return; + } + // avoid gcc strict aliasing error using union cast union { double dval; diff --git a/src/ppc/macro-assembler-ppc.h b/src/ppc/macro-assembler-ppc.h index 2e415d6b6b..491167b227 100644 --- a/src/ppc/macro-assembler-ppc.h +++ b/src/ppc/macro-assembler-ppc.h @@ -102,7 +102,9 @@ class MacroAssembler : public Assembler { MacroAssembler(Isolate* isolate, void* buffer, int size); - // Returns the size of a call in instructions. + // Returns the size of a call in instructions. Note, the value returned is + // only valid as long as no entries are added to the constant pool between + // checking the call size and emitting the actual call. static int CallSize(Register target); int CallSize(Address target, RelocInfo::Mode rmode, Condition cond = al); static int CallSizeNotPredictableCodeSize(Address target, @@ -1360,7 +1362,7 @@ class MacroAssembler : public Assembler { // --------------------------------------------------------------------------- // Patching helpers. - // Retrieve/patch the relocated value (lis/ori pair). + // Retrieve/patch the relocated value (lis/ori pair or constant pool load). void GetRelocatedValue(Register location, Register result, Register scratch); void SetRelocatedValue(Register location, Register scratch, Register new_value); @@ -1449,6 +1451,19 @@ class MacroAssembler : public Assembler { void JumpIfDictionaryInPrototypeChain(Register object, Register scratch0, Register scratch1, Label* found); + // Loads the constant pool pointer (kConstantPoolRegister). + void LoadConstantPoolPointerRegisterFromCodeTargetAddress( + Register code_target_address); + void LoadConstantPoolPointerRegister(); + void LoadConstantPoolPointerRegister(Register base, int code_entry_delta = 0); + + void AbortConstantPoolBuilding() { +#ifdef DEBUG + // Avoid DCHECK(!is_linked()) failure in ~Label() + bind(ConstantPoolPosition()); +#endif + } + private: static const int kSmiShift = kSmiTagSize + kSmiShiftSize; diff --git a/src/runtime/runtime-generator.cc b/src/runtime/runtime-generator.cc index a241ce1077..ed86c4dd74 100644 --- a/src/runtime/runtime-generator.cc +++ b/src/runtime/runtime-generator.cc @@ -111,7 +111,7 @@ RUNTIME_FUNCTION(Runtime_ResumeJSGeneratorObject) { int offset = generator_object->continuation(); DCHECK(offset > 0); frame->set_pc(pc + offset); - if (FLAG_enable_ool_constant_pool) { + if (FLAG_enable_embedded_constant_pool) { frame->set_constant_pool( generator_object->function()->code()->constant_pool()); } diff --git a/src/snapshot/serialize.cc b/src/snapshot/serialize.cc index 08f2300b8e..4be1da2c9a 100644 --- a/src/snapshot/serialize.cc +++ b/src/snapshot/serialize.cc @@ -1006,7 +1006,7 @@ bool Deserializer::ReadData(Object** current, Object** limit, int source_space, ALL_SPACES(kBackref, kPlain, kStartOfObject) ALL_SPACES(kBackrefWithSkip, kPlain, kStartOfObject) #if defined(V8_TARGET_ARCH_MIPS) || defined(V8_TARGET_ARCH_MIPS64) || \ - defined(V8_TARGET_ARCH_PPC) || V8_OOL_CONSTANT_POOL + defined(V8_TARGET_ARCH_PPC) // Deserialize a new object from pointer found in code and write // a pointer to it to the current object. Required only for MIPS, PPC or // ARM with ool constant pool, and omitted on the other architectures @@ -1030,8 +1030,8 @@ bool Deserializer::ReadData(Object** current, Object** limit, int source_space, // current object. CASE_STATEMENT(kRootArray, kPlain, kStartOfObject, 0) CASE_BODY(kRootArray, kPlain, kStartOfObject, 0) -#if defined(V8_TARGET_ARCH_MIPS) || V8_OOL_CONSTANT_POOL || \ - defined(V8_TARGET_ARCH_MIPS64) || defined(V8_TARGET_ARCH_PPC) +#if defined(V8_TARGET_ARCH_MIPS) || defined(V8_TARGET_ARCH_MIPS64) || \ + defined(V8_TARGET_ARCH_PPC) // Find an object in the roots array and write a pointer to it to in code. CASE_STATEMENT(kRootArray, kFromCode, kStartOfObject, 0) CASE_BODY(kRootArray, kFromCode, kStartOfObject, 0) @@ -1950,9 +1950,6 @@ void Serializer::ObjectSerializer::VisitPointers(Object** start, void Serializer::ObjectSerializer::VisitEmbeddedPointer(RelocInfo* rinfo) { - // Out-of-line constant pool entries will be visited by the ConstantPoolArray. - if (FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool()) return; - int skip = OutputRawData(rinfo->target_address_address(), kCanReturnSkipInsteadOfSkipping); HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain; @@ -2024,9 +2021,6 @@ void Serializer::ObjectSerializer::VisitRuntimeEntry(RelocInfo* rinfo) { void Serializer::ObjectSerializer::VisitCodeTarget(RelocInfo* rinfo) { - // Out-of-line constant pool entries will be visited by the ConstantPoolArray. - if (FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool()) return; - int skip = OutputRawData(rinfo->target_address_address(), kCanReturnSkipInsteadOfSkipping); Code* object = Code::GetCodeFromTargetAddress(rinfo->target_address()); @@ -2044,9 +2038,6 @@ void Serializer::ObjectSerializer::VisitCodeEntry(Address entry_address) { void Serializer::ObjectSerializer::VisitCell(RelocInfo* rinfo) { - // Out-of-line constant pool entries will be visited by the ConstantPoolArray. - if (FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool()) return; - int skip = OutputRawData(rinfo->pc(), kCanReturnSkipInsteadOfSkipping); Cell* object = Cell::cast(rinfo->target_cell()); serializer_->SerializeObject(object, kPlain, kInnerPointer, skip); @@ -2094,9 +2085,7 @@ Address Serializer::ObjectSerializer::PrepareCode() { RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED); for (RelocIterator it(code, mode_mask); !it.done(); it.next()) { RelocInfo* rinfo = it.rinfo(); - if (!(FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool())) { - rinfo->WipeOut(); - } + rinfo->WipeOut(); } // We need to wipe out the header fields *after* wiping out the // relocations, because some of these fields are needed for the latter. diff --git a/src/x64/assembler-x64-inl.h b/src/x64/assembler-x64-inl.h index 1a20109612..90deaba4fe 100644 --- a/src/x64/assembler-x64-inl.h +++ b/src/x64/assembler-x64-inl.h @@ -267,14 +267,12 @@ void Assembler::emit_vex_prefix(Register reg, Register vreg, const Operand& rm, } -Address Assembler::target_address_at(Address pc, - ConstantPoolArray* constant_pool) { +Address Assembler::target_address_at(Address pc, Address constant_pool) { return Memory::int32_at(pc) + pc + 4; } -void Assembler::set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, +void Assembler::set_target_address_at(Address pc, Address constant_pool, Address target, ICacheFlushMode icache_flush_mode) { Memory::int32_at(pc) = static_cast(target - pc - 4); diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc index d67a92b06b..cb93ab878b 100644 --- a/src/x64/assembler-x64.cc +++ b/src/x64/assembler-x64.cc @@ -3883,6 +3883,12 @@ void Assembler::dd(uint32_t data) { } +void Assembler::dq(uint64_t data) { + EnsureSpace ensure_space(this); + emitq(data); +} + + void Assembler::dq(Label* label) { EnsureSpace ensure_space(this); if (label->is_bound()) { @@ -3921,20 +3927,6 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { } -Handle Assembler::NewConstantPool(Isolate* isolate) { - // No out-of-line constant pool support. - DCHECK(!FLAG_enable_ool_constant_pool); - return isolate->factory()->empty_constant_pool_array(); -} - - -void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { - // No out-of-line constant pool support. - DCHECK(!FLAG_enable_ool_constant_pool); - return; -} - - const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask | 1 << RelocInfo::RUNTIME_ENTRY | 1 << RelocInfo::INTERNAL_REFERENCE | diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h index 27f04419a3..cb52541b56 100644 --- a/src/x64/assembler-x64.h +++ b/src/x64/assembler-x64.h @@ -535,15 +535,12 @@ class Assembler : public AssemblerBase { // the absolute address of the target. // These functions convert between absolute Addresses of Code objects and // the relative displacements stored in the code. - static inline Address target_address_at(Address pc, - ConstantPoolArray* constant_pool); - static inline void set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, - Address target, - ICacheFlushMode icache_flush_mode = - FLUSH_ICACHE_IF_NEEDED) ; + static inline Address target_address_at(Address pc, Address constant_pool); + static inline void set_target_address_at( + Address pc, Address constant_pool, Address target, + ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED); static inline Address target_address_at(Address pc, Code* code) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; return target_address_at(pc, constant_pool); } static inline void set_target_address_at(Address pc, @@ -551,7 +548,7 @@ class Assembler : public AssemblerBase { Address target, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; set_target_address_at(pc, constant_pool, target, icache_flush_mode); } @@ -1628,16 +1625,19 @@ class Assembler : public AssemblerBase { // Use --trace-deopt to enable. void RecordDeoptReason(const int reason, const SourcePosition position); - // Allocate a constant pool of the correct size for the generated code. - Handle NewConstantPool(Isolate* isolate); - - // Generate the constant pool for the generated code. - void PopulateConstantPool(ConstantPoolArray* constant_pool); + void PatchConstantPoolAccessInstruction(int pc_offset, int offset, + ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type) { + // No embedded constant pool support. + UNREACHABLE(); + } // Writes a single word of data in the code stream. // Used for inline tables, e.g., jump-tables. void db(uint8_t data); void dd(uint32_t data); + void dq(uint64_t data); + void dp(uintptr_t data) { dq(data); } void dq(Label* label); PositionsRecorder* positions_recorder() { return &positions_recorder_; } diff --git a/src/x64/deoptimizer-x64.cc b/src/x64/deoptimizer-x64.cc index cfb4fab37f..cd3324c7e4 100644 --- a/src/x64/deoptimizer-x64.cc +++ b/src/x64/deoptimizer-x64.cc @@ -346,7 +346,7 @@ void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) { - // No out-of-line constant pool support. + // No embedded constant pool support. UNREACHABLE(); } diff --git a/src/x64/frames-x64.cc b/src/x64/frames-x64.cc index 8687861418..11db5b9ed6 100644 --- a/src/x64/frames-x64.cc +++ b/src/x64/frames-x64.cc @@ -31,12 +31,6 @@ Register StubFailureTrampolineFrame::constant_pool_pointer_register() { } -Object*& ExitFrame::constant_pool_slot() const { - UNREACHABLE(); - return Memory::Object_at(NULL); -} - - } // namespace internal } // namespace v8 diff --git a/src/x87/assembler-x87-inl.h b/src/x87/assembler-x87-inl.h index 4f5768e684..62beab8ed3 100644 --- a/src/x87/assembler-x87-inl.h +++ b/src/x87/assembler-x87-inl.h @@ -412,6 +412,12 @@ void Assembler::emit(uint32_t x) { } +void Assembler::emit_q(uint64_t x) { + *reinterpret_cast(pc_) = x; + pc_ += sizeof(uint64_t); +} + + void Assembler::emit(Handle handle) { AllowDeferredHandleDereference heap_object_check; // Verify all Objects referred by code are NOT in new space. @@ -476,14 +482,12 @@ void Assembler::emit_w(const Immediate& x) { } -Address Assembler::target_address_at(Address pc, - ConstantPoolArray* constant_pool) { +Address Assembler::target_address_at(Address pc, Address constant_pool) { return pc + sizeof(int32_t) + *reinterpret_cast(pc); } -void Assembler::set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, +void Assembler::set_target_address_at(Address pc, Address constant_pool, Address target, ICacheFlushMode icache_flush_mode) { int32_t* p = reinterpret_cast(pc); diff --git a/src/x87/assembler-x87.cc b/src/x87/assembler-x87.cc index 44cb8eeb77..1f572be423 100644 --- a/src/x87/assembler-x87.cc +++ b/src/x87/assembler-x87.cc @@ -2028,6 +2028,12 @@ void Assembler::dd(uint32_t data) { } +void Assembler::dq(uint64_t data) { + EnsureSpace ensure_space(this); + emit_q(data); +} + + void Assembler::dd(Label* label) { EnsureSpace ensure_space(this); RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); @@ -2047,20 +2053,6 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { } -Handle Assembler::NewConstantPool(Isolate* isolate) { - // No out-of-line constant pool support. - DCHECK(!FLAG_enable_ool_constant_pool); - return isolate->factory()->empty_constant_pool_array(); -} - - -void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { - // No out-of-line constant pool support. - DCHECK(!FLAG_enable_ool_constant_pool); - return; -} - - #ifdef GENERATED_CODE_COVERAGE static FILE* coverage_log = NULL; diff --git a/src/x87/assembler-x87.h b/src/x87/assembler-x87.h index 67af72e0c2..ea05ab975f 100644 --- a/src/x87/assembler-x87.h +++ b/src/x87/assembler-x87.h @@ -495,15 +495,12 @@ class Assembler : public AssemblerBase { void GetCode(CodeDesc* desc); // Read/Modify the code target in the branch/call instruction at pc. - inline static Address target_address_at(Address pc, - ConstantPoolArray* constant_pool); - inline static void set_target_address_at(Address pc, - ConstantPoolArray* constant_pool, - Address target, - ICacheFlushMode icache_flush_mode = - FLUSH_ICACHE_IF_NEEDED); + inline static Address target_address_at(Address pc, Address constant_pool); + inline static void set_target_address_at( + Address pc, Address constant_pool, Address target, + ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED); static inline Address target_address_at(Address pc, Code* code) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; return target_address_at(pc, constant_pool); } static inline void set_target_address_at(Address pc, @@ -511,7 +508,7 @@ class Assembler : public AssemblerBase { Address target, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED) { - ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; + Address constant_pool = code ? code->constant_pool() : NULL; set_target_address_at(pc, constant_pool, target); } @@ -952,6 +949,8 @@ class Assembler : public AssemblerBase { // inline tables, e.g., jump-tables. void db(uint8_t data); void dd(uint32_t data); + void dq(uint64_t data); + void dp(uintptr_t data) { dd(data); } void dd(Label* label); // Check if there is less than kGap bytes available in the buffer. @@ -978,11 +977,12 @@ class Assembler : public AssemblerBase { byte byte_at(int pos) { return buffer_[pos]; } void set_byte_at(int pos, byte value) { buffer_[pos] = value; } - // Allocate a constant pool of the correct size for the generated code. - Handle NewConstantPool(Isolate* isolate); - - // Generate the constant pool for the generated code. - void PopulateConstantPool(ConstantPoolArray* constant_pool); + void PatchConstantPoolAccessInstruction(int pc_offset, int offset, + ConstantPoolEntry::Access access, + ConstantPoolEntry::Type type) { + // No embedded constant pool support. + UNREACHABLE(); + } protected: byte* addr_at(int pos) { return buffer_ + pos; } @@ -1008,6 +1008,7 @@ class Assembler : public AssemblerBase { TypeFeedbackId id = TypeFeedbackId::None()); inline void emit(const Immediate& x); inline void emit_w(const Immediate& x); + inline void emit_q(uint64_t x); // Emit the code-object-relative offset of the label's position inline void emit_code_relative_offset(Label* label); diff --git a/src/x87/deoptimizer-x87.cc b/src/x87/deoptimizer-x87.cc index 11ff5373d4..533ce1abe6 100644 --- a/src/x87/deoptimizer-x87.cc +++ b/src/x87/deoptimizer-x87.cc @@ -462,7 +462,7 @@ void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) { - // No out-of-line constant pool support. + // No embedded constant pool support. UNREACHABLE(); } diff --git a/src/x87/frames-x87.cc b/src/x87/frames-x87.cc index 0de05884bb..557794f3a2 100644 --- a/src/x87/frames-x87.cc +++ b/src/x87/frames-x87.cc @@ -31,12 +31,6 @@ Register StubFailureTrampolineFrame::constant_pool_pointer_register() { } -Object*& ExitFrame::constant_pool_slot() const { - UNREACHABLE(); - return Memory::Object_at(NULL); -} - - } // namespace internal } // namespace v8 diff --git a/test/cctest/test-compiler.cc b/test/cctest/test-compiler.cc index e62517a25f..4de9aaef20 100644 --- a/test/cctest/test-compiler.cc +++ b/test/cctest/test-compiler.cc @@ -531,6 +531,9 @@ static void CheckCodeForUnsafeLiteral(Handle f) { int decode_size = Min(f->code()->instruction_size(), static_cast(f->code()->back_edge_table_offset())); + if (FLAG_enable_embedded_constant_pool) { + decode_size = Min(decode_size, f->code()->constant_pool_offset()); + } Address end = pc + decode_size; v8::internal::EmbeddedVector decode_buffer; diff --git a/test/cctest/test-constantpool.cc b/test/cctest/test-constantpool.cc index 4592074742..ce3abb0edb 100644 --- a/test/cctest/test-constantpool.cc +++ b/test/cctest/test-constantpool.cc @@ -1,337 +1,247 @@ -// Copyright 2013 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2015 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. -// Test constant pool array code. +// Test embedded constant pool builder code. #include "src/v8.h" -#include "src/factory.h" -#include "src/objects.h" +#include "src/assembler.h" #include "test/cctest/cctest.h" using namespace v8::internal; -static ConstantPoolArray::Type kTypes[] = { ConstantPoolArray::INT64, - ConstantPoolArray::CODE_PTR, - ConstantPoolArray::HEAP_PTR, - ConstantPoolArray::INT32 }; -static ConstantPoolArray::LayoutSection kSmall = - ConstantPoolArray::SMALL_SECTION; -static ConstantPoolArray::LayoutSection kExtended = - ConstantPoolArray::EXTENDED_SECTION; +const ConstantPoolEntry::Type kPtrType = ConstantPoolEntry::INTPTR; +const ConstantPoolEntry::Type kDblType = ConstantPoolEntry::DOUBLE; +const ConstantPoolEntry::Access kRegAccess = ConstantPoolEntry::REGULAR; +const ConstantPoolEntry::Access kOvflAccess = ConstantPoolEntry::OVERFLOWED; -Code* DummyCode(LocalContext* context) { - CompileRun("function foo() {};"); - i::Handle fun = v8::Utils::OpenHandle( - *v8::Local::Cast( - (*context)->Global()->Get(v8_str("foo")))); - return fun->code(); +const int kReachBits = 6; // Use reach of 64-bytes to test overflow. +const int kReach = 1 << kReachBits; + + +TEST(ConstantPoolPointers) { + ConstantPoolBuilder builder(kReachBits, kReachBits); + const int kRegularCount = kReach / kPointerSize; + ConstantPoolEntry::Access access; + int pos = 0; + intptr_t value = 0; + bool sharing_ok = true; + + CHECK(builder.IsEmpty()); + while (builder.NextAccess(kPtrType) == kRegAccess) { + access = builder.AddEntry(pos++, value++, sharing_ok); + CHECK_EQ(access, kRegAccess); + } + CHECK(!builder.IsEmpty()); + CHECK_EQ(pos, kRegularCount); + + access = builder.AddEntry(pos, value, sharing_ok); + CHECK_EQ(access, kOvflAccess); } -TEST(ConstantPoolSmall) { - LocalContext context; - Isolate* isolate = CcTest::i_isolate(); - Factory* factory = isolate->factory(); - v8::HandleScope scope(context->GetIsolate()); +TEST(ConstantPoolDoubles) { + ConstantPoolBuilder builder(kReachBits, kReachBits); + const int kRegularCount = kReach / kDoubleSize; + ConstantPoolEntry::Access access; + int pos = 0; + double value = 0.0; - // Check construction. - ConstantPoolArray::NumberOfEntries small(3, 1, 2, 1); - Handle array = factory->NewConstantPoolArray(small); - - int expected_counts[] = { 3, 1, 2, 1 }; - int expected_first_idx[] = { 0, 3, 4, 6 }; - int expected_last_idx[] = { 2, 3, 5, 6 }; - for (int i = 0; i < 4; i++) { - CHECK_EQ(expected_counts[i], array->number_of_entries(kTypes[i], kSmall)); - CHECK_EQ(expected_first_idx[i], array->first_index(kTypes[i], kSmall)); - CHECK_EQ(expected_last_idx[i], array->last_index(kTypes[i], kSmall)); + CHECK(builder.IsEmpty()); + while (builder.NextAccess(kDblType) == kRegAccess) { + access = builder.AddEntry(pos++, value); + value += 0.5; + CHECK_EQ(access, kRegAccess); } - CHECK(!array->is_extended_layout()); + CHECK(!builder.IsEmpty()); + CHECK_EQ(pos, kRegularCount); - // Check getters and setters. - int64_t big_number = V8_2PART_UINT64_C(0x12345678, 9ABCDEF0); - Handle object = factory->NewHeapNumber(4.0, IMMUTABLE, TENURED); - Code* code = DummyCode(&context); - array->set(0, big_number); - array->set(1, 0.5); - array->set(2, 3e-24); - array->set(3, code->entry()); - array->set(4, code); - array->set(5, *object); - array->set(6, 50); - CHECK_EQ(big_number, array->get_int64_entry(0)); - CHECK_EQ(0.5, array->get_int64_entry_as_double(1)); - CHECK_EQ(3e-24, array->get_int64_entry_as_double(2)); - CHECK_EQ(code->entry(), array->get_code_ptr_entry(3)); - CHECK_EQ(code, array->get_heap_ptr_entry(4)); - CHECK_EQ(*object, array->get_heap_ptr_entry(5)); - CHECK_EQ(50, array->get_int32_entry(6)); + access = builder.AddEntry(pos, value); + CHECK_EQ(access, kOvflAccess); } -TEST(ConstantPoolExtended) { - LocalContext context; - Isolate* isolate = CcTest::i_isolate(); - Factory* factory = isolate->factory(); - v8::HandleScope scope(context->GetIsolate()); +TEST(ConstantPoolMixedTypes) { + ConstantPoolBuilder builder(kReachBits, kReachBits); + const int kRegularCount = (((kReach / (kDoubleSize + kPointerSize)) * 2) + + ((kPointerSize < kDoubleSize) ? 1 : 0)); + ConstantPoolEntry::Type type = kPtrType; + ConstantPoolEntry::Access access; + int pos = 0; + intptr_t ptrValue = 0; + double dblValue = 0.0; + bool sharing_ok = true; - // Check construction. - ConstantPoolArray::NumberOfEntries small(1, 2, 3, 4); - ConstantPoolArray::NumberOfEntries extended(5, 6, 7, 8); - Handle array = - factory->NewExtendedConstantPoolArray(small, extended); - - // Check small section. - int small_counts[] = { 1, 2, 3, 4 }; - int small_first_idx[] = { 0, 1, 3, 6 }; - int small_last_idx[] = { 0, 2, 5, 9 }; - for (int i = 0; i < 4; i++) { - CHECK_EQ(small_counts[i], array->number_of_entries(kTypes[i], kSmall)); - CHECK_EQ(small_first_idx[i], array->first_index(kTypes[i], kSmall)); - CHECK_EQ(small_last_idx[i], array->last_index(kTypes[i], kSmall)); - } - - // Check extended layout. - CHECK(array->is_extended_layout()); - int extended_counts[] = { 5, 6, 7, 8 }; - int extended_first_idx[] = { 10, 15, 21, 28 }; - int extended_last_idx[] = { 14, 20, 27, 35 }; - for (int i = 0; i < 4; i++) { - CHECK_EQ(extended_counts[i], - array->number_of_entries(kTypes[i], kExtended)); - CHECK_EQ(extended_first_idx[i], array->first_index(kTypes[i], kExtended)); - CHECK_EQ(extended_last_idx[i], array->last_index(kTypes[i], kExtended)); - } - - // Check small and large section's don't overlap. - int64_t small_section_int64 = V8_2PART_UINT64_C(0x56781234, DEF09ABC); - Code* small_section_code_ptr = DummyCode(&context); - Handle small_section_heap_ptr = - factory->NewHeapNumber(4.0, IMMUTABLE, TENURED); - int32_t small_section_int32 = 0xab12cd45; - - int64_t extended_section_int64 = V8_2PART_UINT64_C(0x12345678, 9ABCDEF0); - Code* extended_section_code_ptr = DummyCode(&context); - Handle extended_section_heap_ptr = - factory->NewHeapNumber(5.0, IMMUTABLE, TENURED); - int32_t extended_section_int32 = 0xef67ab89; - - for (int i = array->first_index(ConstantPoolArray::INT64, kSmall); - i <= array->last_index(ConstantPoolArray::INT32, kSmall); i++) { - if (i <= array->last_index(ConstantPoolArray::INT64, kSmall)) { - array->set(i, small_section_int64); - } else if (i <= array->last_index(ConstantPoolArray::CODE_PTR, kSmall)) { - array->set(i, small_section_code_ptr->entry()); - } else if (i <= array->last_index(ConstantPoolArray::HEAP_PTR, kSmall)) { - array->set(i, *small_section_heap_ptr); + CHECK(builder.IsEmpty()); + while (builder.NextAccess(type) == kRegAccess) { + if (type == kPtrType) { + access = builder.AddEntry(pos++, ptrValue++, sharing_ok); + type = kDblType; } else { - CHECK(i <= array->last_index(ConstantPoolArray::INT32, kSmall)); - array->set(i, small_section_int32); + access = builder.AddEntry(pos++, dblValue); + dblValue += 0.5; + type = kPtrType; } + CHECK_EQ(access, kRegAccess); } - for (int i = array->first_index(ConstantPoolArray::INT64, kExtended); - i <= array->last_index(ConstantPoolArray::INT32, kExtended); i++) { - if (i <= array->last_index(ConstantPoolArray::INT64, kExtended)) { - array->set(i, extended_section_int64); - } else if (i <= array->last_index(ConstantPoolArray::CODE_PTR, kExtended)) { - array->set(i, extended_section_code_ptr->entry()); - } else if (i <= array->last_index(ConstantPoolArray::HEAP_PTR, kExtended)) { - array->set(i, *extended_section_heap_ptr); + CHECK(!builder.IsEmpty()); + CHECK_EQ(pos, kRegularCount); + + access = builder.AddEntry(pos++, ptrValue, sharing_ok); + CHECK_EQ(access, kOvflAccess); + access = builder.AddEntry(pos, dblValue); + CHECK_EQ(access, kOvflAccess); +} + + +TEST(ConstantPoolMixedReach) { + const int ptrReachBits = kReachBits + 2; + const int ptrReach = 1 << ptrReachBits; + const int dblReachBits = kReachBits; + const int dblReach = kReach; + const int dblRegularCount = + Min(dblReach / kDoubleSize, ptrReach / (kDoubleSize + kPointerSize)); + const int ptrRegularCount = + ((ptrReach - (dblRegularCount * (kDoubleSize + kPointerSize))) / + kPointerSize) + + dblRegularCount; + ConstantPoolBuilder builder(ptrReachBits, dblReachBits); + ConstantPoolEntry::Access access; + int pos = 0; + intptr_t ptrValue = 0; + double dblValue = 0.0; + bool sharing_ok = true; + int ptrCount = 0; + int dblCount = 0; + + CHECK(builder.IsEmpty()); + while (builder.NextAccess(kDblType) == kRegAccess) { + access = builder.AddEntry(pos++, dblValue); + dblValue += 0.5; + dblCount++; + CHECK_EQ(access, kRegAccess); + + access = builder.AddEntry(pos++, ptrValue++, sharing_ok); + ptrCount++; + CHECK_EQ(access, kRegAccess); + } + CHECK(!builder.IsEmpty()); + CHECK_EQ(dblCount, dblRegularCount); + + while (ptrCount < ptrRegularCount) { + access = builder.AddEntry(pos++, dblValue); + dblValue += 0.5; + CHECK_EQ(access, kOvflAccess); + + access = builder.AddEntry(pos++, ptrValue++, sharing_ok); + ptrCount++; + CHECK_EQ(access, kRegAccess); + } + CHECK_EQ(builder.NextAccess(kPtrType), kOvflAccess); + + access = builder.AddEntry(pos++, ptrValue, sharing_ok); + CHECK_EQ(access, kOvflAccess); + access = builder.AddEntry(pos, dblValue); + CHECK_EQ(access, kOvflAccess); +} + + +TEST(ConstantPoolSharing) { + ConstantPoolBuilder builder(kReachBits, kReachBits); + const int kRegularCount = (((kReach / (kDoubleSize + kPointerSize)) * 2) + + ((kPointerSize < kDoubleSize) ? 1 : 0)); + ConstantPoolEntry::Access access; + + CHECK(builder.IsEmpty()); + + ConstantPoolEntry::Type type = kPtrType; + int pos = 0; + intptr_t ptrValue = 0; + double dblValue = 0.0; + bool sharing_ok = true; + while (builder.NextAccess(type) == kRegAccess) { + if (type == kPtrType) { + access = builder.AddEntry(pos++, ptrValue++, sharing_ok); + type = kDblType; } else { - CHECK(i <= array->last_index(ConstantPoolArray::INT32, kExtended)); - array->set(i, extended_section_int32); + access = builder.AddEntry(pos++, dblValue); + dblValue += 0.5; + type = kPtrType; + } + CHECK_EQ(access, kRegAccess); + } + CHECK(!builder.IsEmpty()); + CHECK_EQ(pos, kRegularCount); + + type = kPtrType; + ptrValue = 0; + dblValue = 0.0; + while (pos < kRegularCount * 2) { + if (type == kPtrType) { + access = builder.AddEntry(pos++, ptrValue++, sharing_ok); + type = kDblType; + } else { + access = builder.AddEntry(pos++, dblValue); + dblValue += 0.5; + type = kPtrType; + } + CHECK_EQ(access, kRegAccess); + } + + access = builder.AddEntry(pos++, ptrValue, sharing_ok); + CHECK_EQ(access, kOvflAccess); + access = builder.AddEntry(pos, dblValue); + CHECK_EQ(access, kOvflAccess); +} + + +TEST(ConstantPoolNoSharing) { + ConstantPoolBuilder builder(kReachBits, kReachBits); + const int kRegularCount = (((kReach / (kDoubleSize + kPointerSize)) * 2) + + ((kPointerSize < kDoubleSize) ? 1 : 0)); + ConstantPoolEntry::Access access; + + CHECK(builder.IsEmpty()); + + ConstantPoolEntry::Type type = kPtrType; + int pos = 0; + intptr_t ptrValue = 0; + double dblValue = 0.0; + bool sharing_ok = false; + while (builder.NextAccess(type) == kRegAccess) { + if (type == kPtrType) { + access = builder.AddEntry(pos++, ptrValue++, sharing_ok); + type = kDblType; + } else { + access = builder.AddEntry(pos++, dblValue); + dblValue += 0.5; + type = kPtrType; + } + CHECK_EQ(access, kRegAccess); + } + CHECK(!builder.IsEmpty()); + CHECK_EQ(pos, kRegularCount); + + type = kPtrType; + ptrValue = 0; + dblValue = 0.0; + sharing_ok = true; + while (pos < kRegularCount * 2) { + if (type == kPtrType) { + access = builder.AddEntry(pos++, ptrValue++, sharing_ok); + type = kDblType; + CHECK_EQ(access, kOvflAccess); + } else { + access = builder.AddEntry(pos++, dblValue); + dblValue += 0.5; + type = kPtrType; + CHECK_EQ(access, kRegAccess); } } - for (int i = array->first_index(ConstantPoolArray::INT64, kSmall); - i <= array->last_index(ConstantPoolArray::INT32, kSmall); i++) { - if (i <= array->last_index(ConstantPoolArray::INT64, kSmall)) { - CHECK_EQ(small_section_int64, array->get_int64_entry(i)); - } else if (i <= array->last_index(ConstantPoolArray::CODE_PTR, kSmall)) { - CHECK_EQ(small_section_code_ptr->entry(), array->get_code_ptr_entry(i)); - } else if (i <= array->last_index(ConstantPoolArray::HEAP_PTR, kSmall)) { - CHECK_EQ(*small_section_heap_ptr, array->get_heap_ptr_entry(i)); - } else { - CHECK(i <= array->last_index(ConstantPoolArray::INT32, kSmall)); - CHECK_EQ(small_section_int32, array->get_int32_entry(i)); - } - } - for (int i = array->first_index(ConstantPoolArray::INT64, kExtended); - i <= array->last_index(ConstantPoolArray::INT32, kExtended); i++) { - if (i <= array->last_index(ConstantPoolArray::INT64, kExtended)) { - CHECK_EQ(extended_section_int64, array->get_int64_entry(i)); - } else if (i <= array->last_index(ConstantPoolArray::CODE_PTR, kExtended)) { - CHECK_EQ(extended_section_code_ptr->entry(), - array->get_code_ptr_entry(i)); - } else if (i <= array->last_index(ConstantPoolArray::HEAP_PTR, kExtended)) { - CHECK_EQ(*extended_section_heap_ptr, array->get_heap_ptr_entry(i)); - } else { - CHECK(i <= array->last_index(ConstantPoolArray::INT32, kExtended)); - CHECK_EQ(extended_section_int32, array->get_int32_entry(i)); - } - } -} - - -static void CheckIterator(Handle array, - ConstantPoolArray::Type type, - int expected_indexes[], - int count) { - int i = 0; - ConstantPoolArray::Iterator iter(*array, type); - while (!iter.is_finished()) { - CHECK_EQ(expected_indexes[i++], iter.next_index()); - } - CHECK_EQ(count, i); -} - - -TEST(ConstantPoolIteratorSmall) { - LocalContext context; - Isolate* isolate = CcTest::i_isolate(); - Factory* factory = isolate->factory(); - v8::HandleScope scope(context->GetIsolate()); - - ConstantPoolArray::NumberOfEntries small(1, 5, 2, 0); - Handle array = factory->NewConstantPoolArray(small); - - int expected_int64_indexs[] = { 0 }; - CheckIterator(array, ConstantPoolArray::INT64, expected_int64_indexs, 1); - int expected_code_indexs[] = { 1, 2, 3, 4, 5 }; - CheckIterator(array, ConstantPoolArray::CODE_PTR, expected_code_indexs, 5); - int expected_heap_indexs[] = { 6, 7 }; - CheckIterator(array, ConstantPoolArray::HEAP_PTR, expected_heap_indexs, 2); - int expected_int32_indexs[1]; - CheckIterator(array, ConstantPoolArray::INT32, expected_int32_indexs, 0); -} - - -TEST(ConstantPoolIteratorExtended) { - LocalContext context; - Isolate* isolate = CcTest::i_isolate(); - Factory* factory = isolate->factory(); - v8::HandleScope scope(context->GetIsolate()); - - ConstantPoolArray::NumberOfEntries small(1, 0, 0, 4); - ConstantPoolArray::NumberOfEntries extended(5, 0, 3, 0); - Handle array = - factory->NewExtendedConstantPoolArray(small, extended); - - int expected_int64_indexs[] = { 0, 5, 6, 7, 8, 9 }; - CheckIterator(array, ConstantPoolArray::INT64, expected_int64_indexs, 6); - int expected_code_indexs[1]; - CheckIterator(array, ConstantPoolArray::CODE_PTR, expected_code_indexs, 0); - int expected_heap_indexs[] = { 10, 11, 12 }; - CheckIterator(array, ConstantPoolArray::HEAP_PTR, expected_heap_indexs, 3); - int expected_int32_indexs[] = { 1, 2, 3, 4 }; - CheckIterator(array, ConstantPoolArray::INT32, expected_int32_indexs, 4); -} - - -TEST(ConstantPoolPreciseGC) { - LocalContext context; - Isolate* isolate = CcTest::i_isolate(); - Heap* heap = isolate->heap(); - Factory* factory = isolate->factory(); - v8::HandleScope scope(context->GetIsolate()); - - ConstantPoolArray::NumberOfEntries small(1, 0, 0, 1); - Handle array = factory->NewConstantPoolArray(small); - - // Check that the store buffer knows which entries are pointers and which are - // not. To do this, make non-pointer entries which look like new space - // pointers but are actually invalid and ensure the GC doesn't try to move - // them. - Handle object = factory->NewHeapNumber(4.0); - Object* raw_ptr = *object; - // If interpreted as a pointer, this should be right inside the heap number - // which will cause a crash when trying to lookup the 'map' pointer. - intptr_t invalid_ptr = reinterpret_cast(raw_ptr) + kInt32Size; - int32_t invalid_ptr_int32 = static_cast(invalid_ptr); - int64_t invalid_ptr_int64 = static_cast(invalid_ptr); - array->set(0, invalid_ptr_int64); - array->set(1, invalid_ptr_int32); - - // Ensure we perform a scan on scavenge for the constant pool's page. - MemoryChunk::FromAddress(array->address())->set_scan_on_scavenge(true); - heap->CollectGarbage(NEW_SPACE); - - // Check the object was moved by GC. - CHECK_NE(*object, raw_ptr); - - // Check the non-pointer entries weren't changed. - CHECK_EQ(invalid_ptr_int64, array->get_int64_entry(0)); - CHECK_EQ(invalid_ptr_int32, array->get_int32_entry(1)); -} - - -TEST(ConstantPoolCompacting) { - if (i::FLAG_never_compact) return; - i::FLAG_always_compact = true; - LocalContext context; - Isolate* isolate = CcTest::i_isolate(); - Heap* heap = isolate->heap(); - Factory* factory = isolate->factory(); - v8::HandleScope scope(context->GetIsolate()); - - ConstantPoolArray::NumberOfEntries small(0, 0, 1, 0); - ConstantPoolArray::NumberOfEntries extended(0, 0, 1, 0); - Handle array = - factory->NewExtendedConstantPoolArray(small, extended); - - // Start a second old-space page so that the heap pointer added to the - // constant pool array ends up on the an evacuation candidate page. - Page* first_page = heap->old_space()->anchor()->next_page(); - { - HandleScope scope(isolate); - int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB; - Handle temp = - factory->NewFixedDoubleArray(dummy_array_size / kDoubleSize, TENURED); - CHECK(heap->InOldSpace(temp->address())); - Handle heap_ptr = - factory->NewHeapNumber(5.0, IMMUTABLE, TENURED); - CHECK(heap->InOldSpace(heap_ptr->address())); - CHECK(!first_page->Contains(heap_ptr->address())); - array->set(0, *heap_ptr); - array->set(1, *heap_ptr); - } - - // Check heap pointers are correctly updated on GC. - Object* old_ptr = array->get_heap_ptr_entry(0); - Handle object(old_ptr, isolate); - CHECK_EQ(old_ptr, *object); - CHECK_EQ(old_ptr, array->get_heap_ptr_entry(1)); - - // Force compacting garbage collection. - CHECK(FLAG_always_compact); - heap->CollectAllGarbage(); - - CHECK_NE(old_ptr, *object); - CHECK_EQ(*object, array->get_heap_ptr_entry(0)); - CHECK_EQ(*object, array->get_heap_ptr_entry(1)); + access = builder.AddEntry(pos++, ptrValue, sharing_ok); + CHECK_EQ(access, kOvflAccess); + access = builder.AddEntry(pos, dblValue); + CHECK_EQ(access, kOvflAccess); } diff --git a/test/cctest/test-reloc-info.cc b/test/cctest/test-reloc-info.cc index bbb0116b83..829fd24f4d 100644 --- a/test/cctest/test-reloc-info.cc +++ b/test/cctest/test-reloc-info.cc @@ -66,8 +66,8 @@ TEST(Positions) { writer.Finish(); relocation_info_size = static_cast(buffer_end - writer.pos()); - CodeDesc desc = { buffer.get(), buffer_size, code_size, - relocation_info_size, NULL }; + CodeDesc desc = {buffer.get(), buffer_size, code_size, relocation_info_size, + 0, NULL}; // Read only (non-statement) positions. {