[assembler] Move ConstantPools out of src/assembler.h

Drive-by: Eliminate unnecessary includes to src/assembler.h.

Bug: v8:8238
Change-Id: Ia0408b993b8b1c21a76c947f406f96b63fe41994
Reviewed-on: https://chromium-review.googlesource.com/c/1288810
Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
Reviewed-by: Toon Verwaest <verwaest@chromium.org>
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Commit-Queue: Sigurd Schneider <sigurds@chromium.org>
Cr-Commit-Position: refs/heads/master@{#56817}
This commit is contained in:
Sigurd Schneider 2018-10-19 12:17:59 +02:00 committed by Commit Bot
parent cf38caeab1
commit 1dc1d1ae4e
35 changed files with 422 additions and 412 deletions

View File

@ -1908,6 +1908,8 @@ v8_source_set("v8_base") {
"src/compiler/wasm-compiler.h",
"src/compiler/zone-stats.cc",
"src/compiler/zone-stats.h",
"src/constant-pool.cc",
"src/constant-pool.h",
"src/contexts-inl.h",
"src/contexts.cc",
"src/contexts.h",

View File

@ -46,6 +46,7 @@
#include "src/arm/constants-arm.h"
#include "src/assembler.h"
#include "src/boxed-float.h"
#include "src/constant-pool.h"
#include "src/double.h"
namespace v8 {
@ -1521,13 +1522,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
}
}
void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
ConstantPoolEntry::Access access,
ConstantPoolEntry::Type type) {
// No embedded constant pool support.
UNREACHABLE();
}
// Move a 32-bit immediate into a register, potentially via the constant pool.
void Move32BitImmediate(Register rd, const Operand& x, Condition cond = al);

View File

@ -1901,6 +1901,10 @@ void TurboAssembler::Abort(AbortReason reason) {
// will not return here
}
void MacroAssembler::LoadGlobalProxy(Register dst) {
LoadNativeContextSlot(Context::GLOBAL_PROXY_INDEX, dst);
}
void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
ldr(dst, NativeContextMemOperand());
ldr(dst, ContextMemOperand(dst, index));

View File

@ -8,6 +8,7 @@
#include "src/arm/assembler-arm.h"
#include "src/assembler.h"
#include "src/bailout-reason.h"
#include "src/contexts.h"
#include "src/globals.h"
#include "src/turbo-assembler.h"
@ -656,9 +657,7 @@ class MacroAssembler : public TurboAssembler {
bool argument_count_is_length = false);
// Load the global proxy from the current context.
void LoadGlobalProxy(Register dst) {
LoadNativeContextSlot(Context::GLOBAL_PROXY_INDEX, dst);
}
void LoadGlobalProxy(Register dst);
void LoadNativeContextSlot(int index, Register dst);

View File

@ -14,10 +14,10 @@
#include "src/arm64/instructions-arm64.h"
#include "src/assembler.h"
#include "src/base/optional.h"
#include "src/constant-pool.h"
#include "src/globals.h"
#include "src/utils.h"
namespace v8 {
namespace internal {
@ -3169,13 +3169,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// Check if is time to emit a constant pool.
void CheckConstPool(bool force_emit, bool require_jump);
void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
ConstantPoolEntry::Access access,
ConstantPoolEntry::Type type) {
// No embedded constant pool support.
UNREACHABLE();
}
// Returns true if we should emit a veneer as soon as possible for a branch
// which can at most reach to specified pc.
bool ShouldEmitVeneer(int max_reachable_pc,

View File

@ -7,13 +7,14 @@
#include "src/arm64/constants-arm64.h"
#include "src/arm64/utils-arm64.h"
#include "src/assembler.h"
#include "src/globals.h"
#include "src/utils.h"
namespace v8 {
namespace internal {
struct AssemblerOptions;
// ISA constants. --------------------------------------------------------------
typedef uint32_t Instr;

View File

@ -2531,6 +2531,10 @@ void MacroAssembler::LeaveExitFrame(bool restore_doubles,
Pop(fp, lr);
}
void MacroAssembler::LoadGlobalProxy(Register dst) {
LoadNativeContextSlot(Context::GLOBAL_PROXY_INDEX, dst);
}
void MacroAssembler::LoadWeakValue(Register out, Register in,
Label* target_if_cleared) {
CompareAndBranch(in, Operand(kClearedWeakHeapObject), eq, target_if_cleared);

View File

@ -1897,9 +1897,7 @@ class MacroAssembler : public TurboAssembler {
const Register& scratch2);
// Load the global proxy from the current context.
void LoadGlobalProxy(Register dst) {
LoadNativeContextSlot(Context::GLOBAL_PROXY_INDEX, dst);
}
void LoadGlobalProxy(Register dst);
// ---------------------------------------------------------------------------
// In-place weak references.

View File

@ -164,202 +164,6 @@ unsigned CpuFeatures::supported_ = 0;
unsigned CpuFeatures::icache_line_size_ = 0;
unsigned CpuFeatures::dcache_line_size_ = 0;
ConstantPoolBuilder::ConstantPoolBuilder(int ptr_reach_bits,
int double_reach_bits) {
info_[ConstantPoolEntry::INTPTR].entries.reserve(64);
info_[ConstantPoolEntry::INTPTR].regular_reach_bits = ptr_reach_bits;
info_[ConstantPoolEntry::DOUBLE].regular_reach_bits = double_reach_bits;
}
ConstantPoolEntry::Access ConstantPoolBuilder::NextAccess(
ConstantPoolEntry::Type type) const {
const PerTypeEntryInfo& info = info_[type];
if (info.overflow()) return ConstantPoolEntry::OVERFLOWED;
int dbl_count = info_[ConstantPoolEntry::DOUBLE].regular_count;
int dbl_offset = dbl_count * kDoubleSize;
int ptr_count = info_[ConstantPoolEntry::INTPTR].regular_count;
int ptr_offset = ptr_count * kPointerSize + dbl_offset;
if (type == ConstantPoolEntry::DOUBLE) {
// Double overflow detection must take into account the reach for both types
int ptr_reach_bits = info_[ConstantPoolEntry::INTPTR].regular_reach_bits;
if (!is_uintn(dbl_offset, info.regular_reach_bits) ||
(ptr_count > 0 &&
!is_uintn(ptr_offset + kDoubleSize - kPointerSize, ptr_reach_bits))) {
return ConstantPoolEntry::OVERFLOWED;
}
} else {
DCHECK(type == ConstantPoolEntry::INTPTR);
if (!is_uintn(ptr_offset, info.regular_reach_bits)) {
return ConstantPoolEntry::OVERFLOWED;
}
}
return ConstantPoolEntry::REGULAR;
}
ConstantPoolEntry::Access ConstantPoolBuilder::AddEntry(
ConstantPoolEntry& entry, ConstantPoolEntry::Type type) {
DCHECK(!emitted_label_.is_bound());
PerTypeEntryInfo& info = info_[type];
const int entry_size = ConstantPoolEntry::size(type);
bool merged = false;
if (entry.sharing_ok()) {
// Try to merge entries
std::vector<ConstantPoolEntry>::iterator it = info.shared_entries.begin();
int end = static_cast<int>(info.shared_entries.size());
for (int i = 0; i < end; i++, it++) {
if ((entry_size == kPointerSize) ? entry.value() == it->value()
: entry.value64() == it->value64()) {
// Merge with found entry.
entry.set_merged_index(i);
merged = true;
break;
}
}
}
// By definition, merged entries have regular access.
DCHECK(!merged || entry.merged_index() < info.regular_count);
ConstantPoolEntry::Access access =
(merged ? ConstantPoolEntry::REGULAR : NextAccess(type));
// Enforce an upper bound on search time by limiting the search to
// unique sharable entries which fit in the regular section.
if (entry.sharing_ok() && !merged && access == ConstantPoolEntry::REGULAR) {
info.shared_entries.push_back(entry);
} else {
info.entries.push_back(entry);
}
// We're done if we found a match or have already triggered the
// overflow state.
if (merged || info.overflow()) return access;
if (access == ConstantPoolEntry::REGULAR) {
info.regular_count++;
} else {
info.overflow_start = static_cast<int>(info.entries.size()) - 1;
}
return access;
}
void ConstantPoolBuilder::EmitSharedEntries(Assembler* assm,
ConstantPoolEntry::Type type) {
PerTypeEntryInfo& info = info_[type];
std::vector<ConstantPoolEntry>& shared_entries = info.shared_entries;
const int entry_size = ConstantPoolEntry::size(type);
int base = emitted_label_.pos();
DCHECK_GT(base, 0);
int shared_end = static_cast<int>(shared_entries.size());
std::vector<ConstantPoolEntry>::iterator shared_it = shared_entries.begin();
for (int i = 0; i < shared_end; i++, shared_it++) {
int offset = assm->pc_offset() - base;
shared_it->set_offset(offset); // Save offset for merged entries.
if (entry_size == kPointerSize) {
assm->dp(shared_it->value());
} else {
assm->dq(shared_it->value64());
}
DCHECK(is_uintn(offset, info.regular_reach_bits));
// Patch load sequence with correct offset.
assm->PatchConstantPoolAccessInstruction(shared_it->position(), offset,
ConstantPoolEntry::REGULAR, type);
}
}
void ConstantPoolBuilder::EmitGroup(Assembler* assm,
ConstantPoolEntry::Access access,
ConstantPoolEntry::Type type) {
PerTypeEntryInfo& info = info_[type];
const bool overflow = info.overflow();
std::vector<ConstantPoolEntry>& entries = info.entries;
std::vector<ConstantPoolEntry>& shared_entries = info.shared_entries;
const int entry_size = ConstantPoolEntry::size(type);
int base = emitted_label_.pos();
DCHECK_GT(base, 0);
int begin;
int end;
if (access == ConstantPoolEntry::REGULAR) {
// Emit any shared entries first
EmitSharedEntries(assm, type);
}
if (access == ConstantPoolEntry::REGULAR) {
begin = 0;
end = overflow ? info.overflow_start : static_cast<int>(entries.size());
} else {
DCHECK(access == ConstantPoolEntry::OVERFLOWED);
if (!overflow) return;
begin = info.overflow_start;
end = static_cast<int>(entries.size());
}
std::vector<ConstantPoolEntry>::iterator it = entries.begin();
if (begin > 0) std::advance(it, begin);
for (int i = begin; i < end; i++, it++) {
// Update constant pool if necessary and get the entry's offset.
int offset;
ConstantPoolEntry::Access entry_access;
if (!it->is_merged()) {
// Emit new entry
offset = assm->pc_offset() - base;
entry_access = access;
if (entry_size == kPointerSize) {
assm->dp(it->value());
} else {
assm->dq(it->value64());
}
} else {
// Retrieve offset from shared entry.
offset = shared_entries[it->merged_index()].offset();
entry_access = ConstantPoolEntry::REGULAR;
}
DCHECK(entry_access == ConstantPoolEntry::OVERFLOWED ||
is_uintn(offset, info.regular_reach_bits));
// Patch load sequence with correct offset.
assm->PatchConstantPoolAccessInstruction(it->position(), offset,
entry_access, type);
}
}
// Emit and return position of pool. Zero implies no constant pool.
int ConstantPoolBuilder::Emit(Assembler* assm) {
bool emitted = emitted_label_.is_bound();
bool empty = IsEmpty();
if (!emitted) {
// Mark start of constant pool. Align if necessary.
if (!empty) assm->DataAlign(kDoubleSize);
assm->bind(&emitted_label_);
if (!empty) {
// Emit in groups based on access and type.
// Emit doubles first for alignment purposes.
EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::DOUBLE);
EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::INTPTR);
if (info_[ConstantPoolEntry::DOUBLE].overflow()) {
assm->DataAlign(kDoubleSize);
EmitGroup(assm, ConstantPoolEntry::OVERFLOWED,
ConstantPoolEntry::DOUBLE);
}
if (info_[ConstantPoolEntry::INTPTR].overflow()) {
EmitGroup(assm, ConstantPoolEntry::OVERFLOWED,
ConstantPoolEntry::INTPTR);
}
}
}
return !empty ? emitted_label_.pos() : 0;
}
HeapObjectRequest::HeapObjectRequest(double heap_number, int offset)
: kind_(kHeapNumber), offset_(offset) {
value_.heap_number = heap_number;
@ -438,5 +242,9 @@ void AssemblerBase::UpdateCodeTarget(intptr_t code_target_index,
code_targets_[code_target_index] = code;
}
void AssemblerBase::ReserveCodeTargetSpace(size_t num_of_code_targets) {
code_targets_.reserve(num_of_code_targets);
}
} // namespace internal
} // namespace v8

View File

@ -36,18 +36,11 @@
#define V8_ASSEMBLER_H_
#include <forward_list>
#include <iosfwd>
#include <map>
#include "src/allocation.h"
#include "src/code-reference.h"
#include "src/contexts.h"
#include "src/deoptimize-reason.h"
#include "src/double.h"
#include "src/external-reference.h"
#include "src/flags.h"
#include "src/globals.h"
#include "src/label.h"
#include "src/handles.h"
#include "src/objects.h"
#include "src/register-configuration.h"
#include "src/reglist.h"
@ -259,9 +252,7 @@ class V8_EXPORT_PRIVATE AssemblerBase : public Malloced {
// Update to the code target at {code_target_index} to {target}.
void UpdateCodeTarget(intptr_t code_target_index, Handle<Code> target);
// Reserves space in the code target vector.
void ReserveCodeTargetSpace(size_t num_of_code_targets) {
code_targets_.reserve(num_of_code_targets);
}
void ReserveCodeTargetSpace(size_t num_of_code_targets);
// The buffer into which code and relocation info are generated. It could
// either be owned by the assembler or be provided externally.
@ -449,141 +440,6 @@ double power_double_int(double x, int y);
double power_double_double(double x, double y);
// -----------------------------------------------------------------------------
// Constant pool support
class ConstantPoolEntry {
public:
ConstantPoolEntry() = default;
ConstantPoolEntry(int position, intptr_t value, bool sharing_ok,
RelocInfo::Mode rmode = RelocInfo::NONE)
: position_(position),
merged_index_(sharing_ok ? SHARING_ALLOWED : SHARING_PROHIBITED),
value_(value),
rmode_(rmode) {}
ConstantPoolEntry(int position, Double value,
RelocInfo::Mode rmode = RelocInfo::NONE)
: position_(position),
merged_index_(SHARING_ALLOWED),
value64_(value.AsUint64()),
rmode_(rmode) {}
int position() const { return position_; }
bool sharing_ok() const { return merged_index_ != SHARING_PROHIBITED; }
bool is_merged() const { return merged_index_ >= 0; }
int merged_index() const {
DCHECK(is_merged());
return merged_index_;
}
void set_merged_index(int index) {
DCHECK(sharing_ok());
merged_index_ = index;
DCHECK(is_merged());
}
int offset() const {
DCHECK_GE(merged_index_, 0);
return merged_index_;
}
void set_offset(int offset) {
DCHECK_GE(offset, 0);
merged_index_ = offset;
}
intptr_t value() const { return value_; }
uint64_t value64() const { return value64_; }
RelocInfo::Mode rmode() const { return rmode_; }
enum Type { INTPTR, DOUBLE, NUMBER_OF_TYPES };
static int size(Type type) {
return (type == INTPTR) ? kPointerSize : kDoubleSize;
}
enum Access { REGULAR, OVERFLOWED };
private:
int position_;
int merged_index_;
union {
intptr_t value_;
uint64_t value64_;
};
// TODO(leszeks): The way we use this, it could probably be packed into
// merged_index_ if size is a concern.
RelocInfo::Mode rmode_;
enum { SHARING_PROHIBITED = -2, SHARING_ALLOWED = -1 };
};
// -----------------------------------------------------------------------------
// Embedded constant pool support
class ConstantPoolBuilder {
public:
ConstantPoolBuilder(int ptr_reach_bits, int double_reach_bits);
// Add pointer-sized constant to the embedded constant pool
ConstantPoolEntry::Access AddEntry(int position, intptr_t value,
bool sharing_ok) {
ConstantPoolEntry entry(position, value, sharing_ok);
return AddEntry(entry, ConstantPoolEntry::INTPTR);
}
// Add double constant to the embedded constant pool
ConstantPoolEntry::Access AddEntry(int position, Double value) {
ConstantPoolEntry entry(position, value);
return AddEntry(entry, ConstantPoolEntry::DOUBLE);
}
// Add double constant to the embedded constant pool
ConstantPoolEntry::Access AddEntry(int position, double value) {
return AddEntry(position, Double(value));
}
// Previews the access type required for the next new entry to be added.
ConstantPoolEntry::Access NextAccess(ConstantPoolEntry::Type type) const;
bool IsEmpty() {
return info_[ConstantPoolEntry::INTPTR].entries.empty() &&
info_[ConstantPoolEntry::INTPTR].shared_entries.empty() &&
info_[ConstantPoolEntry::DOUBLE].entries.empty() &&
info_[ConstantPoolEntry::DOUBLE].shared_entries.empty();
}
// Emit the constant pool. Invoke only after all entries have been
// added and all instructions have been emitted.
// Returns position of the emitted pool (zero implies no constant pool).
int Emit(Assembler* assm);
// Returns the label associated with the start of the constant pool.
// Linking to this label in the function prologue may provide an
// efficient means of constant pool pointer register initialization
// on some architectures.
inline Label* EmittedPosition() { return &emitted_label_; }
private:
ConstantPoolEntry::Access AddEntry(ConstantPoolEntry& entry,
ConstantPoolEntry::Type type);
void EmitSharedEntries(Assembler* assm, ConstantPoolEntry::Type type);
void EmitGroup(Assembler* assm, ConstantPoolEntry::Access access,
ConstantPoolEntry::Type type);
struct PerTypeEntryInfo {
PerTypeEntryInfo() : regular_count(0), overflow_start(-1) {}
bool overflow() const {
return (overflow_start >= 0 &&
overflow_start < static_cast<int>(entries.size()));
}
int regular_reach_bits;
int regular_count;
int overflow_start;
std::vector<ConstantPoolEntry> entries;
std::vector<ConstantPoolEntry> shared_entries;
};
Label emitted_label_; // Records pc_offset of emitted pool
PerTypeEntryInfo info_[ConstantPoolEntry::NUMBER_OF_TYPES];
};
// Base type for CPU Registers.
//
// 1) We would prefer to use an enum for registers, but enum values are

View File

@ -4,8 +4,8 @@
#include "src/compiler/common-node-cache.h"
#include "src/assembler.h"
#include "src/compiler/node.h"
#include "src/external-reference.h"
namespace v8 {
namespace internal {

212
src/constant-pool.cc Normal file
View File

@ -0,0 +1,212 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/constant-pool.h"
#include "src/assembler-arch-inl.h"
namespace v8 {
namespace internal {
#if defined(V8_TARGET_ARCH_PPC)
ConstantPoolBuilder::ConstantPoolBuilder(int ptr_reach_bits,
int double_reach_bits) {
info_[ConstantPoolEntry::INTPTR].entries.reserve(64);
info_[ConstantPoolEntry::INTPTR].regular_reach_bits = ptr_reach_bits;
info_[ConstantPoolEntry::DOUBLE].regular_reach_bits = double_reach_bits;
}
ConstantPoolEntry::Access ConstantPoolBuilder::NextAccess(
ConstantPoolEntry::Type type) const {
const PerTypeEntryInfo& info = info_[type];
if (info.overflow()) return ConstantPoolEntry::OVERFLOWED;
int dbl_count = info_[ConstantPoolEntry::DOUBLE].regular_count;
int dbl_offset = dbl_count * kDoubleSize;
int ptr_count = info_[ConstantPoolEntry::INTPTR].regular_count;
int ptr_offset = ptr_count * kPointerSize + dbl_offset;
if (type == ConstantPoolEntry::DOUBLE) {
// Double overflow detection must take into account the reach for both types
int ptr_reach_bits = info_[ConstantPoolEntry::INTPTR].regular_reach_bits;
if (!is_uintn(dbl_offset, info.regular_reach_bits) ||
(ptr_count > 0 &&
!is_uintn(ptr_offset + kDoubleSize - kPointerSize, ptr_reach_bits))) {
return ConstantPoolEntry::OVERFLOWED;
}
} else {
DCHECK(type == ConstantPoolEntry::INTPTR);
if (!is_uintn(ptr_offset, info.regular_reach_bits)) {
return ConstantPoolEntry::OVERFLOWED;
}
}
return ConstantPoolEntry::REGULAR;
}
ConstantPoolEntry::Access ConstantPoolBuilder::AddEntry(
ConstantPoolEntry& entry, ConstantPoolEntry::Type type) {
DCHECK(!emitted_label_.is_bound());
PerTypeEntryInfo& info = info_[type];
const int entry_size = ConstantPoolEntry::size(type);
bool merged = false;
if (entry.sharing_ok()) {
// Try to merge entries
std::vector<ConstantPoolEntry>::iterator it = info.shared_entries.begin();
int end = static_cast<int>(info.shared_entries.size());
for (int i = 0; i < end; i++, it++) {
if ((entry_size == kPointerSize) ? entry.value() == it->value()
: entry.value64() == it->value64()) {
// Merge with found entry.
entry.set_merged_index(i);
merged = true;
break;
}
}
}
// By definition, merged entries have regular access.
DCHECK(!merged || entry.merged_index() < info.regular_count);
ConstantPoolEntry::Access access =
(merged ? ConstantPoolEntry::REGULAR : NextAccess(type));
// Enforce an upper bound on search time by limiting the search to
// unique sharable entries which fit in the regular section.
if (entry.sharing_ok() && !merged && access == ConstantPoolEntry::REGULAR) {
info.shared_entries.push_back(entry);
} else {
info.entries.push_back(entry);
}
// We're done if we found a match or have already triggered the
// overflow state.
if (merged || info.overflow()) return access;
if (access == ConstantPoolEntry::REGULAR) {
info.regular_count++;
} else {
info.overflow_start = static_cast<int>(info.entries.size()) - 1;
}
return access;
}
void ConstantPoolBuilder::EmitSharedEntries(Assembler* assm,
ConstantPoolEntry::Type type) {
PerTypeEntryInfo& info = info_[type];
std::vector<ConstantPoolEntry>& shared_entries = info.shared_entries;
const int entry_size = ConstantPoolEntry::size(type);
int base = emitted_label_.pos();
DCHECK_GT(base, 0);
int shared_end = static_cast<int>(shared_entries.size());
std::vector<ConstantPoolEntry>::iterator shared_it = shared_entries.begin();
for (int i = 0; i < shared_end; i++, shared_it++) {
int offset = assm->pc_offset() - base;
shared_it->set_offset(offset); // Save offset for merged entries.
if (entry_size == kPointerSize) {
assm->dp(shared_it->value());
} else {
assm->dq(shared_it->value64());
}
DCHECK(is_uintn(offset, info.regular_reach_bits));
// Patch load sequence with correct offset.
assm->PatchConstantPoolAccessInstruction(shared_it->position(), offset,
ConstantPoolEntry::REGULAR, type);
}
}
void ConstantPoolBuilder::EmitGroup(Assembler* assm,
ConstantPoolEntry::Access access,
ConstantPoolEntry::Type type) {
PerTypeEntryInfo& info = info_[type];
const bool overflow = info.overflow();
std::vector<ConstantPoolEntry>& entries = info.entries;
std::vector<ConstantPoolEntry>& shared_entries = info.shared_entries;
const int entry_size = ConstantPoolEntry::size(type);
int base = emitted_label_.pos();
DCHECK_GT(base, 0);
int begin;
int end;
if (access == ConstantPoolEntry::REGULAR) {
// Emit any shared entries first
EmitSharedEntries(assm, type);
}
if (access == ConstantPoolEntry::REGULAR) {
begin = 0;
end = overflow ? info.overflow_start : static_cast<int>(entries.size());
} else {
DCHECK(access == ConstantPoolEntry::OVERFLOWED);
if (!overflow) return;
begin = info.overflow_start;
end = static_cast<int>(entries.size());
}
std::vector<ConstantPoolEntry>::iterator it = entries.begin();
if (begin > 0) std::advance(it, begin);
for (int i = begin; i < end; i++, it++) {
// Update constant pool if necessary and get the entry's offset.
int offset;
ConstantPoolEntry::Access entry_access;
if (!it->is_merged()) {
// Emit new entry
offset = assm->pc_offset() - base;
entry_access = access;
if (entry_size == kPointerSize) {
assm->dp(it->value());
} else {
assm->dq(it->value64());
}
} else {
// Retrieve offset from shared entry.
offset = shared_entries[it->merged_index()].offset();
entry_access = ConstantPoolEntry::REGULAR;
}
DCHECK(entry_access == ConstantPoolEntry::OVERFLOWED ||
is_uintn(offset, info.regular_reach_bits));
// Patch load sequence with correct offset.
assm->PatchConstantPoolAccessInstruction(it->position(), offset,
entry_access, type);
}
}
// Emit and return position of pool. Zero implies no constant pool.
int ConstantPoolBuilder::Emit(Assembler* assm) {
bool emitted = emitted_label_.is_bound();
bool empty = IsEmpty();
if (!emitted) {
// Mark start of constant pool. Align if necessary.
if (!empty) assm->DataAlign(kDoubleSize);
assm->bind(&emitted_label_);
if (!empty) {
// Emit in groups based on access and type.
// Emit doubles first for alignment purposes.
EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::DOUBLE);
EmitGroup(assm, ConstantPoolEntry::REGULAR, ConstantPoolEntry::INTPTR);
if (info_[ConstantPoolEntry::DOUBLE].overflow()) {
assm->DataAlign(kDoubleSize);
EmitGroup(assm, ConstantPoolEntry::OVERFLOWED,
ConstantPoolEntry::DOUBLE);
}
if (info_[ConstantPoolEntry::INTPTR].overflow()) {
EmitGroup(assm, ConstantPoolEntry::OVERFLOWED,
ConstantPoolEntry::INTPTR);
}
}
}
return !empty ? emitted_label_.pos() : 0;
}
#endif // defined(V8_TARGET_ARCH_PPC)
} // namespace internal
} // namespace v8

159
src/constant-pool.h Normal file
View File

@ -0,0 +1,159 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_CONSTANT_POOL_H_
#define V8_CONSTANT_POOL_H_
#include <map>
#include "src/double.h"
#include "src/globals.h"
#include "src/label.h"
#include "src/reloc-info.h"
namespace v8 {
namespace internal {
// -----------------------------------------------------------------------------
// Constant pool support
class ConstantPoolEntry {
public:
ConstantPoolEntry() = default;
ConstantPoolEntry(int position, intptr_t value, bool sharing_ok,
RelocInfo::Mode rmode = RelocInfo::NONE)
: position_(position),
merged_index_(sharing_ok ? SHARING_ALLOWED : SHARING_PROHIBITED),
value_(value),
rmode_(rmode) {}
ConstantPoolEntry(int position, Double value,
RelocInfo::Mode rmode = RelocInfo::NONE)
: position_(position),
merged_index_(SHARING_ALLOWED),
value64_(value.AsUint64()),
rmode_(rmode) {}
int position() const { return position_; }
bool sharing_ok() const { return merged_index_ != SHARING_PROHIBITED; }
bool is_merged() const { return merged_index_ >= 0; }
int merged_index() const {
DCHECK(is_merged());
return merged_index_;
}
void set_merged_index(int index) {
DCHECK(sharing_ok());
merged_index_ = index;
DCHECK(is_merged());
}
int offset() const {
DCHECK_GE(merged_index_, 0);
return merged_index_;
}
void set_offset(int offset) {
DCHECK_GE(offset, 0);
merged_index_ = offset;
}
intptr_t value() const { return value_; }
uint64_t value64() const { return value64_; }
RelocInfo::Mode rmode() const { return rmode_; }
enum Type { INTPTR, DOUBLE, NUMBER_OF_TYPES };
static int size(Type type) {
return (type == INTPTR) ? kPointerSize : kDoubleSize;
}
enum Access { REGULAR, OVERFLOWED };
private:
int position_;
int merged_index_;
union {
intptr_t value_;
uint64_t value64_;
};
// TODO(leszeks): The way we use this, it could probably be packed into
// merged_index_ if size is a concern.
RelocInfo::Mode rmode_;
enum { SHARING_PROHIBITED = -2, SHARING_ALLOWED = -1 };
};
#if defined(V8_TARGET_ARCH_PPC)
// -----------------------------------------------------------------------------
// Embedded constant pool support
class ConstantPoolBuilder {
public:
ConstantPoolBuilder(int ptr_reach_bits, int double_reach_bits);
// Add pointer-sized constant to the embedded constant pool
ConstantPoolEntry::Access AddEntry(int position, intptr_t value,
bool sharing_ok) {
ConstantPoolEntry entry(position, value, sharing_ok);
return AddEntry(entry, ConstantPoolEntry::INTPTR);
}
// Add double constant to the embedded constant pool
ConstantPoolEntry::Access AddEntry(int position, Double value) {
ConstantPoolEntry entry(position, value);
return AddEntry(entry, ConstantPoolEntry::DOUBLE);
}
// Add double constant to the embedded constant pool
ConstantPoolEntry::Access AddEntry(int position, double value) {
return AddEntry(position, Double(value));
}
// Previews the access type required for the next new entry to be added.
ConstantPoolEntry::Access NextAccess(ConstantPoolEntry::Type type) const;
bool IsEmpty() {
return info_[ConstantPoolEntry::INTPTR].entries.empty() &&
info_[ConstantPoolEntry::INTPTR].shared_entries.empty() &&
info_[ConstantPoolEntry::DOUBLE].entries.empty() &&
info_[ConstantPoolEntry::DOUBLE].shared_entries.empty();
}
// Emit the constant pool. Invoke only after all entries have been
// added and all instructions have been emitted.
// Returns position of the emitted pool (zero implies no constant pool).
int Emit(Assembler* assm);
// Returns the label associated with the start of the constant pool.
// Linking to this label in the function prologue may provide an
// efficient means of constant pool pointer register initialization
// on some architectures.
inline Label* EmittedPosition() { return &emitted_label_; }
private:
ConstantPoolEntry::Access AddEntry(ConstantPoolEntry& entry,
ConstantPoolEntry::Type type);
void EmitSharedEntries(Assembler* assm, ConstantPoolEntry::Type type);
void EmitGroup(Assembler* assm, ConstantPoolEntry::Access access,
ConstantPoolEntry::Type type);
struct PerTypeEntryInfo {
PerTypeEntryInfo() : regular_count(0), overflow_start(-1) {}
bool overflow() const {
return (overflow_start >= 0 &&
overflow_start < static_cast<int>(entries.size()));
}
int regular_reach_bits;
int regular_count;
int overflow_start;
std::vector<ConstantPoolEntry> entries;
std::vector<ConstantPoolEntry> shared_entries;
};
Label emitted_label_; // Records pc_offset of emitted pool
PerTypeEntryInfo info_[ConstantPoolEntry::NUMBER_OF_TYPES];
};
#endif // defined(V8_TARGET_ARCH_PPC)
} // namespace internal
} // namespace v8
#endif // V8_CONSTANT_POOL_H_

View File

@ -43,6 +43,7 @@
#include "src/ia32/constants-ia32.h"
#include "src/ia32/sse-instr.h"
#include "src/isolate.h"
#include "src/label.h"
#include "src/utils.h"
namespace v8 {
@ -1779,13 +1780,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
byte byte_at(int pos) { return buffer_[pos]; }
void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
ConstantPoolEntry::Access access,
ConstantPoolEntry::Type type) {
// No embedded constant pool support.
UNREACHABLE();
}
// Temporary helper data structures while adding kRootRegister support to ia32
// builtins. The SupportsRootRegisterScope is intended to mark each builtin
// and helper that fully supports the root register, i.e. that does not

View File

@ -9,7 +9,7 @@
#include "src/assembler-inl.h"
#include "src/debug/debug.h"
#include "src/macro-assembler.h"
#include "src/handles-inl.h"
#include "src/prototype.h"
namespace v8 {

View File

@ -11,7 +11,6 @@
#include "src/heap/factory.h"
#include "src/ic/stub-cache.h"
#include "src/isolate.h"
#include "src/macro-assembler.h"
#include "src/message-template.h"
#include "src/objects/map.h"
#include "src/objects/maybe-object.h"

View File

@ -5,7 +5,6 @@
#ifndef V8_IC_STUB_CACHE_H_
#define V8_IC_STUB_CACHE_H_
#include "src/macro-assembler.h"
#include "src/objects/name.h"
namespace v8 {

View File

@ -41,6 +41,8 @@
#include <set>
#include "src/assembler.h"
#include "src/external-reference.h"
#include "src/label.h"
#include "src/mips/constants-mips.h"
namespace v8 {
@ -1847,13 +1849,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
void CheckTrampolinePool();
void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
ConstantPoolEntry::Access access,
ConstantPoolEntry::Type type) {
// No embedded constant pool support.
UNREACHABLE();
}
bool IsPrevInstrCompactBranch() { return prev_instr_compact_branch_; }
static bool IsCompactBranchSupported() {
return IsMipsArchVariant(kMips32r6);

View File

@ -6,6 +6,7 @@
#define V8_MIPS_MACRO_ASSEMBLER_MIPS_H_
#include "src/assembler.h"
#include "src/contexts.h"
#include "src/globals.h"
#include "src/mips/assembler-mips.h"
#include "src/turbo-assembler.h"

View File

@ -40,6 +40,9 @@
#include <set>
#include "src/assembler.h"
#include "src/contexts.h"
#include "src/external-reference.h"
#include "src/label.h"
#include "src/mips64/constants-mips64.h"
namespace v8 {
@ -1905,13 +1908,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
void CheckTrampolinePool();
void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
ConstantPoolEntry::Access access,
ConstantPoolEntry::Type type) {
// No embedded constant pool support.
UNREACHABLE();
}
bool IsPrevInstrCompactBranch() { return prev_instr_compact_branch_; }
static bool IsCompactBranchSupported() { return kArchVariant == kMips64r6; }

View File

@ -44,7 +44,10 @@
#include <vector>
#include "src/assembler.h"
#include "src/constant-pool.h"
#include "src/double.h"
#include "src/external-reference.h"
#include "src/label.h"
#include "src/ppc/constants-ppc.h"
#if V8_HOST_ARCH_PPC && \

View File

@ -7,6 +7,7 @@
#include "src/assembler.h"
#include "src/bailout-reason.h"
#include "src/contexts.h"
#include "src/double.h"
#include "src/globals.h"
#include "src/ppc/assembler-ppc.h"

View File

@ -6,7 +6,6 @@
#define V8_REGEXP_JSREGEXP_H_
#include "src/allocation.h"
#include "src/assembler.h"
#include "src/isolate.h"
#include "src/objects/js-regexp.h"
#include "src/regexp/regexp-ast.h"

View File

@ -7,6 +7,8 @@
#ifdef V8_INTERPRETED_REGEXP
#include "src/regexp/regexp-macro-assembler-irregexp.h"
#include "src/ast/ast.h"
#include "src/regexp/bytecodes-irregexp.h"

View File

@ -5,7 +5,7 @@
#ifndef V8_REGEXP_REGEXP_MACRO_ASSEMBLER_H_
#define V8_REGEXP_REGEXP_MACRO_ASSEMBLER_H_
#include "src/assembler.h"
#include "src/label.h"
#include "src/regexp/regexp-ast.h"
namespace v8 {

View File

@ -5,6 +5,7 @@
#include "src/reloc-info.h"
#include "src/assembler-arch-inl.h"
#include "src/code-reference.h"
#include "src/code-stubs.h"
#include "src/deoptimize-reason.h"
#include "src/deoptimizer.h"

View File

@ -1504,13 +1504,6 @@ inline void ss_a_format(Opcode op, int f1, int f2, int f3, int f4, int f5) {
void dq(uint64_t data);
void dp(uintptr_t data);
void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
ConstantPoolEntry::Access access,
ConstantPoolEntry::Type type) {
// No embedded constant pool support.
UNREACHABLE();
}
// Read/patch instructions
SixByteInstr instr_at(int pos) {
return Instruction::InstructionBits(buffer_ + pos);

View File

@ -7,6 +7,7 @@
#include "src/assembler.h"
#include "src/bailout-reason.h"
#include "src/contexts.h"
#include "src/globals.h"
#include "src/s390/assembler-s390.h"
#include "src/turbo-assembler.h"

View File

@ -4,7 +4,6 @@
#include "src/simulator-base.h"
#include "src/assembler.h"
#include "src/isolate.h"
#include "src/simulator.h"

View File

@ -38,10 +38,11 @@
#define V8_X64_ASSEMBLER_X64_H_
#include <deque>
#include <forward_list>
#include <map>
#include <vector>
#include "src/assembler.h"
#include "src/label.h"
#include "src/x64/constants-x64.h"
#include "src/x64/sse-instr.h"
@ -1923,12 +1924,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
void RecordDeoptReason(DeoptimizeReason reason, SourcePosition position,
int id);
void PatchConstantPoolAccessInstruction(int pc_offset, int offset,
ConstantPoolEntry::Access access,
ConstantPoolEntry::Type type) {
// No embedded constant pool support.
UNREACHABLE();
}
// Writes a single word of data in the code stream.
// Used for inline tables, e.g., jump-tables.

View File

@ -4,12 +4,11 @@
#if V8_TARGET_ARCH_X64
#include "src/assembler.h"
#include "src/x64/frame-constants-x64.h"
#include "src/frame-constants.h"
#include "src/x64/assembler-x64-inl.h"
#include "src/x64/assembler-x64.h"
#include "src/x64/frame-constants-x64.h"
namespace v8 {
namespace internal {

View File

@ -7,6 +7,7 @@
#include "src/bailout-reason.h"
#include "src/base/flags.h"
#include "src/contexts.h"
#include "src/globals.h"
#include "src/turbo-assembler.h"
#include "src/x64/assembler-x64.h"

View File

@ -6,12 +6,14 @@
#include "src/v8.h"
#include "src/assembler.h"
#include "src/constant-pool.h"
#include "test/cctest/cctest.h"
namespace v8 {
namespace internal {
#if defined(V8_TARGET_ARCH_PPC)
const ConstantPoolEntry::Type kPtrType = ConstantPoolEntry::INTPTR;
const ConstantPoolEntry::Type kDblType = ConstantPoolEntry::DOUBLE;
const ConstantPoolEntry::Access kRegAccess = ConstantPoolEntry::REGULAR;
@ -247,5 +249,7 @@ TEST(ConstantPoolNoSharing) {
CHECK_EQ(access, kOvflAccess);
}
#endif // defined(V8_TARGET_ARCH_PPC)
} // namespace internal
} // namespace v8

View File

@ -18,7 +18,6 @@
#include "src/heap/spaces.h"
#include "src/ic/ic.h"
#include "src/layout-descriptor.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"
#include "src/objects/api-callbacks.h"
#include "src/property.h"

View File

@ -6,7 +6,6 @@
#include <vector>
#include "src/assembler.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/js-operator.h"
#include "src/compiler/node-properties.h"