Wrap back edge table in an iterator.

R=titzer@chromium.org
BUG=

Review URL: https://codereview.chromium.org/22424002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@16079 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
yangguo@chromium.org 2013-08-06 14:38:30 +00:00
parent 2144dc2d0f
commit fa2381eb75
4 changed files with 98 additions and 59 deletions

View File

@ -2426,25 +2426,19 @@ void Deoptimizer::PatchInterruptCode(Code* unoptimized_code,
Code* replacement_code) {
// Iterate over the back edge table and patch every interrupt
// call to an unconditional call to the replacement code.
ASSERT(unoptimized_code->kind() == Code::FUNCTION);
int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level();
Address back_edge_cursor = unoptimized_code->instruction_start() +
unoptimized_code->back_edge_table_offset();
uint32_t table_length = Memory::uint32_at(back_edge_cursor);
back_edge_cursor += kIntSize;
for (uint32_t i = 0; i < table_length; ++i) {
uint32_t loop_depth = Memory::uint32_at(back_edge_cursor + 2 * kIntSize);
if (static_cast<int>(loop_depth) == loop_nesting_level) {
// Loop back edge has the loop depth that we want to patch.
uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize);
Address pc_after = unoptimized_code->instruction_start() + pc_offset;
for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
!back_edges.Done();
back_edges.Next()) {
if (static_cast<int>(back_edges.loop_depth()) == loop_nesting_level) {
PatchInterruptCodeAt(unoptimized_code,
pc_after,
back_edges.pc(),
interrupt_code,
replacement_code);
}
back_edge_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
unoptimized_code->set_back_edges_patched_for_osr(true);
#ifdef DEBUG
Deoptimizer::VerifyInterruptCode(
@ -2457,25 +2451,20 @@ void Deoptimizer::RevertInterruptCode(Code* unoptimized_code,
Code* interrupt_code,
Code* replacement_code) {
// Iterate over the back edge table and revert the patched interrupt calls.
ASSERT(unoptimized_code->kind() == Code::FUNCTION);
ASSERT(unoptimized_code->back_edges_patched_for_osr());
int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level();
Address back_edge_cursor = unoptimized_code->instruction_start() +
unoptimized_code->back_edge_table_offset();
uint32_t table_length = Memory::uint32_at(back_edge_cursor);
back_edge_cursor += kIntSize;
for (uint32_t i = 0; i < table_length; ++i) {
uint32_t loop_depth = Memory::uint32_at(back_edge_cursor + 2 * kIntSize);
if (static_cast<int>(loop_depth) <= loop_nesting_level) {
uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize);
Address pc_after = unoptimized_code->instruction_start() + pc_offset;
for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
!back_edges.Done();
back_edges.Next()) {
if (static_cast<int>(back_edges.loop_depth()) <= loop_nesting_level) {
RevertInterruptCodeAt(unoptimized_code,
pc_after,
back_edges.pc(),
interrupt_code,
replacement_code);
}
back_edge_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
unoptimized_code->set_back_edges_patched_for_osr(false);
unoptimized_code->set_allow_osr_at_loop_nesting_level(0);
#ifdef DEBUG
@ -2491,24 +2480,18 @@ void Deoptimizer::VerifyInterruptCode(Code* unoptimized_code,
Code* interrupt_code,
Code* replacement_code,
int loop_nesting_level) {
CHECK(unoptimized_code->kind() == Code::FUNCTION);
Address back_edge_cursor = unoptimized_code->instruction_start() +
unoptimized_code->back_edge_table_offset();
uint32_t table_length = Memory::uint32_at(back_edge_cursor);
back_edge_cursor += kIntSize;
for (uint32_t i = 0; i < table_length; ++i) {
uint32_t loop_depth = Memory::uint32_at(back_edge_cursor + 2 * kIntSize);
for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
!back_edges.Done();
back_edges.Next()) {
uint32_t loop_depth = back_edges.loop_depth();
CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
// Assert that all back edges for shallower loops (and only those)
// have already been patched.
uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize);
Address pc_after = unoptimized_code->instruction_start() + pc_offset;
CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
InterruptCodeIsPatched(unoptimized_code,
pc_after,
back_edges.pc(),
interrupt_code,
replacement_code));
back_edge_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
}
#endif // DEBUG

View File

@ -31,11 +31,14 @@
#include "v8.h"
#include "allocation.h"
#include "assert-scope.h"
#include "ast.h"
#include "code-stubs.h"
#include "codegen.h"
#include "compiler.h"
#include "data-flow.h"
#include "globals.h"
#include "objects.h"
namespace v8 {
namespace internal {
@ -136,7 +139,64 @@ class FullCodeGenerator: public AstVisitor {
#error Unsupported target architecture.
#endif
static const int kBackEdgeEntrySize = 3 * kIntSize;
class BackEdgeTableIterator {
public:
explicit BackEdgeTableIterator(Code* unoptimized) {
ASSERT(unoptimized->kind() == Code::FUNCTION);
instruction_start_ = unoptimized->instruction_start();
cursor_ = instruction_start_ + unoptimized->back_edge_table_offset();
ASSERT(cursor_ < instruction_start_ + unoptimized->instruction_size());
table_length_ = Memory::uint32_at(cursor_);
cursor_ += kTableLengthSize;
end_ = cursor_ + table_length_ * kEntrySize;
}
bool Done() { return cursor_ >= end_; }
void Next() {
ASSERT(!Done());
cursor_ += kEntrySize;
}
BailoutId ast_id() {
ASSERT(!Done());
return BailoutId(static_cast<int>(
Memory::uint32_at(cursor_ + kAstIdOffset)));
}
uint32_t loop_depth() {
ASSERT(!Done());
return Memory::uint32_at(cursor_ + kLoopDepthOffset);
}
uint32_t pc_offset() {
ASSERT(!Done());
return Memory::uint32_at(cursor_ + kPcOffsetOffset);
}
Address pc() {
ASSERT(!Done());
return instruction_start_ + pc_offset();
}
uint32_t table_length() { return table_length_; }
private:
static const int kTableLengthSize = kIntSize;
static const int kAstIdOffset = 0 * kIntSize;
static const int kPcOffsetOffset = 1 * kIntSize;
static const int kLoopDepthOffset = 2 * kIntSize;
static const int kEntrySize = 3 * kIntSize;
Address cursor_;
Address end_;
Address instruction_start_;
uint32_t table_length_;
DisallowHeapAllocation no_gc_while_iterating_over_raw_addresses_;
DISALLOW_COPY_AND_ASSIGN(BackEdgeTableIterator);
};
private:
class Breakable;

View File

@ -10795,18 +10795,17 @@ void Code::Disassemble(const char* name, FILE* out) {
// If there is no back edge table, the "table start" will be at or after
// (due to alignment) the end of the instruction stream.
if (static_cast<int>(offset) < instruction_size()) {
Address back_edge_cursor = instruction_start() + offset;
uint32_t table_length = Memory::uint32_at(back_edge_cursor);
PrintF(out, "Back edges (size = %u)\n", table_length);
FullCodeGenerator::BackEdgeTableIterator back_edges(this);
PrintF(out, "Back edges (size = %u)\n", back_edges.table_length());
PrintF(out, "ast_id pc_offset loop_depth\n");
for (uint32_t i = 0; i < table_length; ++i) {
uint32_t ast_id = Memory::uint32_at(back_edge_cursor);
uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize);
uint32_t loop_depth = Memory::uint32_at(back_edge_cursor +
2 * kIntSize);
PrintF(out, "%6u %9u %10u\n", ast_id, pc_offset, loop_depth);
back_edge_cursor += FullCodeGenerator::kBackEdgeEntrySize;
for ( ; !back_edges.Done(); back_edges.Next()) {
PrintF(out, "%6d %9u %10u\n", back_edges.ast_id().ToInt(),
back_edges.pc_offset(),
back_edges.loop_depth());
}
PrintF(out, "\n");
}
#ifdef OBJECT_PRINT

View File

@ -8547,23 +8547,20 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
// Use linear search of the unoptimized code's back edge table to find
// the AST id matching the PC.
Address start = unoptimized->instruction_start();
unsigned target_pc_offset = static_cast<unsigned>(frame->pc() - start);
Address table_cursor = start + unoptimized->back_edge_table_offset();
uint32_t table_length = Memory::uint32_at(table_cursor);
table_cursor += kIntSize;
uint32_t target_pc_offset = frame->pc() - unoptimized->instruction_start();
uint32_t loop_depth = 0;
for (unsigned i = 0; i < table_length; ++i) {
// Table entries are (AST id, pc offset) pairs.
uint32_t pc_offset = Memory::uint32_at(table_cursor + kIntSize);
if (pc_offset == target_pc_offset) {
ast_id = BailoutId(static_cast<int>(Memory::uint32_at(table_cursor)));
loop_depth = Memory::uint32_at(table_cursor + 2 * kIntSize);
for (FullCodeGenerator::BackEdgeTableIterator back_edges(*unoptimized);
!back_edges.Done();
back_edges.Next()) {
if (back_edges.pc_offset() == target_pc_offset) {
ast_id = back_edges.ast_id();
loop_depth = back_edges.loop_depth();
break;
}
table_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
ASSERT(!ast_id.IsNone());
if (FLAG_trace_osr) {
PrintF("[replacing on-stack at AST id %d, loop depth %d in ",
ast_id.ToInt(), loop_depth);