Fix full code generator to not use --debug-code if it is in
mksnapshot or a VM that is booted from a snapshot. --debug-code can still have an effect on stub and optimized code and it still works on the full code generator when running without snapshots. The deoptimizer generates full-code-generator code and relies on it having the same layout as last time. This means that the code the full code generator makes for the snapshot should be the same as the code it makes later. This change makes the full code generator create more consistent code between mksnapshot time and run time. This is a bug fix and a step towards making the snapshot code more robust. Review URL: https://chromiumcodereview.appspot.com/10834085 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12239 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
cd95464dbc
commit
1d0f872ef9
@ -648,6 +648,9 @@ class Assembler : public AssemblerBase {
|
||||
// Overrides the default provided by FLAG_debug_code.
|
||||
void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
|
||||
|
||||
// Dummy for cross platform compatibility.
|
||||
void set_predictable_code_size(bool value) { }
|
||||
|
||||
// GetCode emits any pending (non-emitted) code and fills the descriptor
|
||||
// desc. GetCode() is idempotent; it returns the same result if no other
|
||||
// Assembler functions are invoked in between GetCode() calls.
|
||||
|
@ -785,7 +785,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
|
||||
// The variable in the declaration always resides in the current function
|
||||
// context.
|
||||
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
// Check that we're not inside a with or catch context.
|
||||
__ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
|
||||
__ CompareRoot(r1, Heap::kWithContextMapRootIndex);
|
||||
@ -2147,7 +2147,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
||||
// in harmony mode.
|
||||
if (var->IsStackAllocated() || var->IsContextSlot()) {
|
||||
MemOperand location = VarOperand(var, r1);
|
||||
if (FLAG_debug_code && op == Token::INIT_LET) {
|
||||
if (generate_debug_code_ && op == Token::INIT_LET) {
|
||||
// Check for an uninitialized let binding.
|
||||
__ ldr(r2, location);
|
||||
__ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
|
||||
@ -2711,7 +2711,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
|
||||
context()->PrepareTest(&materialize_true, &materialize_false,
|
||||
&if_true, &if_false, &fall_through);
|
||||
|
||||
if (FLAG_debug_code) __ AbortIfSmi(r0);
|
||||
if (generate_debug_code_) __ AbortIfSmi(r0);
|
||||
|
||||
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
|
||||
__ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
|
||||
@ -3575,7 +3575,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
|
||||
ASSERT(args->length() == 1);
|
||||
VisitForAccumulatorValue(args->at(0));
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
__ AbortIfNotString(r0);
|
||||
}
|
||||
|
||||
@ -3649,7 +3649,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
||||
// string_length: Accumulated sum of string lengths (smi).
|
||||
// element: Current array element.
|
||||
// elements_end: Array end.
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
__ cmp(array_length, Operand(0));
|
||||
__ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
|
||||
}
|
||||
|
@ -699,7 +699,7 @@ static bool InstallFullCode(CompilationInfo* info) {
|
||||
shared->set_dont_inline(lit->flags()->Contains(kDontInline));
|
||||
shared->set_ast_node_count(lit->ast_node_count());
|
||||
|
||||
if (V8::UseCrankshaft()&&
|
||||
if (V8::UseCrankshaft() &&
|
||||
!function.is_null() &&
|
||||
!shared->optimization_disabled()) {
|
||||
// If we're asked to always optimize, we compile the optimized
|
||||
|
@ -36,6 +36,7 @@
|
||||
#include "prettyprinter.h"
|
||||
#include "scopes.h"
|
||||
#include "scopeinfo.h"
|
||||
#include "snapshot.h"
|
||||
#include "stub-cache.h"
|
||||
|
||||
namespace v8 {
|
||||
@ -382,6 +383,20 @@ void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::Initialize() {
|
||||
// The generation of debug code must match between the snapshot code and the
|
||||
// code that is generated later. This is assumed by the debugger when it is
|
||||
// calculating PC offsets after generating a debug version of code. Therefore
|
||||
// we disable the production of debug code in the full compiler if we are
|
||||
// either generating a snapshot or we booted from a snapshot.
|
||||
generate_debug_code_ = FLAG_debug_code &&
|
||||
!Serializer::enabled() &&
|
||||
!Snapshot::HaveASnapshotToStartFrom();
|
||||
masm_->set_emit_debug_code(generate_debug_code_);
|
||||
masm_->set_predictable_code_size(true);
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::PopulateTypeFeedbackCells(Handle<Code> code) {
|
||||
if (type_feedback_cells_.is_empty()) return;
|
||||
int length = type_feedback_cells_.length();
|
||||
|
@ -93,7 +93,11 @@ class FullCodeGenerator: public AstVisitor {
|
||||
? info->function()->ast_node_count() : 0,
|
||||
info->zone()),
|
||||
ic_total_count_(0),
|
||||
zone_(info->zone()) { }
|
||||
zone_(info->zone()) {
|
||||
Initialize();
|
||||
}
|
||||
|
||||
void Initialize();
|
||||
|
||||
static bool MakeCode(CompilationInfo* info);
|
||||
|
||||
@ -806,6 +810,7 @@ class FullCodeGenerator: public AstVisitor {
|
||||
int ic_total_count_;
|
||||
Handle<FixedArray> handler_table_;
|
||||
Handle<JSGlobalPropertyCell> profiling_counter_;
|
||||
bool generate_debug_code_;
|
||||
Zone* zone_;
|
||||
|
||||
friend class NestedStatement;
|
||||
|
@ -587,6 +587,11 @@ class Assembler : public AssemblerBase {
|
||||
// Overrides the default provided by FLAG_debug_code.
|
||||
void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
|
||||
|
||||
// Avoids using instructions that vary in size in unpredictable ways between
|
||||
// the snapshot and the running VM. This is needed by the full compiler so
|
||||
// that it can recompile code with debug support and fix the PC.
|
||||
void set_predictable_code_size(bool value) { predictable_code_size_ = value; }
|
||||
|
||||
// GetCode emits any pending (non-emitted) code and fills the descriptor
|
||||
// desc. GetCode() is idempotent; it returns the same result if no other
|
||||
// Assembler functions are invoked in between GetCode() calls.
|
||||
@ -1111,6 +1116,7 @@ class Assembler : public AssemblerBase {
|
||||
|
||||
protected:
|
||||
bool emit_debug_code() const { return emit_debug_code_; }
|
||||
bool predictable_code_size() const { return predictable_code_size_ ; }
|
||||
|
||||
void movsd(XMMRegister dst, const Operand& src);
|
||||
void movsd(const Operand& dst, XMMRegister src);
|
||||
@ -1186,6 +1192,7 @@ class Assembler : public AssemblerBase {
|
||||
PositionsRecorder positions_recorder_;
|
||||
|
||||
bool emit_debug_code_;
|
||||
bool predictable_code_size_;
|
||||
|
||||
friend class PositionsRecorder;
|
||||
};
|
||||
|
@ -754,7 +754,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
|
||||
// The variable in the declaration always resides in the current function
|
||||
// context.
|
||||
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
// Check that we're not inside a with or catch context.
|
||||
__ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
|
||||
__ cmp(ebx, isolate()->factory()->with_context_map());
|
||||
@ -2091,7 +2091,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
||||
// in harmony mode.
|
||||
if (var->IsStackAllocated() || var->IsContextSlot()) {
|
||||
MemOperand location = VarOperand(var, ecx);
|
||||
if (FLAG_debug_code && op == Token::INIT_LET) {
|
||||
if (generate_debug_code_ && op == Token::INIT_LET) {
|
||||
// Check for an uninitialized let binding.
|
||||
__ mov(edx, location);
|
||||
__ cmp(edx, isolate()->factory()->the_hole_value());
|
||||
@ -2640,7 +2640,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
|
||||
context()->PrepareTest(&materialize_true, &materialize_false,
|
||||
&if_true, &if_false, &fall_through);
|
||||
|
||||
if (FLAG_debug_code) __ AbortIfSmi(eax);
|
||||
if (generate_debug_code_) __ AbortIfSmi(eax);
|
||||
|
||||
// Check whether this map has already been checked to be safe for default
|
||||
// valueOf.
|
||||
@ -2865,7 +2865,7 @@ void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
|
||||
__ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
|
||||
|
||||
__ bind(&exit);
|
||||
if (FLAG_debug_code) __ AbortIfNotSmi(eax);
|
||||
if (generate_debug_code_) __ AbortIfNotSmi(eax);
|
||||
context()->Plug(eax);
|
||||
}
|
||||
|
||||
@ -3485,7 +3485,7 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
|
||||
|
||||
VisitForAccumulatorValue(args->at(0));
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
__ AbortIfNotString(eax);
|
||||
}
|
||||
|
||||
@ -3510,7 +3510,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
|
||||
ASSERT(args->length() == 1);
|
||||
VisitForAccumulatorValue(args->at(0));
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
__ AbortIfNotString(eax);
|
||||
}
|
||||
|
||||
@ -3586,7 +3586,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
||||
// Loop condition: while (index < length).
|
||||
// Live loop registers: index, array_length, string,
|
||||
// scratch, string_length, elements.
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
__ cmp(index, array_length);
|
||||
__ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ void MacroAssembler::RememberedSetHelper(
|
||||
SaveFPRegsMode save_fp,
|
||||
MacroAssembler::RememberedSetFinalAction and_then) {
|
||||
Label done;
|
||||
if (FLAG_debug_code) {
|
||||
if (emit_debug_code()) {
|
||||
Label ok;
|
||||
JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
|
||||
int3();
|
||||
@ -317,7 +317,7 @@ void MacroAssembler::RecordWrite(Register object,
|
||||
return;
|
||||
}
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
if (emit_debug_code()) {
|
||||
Label ok;
|
||||
cmp(value, Operand(address, 0));
|
||||
j(equal, &ok, Label::kNear);
|
||||
@ -2793,7 +2793,7 @@ void MacroAssembler::EnsureNotWhite(
|
||||
test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
|
||||
j(not_zero, &done, Label::kNear);
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
if (emit_debug_code()) {
|
||||
// Check for impossible bit pattern.
|
||||
Label ok;
|
||||
push(mask_scratch);
|
||||
@ -2868,7 +2868,7 @@ void MacroAssembler::EnsureNotWhite(
|
||||
and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
|
||||
add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
|
||||
length);
|
||||
if (FLAG_debug_code) {
|
||||
if (emit_debug_code()) {
|
||||
mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
|
||||
cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
|
||||
Check(less_equal, "Live Bytes Count overflow chunk size");
|
||||
|
@ -525,6 +525,9 @@ class Assembler : public AssemblerBase {
|
||||
// Overrides the default provided by FLAG_debug_code.
|
||||
void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
|
||||
|
||||
// Dummy for cross platform compatibility.
|
||||
void set_predictable_code_size(bool value) { }
|
||||
|
||||
// GetCode emits any pending (non-emitted) code and fills the descriptor
|
||||
// desc. GetCode() is idempotent; it returns the same result if no other
|
||||
// Assembler functions are invoked in between GetCode() calls.
|
||||
|
@ -791,7 +791,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
|
||||
// The variable in the declaration always resides in the current function
|
||||
// context.
|
||||
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
// Check that we're not inside a with or catch context.
|
||||
__ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
|
||||
__ LoadRoot(t0, Heap::kWithContextMapRootIndex);
|
||||
@ -2164,7 +2164,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
||||
// in harmony mode.
|
||||
if (var->IsStackAllocated() || var->IsContextSlot()) {
|
||||
MemOperand location = VarOperand(var, a1);
|
||||
if (FLAG_debug_code && op == Token::INIT_LET) {
|
||||
if (generate_debug_code_ && op == Token::INIT_LET) {
|
||||
// Check for an uninitialized let binding.
|
||||
__ lw(a2, location);
|
||||
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
|
||||
@ -2734,7 +2734,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
|
||||
context()->PrepareTest(&materialize_true, &materialize_false,
|
||||
&if_true, &if_false, &fall_through);
|
||||
|
||||
if (FLAG_debug_code) __ AbortIfSmi(v0);
|
||||
if (generate_debug_code_) __ AbortIfSmi(v0);
|
||||
|
||||
__ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
|
||||
__ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
|
||||
@ -3609,7 +3609,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
|
||||
ASSERT(args->length() == 1);
|
||||
VisitForAccumulatorValue(args->at(0));
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
__ AbortIfNotString(v0);
|
||||
}
|
||||
|
||||
@ -3685,7 +3685,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
||||
// string_length: Accumulated sum of string lengths (smi).
|
||||
// element: Current array element.
|
||||
// elements_end: Array end.
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
__ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin",
|
||||
array_length, Operand(zero_reg));
|
||||
}
|
||||
|
@ -511,6 +511,18 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
|
||||
UNCLASSIFIED,
|
||||
47,
|
||||
"date_cache_stamp");
|
||||
Add(ExternalReference::address_of_pending_message_obj(isolate).address(),
|
||||
UNCLASSIFIED,
|
||||
48,
|
||||
"address_of_pending_message_obj");
|
||||
Add(ExternalReference::address_of_has_pending_message(isolate).address(),
|
||||
UNCLASSIFIED,
|
||||
49,
|
||||
"address_of_has_pending_message");
|
||||
Add(ExternalReference::address_of_pending_message_script(isolate).address(),
|
||||
UNCLASSIFIED,
|
||||
50,
|
||||
"pending_message_script");
|
||||
}
|
||||
|
||||
|
||||
|
@ -350,7 +350,8 @@ Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
|
||||
: AssemblerBase(arg_isolate),
|
||||
code_targets_(100),
|
||||
positions_recorder_(this),
|
||||
emit_debug_code_(FLAG_debug_code) {
|
||||
emit_debug_code_(FLAG_debug_code),
|
||||
predictable_code_size_(false) {
|
||||
if (buffer == NULL) {
|
||||
// Do our own buffer management.
|
||||
if (buffer_size <= kMinimalBufferSize) {
|
||||
@ -1234,7 +1235,16 @@ void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
|
||||
const int long_size = 6;
|
||||
int offs = L->pos() - pc_offset();
|
||||
ASSERT(offs <= 0);
|
||||
if (is_int8(offs - short_size)) {
|
||||
// Determine whether we can use 1-byte offsets for backwards branches,
|
||||
// which have a max range of 128 bytes.
|
||||
|
||||
// We also need to check the predictable_code_size_ flag here, because
|
||||
// on x64, when the full code generator recompiles code for debugging, some
|
||||
// places need to be padded out to a certain size. The debugger is keeping
|
||||
// track of how often it did this so that it can adjust return addresses on
|
||||
// the stack, but if the size of jump instructions can also change, that's
|
||||
// not enough and the calculated offsets would be incorrect.
|
||||
if (is_int8(offs - short_size) && !predictable_code_size_) {
|
||||
// 0111 tttn #8-bit disp.
|
||||
emit(0x70 | cc);
|
||||
emit((offs - short_size) & 0xFF);
|
||||
@ -1291,7 +1301,7 @@ void Assembler::jmp(Label* L, Label::Distance distance) {
|
||||
if (L->is_bound()) {
|
||||
int offs = L->pos() - pc_offset() - 1;
|
||||
ASSERT(offs <= 0);
|
||||
if (is_int8(offs - short_size)) {
|
||||
if (is_int8(offs - short_size) && !predictable_code_size_) {
|
||||
// 1110 1011 #8-bit disp.
|
||||
emit(0xEB);
|
||||
emit((offs - short_size) & 0xFF);
|
||||
|
@ -561,6 +561,11 @@ class Assembler : public AssemblerBase {
|
||||
// Overrides the default provided by FLAG_debug_code.
|
||||
void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
|
||||
|
||||
// Avoids using instructions that vary in size in unpredictable ways between
|
||||
// the snapshot and the running VM. This is needed by the full compiler so
|
||||
// that it can recompile code with debug support and fix the PC.
|
||||
void set_predictable_code_size(bool value) { predictable_code_size_ = value; }
|
||||
|
||||
// GetCode emits any pending (non-emitted) code and fills the descriptor
|
||||
// desc. GetCode() is idempotent; it returns the same result if no other
|
||||
// Assembler functions are invoked in between GetCode() calls.
|
||||
@ -1433,6 +1438,7 @@ class Assembler : public AssemblerBase {
|
||||
|
||||
protected:
|
||||
bool emit_debug_code() const { return emit_debug_code_; }
|
||||
bool predictable_code_size() const { return predictable_code_size_; }
|
||||
|
||||
private:
|
||||
byte* addr_at(int pos) { return buffer_ + pos; }
|
||||
@ -1637,6 +1643,7 @@ class Assembler : public AssemblerBase {
|
||||
PositionsRecorder positions_recorder_;
|
||||
|
||||
bool emit_debug_code_;
|
||||
bool predictable_code_size_;
|
||||
|
||||
friend class PositionsRecorder;
|
||||
};
|
||||
|
@ -759,7 +759,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
|
||||
// The variable in the declaration always resides in the current function
|
||||
// context.
|
||||
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
// Check that we're not inside a with or catch context.
|
||||
__ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
|
||||
__ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
|
||||
@ -2072,7 +2072,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
||||
// in harmony mode.
|
||||
if (var->IsStackAllocated() || var->IsContextSlot()) {
|
||||
MemOperand location = VarOperand(var, rcx);
|
||||
if (FLAG_debug_code && op == Token::INIT_LET) {
|
||||
if (generate_debug_code_ && op == Token::INIT_LET) {
|
||||
// Check for an uninitialized let binding.
|
||||
__ movq(rdx, location);
|
||||
__ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
|
||||
@ -2612,7 +2612,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
|
||||
context()->PrepareTest(&materialize_true, &materialize_false,
|
||||
&if_true, &if_false, &fall_through);
|
||||
|
||||
if (FLAG_debug_code) __ AbortIfSmi(rax);
|
||||
if (generate_debug_code_) __ AbortIfSmi(rax);
|
||||
|
||||
// Check whether this map has already been checked to be safe for default
|
||||
// valueOf.
|
||||
@ -2836,7 +2836,7 @@ void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
|
||||
__ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
|
||||
|
||||
__ bind(&exit);
|
||||
if (FLAG_debug_code) __ AbortIfNotSmi(rax);
|
||||
if (generate_debug_code_) __ AbortIfNotSmi(rax);
|
||||
context()->Plug(rax);
|
||||
}
|
||||
|
||||
@ -3480,7 +3480,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
|
||||
ASSERT(args->length() == 1);
|
||||
VisitForAccumulatorValue(args->at(0));
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
__ AbortIfNotString(rax);
|
||||
}
|
||||
|
||||
@ -3560,7 +3560,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
||||
// Loop condition: while (index < array_length).
|
||||
// Live loop registers: index(int32), array_length(int32), string(String*),
|
||||
// scratch, string_length(int32), elements(FixedArray*).
|
||||
if (FLAG_debug_code) {
|
||||
if (generate_debug_code_) {
|
||||
__ cmpq(index, array_length);
|
||||
__ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
|
||||
}
|
||||
|
@ -53,9 +53,17 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
|
||||
}
|
||||
|
||||
|
||||
static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
|
||||
static const int kInvalidRootRegisterDelta = -1;
|
||||
|
||||
|
||||
intptr_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
|
||||
if (predictable_code_size() &&
|
||||
(other.address() < reinterpret_cast<Address>(isolate()) ||
|
||||
other.address() >= reinterpret_cast<Address>(isolate() + 1))) {
|
||||
return kInvalidRootRegisterDelta;
|
||||
}
|
||||
Address roots_register_value = kRootRegisterBias +
|
||||
reinterpret_cast<Address>(isolate->heap()->roots_array_start());
|
||||
reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
|
||||
intptr_t delta = other.address() - roots_register_value;
|
||||
return delta;
|
||||
}
|
||||
@ -64,8 +72,8 @@ static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
|
||||
Operand MacroAssembler::ExternalOperand(ExternalReference target,
|
||||
Register scratch) {
|
||||
if (root_array_available_ && !Serializer::enabled()) {
|
||||
intptr_t delta = RootRegisterDelta(target, isolate());
|
||||
if (is_int32(delta)) {
|
||||
intptr_t delta = RootRegisterDelta(target);
|
||||
if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
|
||||
Serializer::TooLateToEnableNow();
|
||||
return Operand(kRootRegister, static_cast<int32_t>(delta));
|
||||
}
|
||||
@ -77,8 +85,8 @@ Operand MacroAssembler::ExternalOperand(ExternalReference target,
|
||||
|
||||
void MacroAssembler::Load(Register destination, ExternalReference source) {
|
||||
if (root_array_available_ && !Serializer::enabled()) {
|
||||
intptr_t delta = RootRegisterDelta(source, isolate());
|
||||
if (is_int32(delta)) {
|
||||
intptr_t delta = RootRegisterDelta(source);
|
||||
if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
|
||||
Serializer::TooLateToEnableNow();
|
||||
movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
|
||||
return;
|
||||
@ -96,8 +104,8 @@ void MacroAssembler::Load(Register destination, ExternalReference source) {
|
||||
|
||||
void MacroAssembler::Store(ExternalReference destination, Register source) {
|
||||
if (root_array_available_ && !Serializer::enabled()) {
|
||||
intptr_t delta = RootRegisterDelta(destination, isolate());
|
||||
if (is_int32(delta)) {
|
||||
intptr_t delta = RootRegisterDelta(destination);
|
||||
if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
|
||||
Serializer::TooLateToEnableNow();
|
||||
movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
|
||||
return;
|
||||
@ -116,8 +124,8 @@ void MacroAssembler::Store(ExternalReference destination, Register source) {
|
||||
void MacroAssembler::LoadAddress(Register destination,
|
||||
ExternalReference source) {
|
||||
if (root_array_available_ && !Serializer::enabled()) {
|
||||
intptr_t delta = RootRegisterDelta(source, isolate());
|
||||
if (is_int32(delta)) {
|
||||
intptr_t delta = RootRegisterDelta(source);
|
||||
if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
|
||||
Serializer::TooLateToEnableNow();
|
||||
lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
|
||||
return;
|
||||
@ -133,8 +141,8 @@ int MacroAssembler::LoadAddressSize(ExternalReference source) {
|
||||
// This calculation depends on the internals of LoadAddress.
|
||||
// It's correctness is ensured by the asserts in the Call
|
||||
// instruction below.
|
||||
intptr_t delta = RootRegisterDelta(source, isolate());
|
||||
if (is_int32(delta)) {
|
||||
intptr_t delta = RootRegisterDelta(source);
|
||||
if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
|
||||
Serializer::TooLateToEnableNow();
|
||||
// Operand is lea(scratch, Operand(kRootRegister, delta));
|
||||
// Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
|
||||
@ -216,7 +224,7 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
|
||||
Register scratch,
|
||||
SaveFPRegsMode save_fp,
|
||||
RememberedSetFinalAction and_then) {
|
||||
if (FLAG_debug_code) {
|
||||
if (emit_debug_code()) {
|
||||
Label ok;
|
||||
JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
|
||||
int3();
|
||||
@ -397,7 +405,7 @@ void MacroAssembler::RecordWrite(Register object,
|
||||
return;
|
||||
}
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
if (emit_debug_code()) {
|
||||
Label ok;
|
||||
cmpq(value, Operand(address, 0));
|
||||
j(equal, &ok, Label::kNear);
|
||||
@ -3992,7 +4000,7 @@ void MacroAssembler::CopyBytes(Register destination,
|
||||
int min_length,
|
||||
Register scratch) {
|
||||
ASSERT(min_length >= 0);
|
||||
if (FLAG_debug_code) {
|
||||
if (emit_debug_code()) {
|
||||
cmpl(length, Immediate(min_length));
|
||||
Assert(greater_equal, "Invalid min_length");
|
||||
}
|
||||
@ -4369,7 +4377,7 @@ void MacroAssembler::EnsureNotWhite(
|
||||
testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
|
||||
j(not_zero, &done, Label::kNear);
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
if (emit_debug_code()) {
|
||||
// Check for impossible bit pattern.
|
||||
Label ok;
|
||||
push(mask_scratch);
|
||||
|
@ -1328,6 +1328,8 @@ class MacroAssembler: public Assembler {
|
||||
// modified. It may be the "smi 1 constant" register.
|
||||
Register GetSmiConstant(Smi* value);
|
||||
|
||||
intptr_t RootRegisterDelta(ExternalReference other);
|
||||
|
||||
// Moves the smi value to the destination register.
|
||||
void LoadSmiConstant(Register dst, Smi* value);
|
||||
|
||||
|
@ -1902,6 +1902,9 @@ void SimulateFullSpace(PagedSpace* space);
|
||||
|
||||
TEST(ReleaseOverReservedPages) {
|
||||
i::FLAG_trace_gc = true;
|
||||
// The optimizer can allocate stuff, messing up the test.
|
||||
i::FLAG_crankshaft = false;
|
||||
i::FLAG_always_opt = false;
|
||||
InitializeVM();
|
||||
v8::HandleScope scope;
|
||||
static const int number_of_test_pages = 20;
|
||||
|
Loading…
Reference in New Issue
Block a user