Initial support for count-based profiling
(behind FLAG_count_based_interrupts; only on ia32) Review URL: https://chromiumcodereview.appspot.com/9373028 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10699 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
559f5eecad
commit
4233bf8348
@ -3439,6 +3439,11 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void InterruptStub::Generate(MacroAssembler* masm) {
|
||||
__ TailCallRuntime(Runtime::kInterrupt, 0, 1);
|
||||
}
|
||||
|
||||
|
||||
void MathPowStub::Generate(MacroAssembler* masm) {
|
||||
CpuFeatures::Scope vfp3_scope(VFP3);
|
||||
const Register base = r1;
|
||||
|
@ -331,7 +331,8 @@ void FullCodeGenerator::ClearAccumulator() {
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
|
||||
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
|
||||
Label* back_edge_target) {
|
||||
Comment cmnt(masm_, "[ Stack check");
|
||||
Label ok;
|
||||
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
|
||||
@ -1106,7 +1107,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ add(r0, r0, Operand(Smi::FromInt(1)));
|
||||
__ push(r0);
|
||||
|
||||
EmitStackCheck(stmt);
|
||||
EmitStackCheck(stmt, &loop);
|
||||
__ b(&loop);
|
||||
|
||||
// Remove the pointers stored on the stack.
|
||||
|
@ -55,6 +55,7 @@ namespace internal {
|
||||
V(ConvertToDouble) \
|
||||
V(WriteInt32ToHeapNumber) \
|
||||
V(StackCheck) \
|
||||
V(Interrupt) \
|
||||
V(FastNewClosure) \
|
||||
V(FastNewContext) \
|
||||
V(FastNewBlockContext) \
|
||||
@ -297,6 +298,18 @@ class StackCheckStub : public CodeStub {
|
||||
};
|
||||
|
||||
|
||||
class InterruptStub : public CodeStub {
|
||||
public:
|
||||
InterruptStub() { }
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
private:
|
||||
Major MajorKey() { return Interrupt; }
|
||||
int MinorKey() { return 0; }
|
||||
};
|
||||
|
||||
|
||||
class ToNumberStub: public CodeStub {
|
||||
public:
|
||||
ToNumberStub() { }
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Copyright 2012 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -882,7 +882,9 @@ MaybeObject* Execution::HandleStackGuardInterrupt() {
|
||||
}
|
||||
|
||||
isolate->counters()->stack_interrupts()->Increment();
|
||||
if (stack_guard->IsRuntimeProfilerTick()) {
|
||||
// If FLAG_count_based_interrupts, every interrupt is a profiler interrupt.
|
||||
if (FLAG_count_based_interrupts ||
|
||||
stack_guard->IsRuntimeProfilerTick()) {
|
||||
isolate->counters()->runtime_profiler_ticks()->Increment();
|
||||
stack_guard->Continue(RUNTIME_PROFILER_TICK);
|
||||
isolate->runtime_profiler()->OptimizeNow();
|
||||
@ -904,4 +906,5 @@ MaybeObject* Execution::HandleStackGuardInterrupt() {
|
||||
return isolate->heap()->undefined_value();
|
||||
}
|
||||
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
@ -170,9 +170,16 @@ DEFINE_bool(experimental_profiler, false, "enable all profiler experiments")
|
||||
DEFINE_bool(watch_ic_patching, false, "profiler considers IC stability")
|
||||
DEFINE_bool(self_optimization, false,
|
||||
"primitive functions trigger their own optimization")
|
||||
DEFINE_bool(count_based_interrupts, false,
|
||||
"trigger profiler ticks based on counting instead of timing")
|
||||
DEFINE_bool(weighted_back_edges, false,
|
||||
"weight back edges by jump distance for interrupt triggering")
|
||||
DEFINE_int(interrupt_budget, 100,
|
||||
"execution budget before interrupt is triggered")
|
||||
|
||||
DEFINE_implication(experimental_profiler, watch_ic_patching)
|
||||
DEFINE_implication(experimental_profiler, self_optimization)
|
||||
DEFINE_implication(experimental_profiler, count_based_interrupts)
|
||||
|
||||
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
|
||||
DEFINE_bool(debug_code, false,
|
||||
|
@ -1073,7 +1073,7 @@ void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
|
||||
// Check stack before looping.
|
||||
PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
|
||||
__ bind(&stack_check);
|
||||
EmitStackCheck(stmt);
|
||||
EmitStackCheck(stmt, &body);
|
||||
__ jmp(&body);
|
||||
|
||||
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
|
||||
@ -1102,7 +1102,7 @@ void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
|
||||
SetStatementPosition(stmt);
|
||||
|
||||
// Check stack before looping.
|
||||
EmitStackCheck(stmt);
|
||||
EmitStackCheck(stmt, &body);
|
||||
|
||||
__ bind(&test);
|
||||
VisitForControl(stmt->cond(),
|
||||
@ -1145,7 +1145,7 @@ void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
|
||||
SetStatementPosition(stmt);
|
||||
|
||||
// Check stack before looping.
|
||||
EmitStackCheck(stmt);
|
||||
EmitStackCheck(stmt, &body);
|
||||
|
||||
__ bind(&test);
|
||||
if (stmt->cond() != NULL) {
|
||||
|
@ -424,7 +424,10 @@ class FullCodeGenerator: public AstVisitor {
|
||||
|
||||
// Platform-specific code for checking the stack limit at the back edge of
|
||||
// a loop.
|
||||
void EmitStackCheck(IterationStatement* stmt);
|
||||
// This is meant to be called at loop back edges, |back_edge_target| is
|
||||
// the jump target of the back edge and is used to approximate the amount
|
||||
// of code inside the loop.
|
||||
void EmitStackCheck(IterationStatement* stmt, Label* back_edge_target);
|
||||
// Record the OSR AST id corresponding to a stack check in the code.
|
||||
void RecordStackCheck(unsigned osr_ast_id);
|
||||
// Emit a table of stack check ids and pcs into the code stream. Return
|
||||
@ -774,6 +777,7 @@ class FullCodeGenerator: public AstVisitor {
|
||||
ZoneList<BailoutEntry> stack_checks_;
|
||||
ZoneList<TypeFeedbackCellEntry> type_feedback_cells_;
|
||||
Handle<FixedArray> handler_table_;
|
||||
Handle<JSGlobalPropertyCell> profiling_counter_;
|
||||
|
||||
friend class NestedStatement;
|
||||
|
||||
|
@ -1694,8 +1694,9 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
|
||||
__ j(not_equal, &skip, Label::kNear);
|
||||
__ ret(0);
|
||||
|
||||
// If we decide not to perform on-stack replacement we perform a
|
||||
// stack guard check to enable interrupts.
|
||||
// Insert a stack guard check so that if we decide not to perform
|
||||
// on-stack replacement right away, the function calling this stub can
|
||||
// still be interrupted.
|
||||
__ bind(&stack_check);
|
||||
Label ok;
|
||||
ExternalReference stack_limit =
|
||||
|
@ -4573,6 +4573,11 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void InterruptStub::Generate(MacroAssembler* masm) {
|
||||
__ TailCallRuntime(Runtime::kInterrupt, 0, 1);
|
||||
}
|
||||
|
||||
|
||||
static void GenerateRecordCallTarget(MacroAssembler* masm) {
|
||||
// Cache the called function in a global property cell. Cache states
|
||||
// are uninitialized, monomorphic (indicated by a JSFunction), and
|
||||
|
@ -205,6 +205,15 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
|
||||
}
|
||||
|
||||
|
||||
static const byte kJnsInstruction = 0x79;
|
||||
static const byte kJnsOffset = 0x11;
|
||||
static const byte kJaeInstruction = 0x73;
|
||||
static const byte kJaeOffset = 0x07;
|
||||
static const byte kCallInstruction = 0xe8;
|
||||
static const byte kNopByteOne = 0x66;
|
||||
static const byte kNopByteTwo = 0x90;
|
||||
|
||||
|
||||
void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
|
||||
Address pc_after,
|
||||
Code* check_code,
|
||||
@ -228,11 +237,17 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
|
||||
// call <on-stack replacment>
|
||||
// test eax, <loop nesting depth>
|
||||
// ok:
|
||||
ASSERT(*(call_target_address - 3) == 0x73 && // jae
|
||||
*(call_target_address - 2) == 0x07 && // offset
|
||||
*(call_target_address - 1) == 0xe8); // call
|
||||
*(call_target_address - 3) = 0x66; // 2 byte nop part 1
|
||||
*(call_target_address - 2) = 0x90; // 2 byte nop part 2
|
||||
|
||||
if (FLAG_count_based_interrupts) {
|
||||
ASSERT(*(call_target_address - 3) == kJnsInstruction);
|
||||
ASSERT(*(call_target_address - 2) == kJnsOffset);
|
||||
} else {
|
||||
ASSERT(*(call_target_address - 3) == kJaeInstruction);
|
||||
ASSERT(*(call_target_address - 2) == kJaeOffset);
|
||||
}
|
||||
ASSERT(*(call_target_address - 1) == kCallInstruction);
|
||||
*(call_target_address - 3) = kNopByteOne;
|
||||
*(call_target_address - 2) = kNopByteTwo;
|
||||
Assembler::set_target_address_at(call_target_address,
|
||||
replacement_code->entry());
|
||||
|
||||
@ -248,13 +263,19 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
|
||||
Address call_target_address = pc_after - kIntSize;
|
||||
ASSERT(replacement_code->entry() ==
|
||||
Assembler::target_address_at(call_target_address));
|
||||
|
||||
// Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
|
||||
// restore the conditional branch.
|
||||
ASSERT(*(call_target_address - 3) == 0x66 && // 2 byte nop part 1
|
||||
*(call_target_address - 2) == 0x90 && // 2 byte nop part 2
|
||||
*(call_target_address - 1) == 0xe8); // call
|
||||
*(call_target_address - 3) = 0x73; // jae
|
||||
*(call_target_address - 2) = 0x07; // offset
|
||||
ASSERT(*(call_target_address - 3) == kNopByteOne &&
|
||||
*(call_target_address - 2) == kNopByteTwo &&
|
||||
*(call_target_address - 1) == kCallInstruction);
|
||||
if (FLAG_count_based_interrupts) {
|
||||
*(call_target_address - 3) = kJnsInstruction;
|
||||
*(call_target_address - 2) = kJnsOffset;
|
||||
} else {
|
||||
*(call_target_address - 3) = kJaeInstruction;
|
||||
*(call_target_address - 2) = kJaeOffset;
|
||||
}
|
||||
Assembler::set_target_address_at(call_target_address,
|
||||
check_code->entry());
|
||||
|
||||
|
@ -119,6 +119,8 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
|
||||
scope_ = info->scope();
|
||||
handler_table_ =
|
||||
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
|
||||
profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
|
||||
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
|
||||
SetFunctionPosition(function());
|
||||
Comment cmnt(masm_, "[ function compiled by full code generator");
|
||||
|
||||
@ -323,15 +325,34 @@ void FullCodeGenerator::ClearAccumulator() {
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
|
||||
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
|
||||
Label* back_edge_target) {
|
||||
Comment cmnt(masm_, "[ Stack check");
|
||||
Label ok;
|
||||
|
||||
if (FLAG_count_based_interrupts) {
|
||||
int weight = 1;
|
||||
if (FLAG_weighted_back_edges) {
|
||||
ASSERT(back_edge_target->is_bound());
|
||||
int distance = masm_->pc_offset() - back_edge_target->pos();
|
||||
weight = Min(127, Max(1, distance / 100));
|
||||
}
|
||||
__ sub(Operand::Cell(profiling_counter_), Immediate(Smi::FromInt(weight)));
|
||||
__ j(positive, &ok, Label::kNear);
|
||||
InterruptStub stub;
|
||||
__ CallStub(&stub);
|
||||
} else {
|
||||
// Count based interrupts happen often enough when they are enabled
|
||||
// that the additional stack checks are not necessary (they would
|
||||
// only check for interrupts).
|
||||
ExternalReference stack_limit =
|
||||
ExternalReference::address_of_stack_limit(isolate());
|
||||
__ cmp(esp, Operand::StaticVariable(stack_limit));
|
||||
__ j(above_equal, &ok, Label::kNear);
|
||||
StackCheckStub stub;
|
||||
__ CallStub(&stub);
|
||||
}
|
||||
|
||||
// Record a mapping of this PC offset to the OSR id. This is used to find
|
||||
// the AST id from the unoptimized code in order to use it as a key into
|
||||
// the deoptimization input data found in the optimized code.
|
||||
@ -344,6 +365,12 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
|
||||
ASSERT(loop_depth() > 0);
|
||||
__ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
|
||||
|
||||
if (FLAG_count_based_interrupts) {
|
||||
// Reset the countdown.
|
||||
__ mov(Operand::Cell(profiling_counter_),
|
||||
Immediate(Smi::FromInt(FLAG_interrupt_budget)));
|
||||
}
|
||||
|
||||
__ bind(&ok);
|
||||
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
|
||||
// Record a mapping of the OSR id to this PC. This is used if the OSR
|
||||
@ -1061,7 +1088,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ bind(loop_statement.continue_label());
|
||||
__ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
|
||||
|
||||
EmitStackCheck(stmt);
|
||||
EmitStackCheck(stmt, &loop);
|
||||
__ jmp(&loop);
|
||||
|
||||
// Remove the pointers stored on the stack.
|
||||
|
@ -336,7 +336,8 @@ void FullCodeGenerator::ClearAccumulator() {
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
|
||||
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
|
||||
Label* back_edge_target) {
|
||||
// The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
|
||||
// to make sure it is constant. Branch may emit a skip-or-jump sequence
|
||||
// instead of the normal Branch. It seems that the "skip" part of that
|
||||
@ -1119,7 +1120,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ Addu(a0, a0, Operand(Smi::FromInt(1)));
|
||||
__ push(a0);
|
||||
|
||||
EmitStackCheck(stmt);
|
||||
EmitStackCheck(stmt, &loop);
|
||||
__ Branch(&loop);
|
||||
|
||||
// Remove the pointers stored on the stack.
|
||||
|
@ -147,9 +147,16 @@ void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
|
||||
|
||||
// Get the stack check stub code object to match against. We aren't
|
||||
// prepared to generate it, but we don't expect to have to.
|
||||
StackCheckStub check_stub;
|
||||
bool found_code = false;
|
||||
Code* stack_check_code = NULL;
|
||||
if (check_stub.FindCodeInCache(&stack_check_code)) {
|
||||
if (FLAG_count_based_interrupts) {
|
||||
InterruptStub interrupt_stub;
|
||||
found_code = interrupt_stub.FindCodeInCache(&stack_check_code);
|
||||
} else {
|
||||
StackCheckStub check_stub;
|
||||
found_code = check_stub.FindCodeInCache(&stack_check_code);
|
||||
}
|
||||
if (found_code) {
|
||||
Code* replacement_code =
|
||||
isolate_->builtins()->builtin(Builtins::kOnStackReplacement);
|
||||
Code* unoptimized_code = shared->code();
|
||||
@ -255,7 +262,7 @@ void RuntimeProfiler::OptimizeNow() {
|
||||
} else {
|
||||
function->shared()->set_profiler_ticks(ticks + 1);
|
||||
}
|
||||
} else { // !FLAG_counting_profiler
|
||||
} else { // !FLAG_watch_ic_patching
|
||||
samples[sample_count++] = function;
|
||||
|
||||
int function_size = function->shared()->SourceSize();
|
||||
@ -273,7 +280,7 @@ void RuntimeProfiler::OptimizeNow() {
|
||||
if (FLAG_watch_ic_patching) {
|
||||
any_ic_changed_ = false;
|
||||
code_generated_ = false;
|
||||
} else { // !FLAG_counting_profiler
|
||||
} else { // !FLAG_watch_ic_patching
|
||||
// Add the collected functions as samples. It's important not to do
|
||||
// this as part of collecting them because this will interfere with
|
||||
// the sample lookup in case of recursive functions.
|
||||
@ -285,6 +292,7 @@ void RuntimeProfiler::OptimizeNow() {
|
||||
|
||||
|
||||
void RuntimeProfiler::NotifyTick() {
|
||||
if (FLAG_count_based_interrupts) return;
|
||||
isolate_->stack_guard()->RequestRuntimeProfilerTick();
|
||||
}
|
||||
|
||||
@ -303,7 +311,7 @@ void RuntimeProfiler::SetUp() {
|
||||
void RuntimeProfiler::Reset() {
|
||||
if (FLAG_watch_ic_patching) {
|
||||
total_code_generated_ = 0;
|
||||
} else { // !FLAG_counting_profiler
|
||||
} else { // !FLAG_watch_ic_patching
|
||||
sampler_threshold_ = kSamplerThresholdInit;
|
||||
sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
|
||||
sampler_ticks_until_threshold_adjustment_ =
|
||||
|
@ -8711,8 +8711,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
|
||||
function->PrintName();
|
||||
PrintF("]\n");
|
||||
}
|
||||
Handle<Code> check_code;
|
||||
if (FLAG_count_based_interrupts) {
|
||||
InterruptStub interrupt_stub;
|
||||
check_code = interrupt_stub.GetCode();
|
||||
} else {
|
||||
StackCheckStub check_stub;
|
||||
Handle<Code> check_code = check_stub.GetCode();
|
||||
check_code = check_stub.GetCode();
|
||||
}
|
||||
Handle<Code> replacement_code = isolate->builtins()->OnStackReplacement();
|
||||
Deoptimizer::RevertStackCheckCode(*unoptimized,
|
||||
*check_code,
|
||||
@ -9266,6 +9272,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StackGuard) {
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(MaybeObject*, Runtime_Interrupt) {
|
||||
ASSERT(args.length() == 0);
|
||||
return Execution::HandleStackGuardInterrupt();
|
||||
}
|
||||
|
||||
|
||||
static int StackSize() {
|
||||
int n = 0;
|
||||
for (JavaScriptFrameIterator it; !it.done(); it.Advance()) n++;
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Copyright 2012 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -319,6 +319,7 @@ namespace internal {
|
||||
F(ReThrow, 1, 1) \
|
||||
F(ThrowReferenceError, 1, 1) \
|
||||
F(StackGuard, 0, 1) \
|
||||
F(Interrupt, 0, 1) \
|
||||
F(PromoteScheduledException, 0, 1) \
|
||||
\
|
||||
/* Contexts */ \
|
||||
|
@ -3579,6 +3579,11 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void InterruptStub::Generate(MacroAssembler* masm) {
|
||||
__ TailCallRuntime(Runtime::kInterrupt, 0, 1);
|
||||
}
|
||||
|
||||
|
||||
static void GenerateRecordCallTarget(MacroAssembler* masm) {
|
||||
// Cache the called function in a global property cell. Cache states
|
||||
// are uninitialized, monomorphic (indicated by a JSFunction), and
|
||||
|
@ -317,7 +317,8 @@ void FullCodeGenerator::ClearAccumulator() {
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
|
||||
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
|
||||
Label* back_edge_target) {
|
||||
Comment cmnt(masm_, "[ Stack check");
|
||||
Label ok;
|
||||
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
|
||||
@ -1071,7 +1072,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ bind(loop_statement.continue_label());
|
||||
__ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
|
||||
|
||||
EmitStackCheck(stmt);
|
||||
EmitStackCheck(stmt, &loop);
|
||||
__ jmp(&loop);
|
||||
|
||||
// Remove the pointers stored on the stack.
|
||||
|
38
test/mjsunit/count-based-osr.js
Normal file
38
test/mjsunit/count-based-osr.js
Normal file
@ -0,0 +1,38 @@
|
||||
// Copyright 2012 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following
|
||||
// disclaimer in the documentation and/or other materials provided
|
||||
// with the distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived
|
||||
// from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Flags: --count-based-interrupts --interrupt-budget=10 --weighted-back-edges --allow-natives-syntax
|
||||
|
||||
// Test that OSR works properly when using count-based interrupting/profiling.
|
||||
|
||||
function osr_this() {
|
||||
var a = 1;
|
||||
// Trigger OSR.
|
||||
while (%GetOptimizationStatus(osr_this) == 2) {}
|
||||
return a;
|
||||
}
|
||||
assertEquals(1, osr_this());
|
Loading…
Reference in New Issue
Block a user