2012-01-24 08:43:12 +00:00
|
|
|
// Copyright 2012 the V8 project authors. All rights reserved.
|
2010-12-07 11:31:57 +00:00
|
|
|
// Redistribution and use in source and binary forms, with or without
|
|
|
|
// modification, are permitted provided that the following conditions are
|
|
|
|
// met:
|
|
|
|
//
|
|
|
|
// * Redistributions of source code must retain the above copyright
|
|
|
|
// notice, this list of conditions and the following disclaimer.
|
|
|
|
// * Redistributions in binary form must reproduce the above
|
|
|
|
// copyright notice, this list of conditions and the following
|
|
|
|
// disclaimer in the documentation and/or other materials provided
|
|
|
|
// with the distribution.
|
|
|
|
// * Neither the name of Google Inc. nor the names of its
|
|
|
|
// contributors may be used to endorse or promote products derived
|
|
|
|
// from this software without specific prior written permission.
|
|
|
|
//
|
|
|
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
|
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
|
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
|
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
|
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
|
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
|
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
|
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
|
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
|
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
|
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
|
|
|
|
#ifndef V8_DEOPTIMIZER_H_
|
|
|
|
#define V8_DEOPTIMIZER_H_
|
|
|
|
|
|
|
|
#include "v8.h"
|
|
|
|
|
2011-05-06 06:50:20 +00:00
|
|
|
#include "allocation.h"
|
2010-12-07 11:31:57 +00:00
|
|
|
#include "macro-assembler.h"
|
|
|
|
#include "zone-inl.h"
|
|
|
|
|
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
|
|
|
|
class FrameDescription;
|
|
|
|
class TranslationIterator;
|
|
|
|
class DeoptimizingCodeListNode;
|
2011-06-29 13:02:00 +00:00
|
|
|
class DeoptimizedFrameInfo;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-04-06 14:23:27 +00:00
|
|
|
class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
|
2010-12-07 11:31:57 +00:00
|
|
|
public:
|
2011-04-06 14:23:27 +00:00
|
|
|
HeapNumberMaterializationDescriptor(Address slot_address, double val)
|
|
|
|
: slot_address_(slot_address), val_(val) { }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-04-06 14:23:27 +00:00
|
|
|
Address slot_address() const { return slot_address_; }
|
|
|
|
double value() const { return val_; }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
private:
|
2011-04-06 14:23:27 +00:00
|
|
|
Address slot_address_;
|
|
|
|
double val_;
|
2010-12-07 11:31:57 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
2012-09-12 12:28:42 +00:00
|
|
|
class ArgumentsObjectMaterializationDescriptor BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
ArgumentsObjectMaterializationDescriptor(Address slot_address, int argc)
|
|
|
|
: slot_address_(slot_address), arguments_length_(argc) { }
|
|
|
|
|
|
|
|
Address slot_address() const { return slot_address_; }
|
|
|
|
int arguments_length() const { return arguments_length_; }
|
|
|
|
|
|
|
|
private:
|
|
|
|
Address slot_address_;
|
|
|
|
int arguments_length_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
class OptimizedFunctionVisitor BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
virtual ~OptimizedFunctionVisitor() {}
|
|
|
|
|
|
|
|
// Function which is called before iteration of any optimized functions
|
2012-08-17 09:03:08 +00:00
|
|
|
// from given native context.
|
2010-12-07 11:31:57 +00:00
|
|
|
virtual void EnterContext(Context* context) = 0;
|
|
|
|
|
|
|
|
virtual void VisitFunction(JSFunction* function) = 0;
|
|
|
|
|
|
|
|
// Function which is called after iteration of all optimized functions
|
2012-08-17 09:03:08 +00:00
|
|
|
// from given native context.
|
2010-12-07 11:31:57 +00:00
|
|
|
virtual void LeaveContext(Context* context) = 0;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2012-12-17 10:23:52 +00:00
|
|
|
class OptimizedFunctionFilter BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
virtual ~OptimizedFunctionFilter() {}
|
|
|
|
|
|
|
|
virtual bool TakeFunction(JSFunction* function) = 0;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
class Deoptimizer;
|
|
|
|
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
class Deoptimizer : public Malloced {
|
|
|
|
public:
|
|
|
|
enum BailoutType {
|
|
|
|
EAGER,
|
|
|
|
LAZY,
|
2013-05-14 11:45:33 +00:00
|
|
|
SOFT,
|
2011-06-29 13:02:00 +00:00
|
|
|
OSR,
|
|
|
|
// This last bailout type is not really a bailout, but used by the
|
|
|
|
// debugger to deoptimize stack frames to allow inspection.
|
|
|
|
DEBUGGER
|
2010-12-07 11:31:57 +00:00
|
|
|
};
|
|
|
|
|
2013-05-14 11:45:33 +00:00
|
|
|
static const int kBailoutTypesWithCodeEntry = SOFT + 1;
|
|
|
|
|
|
|
|
struct JumpTableEntry {
|
|
|
|
inline JumpTableEntry(Address entry,
|
|
|
|
Deoptimizer::BailoutType type,
|
|
|
|
bool frame)
|
|
|
|
: label(),
|
|
|
|
address(entry),
|
|
|
|
bailout_type(type),
|
|
|
|
needs_frame(frame) { }
|
|
|
|
Label label;
|
|
|
|
Address address;
|
|
|
|
Deoptimizer::BailoutType bailout_type;
|
|
|
|
bool needs_frame;
|
|
|
|
};
|
|
|
|
|
2013-02-05 16:28:36 +00:00
|
|
|
static bool TraceEnabledFor(BailoutType deopt_type,
|
|
|
|
StackFrame::Type frame_type);
|
2012-12-21 07:18:56 +00:00
|
|
|
static const char* MessageFor(BailoutType type);
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
int output_count() const { return output_count_; }
|
|
|
|
|
2012-12-18 16:25:45 +00:00
|
|
|
Code::Kind compiled_code_kind() const { return compiled_code_->kind(); }
|
|
|
|
|
2012-01-24 08:43:12 +00:00
|
|
|
// Number of created JS frames. Not all created frames are necessarily JS.
|
|
|
|
int jsframe_count() const { return jsframe_count_; }
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
static Deoptimizer* New(JSFunction* function,
|
|
|
|
BailoutType type,
|
|
|
|
unsigned bailout_id,
|
|
|
|
Address from,
|
2011-03-18 20:35:07 +00:00
|
|
|
int fp_to_sp_delta,
|
|
|
|
Isolate* isolate);
|
|
|
|
static Deoptimizer* Grab(Isolate* isolate);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
|
|
|
// The returned object with information on the optimized frame needs to be
|
|
|
|
// freed before another one can be generated.
|
|
|
|
static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
|
2012-01-24 08:43:12 +00:00
|
|
|
int jsframe_index,
|
2011-06-29 13:02:00 +00:00
|
|
|
Isolate* isolate);
|
|
|
|
static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
|
|
|
|
Isolate* isolate);
|
|
|
|
#endif
|
|
|
|
|
2011-03-25 10:29:34 +00:00
|
|
|
// Makes sure that there is enough room in the relocation
|
|
|
|
// information of a code object to perform lazy deoptimization
|
|
|
|
// patching. If there is not enough room a new relocation
|
|
|
|
// information object is allocated and comments are added until it
|
|
|
|
// is big enough.
|
|
|
|
static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
// Deoptimize the function now. Its current optimized code will never be run
|
|
|
|
// again and any activations of the optimized code will get deoptimized when
|
|
|
|
// execution returns.
|
|
|
|
static void DeoptimizeFunction(JSFunction* function);
|
|
|
|
|
2012-10-22 09:48:56 +00:00
|
|
|
// Iterate over all the functions which share the same code object
|
|
|
|
// and make them use unoptimized version.
|
|
|
|
static void ReplaceCodeForRelatedFunctions(JSFunction* function, Code* code);
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
// Deoptimize all functions in the heap.
|
2013-03-18 13:57:49 +00:00
|
|
|
static void DeoptimizeAll(Isolate* isolate);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
static void DeoptimizeGlobalObject(JSObject* object);
|
|
|
|
|
2013-03-18 13:57:49 +00:00
|
|
|
static void DeoptimizeAllFunctionsWith(Isolate* isolate,
|
|
|
|
OptimizedFunctionFilter* filter);
|
2012-12-17 10:23:52 +00:00
|
|
|
|
|
|
|
static void DeoptimizeAllFunctionsForContext(
|
|
|
|
Context* context, OptimizedFunctionFilter* filter);
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
static void VisitAllOptimizedFunctionsForContext(
|
|
|
|
Context* context, OptimizedFunctionVisitor* visitor);
|
|
|
|
|
2013-03-18 13:57:49 +00:00
|
|
|
static void VisitAllOptimizedFunctions(Isolate* isolate,
|
|
|
|
OptimizedFunctionVisitor* visitor);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-02-02 13:55:29 +00:00
|
|
|
// The size in bytes of the code required at a lazy deopt patch site.
|
|
|
|
static int patch_size();
|
|
|
|
|
2013-04-10 09:24:31 +00:00
|
|
|
// Patch all interrupts with allowed loop depth in the unoptimized code to
|
2011-01-24 14:54:45 +00:00
|
|
|
// unconditionally call replacement_code.
|
2013-04-10 09:24:31 +00:00
|
|
|
static void PatchInterruptCode(Code* unoptimized_code,
|
|
|
|
Code* interrupt_code,
|
|
|
|
Code* replacement_code);
|
2011-01-24 14:54:45 +00:00
|
|
|
|
2013-04-10 09:24:31 +00:00
|
|
|
// Patch the interrupt at the instruction before pc_after in
|
2011-02-01 11:18:45 +00:00
|
|
|
// the unoptimized code to unconditionally call replacement_code.
|
2013-04-10 09:24:31 +00:00
|
|
|
static void PatchInterruptCodeAt(Code* unoptimized_code,
|
|
|
|
Address pc_after,
|
|
|
|
Code* interrupt_code,
|
|
|
|
Code* replacement_code);
|
|
|
|
|
|
|
|
// Change all patched interrupts patched in the unoptimized code
|
|
|
|
// back to normal interrupts.
|
|
|
|
static void RevertInterruptCode(Code* unoptimized_code,
|
|
|
|
Code* interrupt_code,
|
|
|
|
Code* replacement_code);
|
|
|
|
|
|
|
|
// Change patched interrupt in the unoptimized code
|
|
|
|
// back to a normal interrupt.
|
|
|
|
static void RevertInterruptCodeAt(Code* unoptimized_code,
|
2011-09-19 18:36:47 +00:00
|
|
|
Address pc_after,
|
2013-04-10 09:24:31 +00:00
|
|
|
Code* interrupt_code,
|
2011-02-02 11:58:24 +00:00
|
|
|
Code* replacement_code);
|
2011-02-01 11:18:45 +00:00
|
|
|
|
2013-04-10 09:24:31 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
static bool InterruptCodeIsPatched(Code* unoptimized_code,
|
2011-10-18 15:07:42 +00:00
|
|
|
Address pc_after,
|
2013-04-10 09:24:31 +00:00
|
|
|
Code* interrupt_code,
|
2011-02-02 11:58:24 +00:00
|
|
|
Code* replacement_code);
|
|
|
|
|
2013-04-10 09:24:31 +00:00
|
|
|
// Verify that all back edges of a certain loop depth are patched.
|
|
|
|
static void VerifyInterruptCode(Code* unoptimized_code,
|
|
|
|
Code* interrupt_code,
|
|
|
|
Code* replacement_code,
|
|
|
|
int loop_nesting_level);
|
|
|
|
#endif // DEBUG
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
~Deoptimizer();
|
|
|
|
|
2012-09-12 12:28:42 +00:00
|
|
|
void MaterializeHeapObjects(JavaScriptFrameIterator* it);
|
2011-06-29 13:02:00 +00:00
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
|
|
|
void MaterializeHeapNumbersForDebuggerInspectableFrame(
|
2012-01-24 08:43:12 +00:00
|
|
|
Address parameters_top,
|
|
|
|
uint32_t parameters_size,
|
|
|
|
Address expressions_top,
|
|
|
|
uint32_t expressions_size,
|
|
|
|
DeoptimizedFrameInfo* info);
|
2011-06-29 13:02:00 +00:00
|
|
|
#endif
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-03-30 18:05:16 +00:00
|
|
|
static void ComputeOutputFrames(Deoptimizer* deoptimizer);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2012-11-07 08:49:17 +00:00
|
|
|
|
|
|
|
enum GetEntryMode {
|
|
|
|
CALCULATE_ENTRY_ADDRESS,
|
|
|
|
ENSURE_ENTRY_CODE
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
static Address GetDeoptimizationEntry(
|
2013-02-27 14:45:59 +00:00
|
|
|
Isolate* isolate,
|
2012-11-07 08:49:17 +00:00
|
|
|
int id,
|
|
|
|
BailoutType type,
|
|
|
|
GetEntryMode mode = ENSURE_ENTRY_CODE);
|
2013-03-18 13:57:49 +00:00
|
|
|
static int GetDeoptimizationId(Isolate* isolate,
|
|
|
|
Address addr,
|
|
|
|
BailoutType type);
|
2010-12-22 09:49:26 +00:00
|
|
|
static int GetOutputInfo(DeoptimizationOutputData* data,
|
2012-08-06 14:13:09 +00:00
|
|
|
BailoutId node_id,
|
2010-12-22 09:49:26 +00:00
|
|
|
SharedFunctionInfo* shared);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
// Code generation support.
|
|
|
|
static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
|
|
|
|
static int output_count_offset() {
|
|
|
|
return OFFSET_OF(Deoptimizer, output_count_);
|
|
|
|
}
|
|
|
|
static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
|
|
|
|
|
2012-06-12 10:22:33 +00:00
|
|
|
static int has_alignment_padding_offset() {
|
|
|
|
return OFFSET_OF(Deoptimizer, has_alignment_padding_);
|
|
|
|
}
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
static int GetDeoptimizedCodeCount(Isolate* isolate);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
static const int kNotDeoptimizationEntry = -1;
|
|
|
|
|
|
|
|
// Generators for the deoptimization entry code.
|
|
|
|
class EntryGenerator BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
EntryGenerator(MacroAssembler* masm, BailoutType type)
|
|
|
|
: masm_(masm), type_(type) { }
|
|
|
|
virtual ~EntryGenerator() { }
|
|
|
|
|
|
|
|
void Generate();
|
|
|
|
|
|
|
|
protected:
|
|
|
|
MacroAssembler* masm() const { return masm_; }
|
|
|
|
BailoutType type() const { return type_; }
|
2013-04-24 07:39:35 +00:00
|
|
|
Isolate* isolate() const { return masm_->isolate(); }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
virtual void GeneratePrologue() { }
|
|
|
|
|
|
|
|
private:
|
|
|
|
MacroAssembler* masm_;
|
|
|
|
Deoptimizer::BailoutType type_;
|
|
|
|
};
|
|
|
|
|
|
|
|
class TableEntryGenerator : public EntryGenerator {
|
|
|
|
public:
|
|
|
|
TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
|
|
|
|
: EntryGenerator(masm, type), count_(count) { }
|
|
|
|
|
|
|
|
protected:
|
|
|
|
virtual void GeneratePrologue();
|
|
|
|
|
|
|
|
private:
|
|
|
|
int count() const { return count_; }
|
|
|
|
|
|
|
|
int count_;
|
|
|
|
};
|
|
|
|
|
2012-01-24 08:43:12 +00:00
|
|
|
int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
|
|
|
|
|
2012-11-07 08:49:17 +00:00
|
|
|
static size_t GetMaxDeoptTableSize();
|
|
|
|
|
2013-02-27 14:45:59 +00:00
|
|
|
static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
|
|
|
|
BailoutType type,
|
2012-12-18 16:25:45 +00:00
|
|
|
int max_entry_id);
|
|
|
|
|
2013-03-18 13:57:49 +00:00
|
|
|
Isolate* isolate() const { return isolate_; }
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
private:
|
2012-11-07 08:49:17 +00:00
|
|
|
static const int kMinNumberOfEntries = 64;
|
|
|
|
static const int kMaxNumberOfEntries = 16384;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
Deoptimizer(Isolate* isolate,
|
|
|
|
JSFunction* function,
|
2010-12-07 11:31:57 +00:00
|
|
|
BailoutType type,
|
|
|
|
unsigned bailout_id,
|
|
|
|
Address from,
|
2011-06-29 13:02:00 +00:00
|
|
|
int fp_to_sp_delta,
|
|
|
|
Code* optimized_code);
|
2012-12-21 07:18:56 +00:00
|
|
|
Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
|
|
|
|
void PrintFunctionName();
|
2010-12-07 11:31:57 +00:00
|
|
|
void DeleteFrameDescriptions();
|
|
|
|
|
|
|
|
void DoComputeOutputFrames();
|
|
|
|
void DoComputeOsrOutputFrame();
|
2012-01-24 08:43:12 +00:00
|
|
|
void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
|
|
|
|
void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
|
|
|
|
int frame_index);
|
2012-02-28 09:05:55 +00:00
|
|
|
void DoComputeConstructStubFrame(TranslationIterator* iterator,
|
|
|
|
int frame_index);
|
2012-09-07 09:01:54 +00:00
|
|
|
void DoComputeAccessorStubFrame(TranslationIterator* iterator,
|
|
|
|
int frame_index,
|
|
|
|
bool is_setter_stub_frame);
|
2013-02-26 13:08:08 +00:00
|
|
|
void DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
|
|
|
int frame_index);
|
2013-04-02 11:28:01 +00:00
|
|
|
|
|
|
|
enum DeoptimizerTranslatedValueType {
|
|
|
|
TRANSLATED_VALUE_IS_NATIVE,
|
|
|
|
TRANSLATED_VALUE_IS_TAGGED
|
|
|
|
};
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
void DoTranslateCommand(TranslationIterator* iterator,
|
2013-04-02 11:28:01 +00:00
|
|
|
int frame_index,
|
|
|
|
unsigned output_offset,
|
|
|
|
DeoptimizerTranslatedValueType value_type = TRANSLATED_VALUE_IS_TAGGED);
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
// Translate a command for OSR. Updates the input offset to be used for
|
|
|
|
// the next command. Returns false if translation of the command failed
|
|
|
|
// (e.g., a number conversion failed) and may or may not have updated the
|
|
|
|
// input offset.
|
|
|
|
bool DoOsrTranslateCommand(TranslationIterator* iterator,
|
|
|
|
int* input_offset);
|
|
|
|
|
|
|
|
unsigned ComputeInputFrameSize() const;
|
|
|
|
unsigned ComputeFixedSize(JSFunction* function) const;
|
|
|
|
|
|
|
|
unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
|
|
|
|
unsigned ComputeOutgoingArgumentSize() const;
|
|
|
|
|
|
|
|
Object* ComputeLiteral(int index) const;
|
|
|
|
|
2012-09-12 12:28:42 +00:00
|
|
|
void AddArgumentsObject(intptr_t slot_address, int argc);
|
|
|
|
void AddArgumentsObjectValue(intptr_t value);
|
2011-04-06 14:23:27 +00:00
|
|
|
void AddDoubleValue(intptr_t slot_address, double value);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
static void GenerateDeoptimizationEntries(
|
|
|
|
MacroAssembler* masm, int count, BailoutType type);
|
|
|
|
|
|
|
|
// Weak handle callback for deoptimizing code objects.
|
2013-01-25 08:31:46 +00:00
|
|
|
static void HandleWeakDeoptimizedCode(v8::Isolate* isolate,
|
|
|
|
v8::Persistent<v8::Value> obj,
|
|
|
|
void* data);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2012-12-17 10:23:52 +00:00
|
|
|
// Deoptimize function assuming that function->next_function_link() points
|
|
|
|
// to a list that contains all functions that share the same optimized code.
|
|
|
|
static void DeoptimizeFunctionWithPreparedFunctionList(JSFunction* function);
|
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
// Fill the input from from a JavaScript frame. This is used when
|
|
|
|
// the debugger needs to inspect an optimized frame. For normal
|
|
|
|
// deoptimizations the input frame is filled in generated code.
|
|
|
|
void FillInputFrame(Address tos, JavaScriptFrame* frame);
|
|
|
|
|
2013-03-08 16:18:50 +00:00
|
|
|
// Fill the given output frame's registers to contain the failure handler
|
|
|
|
// address and the number of parameters for a stub failure trampoline.
|
|
|
|
void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
|
|
|
|
CodeStubInterfaceDescriptor* desc);
|
|
|
|
|
|
|
|
// Fill the given output frame's double registers with the original values
|
|
|
|
// from the input frame's double registers.
|
|
|
|
void CopyDoubleRegisters(FrameDescription* output_frame);
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
Isolate* isolate_;
|
2010-12-07 11:31:57 +00:00
|
|
|
JSFunction* function_;
|
2012-12-18 16:25:45 +00:00
|
|
|
Code* compiled_code_;
|
2010-12-07 11:31:57 +00:00
|
|
|
unsigned bailout_id_;
|
|
|
|
BailoutType bailout_type_;
|
|
|
|
Address from_;
|
|
|
|
int fp_to_sp_delta_;
|
2012-06-12 10:22:33 +00:00
|
|
|
int has_alignment_padding_;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
// Input frame description.
|
|
|
|
FrameDescription* input_;
|
|
|
|
// Number of output frames.
|
|
|
|
int output_count_;
|
2012-01-24 08:43:12 +00:00
|
|
|
// Number of output js frames.
|
|
|
|
int jsframe_count_;
|
2010-12-07 11:31:57 +00:00
|
|
|
// Array of output frame descriptions.
|
|
|
|
FrameDescription** output_;
|
|
|
|
|
2012-09-12 12:28:42 +00:00
|
|
|
List<Object*> deferred_arguments_objects_values_;
|
|
|
|
List<ArgumentsObjectMaterializationDescriptor> deferred_arguments_objects_;
|
2011-04-06 14:23:27 +00:00
|
|
|
List<HeapNumberMaterializationDescriptor> deferred_heap_numbers_;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2013-02-05 16:28:36 +00:00
|
|
|
bool trace_;
|
|
|
|
|
2011-08-05 11:32:46 +00:00
|
|
|
static const int table_entry_size_;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
friend class FrameDescription;
|
|
|
|
friend class DeoptimizingCodeListNode;
|
2011-06-29 13:02:00 +00:00
|
|
|
friend class DeoptimizedFrameInfo;
|
2010-12-07 11:31:57 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
class FrameDescription {
|
|
|
|
public:
|
|
|
|
FrameDescription(uint32_t frame_size,
|
|
|
|
JSFunction* function);
|
|
|
|
|
|
|
|
void* operator new(size_t size, uint32_t frame_size) {
|
2011-03-25 13:26:55 +00:00
|
|
|
// Subtracts kPointerSize, as the member frame_content_ already supplies
|
|
|
|
// the first element of the area to store the frame.
|
|
|
|
return malloc(size + frame_size - kPointerSize);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
2011-08-29 09:14:59 +00:00
|
|
|
void operator delete(void* pointer, uint32_t frame_size) {
|
|
|
|
free(pointer);
|
|
|
|
}
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
void operator delete(void* description) {
|
|
|
|
free(description);
|
|
|
|
}
|
|
|
|
|
2011-06-30 15:57:56 +00:00
|
|
|
uint32_t GetFrameSize() const {
|
|
|
|
ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_);
|
|
|
|
return static_cast<uint32_t>(frame_size_);
|
|
|
|
}
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
JSFunction* GetFunction() const { return function_; }
|
|
|
|
|
2012-01-24 08:43:12 +00:00
|
|
|
unsigned GetOffsetFromSlotIndex(int slot_index);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t GetFrameSlot(unsigned offset) {
|
2010-12-07 11:31:57 +00:00
|
|
|
return *GetFrameSlotPointer(offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
double GetDoubleFrameSlot(unsigned offset) {
|
2011-10-28 08:14:46 +00:00
|
|
|
intptr_t* ptr = GetFrameSlotPointer(offset);
|
|
|
|
#if V8_TARGET_ARCH_MIPS
|
|
|
|
// Prevent gcc from using load-double (mips ldc1) on (possibly)
|
|
|
|
// non-64-bit aligned double. Uses two lwc1 instructions.
|
|
|
|
union conversion {
|
|
|
|
double d;
|
|
|
|
uint32_t u[2];
|
|
|
|
} c;
|
|
|
|
c.u[0] = *reinterpret_cast<uint32_t*>(ptr);
|
|
|
|
c.u[1] = *(reinterpret_cast<uint32_t*>(ptr) + 1);
|
|
|
|
return c.d;
|
|
|
|
#else
|
|
|
|
return *reinterpret_cast<double*>(ptr);
|
|
|
|
#endif
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
void SetFrameSlot(unsigned offset, intptr_t value) {
|
2010-12-07 11:31:57 +00:00
|
|
|
*GetFrameSlotPointer(offset) = value;
|
|
|
|
}
|
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t GetRegister(unsigned n) const {
|
2010-12-07 11:31:57 +00:00
|
|
|
ASSERT(n < ARRAY_SIZE(registers_));
|
|
|
|
return registers_[n];
|
|
|
|
}
|
|
|
|
|
|
|
|
double GetDoubleRegister(unsigned n) const {
|
|
|
|
ASSERT(n < ARRAY_SIZE(double_registers_));
|
|
|
|
return double_registers_[n];
|
|
|
|
}
|
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
void SetRegister(unsigned n, intptr_t value) {
|
2010-12-07 11:31:57 +00:00
|
|
|
ASSERT(n < ARRAY_SIZE(registers_));
|
|
|
|
registers_[n] = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetDoubleRegister(unsigned n, double value) {
|
|
|
|
ASSERT(n < ARRAY_SIZE(double_registers_));
|
|
|
|
double_registers_[n] = value;
|
|
|
|
}
|
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t GetTop() const { return top_; }
|
|
|
|
void SetTop(intptr_t top) { top_ = top; }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t GetPc() const { return pc_; }
|
|
|
|
void SetPc(intptr_t pc) { pc_ = pc; }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t GetFp() const { return fp_; }
|
|
|
|
void SetFp(intptr_t fp) { fp_ = fp; }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2012-02-28 09:05:55 +00:00
|
|
|
intptr_t GetContext() const { return context_; }
|
|
|
|
void SetContext(intptr_t context) { context_ = context; }
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
Smi* GetState() const { return state_; }
|
|
|
|
void SetState(Smi* state) { state_ = state; }
|
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
void SetContinuation(intptr_t pc) { continuation_ = pc; }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2012-01-24 08:43:12 +00:00
|
|
|
StackFrame::Type GetFrameType() const { return type_; }
|
|
|
|
void SetFrameType(StackFrame::Type type) { type_ = type; }
|
2011-06-29 13:02:00 +00:00
|
|
|
|
2011-07-07 14:29:16 +00:00
|
|
|
// Get the incoming arguments count.
|
|
|
|
int ComputeParametersCount();
|
|
|
|
|
|
|
|
// Get a parameter value for an unoptimized frame.
|
2012-01-24 08:43:12 +00:00
|
|
|
Object* GetParameter(int index);
|
2011-07-07 14:29:16 +00:00
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
// Get the expression stack height for a unoptimized frame.
|
2012-01-24 08:43:12 +00:00
|
|
|
unsigned GetExpressionCount();
|
2011-06-29 13:02:00 +00:00
|
|
|
|
|
|
|
// Get the expression stack value for an unoptimized frame.
|
2012-01-24 08:43:12 +00:00
|
|
|
Object* GetExpression(int index);
|
2011-06-29 13:02:00 +00:00
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
static int registers_offset() {
|
|
|
|
return OFFSET_OF(FrameDescription, registers_);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int double_registers_offset() {
|
|
|
|
return OFFSET_OF(FrameDescription, double_registers_);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int frame_size_offset() {
|
|
|
|
return OFFSET_OF(FrameDescription, frame_size_);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int pc_offset() {
|
|
|
|
return OFFSET_OF(FrameDescription, pc_);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int state_offset() {
|
|
|
|
return OFFSET_OF(FrameDescription, state_);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int continuation_offset() {
|
|
|
|
return OFFSET_OF(FrameDescription, continuation_);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int frame_content_offset() {
|
2011-03-25 13:26:55 +00:00
|
|
|
return OFFSET_OF(FrameDescription, frame_content_);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
static const uint32_t kZapUint32 = 0xbeeddead;
|
|
|
|
|
2011-06-30 15:57:56 +00:00
|
|
|
// Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
|
|
|
|
// keep the variable-size array frame_content_ of type intptr_t at
|
|
|
|
// the end of the structure aligned.
|
2010-12-07 11:53:19 +00:00
|
|
|
uintptr_t frame_size_; // Number of bytes.
|
2010-12-07 11:31:57 +00:00
|
|
|
JSFunction* function_;
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t registers_[Register::kNumRegisters];
|
2013-02-04 12:01:59 +00:00
|
|
|
double double_registers_[DoubleRegister::kMaxNumRegisters];
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t top_;
|
|
|
|
intptr_t pc_;
|
|
|
|
intptr_t fp_;
|
2012-02-28 09:05:55 +00:00
|
|
|
intptr_t context_;
|
2012-01-24 08:43:12 +00:00
|
|
|
StackFrame::Type type_;
|
2010-12-07 11:31:57 +00:00
|
|
|
Smi* state_;
|
|
|
|
|
|
|
|
// Continuation is the PC where the execution continues after
|
|
|
|
// deoptimizing.
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t continuation_;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-03-25 13:26:55 +00:00
|
|
|
// This must be at the end of the object as the object is allocated larger
|
|
|
|
// than it's definition indicate to extend this array.
|
|
|
|
intptr_t frame_content_[1];
|
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t* GetFrameSlotPointer(unsigned offset) {
|
2010-12-07 11:31:57 +00:00
|
|
|
ASSERT(offset < frame_size_);
|
2010-12-07 11:53:19 +00:00
|
|
|
return reinterpret_cast<intptr_t*>(
|
2010-12-07 11:31:57 +00:00
|
|
|
reinterpret_cast<Address>(this) + frame_content_offset() + offset);
|
|
|
|
}
|
2012-01-24 08:43:12 +00:00
|
|
|
|
|
|
|
int ComputeFixedSize();
|
2010-12-07 11:31:57 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
2013-05-14 11:45:33 +00:00
|
|
|
class DeoptimizerData {
|
|
|
|
public:
|
|
|
|
explicit DeoptimizerData(MemoryAllocator* allocator);
|
|
|
|
~DeoptimizerData();
|
|
|
|
|
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
|
|
|
void Iterate(ObjectVisitor* v);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
Code* FindDeoptimizingCode(Address addr);
|
|
|
|
void RemoveDeoptimizingCode(Code* code);
|
|
|
|
|
|
|
|
private:
|
|
|
|
MemoryAllocator* allocator_;
|
|
|
|
int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
|
|
|
|
MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
|
|
|
|
Deoptimizer* current_;
|
|
|
|
|
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
|
|
|
DeoptimizedFrameInfo* deoptimized_frame_info_;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
// List of deoptimized code which still have references from active stack
|
|
|
|
// frames. These code objects are needed by the deoptimizer when deoptimizing
|
|
|
|
// a frame for which the code object for the function function has been
|
|
|
|
// changed from the code present when deoptimizing was done.
|
|
|
|
DeoptimizingCodeListNode* deoptimizing_code_list_;
|
|
|
|
|
|
|
|
friend class Deoptimizer;
|
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
class TranslationBuffer BASE_EMBEDDED {
|
|
|
|
public:
|
2012-06-04 14:42:58 +00:00
|
|
|
explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
int CurrentIndex() const { return contents_.length(); }
|
2012-06-04 14:42:58 +00:00
|
|
|
void Add(int32_t value, Zone* zone);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2013-03-18 13:57:49 +00:00
|
|
|
Handle<ByteArray> CreateByteArray(Factory* factory);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
ZoneList<uint8_t> contents_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
class TranslationIterator BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
TranslationIterator(ByteArray* buffer, int index)
|
|
|
|
: buffer_(buffer), index_(index) {
|
|
|
|
ASSERT(index >= 0 && index < buffer->length());
|
|
|
|
}
|
|
|
|
|
|
|
|
int32_t Next();
|
|
|
|
|
2011-08-08 07:17:01 +00:00
|
|
|
bool HasNext() const { return index_ < buffer_->length(); }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
void Skip(int n) {
|
|
|
|
for (int i = 0; i < n; i++) Next();
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
ByteArray* buffer_;
|
|
|
|
int index_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
class Translation BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
enum Opcode {
|
|
|
|
BEGIN,
|
2012-01-24 08:43:12 +00:00
|
|
|
JS_FRAME,
|
2012-02-28 09:05:55 +00:00
|
|
|
CONSTRUCT_STUB_FRAME,
|
2012-09-07 09:01:54 +00:00
|
|
|
GETTER_STUB_FRAME,
|
2012-08-17 10:43:32 +00:00
|
|
|
SETTER_STUB_FRAME,
|
2012-01-24 08:43:12 +00:00
|
|
|
ARGUMENTS_ADAPTOR_FRAME,
|
2012-12-18 16:25:45 +00:00
|
|
|
COMPILED_STUB_FRAME,
|
2010-12-07 11:31:57 +00:00
|
|
|
REGISTER,
|
|
|
|
INT32_REGISTER,
|
2012-08-22 15:44:17 +00:00
|
|
|
UINT32_REGISTER,
|
2010-12-07 11:31:57 +00:00
|
|
|
DOUBLE_REGISTER,
|
|
|
|
STACK_SLOT,
|
|
|
|
INT32_STACK_SLOT,
|
2012-08-22 15:44:17 +00:00
|
|
|
UINT32_STACK_SLOT,
|
2010-12-07 11:31:57 +00:00
|
|
|
DOUBLE_STACK_SLOT,
|
|
|
|
LITERAL,
|
|
|
|
ARGUMENTS_OBJECT,
|
|
|
|
|
|
|
|
// A prefix indicating that the next command is a duplicate of the one
|
|
|
|
// that follows it.
|
|
|
|
DUPLICATE
|
|
|
|
};
|
|
|
|
|
2012-06-04 14:42:58 +00:00
|
|
|
Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
|
|
|
|
Zone* zone)
|
2010-12-07 11:31:57 +00:00
|
|
|
: buffer_(buffer),
|
2012-06-04 14:42:58 +00:00
|
|
|
index_(buffer->CurrentIndex()),
|
|
|
|
zone_(zone) {
|
|
|
|
buffer_->Add(BEGIN, zone);
|
|
|
|
buffer_->Add(frame_count, zone);
|
|
|
|
buffer_->Add(jsframe_count, zone);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
int index() const { return index_; }
|
|
|
|
|
|
|
|
// Commands.
|
2012-08-06 14:13:09 +00:00
|
|
|
void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
|
2012-12-18 16:25:45 +00:00
|
|
|
void BeginCompiledStubFrame();
|
2012-01-24 08:43:12 +00:00
|
|
|
void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
|
2012-02-28 09:05:55 +00:00
|
|
|
void BeginConstructStubFrame(int literal_id, unsigned height);
|
2012-09-07 09:01:54 +00:00
|
|
|
void BeginGetterStubFrame(int literal_id);
|
2012-08-17 10:43:32 +00:00
|
|
|
void BeginSetterStubFrame(int literal_id);
|
2010-12-07 11:31:57 +00:00
|
|
|
void StoreRegister(Register reg);
|
|
|
|
void StoreInt32Register(Register reg);
|
2012-08-22 15:44:17 +00:00
|
|
|
void StoreUint32Register(Register reg);
|
2010-12-07 11:31:57 +00:00
|
|
|
void StoreDoubleRegister(DoubleRegister reg);
|
|
|
|
void StoreStackSlot(int index);
|
|
|
|
void StoreInt32StackSlot(int index);
|
2012-08-22 15:44:17 +00:00
|
|
|
void StoreUint32StackSlot(int index);
|
2010-12-07 11:31:57 +00:00
|
|
|
void StoreDoubleStackSlot(int index);
|
|
|
|
void StoreLiteral(int literal_id);
|
2013-02-27 14:37:51 +00:00
|
|
|
void StoreArgumentsObject(bool args_known, int args_index, int args_length);
|
2010-12-07 11:31:57 +00:00
|
|
|
void MarkDuplicate();
|
|
|
|
|
2012-06-11 12:42:31 +00:00
|
|
|
Zone* zone() const { return zone_; }
|
2012-06-04 14:42:58 +00:00
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
static int NumberOfOperandsFor(Opcode opcode);
|
|
|
|
|
2011-06-16 07:58:47 +00:00
|
|
|
#if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
|
2010-12-07 11:31:57 +00:00
|
|
|
static const char* StringFor(Opcode opcode);
|
|
|
|
#endif
|
|
|
|
|
2012-06-14 14:06:22 +00:00
|
|
|
// A literal id which refers to the JSFunction itself.
|
|
|
|
static const int kSelfLiteralId = -239;
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
private:
|
|
|
|
TranslationBuffer* buffer_;
|
|
|
|
int index_;
|
2012-06-04 14:42:58 +00:00
|
|
|
Zone* zone_;
|
2010-12-07 11:31:57 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
// Linked list holding deoptimizing code objects. The deoptimizing code objects
|
|
|
|
// are kept as weak handles until they are no longer activated on the stack.
|
|
|
|
class DeoptimizingCodeListNode : public Malloced {
|
|
|
|
public:
|
|
|
|
explicit DeoptimizingCodeListNode(Code* code);
|
|
|
|
~DeoptimizingCodeListNode();
|
|
|
|
|
|
|
|
DeoptimizingCodeListNode* next() const { return next_; }
|
|
|
|
void set_next(DeoptimizingCodeListNode* next) { next_ = next; }
|
|
|
|
Handle<Code> code() const { return code_; }
|
|
|
|
|
|
|
|
private:
|
|
|
|
// Global (weak) handle to the deoptimizing code object.
|
|
|
|
Handle<Code> code_;
|
|
|
|
|
|
|
|
// Next pointer for linked list.
|
|
|
|
DeoptimizingCodeListNode* next_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2011-04-01 11:41:36 +00:00
|
|
|
class SlotRef BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
enum SlotRepresentation {
|
|
|
|
UNKNOWN,
|
|
|
|
TAGGED,
|
|
|
|
INT32,
|
2012-08-22 15:44:17 +00:00
|
|
|
UINT32,
|
2011-04-01 11:41:36 +00:00
|
|
|
DOUBLE,
|
|
|
|
LITERAL
|
|
|
|
};
|
|
|
|
|
|
|
|
SlotRef()
|
|
|
|
: addr_(NULL), representation_(UNKNOWN) { }
|
|
|
|
|
|
|
|
SlotRef(Address addr, SlotRepresentation representation)
|
|
|
|
: addr_(addr), representation_(representation) { }
|
|
|
|
|
2013-02-25 14:46:09 +00:00
|
|
|
SlotRef(Isolate* isolate, Object* literal)
|
|
|
|
: literal_(literal, isolate), representation_(LITERAL) { }
|
2011-04-01 11:41:36 +00:00
|
|
|
|
2013-02-25 14:46:09 +00:00
|
|
|
Handle<Object> GetValue(Isolate* isolate) {
|
2011-04-01 11:41:36 +00:00
|
|
|
switch (representation_) {
|
|
|
|
case TAGGED:
|
2013-02-25 14:46:09 +00:00
|
|
|
return Handle<Object>(Memory::Object_at(addr_), isolate);
|
2011-04-01 11:41:36 +00:00
|
|
|
|
|
|
|
case INT32: {
|
|
|
|
int value = Memory::int32_at(addr_);
|
|
|
|
if (Smi::IsValid(value)) {
|
2013-02-25 14:46:09 +00:00
|
|
|
return Handle<Object>(Smi::FromInt(value), isolate);
|
2011-04-01 11:41:36 +00:00
|
|
|
} else {
|
2013-02-25 14:46:09 +00:00
|
|
|
return isolate->factory()->NewNumberFromInt(value);
|
2011-04-01 11:41:36 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-22 15:44:17 +00:00
|
|
|
case UINT32: {
|
|
|
|
uint32_t value = Memory::uint32_at(addr_);
|
|
|
|
if (value <= static_cast<uint32_t>(Smi::kMaxValue)) {
|
2013-02-25 14:46:09 +00:00
|
|
|
return Handle<Object>(Smi::FromInt(static_cast<int>(value)), isolate);
|
2012-08-22 15:44:17 +00:00
|
|
|
} else {
|
2013-02-25 14:46:09 +00:00
|
|
|
return isolate->factory()->NewNumber(static_cast<double>(value));
|
2012-08-22 15:44:17 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-04-01 11:41:36 +00:00
|
|
|
case DOUBLE: {
|
|
|
|
double value = Memory::double_at(addr_);
|
2013-02-25 14:46:09 +00:00
|
|
|
return isolate->factory()->NewNumber(value);
|
2011-04-01 11:41:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
case LITERAL:
|
|
|
|
return literal_;
|
|
|
|
|
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
|
|
|
return Handle<Object>::null();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-01-24 08:43:12 +00:00
|
|
|
static Vector<SlotRef> ComputeSlotMappingForArguments(
|
|
|
|
JavaScriptFrame* frame,
|
|
|
|
int inlined_frame_index,
|
|
|
|
int formal_parameter_count);
|
2011-04-01 11:41:36 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
Address addr_;
|
|
|
|
Handle<Object> literal_;
|
|
|
|
SlotRepresentation representation_;
|
|
|
|
|
|
|
|
static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
|
|
|
|
if (slot_index >= 0) {
|
|
|
|
const int offset = JavaScriptFrameConstants::kLocal0Offset;
|
|
|
|
return frame->fp() + offset - (slot_index * kPointerSize);
|
|
|
|
} else {
|
|
|
|
const int offset = JavaScriptFrameConstants::kLastParameterOffset;
|
|
|
|
return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static SlotRef ComputeSlotForNextArgument(TranslationIterator* iterator,
|
|
|
|
DeoptimizationInputData* data,
|
|
|
|
JavaScriptFrame* frame);
|
2012-01-24 08:43:12 +00:00
|
|
|
|
|
|
|
static void ComputeSlotsForArguments(
|
|
|
|
Vector<SlotRef>* args_slots,
|
|
|
|
TranslationIterator* iterator,
|
|
|
|
DeoptimizationInputData* data,
|
|
|
|
JavaScriptFrame* frame);
|
2011-04-01 11:41:36 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
|
|
|
// Class used to represent an unoptimized frame when the debugger
|
|
|
|
// needs to inspect a frame that is part of an optimized frame. The
|
|
|
|
// internally used FrameDescription objects are not GC safe so for use
|
|
|
|
// by the debugger frame information is copied to an object of this type.
|
2012-01-24 08:43:12 +00:00
|
|
|
// Represents parameters in unadapted form so their number might mismatch
|
|
|
|
// formal parameter count.
|
2011-06-29 13:02:00 +00:00
|
|
|
class DeoptimizedFrameInfo : public Malloced {
|
|
|
|
public:
|
2012-01-24 08:43:12 +00:00
|
|
|
DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
|
|
|
|
int frame_index,
|
2012-02-28 09:05:55 +00:00
|
|
|
bool has_arguments_adaptor,
|
|
|
|
bool has_construct_stub);
|
2011-06-29 13:02:00 +00:00
|
|
|
virtual ~DeoptimizedFrameInfo();
|
|
|
|
|
|
|
|
// GC support.
|
|
|
|
void Iterate(ObjectVisitor* v);
|
|
|
|
|
2011-07-07 14:29:16 +00:00
|
|
|
// Return the number of incoming arguments.
|
|
|
|
int parameters_count() { return parameters_count_; }
|
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
// Return the height of the expression stack.
|
|
|
|
int expression_count() { return expression_count_; }
|
|
|
|
|
2011-07-06 13:02:17 +00:00
|
|
|
// Get the frame function.
|
|
|
|
JSFunction* GetFunction() {
|
|
|
|
return function_;
|
|
|
|
}
|
|
|
|
|
2012-02-28 09:05:55 +00:00
|
|
|
// Check if this frame is preceded by construct stub frame. The bottom-most
|
|
|
|
// inlined frame might still be called by an uninlined construct stub.
|
|
|
|
bool HasConstructStub() {
|
|
|
|
return has_construct_stub_;
|
|
|
|
}
|
|
|
|
|
2011-07-07 14:29:16 +00:00
|
|
|
// Get an incoming argument.
|
|
|
|
Object* GetParameter(int index) {
|
|
|
|
ASSERT(0 <= index && index < parameters_count());
|
|
|
|
return parameters_[index];
|
|
|
|
}
|
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
// Get an expression from the expression stack.
|
|
|
|
Object* GetExpression(int index) {
|
|
|
|
ASSERT(0 <= index && index < expression_count());
|
|
|
|
return expression_stack_[index];
|
|
|
|
}
|
|
|
|
|
2012-01-31 12:08:33 +00:00
|
|
|
int GetSourcePosition() {
|
|
|
|
return source_position_;
|
2012-01-30 13:07:01 +00:00
|
|
|
}
|
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
private:
|
2011-07-07 14:29:16 +00:00
|
|
|
// Set an incoming argument.
|
|
|
|
void SetParameter(int index, Object* obj) {
|
|
|
|
ASSERT(0 <= index && index < parameters_count());
|
|
|
|
parameters_[index] = obj;
|
|
|
|
}
|
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
// Set an expression on the expression stack.
|
|
|
|
void SetExpression(int index, Object* obj) {
|
|
|
|
ASSERT(0 <= index && index < expression_count());
|
|
|
|
expression_stack_[index] = obj;
|
|
|
|
}
|
|
|
|
|
2011-07-06 13:02:17 +00:00
|
|
|
JSFunction* function_;
|
2012-02-28 09:05:55 +00:00
|
|
|
bool has_construct_stub_;
|
2011-07-07 14:29:16 +00:00
|
|
|
int parameters_count_;
|
2011-06-29 13:02:00 +00:00
|
|
|
int expression_count_;
|
2011-07-07 14:29:16 +00:00
|
|
|
Object** parameters_;
|
2011-06-29 13:02:00 +00:00
|
|
|
Object** expression_stack_;
|
2012-01-31 12:08:33 +00:00
|
|
|
int source_position_;
|
2011-06-29 13:02:00 +00:00
|
|
|
|
|
|
|
friend class Deoptimizer;
|
|
|
|
};
|
|
|
|
#endif
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
} } // namespace v8::internal
|
|
|
|
|
|
|
|
#endif // V8_DEOPTIMIZER_H_
|