2012-01-24 08:43:12 +00:00
|
|
|
// Copyright 2012 the V8 project authors. All rights reserved.
|
2014-04-29 06:42:26 +00:00
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
#ifndef V8_DEOPTIMIZER_H_
|
|
|
|
#define V8_DEOPTIMIZER_H_
|
|
|
|
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/allocation.h"
|
2016-07-18 09:23:28 +00:00
|
|
|
#include "src/deoptimize-reason.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/macro-assembler.h"
|
2016-08-23 12:35:20 +00:00
|
|
|
#include "src/source-position.h"
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
|
|
|
|
class FrameDescription;
|
|
|
|
class TranslationIterator;
|
2011-06-29 13:02:00 +00:00
|
|
|
class DeoptimizedFrameInfo;
|
2015-06-08 10:04:51 +00:00
|
|
|
class TranslatedState;
|
|
|
|
class RegisterValues;
|
|
|
|
|
2015-06-30 08:24:44 +00:00
|
|
|
class TranslatedValue {
|
2015-06-08 10:04:51 +00:00
|
|
|
public:
|
|
|
|
// Allocation-less getter of the value.
|
|
|
|
// Returns heap()->arguments_marker() if allocation would be
|
|
|
|
// necessary to get the value.
|
|
|
|
Object* GetRawValue() const;
|
|
|
|
Handle<Object> GetValue();
|
|
|
|
|
|
|
|
bool IsMaterializedObject() const;
|
2016-02-15 07:36:15 +00:00
|
|
|
bool IsMaterializableByDebugger() const;
|
2015-06-08 10:04:51 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
friend class TranslatedState;
|
|
|
|
friend class TranslatedFrame;
|
|
|
|
|
|
|
|
enum Kind {
|
|
|
|
kInvalid,
|
|
|
|
kTagged,
|
|
|
|
kInt32,
|
|
|
|
kUInt32,
|
|
|
|
kBoolBit,
|
2016-06-03 14:16:15 +00:00
|
|
|
kFloat,
|
2015-06-08 10:04:51 +00:00
|
|
|
kDouble,
|
|
|
|
kCapturedObject, // Object captured by the escape analysis.
|
|
|
|
// The number of nested objects can be obtained
|
|
|
|
// with the DeferredObjectLength() method
|
|
|
|
// (the values of the nested objects follow
|
|
|
|
// this value in the depth-first order.)
|
|
|
|
kDuplicatedObject, // Duplicated object of a deferred object.
|
|
|
|
kArgumentsObject // Arguments object - only used to keep indexing
|
|
|
|
// in sync, it should not be materialized.
|
|
|
|
};
|
|
|
|
|
|
|
|
TranslatedValue(TranslatedState* container, Kind kind)
|
|
|
|
: kind_(kind), container_(container) {}
|
|
|
|
Kind kind() const { return kind_; }
|
|
|
|
void Handlify();
|
|
|
|
int GetChildrenCount() const;
|
|
|
|
|
|
|
|
static TranslatedValue NewArgumentsObject(TranslatedState* container,
|
|
|
|
int length, int object_index);
|
|
|
|
static TranslatedValue NewDeferredObject(TranslatedState* container,
|
|
|
|
int length, int object_index);
|
|
|
|
static TranslatedValue NewDuplicateObject(TranslatedState* container, int id);
|
2016-06-03 14:16:15 +00:00
|
|
|
static TranslatedValue NewFloat(TranslatedState* container, float value);
|
2015-06-08 10:04:51 +00:00
|
|
|
static TranslatedValue NewDouble(TranslatedState* container, double value);
|
|
|
|
static TranslatedValue NewInt32(TranslatedState* container, int32_t value);
|
|
|
|
static TranslatedValue NewUInt32(TranslatedState* container, uint32_t value);
|
|
|
|
static TranslatedValue NewBool(TranslatedState* container, uint32_t value);
|
|
|
|
static TranslatedValue NewTagged(TranslatedState* container, Object* literal);
|
2015-06-09 13:10:10 +00:00
|
|
|
static TranslatedValue NewInvalid(TranslatedState* container);
|
2015-06-08 10:04:51 +00:00
|
|
|
|
|
|
|
Isolate* isolate() const;
|
|
|
|
void MaterializeSimple();
|
|
|
|
|
|
|
|
Kind kind_;
|
|
|
|
TranslatedState* container_; // This is only needed for materialization of
|
|
|
|
// objects and constructing handles (to get
|
|
|
|
// to the isolate).
|
|
|
|
|
|
|
|
MaybeHandle<Object> value_; // Before handlification, this is always null,
|
|
|
|
// after materialization it is never null,
|
|
|
|
// in between it is only null if the value needs
|
|
|
|
// to be materialized.
|
|
|
|
|
|
|
|
struct MaterializedObjectInfo {
|
|
|
|
int id_;
|
|
|
|
int length_; // Applies only to kArgumentsObject or kCapturedObject kinds.
|
|
|
|
};
|
|
|
|
|
|
|
|
union {
|
|
|
|
// kind kTagged. After handlification it is always nullptr.
|
|
|
|
Object* raw_literal_;
|
|
|
|
// kind is kUInt32 or kBoolBit.
|
|
|
|
uint32_t uint32_value_;
|
|
|
|
// kind is kInt32.
|
|
|
|
int32_t int32_value_;
|
2016-06-03 14:16:15 +00:00
|
|
|
// kind is kFloat
|
|
|
|
float float_value_;
|
2015-06-08 10:04:51 +00:00
|
|
|
// kind is kDouble
|
|
|
|
double double_value_;
|
|
|
|
// kind is kDuplicatedObject or kArgumentsObject or kCapturedObject.
|
|
|
|
MaterializedObjectInfo materialization_info_;
|
|
|
|
};
|
|
|
|
|
|
|
|
// Checked accessors for the union members.
|
|
|
|
Object* raw_literal() const;
|
|
|
|
int32_t int32_value() const;
|
|
|
|
uint32_t uint32_value() const;
|
2016-06-03 14:16:15 +00:00
|
|
|
float float_value() const;
|
2015-06-08 10:04:51 +00:00
|
|
|
double double_value() const;
|
|
|
|
int object_length() const;
|
|
|
|
int object_index() const;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
class TranslatedFrame {
|
|
|
|
public:
|
|
|
|
enum Kind {
|
|
|
|
kFunction,
|
2015-12-18 18:34:21 +00:00
|
|
|
kInterpretedFunction,
|
2015-06-08 10:04:51 +00:00
|
|
|
kGetter,
|
|
|
|
kSetter,
|
2016-03-09 11:33:10 +00:00
|
|
|
kTailCallerFunction,
|
2015-06-08 10:04:51 +00:00
|
|
|
kArgumentsAdaptor,
|
|
|
|
kConstructStub,
|
|
|
|
kCompiledStub,
|
|
|
|
kInvalid
|
|
|
|
};
|
|
|
|
|
|
|
|
int GetValueCount();
|
|
|
|
|
|
|
|
Kind kind() const { return kind_; }
|
2015-06-09 13:10:10 +00:00
|
|
|
BailoutId node_id() const { return node_id_; }
|
2015-06-10 11:52:35 +00:00
|
|
|
Handle<SharedFunctionInfo> shared_info() const { return shared_info_; }
|
2015-06-09 13:10:10 +00:00
|
|
|
int height() const { return height_; }
|
2015-06-08 10:04:51 +00:00
|
|
|
|
2016-02-11 12:03:29 +00:00
|
|
|
SharedFunctionInfo* raw_shared_info() const {
|
|
|
|
CHECK_NOT_NULL(raw_shared_info_);
|
|
|
|
return raw_shared_info_;
|
|
|
|
}
|
|
|
|
|
2015-06-08 10:04:51 +00:00
|
|
|
class iterator {
|
|
|
|
public:
|
|
|
|
iterator& operator++() {
|
|
|
|
AdvanceIterator(&position_);
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
iterator operator++(int) {
|
|
|
|
iterator original(position_);
|
|
|
|
AdvanceIterator(&position_);
|
|
|
|
return original;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool operator==(const iterator& other) const {
|
|
|
|
return position_ == other.position_;
|
|
|
|
}
|
|
|
|
bool operator!=(const iterator& other) const { return !(*this == other); }
|
|
|
|
|
|
|
|
TranslatedValue& operator*() { return (*position_); }
|
|
|
|
TranslatedValue* operator->() { return &(*position_); }
|
|
|
|
|
|
|
|
private:
|
|
|
|
friend TranslatedFrame;
|
|
|
|
|
|
|
|
explicit iterator(std::deque<TranslatedValue>::iterator position)
|
|
|
|
: position_(position) {}
|
|
|
|
|
|
|
|
std::deque<TranslatedValue>::iterator position_;
|
|
|
|
};
|
|
|
|
|
2015-06-09 13:10:10 +00:00
|
|
|
typedef TranslatedValue& reference;
|
|
|
|
typedef TranslatedValue const& const_reference;
|
|
|
|
|
2015-06-08 10:04:51 +00:00
|
|
|
iterator begin() { return iterator(values_.begin()); }
|
|
|
|
iterator end() { return iterator(values_.end()); }
|
2015-06-10 11:52:35 +00:00
|
|
|
|
2015-06-09 13:10:10 +00:00
|
|
|
reference front() { return values_.front(); }
|
|
|
|
const_reference front() const { return values_.front(); }
|
2015-06-08 10:04:51 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
friend class TranslatedState;
|
|
|
|
|
|
|
|
// Constructor static methods.
|
2015-06-10 11:52:35 +00:00
|
|
|
static TranslatedFrame JSFrame(BailoutId node_id,
|
|
|
|
SharedFunctionInfo* shared_info, int height);
|
2015-12-18 18:34:21 +00:00
|
|
|
static TranslatedFrame InterpretedFrame(BailoutId bytecode_offset,
|
|
|
|
SharedFunctionInfo* shared_info,
|
|
|
|
int height);
|
2015-06-10 11:52:35 +00:00
|
|
|
static TranslatedFrame AccessorFrame(Kind kind,
|
|
|
|
SharedFunctionInfo* shared_info);
|
|
|
|
static TranslatedFrame ArgumentsAdaptorFrame(SharedFunctionInfo* shared_info,
|
2015-06-08 10:04:51 +00:00
|
|
|
int height);
|
2016-03-09 11:33:10 +00:00
|
|
|
static TranslatedFrame TailCallerFrame(SharedFunctionInfo* shared_info);
|
2015-06-10 11:52:35 +00:00
|
|
|
static TranslatedFrame ConstructStubFrame(SharedFunctionInfo* shared_info,
|
|
|
|
int height);
|
2015-06-08 10:04:51 +00:00
|
|
|
static TranslatedFrame CompiledStubFrame(int height, Isolate* isolate) {
|
|
|
|
return TranslatedFrame(kCompiledStub, isolate, nullptr, height);
|
|
|
|
}
|
|
|
|
static TranslatedFrame InvalidFrame() {
|
|
|
|
return TranslatedFrame(kInvalid, nullptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void AdvanceIterator(std::deque<TranslatedValue>::iterator* iter);
|
|
|
|
|
2015-06-10 11:52:35 +00:00
|
|
|
TranslatedFrame(Kind kind, Isolate* isolate,
|
|
|
|
SharedFunctionInfo* shared_info = nullptr, int height = 0)
|
2015-06-08 10:04:51 +00:00
|
|
|
: kind_(kind),
|
|
|
|
node_id_(BailoutId::None()),
|
2015-06-10 11:52:35 +00:00
|
|
|
raw_shared_info_(shared_info),
|
2015-06-08 10:04:51 +00:00
|
|
|
height_(height),
|
|
|
|
isolate_(isolate) {}
|
|
|
|
|
|
|
|
|
|
|
|
void Add(const TranslatedValue& value) { values_.push_back(value); }
|
2015-06-10 11:52:35 +00:00
|
|
|
void Handlify();
|
2015-06-08 10:04:51 +00:00
|
|
|
|
|
|
|
Kind kind_;
|
|
|
|
BailoutId node_id_;
|
2015-06-10 11:52:35 +00:00
|
|
|
SharedFunctionInfo* raw_shared_info_;
|
|
|
|
Handle<SharedFunctionInfo> shared_info_;
|
2015-06-08 10:04:51 +00:00
|
|
|
int height_;
|
|
|
|
Isolate* isolate_;
|
|
|
|
|
|
|
|
typedef std::deque<TranslatedValue> ValuesContainer;
|
|
|
|
|
|
|
|
ValuesContainer values_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
// Auxiliary class for translating deoptimization values.
|
|
|
|
// Typical usage sequence:
|
|
|
|
//
|
|
|
|
// 1. Construct the instance. This will involve reading out the translations
|
|
|
|
// and resolving them to values using the supplied frame pointer and
|
|
|
|
// machine state (registers). This phase is guaranteed not to allocate
|
|
|
|
// and not to use any HandleScope. Any object pointers will be stored raw.
|
|
|
|
//
|
|
|
|
// 2. Handlify pointers. This will convert all the raw pointers to handles.
|
|
|
|
//
|
|
|
|
// 3. Reading out the frame values.
|
|
|
|
//
|
|
|
|
// Note: After the instance is constructed, it is possible to iterate over
|
|
|
|
// the values eagerly.
|
|
|
|
|
|
|
|
class TranslatedState {
|
|
|
|
public:
|
|
|
|
TranslatedState();
|
|
|
|
explicit TranslatedState(JavaScriptFrame* frame);
|
|
|
|
|
|
|
|
void Prepare(bool has_adapted_arguments, Address stack_frame_pointer);
|
|
|
|
|
|
|
|
// Store newly materialized values into the isolate.
|
|
|
|
void StoreMaterializedValuesAndDeopt();
|
|
|
|
|
2015-06-10 11:52:35 +00:00
|
|
|
typedef std::vector<TranslatedFrame>::iterator iterator;
|
|
|
|
iterator begin() { return frames_.begin(); }
|
|
|
|
iterator end() { return frames_.end(); }
|
|
|
|
|
2015-06-09 13:10:10 +00:00
|
|
|
typedef std::vector<TranslatedFrame>::const_iterator const_iterator;
|
|
|
|
const_iterator begin() const { return frames_.begin(); }
|
|
|
|
const_iterator end() const { return frames_.end(); }
|
|
|
|
|
2015-06-08 10:04:51 +00:00
|
|
|
std::vector<TranslatedFrame>& frames() { return frames_; }
|
|
|
|
|
|
|
|
TranslatedFrame* GetArgumentsInfoFromJSFrameIndex(int jsframe_index,
|
|
|
|
int* arguments_count);
|
|
|
|
|
|
|
|
Isolate* isolate() { return isolate_; }
|
|
|
|
|
2015-11-11 10:45:13 +00:00
|
|
|
void Init(Address input_frame_pointer, TranslationIterator* iterator,
|
|
|
|
FixedArray* literal_array, RegisterValues* registers,
|
|
|
|
FILE* trace_file);
|
2015-06-08 10:04:51 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
friend TranslatedValue;
|
|
|
|
|
|
|
|
TranslatedFrame CreateNextTranslatedFrame(TranslationIterator* iterator,
|
|
|
|
FixedArray* literal_array,
|
|
|
|
Address fp,
|
|
|
|
FILE* trace_file);
|
|
|
|
TranslatedValue CreateNextTranslatedValue(int frame_index, int value_index,
|
|
|
|
TranslationIterator* iterator,
|
|
|
|
FixedArray* literal_array,
|
|
|
|
Address fp,
|
|
|
|
RegisterValues* registers,
|
|
|
|
FILE* trace_file);
|
|
|
|
|
|
|
|
void UpdateFromPreviouslyMaterializedObjects();
|
|
|
|
Handle<Object> MaterializeAt(int frame_index, int* value_index);
|
|
|
|
Handle<Object> MaterializeObjectAt(int object_index);
|
|
|
|
bool GetAdaptedArguments(Handle<JSObject>* result, int frame_index);
|
|
|
|
|
|
|
|
static uint32_t GetUInt32Slot(Address fp, int slot_index);
|
|
|
|
|
|
|
|
std::vector<TranslatedFrame> frames_;
|
|
|
|
Isolate* isolate_;
|
|
|
|
Address stack_frame_pointer_;
|
|
|
|
bool has_adapted_arguments_;
|
|
|
|
|
|
|
|
struct ObjectPosition {
|
|
|
|
int frame_index_;
|
|
|
|
int value_index_;
|
|
|
|
};
|
|
|
|
std::deque<ObjectPosition> object_positions_;
|
|
|
|
};
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2012-09-12 12:28:42 +00:00
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
class OptimizedFunctionVisitor BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
virtual ~OptimizedFunctionVisitor() {}
|
|
|
|
|
|
|
|
// Function which is called before iteration of any optimized functions
|
2012-08-17 09:03:08 +00:00
|
|
|
// from given native context.
|
2010-12-07 11:31:57 +00:00
|
|
|
virtual void EnterContext(Context* context) = 0;
|
|
|
|
|
|
|
|
virtual void VisitFunction(JSFunction* function) = 0;
|
|
|
|
|
|
|
|
// Function which is called after iteration of all optimized functions
|
2012-08-17 09:03:08 +00:00
|
|
|
// from given native context.
|
2010-12-07 11:31:57 +00:00
|
|
|
virtual void LeaveContext(Context* context) = 0;
|
|
|
|
};
|
|
|
|
|
|
|
|
class Deoptimizer : public Malloced {
|
|
|
|
public:
|
2016-03-07 08:18:41 +00:00
|
|
|
enum BailoutType { EAGER, LAZY, SOFT, kLastBailoutType = SOFT };
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2016-05-18 07:50:00 +00:00
|
|
|
enum class BailoutState {
|
|
|
|
NO_REGISTERS,
|
|
|
|
TOS_REGISTER,
|
|
|
|
};
|
|
|
|
|
|
|
|
static const char* BailoutStateToString(BailoutState state) {
|
|
|
|
switch (state) {
|
|
|
|
case BailoutState::NO_REGISTERS:
|
|
|
|
return "NO_REGISTERS";
|
|
|
|
case BailoutState::TOS_REGISTER:
|
|
|
|
return "TOS_REGISTER";
|
|
|
|
}
|
|
|
|
UNREACHABLE();
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2015-02-10 14:32:42 +00:00
|
|
|
struct DeoptInfo {
|
2016-07-18 09:23:28 +00:00
|
|
|
DeoptInfo(SourcePosition position, DeoptimizeReason deopt_reason,
|
|
|
|
int deopt_id)
|
2016-05-13 08:49:49 +00:00
|
|
|
: position(position), deopt_reason(deopt_reason), deopt_id(deopt_id) {}
|
2014-09-22 14:16:38 +00:00
|
|
|
|
2015-02-27 13:34:23 +00:00
|
|
|
SourcePosition position;
|
2016-07-18 09:23:28 +00:00
|
|
|
DeoptimizeReason deopt_reason;
|
2016-05-13 08:49:49 +00:00
|
|
|
int deopt_id;
|
|
|
|
|
|
|
|
static const int kNoDeoptId = -1;
|
2014-09-22 06:36:57 +00:00
|
|
|
};
|
|
|
|
|
2015-03-09 14:43:29 +00:00
|
|
|
static DeoptInfo GetDeoptInfo(Code* code, byte* from);
|
2015-02-10 14:32:42 +00:00
|
|
|
|
2016-08-12 12:01:37 +00:00
|
|
|
static int ComputeSourcePositionFromBaselineCode(SharedFunctionInfo* shared,
|
|
|
|
BailoutId node_id);
|
|
|
|
static int ComputeSourcePositionFromBytecodeArray(SharedFunctionInfo* shared,
|
|
|
|
BailoutId node_id);
|
2016-05-13 08:49:49 +00:00
|
|
|
|
2014-03-14 15:14:42 +00:00
|
|
|
struct JumpTableEntry : public ZoneObject {
|
2015-02-10 14:32:42 +00:00
|
|
|
inline JumpTableEntry(Address entry, const DeoptInfo& deopt_info,
|
2014-09-22 06:36:57 +00:00
|
|
|
Deoptimizer::BailoutType type, bool frame)
|
2013-05-14 11:45:33 +00:00
|
|
|
: label(),
|
|
|
|
address(entry),
|
2015-02-10 14:32:42 +00:00
|
|
|
deopt_info(deopt_info),
|
2013-05-14 11:45:33 +00:00
|
|
|
bailout_type(type),
|
2014-09-19 06:27:06 +00:00
|
|
|
needs_frame(frame) {}
|
2014-09-22 14:16:38 +00:00
|
|
|
|
|
|
|
bool IsEquivalentTo(const JumpTableEntry& other) const {
|
|
|
|
return address == other.address && bailout_type == other.bailout_type &&
|
2015-02-12 19:51:26 +00:00
|
|
|
needs_frame == other.needs_frame;
|
2014-09-22 14:16:38 +00:00
|
|
|
}
|
|
|
|
|
2013-05-14 11:45:33 +00:00
|
|
|
Label label;
|
|
|
|
Address address;
|
2015-02-10 14:32:42 +00:00
|
|
|
DeoptInfo deopt_info;
|
2013-05-14 11:45:33 +00:00
|
|
|
Deoptimizer::BailoutType bailout_type;
|
|
|
|
bool needs_frame;
|
|
|
|
};
|
|
|
|
|
2016-08-25 13:25:54 +00:00
|
|
|
static bool TraceEnabledFor(StackFrame::Type frame_type);
|
2012-12-21 07:18:56 +00:00
|
|
|
static const char* MessageFor(BailoutType type);
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
int output_count() const { return output_count_; }
|
|
|
|
|
2013-08-22 13:03:40 +00:00
|
|
|
Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
|
|
|
|
Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
|
|
|
|
BailoutType bailout_type() const { return bailout_type_; }
|
2012-12-18 16:25:45 +00:00
|
|
|
|
2012-01-24 08:43:12 +00:00
|
|
|
// Number of created JS frames. Not all created frames are necessarily JS.
|
|
|
|
int jsframe_count() const { return jsframe_count_; }
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
static Deoptimizer* New(JSFunction* function,
|
|
|
|
BailoutType type,
|
|
|
|
unsigned bailout_id,
|
|
|
|
Address from,
|
2011-03-18 20:35:07 +00:00
|
|
|
int fp_to_sp_delta,
|
|
|
|
Isolate* isolate);
|
|
|
|
static Deoptimizer* Grab(Isolate* isolate);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
// The returned object with information on the optimized frame needs to be
|
|
|
|
// freed before another one can be generated.
|
|
|
|
static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
|
2012-01-24 08:43:12 +00:00
|
|
|
int jsframe_index,
|
2011-06-29 13:02:00 +00:00
|
|
|
Isolate* isolate);
|
|
|
|
|
2011-03-25 10:29:34 +00:00
|
|
|
// Makes sure that there is enough room in the relocation
|
|
|
|
// information of a code object to perform lazy deoptimization
|
|
|
|
// patching. If there is not enough room a new relocation
|
|
|
|
// information object is allocated and comments are added until it
|
|
|
|
// is big enough.
|
|
|
|
static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
// Deoptimize the function now. Its current optimized code will never be run
|
|
|
|
// again and any activations of the optimized code will get deoptimized when
|
|
|
|
// execution returns.
|
|
|
|
static void DeoptimizeFunction(JSFunction* function);
|
|
|
|
|
2013-09-04 13:53:24 +00:00
|
|
|
// Deoptimize all code in the given isolate.
|
2013-03-18 13:57:49 +00:00
|
|
|
static void DeoptimizeAll(Isolate* isolate);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2013-09-04 13:53:24 +00:00
|
|
|
// Deoptimizes all optimized code that has been previously marked
|
|
|
|
// (via code->set_marked_for_deoptimization) and unlinks all functions that
|
|
|
|
// refer to that code.
|
|
|
|
static void DeoptimizeMarkedCode(Isolate* isolate);
|
2012-12-17 10:23:52 +00:00
|
|
|
|
2013-09-04 13:53:24 +00:00
|
|
|
// Visit all the known optimized functions in a given isolate.
|
|
|
|
static void VisitAllOptimizedFunctions(
|
|
|
|
Isolate* isolate, OptimizedFunctionVisitor* visitor);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-02-02 13:55:29 +00:00
|
|
|
// The size in bytes of the code required at a lazy deopt patch site.
|
|
|
|
static int patch_size();
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
~Deoptimizer();
|
|
|
|
|
2012-09-12 12:28:42 +00:00
|
|
|
void MaterializeHeapObjects(JavaScriptFrameIterator* it);
|
2014-04-25 11:00:37 +00:00
|
|
|
|
2011-03-30 18:05:16 +00:00
|
|
|
static void ComputeOutputFrames(Deoptimizer* deoptimizer);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2012-11-07 08:49:17 +00:00
|
|
|
|
|
|
|
enum GetEntryMode {
|
|
|
|
CALCULATE_ENTRY_ADDRESS,
|
|
|
|
ENSURE_ENTRY_CODE
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
static Address GetDeoptimizationEntry(
|
2013-02-27 14:45:59 +00:00
|
|
|
Isolate* isolate,
|
2012-11-07 08:49:17 +00:00
|
|
|
int id,
|
|
|
|
BailoutType type,
|
|
|
|
GetEntryMode mode = ENSURE_ENTRY_CODE);
|
2013-03-18 13:57:49 +00:00
|
|
|
static int GetDeoptimizationId(Isolate* isolate,
|
|
|
|
Address addr,
|
|
|
|
BailoutType type);
|
2010-12-22 09:49:26 +00:00
|
|
|
static int GetOutputInfo(DeoptimizationOutputData* data,
|
2012-08-06 14:13:09 +00:00
|
|
|
BailoutId node_id,
|
2010-12-22 09:49:26 +00:00
|
|
|
SharedFunctionInfo* shared);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
// Code generation support.
|
|
|
|
static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
|
|
|
|
static int output_count_offset() {
|
|
|
|
return OFFSET_OF(Deoptimizer, output_count_);
|
|
|
|
}
|
|
|
|
static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
|
|
|
|
|
2016-03-09 11:33:10 +00:00
|
|
|
static int caller_frame_top_offset() {
|
|
|
|
return OFFSET_OF(Deoptimizer, caller_frame_top_);
|
|
|
|
}
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
static int GetDeoptimizedCodeCount(Isolate* isolate);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
static const int kNotDeoptimizationEntry = -1;
|
|
|
|
|
|
|
|
// Generators for the deoptimization entry code.
|
2015-03-18 10:34:09 +00:00
|
|
|
class TableEntryGenerator BASE_EMBEDDED {
|
2010-12-07 11:31:57 +00:00
|
|
|
public:
|
2015-03-18 10:34:09 +00:00
|
|
|
TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
|
|
|
|
: masm_(masm), type_(type), count_(count) {}
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
void Generate();
|
|
|
|
|
|
|
|
protected:
|
|
|
|
MacroAssembler* masm() const { return masm_; }
|
|
|
|
BailoutType type() const { return type_; }
|
2013-04-24 07:39:35 +00:00
|
|
|
Isolate* isolate() const { return masm_->isolate(); }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2015-03-18 10:34:09 +00:00
|
|
|
void GeneratePrologue();
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
int count() const { return count_; }
|
|
|
|
|
2015-03-18 10:34:09 +00:00
|
|
|
MacroAssembler* masm_;
|
|
|
|
Deoptimizer::BailoutType type_;
|
2010-12-07 11:31:57 +00:00
|
|
|
int count_;
|
|
|
|
};
|
|
|
|
|
2012-11-07 08:49:17 +00:00
|
|
|
static size_t GetMaxDeoptTableSize();
|
|
|
|
|
2013-02-27 14:45:59 +00:00
|
|
|
static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
|
|
|
|
BailoutType type,
|
2012-12-18 16:25:45 +00:00
|
|
|
int max_entry_id);
|
|
|
|
|
2013-03-18 13:57:49 +00:00
|
|
|
Isolate* isolate() const { return isolate_; }
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
private:
|
2012-11-07 08:49:17 +00:00
|
|
|
static const int kMinNumberOfEntries = 64;
|
|
|
|
static const int kMaxNumberOfEntries = 16384;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2016-08-25 08:14:14 +00:00
|
|
|
Deoptimizer(Isolate* isolate, JSFunction* function, BailoutType type,
|
|
|
|
unsigned bailout_id, Address from, int fp_to_sp_delta);
|
|
|
|
Code* FindOptimizedCode(JSFunction* function);
|
2012-12-21 07:18:56 +00:00
|
|
|
void PrintFunctionName();
|
2010-12-07 11:31:57 +00:00
|
|
|
void DeleteFrameDescriptions();
|
|
|
|
|
|
|
|
void DoComputeOutputFrames();
|
2016-03-09 11:33:10 +00:00
|
|
|
void DoComputeJSFrame(TranslatedFrame* translated_frame, int frame_index,
|
|
|
|
bool goto_catch_handler);
|
|
|
|
void DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
|
|
|
|
int frame_index, bool goto_catch_handler);
|
|
|
|
void DoComputeArgumentsAdaptorFrame(TranslatedFrame* translated_frame,
|
|
|
|
int frame_index);
|
|
|
|
void DoComputeTailCallerFrame(TranslatedFrame* translated_frame,
|
|
|
|
int frame_index);
|
|
|
|
void DoComputeConstructStubFrame(TranslatedFrame* translated_frame,
|
|
|
|
int frame_index);
|
|
|
|
void DoComputeAccessorStubFrame(TranslatedFrame* translated_frame,
|
|
|
|
int frame_index, bool is_setter_stub_frame);
|
|
|
|
void DoComputeCompiledStubFrame(TranslatedFrame* translated_frame,
|
|
|
|
int frame_index);
|
2013-04-02 11:28:01 +00:00
|
|
|
|
2015-06-15 10:14:28 +00:00
|
|
|
void WriteTranslatedValueToOutput(
|
|
|
|
TranslatedFrame::iterator* iterator, int* input_index, int frame_index,
|
|
|
|
unsigned output_offset, const char* debug_hint_string = nullptr,
|
|
|
|
Address output_address_for_materialization = nullptr);
|
|
|
|
void WriteValueToOutput(Object* value, int input_index, int frame_index,
|
|
|
|
unsigned output_offset,
|
|
|
|
const char* debug_hint_string);
|
|
|
|
void DebugPrintOutputSlot(intptr_t value, int frame_index,
|
|
|
|
unsigned output_offset,
|
|
|
|
const char* debug_hint_string);
|
2014-04-25 12:58:15 +00:00
|
|
|
|
2016-03-07 12:18:43 +00:00
|
|
|
unsigned ComputeInputFrameAboveFpFixedSize() const;
|
2010-12-07 11:31:57 +00:00
|
|
|
unsigned ComputeInputFrameSize() const;
|
2016-02-11 12:03:29 +00:00
|
|
|
static unsigned ComputeJavascriptFixedSize(SharedFunctionInfo* shared);
|
|
|
|
static unsigned ComputeInterpretedFixedSize(SharedFunctionInfo* shared);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2016-02-11 12:03:29 +00:00
|
|
|
static unsigned ComputeIncomingArgumentSize(SharedFunctionInfo* shared);
|
2015-08-03 12:59:41 +00:00
|
|
|
static unsigned ComputeOutgoingArgumentSize(Code* code, unsigned bailout_id);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
static void GenerateDeoptimizationEntries(
|
|
|
|
MacroAssembler* masm, int count, BailoutType type);
|
|
|
|
|
2013-09-04 13:53:24 +00:00
|
|
|
// Marks all the code in the given context for deoptimization.
|
|
|
|
static void MarkAllCodeForContext(Context* native_context);
|
|
|
|
|
|
|
|
// Visit all the known optimized functions in a given context.
|
|
|
|
static void VisitAllOptimizedFunctionsForContext(
|
|
|
|
Context* context, OptimizedFunctionVisitor* visitor);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2013-09-04 13:53:24 +00:00
|
|
|
// Deoptimizes all code marked in the given context.
|
|
|
|
static void DeoptimizeMarkedCodeForContext(Context* native_context);
|
2013-07-24 11:12:17 +00:00
|
|
|
|
|
|
|
// Patch the given code so that it will deoptimize itself.
|
|
|
|
static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
|
2012-12-17 10:23:52 +00:00
|
|
|
|
2013-09-04 13:53:24 +00:00
|
|
|
// Searches the list of known deoptimizing code for a Code object
|
|
|
|
// containing the given address (which is supposedly faster than
|
|
|
|
// searching all code objects).
|
|
|
|
Code* FindDeoptimizingCode(Address addr);
|
|
|
|
|
2013-03-08 16:18:50 +00:00
|
|
|
// Fill the given output frame's registers to contain the failure handler
|
|
|
|
// address and the number of parameters for a stub failure trampoline.
|
|
|
|
void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
|
2014-09-08 15:18:54 +00:00
|
|
|
CodeStubDescriptor* desc);
|
2013-03-08 16:18:50 +00:00
|
|
|
|
|
|
|
// Fill the given output frame's double registers with the original values
|
|
|
|
// from the input frame's double registers.
|
|
|
|
void CopyDoubleRegisters(FrameDescription* output_frame);
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
Isolate* isolate_;
|
2010-12-07 11:31:57 +00:00
|
|
|
JSFunction* function_;
|
2012-12-18 16:25:45 +00:00
|
|
|
Code* compiled_code_;
|
2010-12-07 11:31:57 +00:00
|
|
|
unsigned bailout_id_;
|
|
|
|
BailoutType bailout_type_;
|
|
|
|
Address from_;
|
|
|
|
int fp_to_sp_delta_;
|
2016-02-12 10:14:42 +00:00
|
|
|
bool deoptimizing_throw_;
|
|
|
|
int catch_handler_data_;
|
|
|
|
int catch_handler_pc_offset_;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
// Input frame description.
|
|
|
|
FrameDescription* input_;
|
|
|
|
// Number of output frames.
|
|
|
|
int output_count_;
|
2012-01-24 08:43:12 +00:00
|
|
|
// Number of output js frames.
|
|
|
|
int jsframe_count_;
|
2010-12-07 11:31:57 +00:00
|
|
|
// Array of output frame descriptions.
|
|
|
|
FrameDescription** output_;
|
|
|
|
|
2016-03-07 12:18:43 +00:00
|
|
|
// Caller frame details computed from input frame.
|
|
|
|
intptr_t caller_frame_top_;
|
|
|
|
intptr_t caller_fp_;
|
|
|
|
intptr_t caller_pc_;
|
|
|
|
intptr_t caller_constant_pool_;
|
|
|
|
intptr_t input_frame_context_;
|
|
|
|
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
// Key for lookup of previously materialized objects
|
2016-03-07 12:18:43 +00:00
|
|
|
intptr_t stack_fp_;
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
|
2015-06-08 10:04:51 +00:00
|
|
|
TranslatedState translated_state_;
|
|
|
|
struct ValueToMaterialize {
|
|
|
|
Address output_slot_address_;
|
|
|
|
TranslatedFrame::iterator value_;
|
|
|
|
};
|
|
|
|
std::vector<ValueToMaterialize> values_to_materialize_;
|
2013-08-07 11:24:14 +00:00
|
|
|
|
2013-06-03 15:32:22 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
DisallowHeapAllocation* disallow_heap_allocation_;
|
|
|
|
#endif // DEBUG
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2013-11-07 16:35:27 +00:00
|
|
|
CodeTracer::Scope* trace_scope_;
|
2013-02-05 16:28:36 +00:00
|
|
|
|
2011-08-05 11:32:46 +00:00
|
|
|
static const int table_entry_size_;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
friend class FrameDescription;
|
2011-06-29 13:02:00 +00:00
|
|
|
friend class DeoptimizedFrameInfo;
|
2010-12-07 11:31:57 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
2015-06-08 10:04:51 +00:00
|
|
|
class RegisterValues {
|
|
|
|
public:
|
|
|
|
intptr_t GetRegister(unsigned n) const {
|
|
|
|
#if DEBUG
|
|
|
|
// This convoluted DCHECK is needed to work around a gcc problem that
|
|
|
|
// improperly detects an array bounds overflow in optimized debug builds
|
|
|
|
// when using a plain DCHECK.
|
|
|
|
if (n >= arraysize(registers_)) {
|
|
|
|
DCHECK(false);
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
return registers_[n];
|
|
|
|
}
|
|
|
|
|
2016-06-03 14:16:15 +00:00
|
|
|
float GetFloatRegister(unsigned n) const {
|
|
|
|
DCHECK(n < arraysize(float_registers_));
|
|
|
|
return float_registers_[n];
|
|
|
|
}
|
|
|
|
|
2015-06-08 10:04:51 +00:00
|
|
|
double GetDoubleRegister(unsigned n) const {
|
|
|
|
DCHECK(n < arraysize(double_registers_));
|
|
|
|
return double_registers_[n];
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetRegister(unsigned n, intptr_t value) {
|
|
|
|
DCHECK(n < arraysize(registers_));
|
|
|
|
registers_[n] = value;
|
|
|
|
}
|
|
|
|
|
2016-06-03 14:16:15 +00:00
|
|
|
void SetFloatRegister(unsigned n, float value) {
|
|
|
|
DCHECK(n < arraysize(float_registers_));
|
|
|
|
float_registers_[n] = value;
|
|
|
|
}
|
|
|
|
|
2015-06-08 10:04:51 +00:00
|
|
|
void SetDoubleRegister(unsigned n, double value) {
|
|
|
|
DCHECK(n < arraysize(double_registers_));
|
|
|
|
double_registers_[n] = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
intptr_t registers_[Register::kNumRegisters];
|
2016-06-03 14:16:15 +00:00
|
|
|
float float_registers_[FloatRegister::kMaxNumRegisters];
|
2015-06-08 10:04:51 +00:00
|
|
|
double double_registers_[DoubleRegister::kMaxNumRegisters];
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
class FrameDescription {
|
|
|
|
public:
|
2016-02-15 07:36:15 +00:00
|
|
|
explicit FrameDescription(uint32_t frame_size, int parameter_count = 0);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
void* operator new(size_t size, uint32_t frame_size) {
|
2011-03-25 13:26:55 +00:00
|
|
|
// Subtracts kPointerSize, as the member frame_content_ already supplies
|
|
|
|
// the first element of the area to store the frame.
|
|
|
|
return malloc(size + frame_size - kPointerSize);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
2011-08-29 09:14:59 +00:00
|
|
|
void operator delete(void* pointer, uint32_t frame_size) {
|
|
|
|
free(pointer);
|
|
|
|
}
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
void operator delete(void* description) {
|
|
|
|
free(description);
|
|
|
|
}
|
|
|
|
|
2011-06-30 15:57:56 +00:00
|
|
|
uint32_t GetFrameSize() const {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(static_cast<uint32_t>(frame_size_) == frame_size_);
|
2011-06-30 15:57:56 +00:00
|
|
|
return static_cast<uint32_t>(frame_size_);
|
|
|
|
}
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t GetFrameSlot(unsigned offset) {
|
2010-12-07 11:31:57 +00:00
|
|
|
return *GetFrameSlotPointer(offset);
|
|
|
|
}
|
|
|
|
|
2015-06-08 10:04:51 +00:00
|
|
|
Address GetFramePointerAddress() {
|
2016-02-15 07:36:15 +00:00
|
|
|
int fp_offset = GetFrameSize() - parameter_count() * kPointerSize -
|
2015-06-08 10:04:51 +00:00
|
|
|
StandardFrameConstants::kCallerSPOffset;
|
|
|
|
return reinterpret_cast<Address>(GetFrameSlotPointer(fp_offset));
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
2015-06-08 10:04:51 +00:00
|
|
|
RegisterValues* GetRegisterValues() { return ®ister_values_; }
|
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
void SetFrameSlot(unsigned offset, intptr_t value) {
|
2010-12-07 11:31:57 +00:00
|
|
|
*GetFrameSlotPointer(offset) = value;
|
|
|
|
}
|
|
|
|
|
2013-07-23 13:46:10 +00:00
|
|
|
void SetCallerPc(unsigned offset, intptr_t value);
|
|
|
|
|
|
|
|
void SetCallerFp(unsigned offset, intptr_t value);
|
|
|
|
|
2014-03-14 15:11:58 +00:00
|
|
|
void SetCallerConstantPool(unsigned offset, intptr_t value);
|
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t GetRegister(unsigned n) const {
|
2015-06-08 10:04:51 +00:00
|
|
|
return register_values_.GetRegister(n);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
double GetDoubleRegister(unsigned n) const {
|
2015-06-08 10:04:51 +00:00
|
|
|
return register_values_.GetDoubleRegister(n);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
void SetRegister(unsigned n, intptr_t value) {
|
2015-06-08 10:04:51 +00:00
|
|
|
register_values_.SetRegister(n, value);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void SetDoubleRegister(unsigned n, double value) {
|
2015-06-08 10:04:51 +00:00
|
|
|
register_values_.SetDoubleRegister(n, value);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t GetTop() const { return top_; }
|
|
|
|
void SetTop(intptr_t top) { top_ = top; }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t GetPc() const { return pc_; }
|
|
|
|
void SetPc(intptr_t pc) { pc_ = pc; }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t GetFp() const { return fp_; }
|
|
|
|
void SetFp(intptr_t fp) { fp_ = fp; }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2012-02-28 09:05:55 +00:00
|
|
|
intptr_t GetContext() const { return context_; }
|
|
|
|
void SetContext(intptr_t context) { context_ = context; }
|
|
|
|
|
2013-12-30 11:23:59 +00:00
|
|
|
intptr_t GetConstantPool() const { return constant_pool_; }
|
|
|
|
void SetConstantPool(intptr_t constant_pool) {
|
|
|
|
constant_pool_ = constant_pool;
|
|
|
|
}
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
Smi* GetState() const { return state_; }
|
|
|
|
void SetState(Smi* state) { state_ = state; }
|
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
void SetContinuation(intptr_t pc) { continuation_ = pc; }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2012-01-24 08:43:12 +00:00
|
|
|
StackFrame::Type GetFrameType() const { return type_; }
|
|
|
|
void SetFrameType(StackFrame::Type type) { type_ = type; }
|
2011-06-29 13:02:00 +00:00
|
|
|
|
2016-02-15 07:36:15 +00:00
|
|
|
// Argument count, including receiver.
|
|
|
|
int parameter_count() { return parameter_count_; }
|
2011-06-29 13:02:00 +00:00
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
static int registers_offset() {
|
2015-06-08 10:04:51 +00:00
|
|
|
return OFFSET_OF(FrameDescription, register_values_.registers_);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static int double_registers_offset() {
|
2015-06-08 10:04:51 +00:00
|
|
|
return OFFSET_OF(FrameDescription, register_values_.double_registers_);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static int frame_size_offset() {
|
2015-06-17 09:06:44 +00:00
|
|
|
return offsetof(FrameDescription, frame_size_);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
2015-06-17 09:06:44 +00:00
|
|
|
static int pc_offset() { return offsetof(FrameDescription, pc_); }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2015-06-17 09:06:44 +00:00
|
|
|
static int state_offset() { return offsetof(FrameDescription, state_); }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
static int continuation_offset() {
|
2015-06-17 09:06:44 +00:00
|
|
|
return offsetof(FrameDescription, continuation_);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static int frame_content_offset() {
|
2015-06-17 09:06:44 +00:00
|
|
|
return offsetof(FrameDescription, frame_content_);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
static const uint32_t kZapUint32 = 0xbeeddead;
|
|
|
|
|
2011-06-30 15:57:56 +00:00
|
|
|
// Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
|
|
|
|
// keep the variable-size array frame_content_ of type intptr_t at
|
|
|
|
// the end of the structure aligned.
|
2010-12-07 11:53:19 +00:00
|
|
|
uintptr_t frame_size_; // Number of bytes.
|
2016-02-15 07:36:15 +00:00
|
|
|
int parameter_count_;
|
2015-06-08 10:04:51 +00:00
|
|
|
RegisterValues register_values_;
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t top_;
|
|
|
|
intptr_t pc_;
|
|
|
|
intptr_t fp_;
|
2012-02-28 09:05:55 +00:00
|
|
|
intptr_t context_;
|
2013-12-30 11:23:59 +00:00
|
|
|
intptr_t constant_pool_;
|
2012-01-24 08:43:12 +00:00
|
|
|
StackFrame::Type type_;
|
2010-12-07 11:31:57 +00:00
|
|
|
Smi* state_;
|
|
|
|
|
|
|
|
// Continuation is the PC where the execution continues after
|
|
|
|
// deoptimizing.
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t continuation_;
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2011-03-25 13:26:55 +00:00
|
|
|
// This must be at the end of the object as the object is allocated larger
|
|
|
|
// than it's definition indicate to extend this array.
|
|
|
|
intptr_t frame_content_[1];
|
|
|
|
|
2010-12-07 11:53:19 +00:00
|
|
|
intptr_t* GetFrameSlotPointer(unsigned offset) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(offset < frame_size_);
|
2010-12-07 11:53:19 +00:00
|
|
|
return reinterpret_cast<intptr_t*>(
|
2010-12-07 11:31:57 +00:00
|
|
|
reinterpret_cast<Address>(this) + frame_content_offset() + offset);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2013-05-14 11:45:33 +00:00
|
|
|
class DeoptimizerData {
|
|
|
|
public:
|
|
|
|
explicit DeoptimizerData(MemoryAllocator* allocator);
|
|
|
|
~DeoptimizerData();
|
|
|
|
|
|
|
|
private:
|
|
|
|
MemoryAllocator* allocator_;
|
2016-03-07 08:18:41 +00:00
|
|
|
int deopt_entry_code_entries_[Deoptimizer::kLastBailoutType + 1];
|
|
|
|
MemoryChunk* deopt_entry_code_[Deoptimizer::kLastBailoutType + 1];
|
2013-05-14 11:45:33 +00:00
|
|
|
|
2013-09-04 13:53:24 +00:00
|
|
|
Deoptimizer* current_;
|
2013-05-14 11:45:33 +00:00
|
|
|
|
|
|
|
friend class Deoptimizer;
|
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
class TranslationBuffer BASE_EMBEDDED {
|
|
|
|
public:
|
2012-06-04 14:42:58 +00:00
|
|
|
explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
int CurrentIndex() const { return contents_.length(); }
|
2012-06-04 14:42:58 +00:00
|
|
|
void Add(int32_t value, Zone* zone);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2013-03-18 13:57:49 +00:00
|
|
|
Handle<ByteArray> CreateByteArray(Factory* factory);
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
ZoneList<uint8_t> contents_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
class TranslationIterator BASE_EMBEDDED {
|
|
|
|
public:
|
|
|
|
TranslationIterator(ByteArray* buffer, int index)
|
|
|
|
: buffer_(buffer), index_(index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < buffer->length());
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
int32_t Next();
|
|
|
|
|
2011-08-08 07:17:01 +00:00
|
|
|
bool HasNext() const { return index_ < buffer_->length(); }
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
void Skip(int n) {
|
|
|
|
for (int i = 0; i < n; i++) Next();
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
ByteArray* buffer_;
|
|
|
|
int index_;
|
|
|
|
};
|
|
|
|
|
2015-04-23 08:07:12 +00:00
|
|
|
#define TRANSLATION_OPCODE_LIST(V) \
|
|
|
|
V(BEGIN) \
|
|
|
|
V(JS_FRAME) \
|
2015-12-18 18:34:21 +00:00
|
|
|
V(INTERPRETED_FRAME) \
|
2015-04-23 08:07:12 +00:00
|
|
|
V(CONSTRUCT_STUB_FRAME) \
|
|
|
|
V(GETTER_STUB_FRAME) \
|
|
|
|
V(SETTER_STUB_FRAME) \
|
|
|
|
V(ARGUMENTS_ADAPTOR_FRAME) \
|
2016-03-09 11:33:10 +00:00
|
|
|
V(TAIL_CALLER_FRAME) \
|
2015-04-23 08:07:12 +00:00
|
|
|
V(COMPILED_STUB_FRAME) \
|
|
|
|
V(DUPLICATED_OBJECT) \
|
|
|
|
V(ARGUMENTS_OBJECT) \
|
|
|
|
V(CAPTURED_OBJECT) \
|
|
|
|
V(REGISTER) \
|
|
|
|
V(INT32_REGISTER) \
|
|
|
|
V(UINT32_REGISTER) \
|
|
|
|
V(BOOL_REGISTER) \
|
2016-06-03 14:16:15 +00:00
|
|
|
V(FLOAT_REGISTER) \
|
2015-04-23 08:07:12 +00:00
|
|
|
V(DOUBLE_REGISTER) \
|
|
|
|
V(STACK_SLOT) \
|
|
|
|
V(INT32_STACK_SLOT) \
|
|
|
|
V(UINT32_STACK_SLOT) \
|
|
|
|
V(BOOL_STACK_SLOT) \
|
2016-06-03 14:16:15 +00:00
|
|
|
V(FLOAT_STACK_SLOT) \
|
2015-04-23 08:07:12 +00:00
|
|
|
V(DOUBLE_STACK_SLOT) \
|
2016-02-11 08:24:36 +00:00
|
|
|
V(LITERAL)
|
2013-10-01 11:18:30 +00:00
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
class Translation BASE_EMBEDDED {
|
|
|
|
public:
|
2013-10-01 11:18:30 +00:00
|
|
|
#define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
|
2010-12-07 11:31:57 +00:00
|
|
|
enum Opcode {
|
2013-10-01 11:18:30 +00:00
|
|
|
TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM)
|
|
|
|
LAST = LITERAL
|
2010-12-07 11:31:57 +00:00
|
|
|
};
|
2013-10-01 11:18:30 +00:00
|
|
|
#undef DECLARE_TRANSLATION_OPCODE_ENUM
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2012-06-04 14:42:58 +00:00
|
|
|
Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
|
|
|
|
Zone* zone)
|
2010-12-07 11:31:57 +00:00
|
|
|
: buffer_(buffer),
|
2012-06-04 14:42:58 +00:00
|
|
|
index_(buffer->CurrentIndex()),
|
|
|
|
zone_(zone) {
|
|
|
|
buffer_->Add(BEGIN, zone);
|
|
|
|
buffer_->Add(frame_count, zone);
|
|
|
|
buffer_->Add(jsframe_count, zone);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
int index() const { return index_; }
|
|
|
|
|
|
|
|
// Commands.
|
2012-08-06 14:13:09 +00:00
|
|
|
void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
|
2015-12-18 18:34:21 +00:00
|
|
|
void BeginInterpretedFrame(BailoutId bytecode_offset, int literal_id,
|
|
|
|
unsigned height);
|
2015-06-08 10:04:51 +00:00
|
|
|
void BeginCompiledStubFrame(int height);
|
2012-01-24 08:43:12 +00:00
|
|
|
void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
|
2016-03-09 11:33:10 +00:00
|
|
|
void BeginTailCallerFrame(int literal_id);
|
2012-02-28 09:05:55 +00:00
|
|
|
void BeginConstructStubFrame(int literal_id, unsigned height);
|
2012-09-07 09:01:54 +00:00
|
|
|
void BeginGetterStubFrame(int literal_id);
|
2012-08-17 10:43:32 +00:00
|
|
|
void BeginSetterStubFrame(int literal_id);
|
2013-06-12 14:22:49 +00:00
|
|
|
void BeginArgumentsObject(int args_length);
|
2013-08-07 11:24:14 +00:00
|
|
|
void BeginCapturedObject(int length);
|
|
|
|
void DuplicateObject(int object_index);
|
2010-12-07 11:31:57 +00:00
|
|
|
void StoreRegister(Register reg);
|
|
|
|
void StoreInt32Register(Register reg);
|
2012-08-22 15:44:17 +00:00
|
|
|
void StoreUint32Register(Register reg);
|
2015-04-23 08:07:12 +00:00
|
|
|
void StoreBoolRegister(Register reg);
|
2016-06-03 14:16:15 +00:00
|
|
|
void StoreFloatRegister(FloatRegister reg);
|
2010-12-07 11:31:57 +00:00
|
|
|
void StoreDoubleRegister(DoubleRegister reg);
|
|
|
|
void StoreStackSlot(int index);
|
|
|
|
void StoreInt32StackSlot(int index);
|
2012-08-22 15:44:17 +00:00
|
|
|
void StoreUint32StackSlot(int index);
|
2015-04-23 08:07:12 +00:00
|
|
|
void StoreBoolStackSlot(int index);
|
2016-06-03 14:16:15 +00:00
|
|
|
void StoreFloatStackSlot(int index);
|
2010-12-07 11:31:57 +00:00
|
|
|
void StoreDoubleStackSlot(int index);
|
|
|
|
void StoreLiteral(int literal_id);
|
2013-06-26 08:43:27 +00:00
|
|
|
void StoreArgumentsObject(bool args_known, int args_index, int args_length);
|
2015-06-10 11:52:35 +00:00
|
|
|
void StoreJSFrameFunction();
|
2010-12-07 11:31:57 +00:00
|
|
|
|
2012-06-11 12:42:31 +00:00
|
|
|
Zone* zone() const { return zone_; }
|
2012-06-04 14:42:58 +00:00
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
static int NumberOfOperandsFor(Opcode opcode);
|
|
|
|
|
2011-06-16 07:58:47 +00:00
|
|
|
#if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
|
2010-12-07 11:31:57 +00:00
|
|
|
static const char* StringFor(Opcode opcode);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
private:
|
|
|
|
TranslationBuffer* buffer_;
|
|
|
|
int index_;
|
2012-06-04 14:42:58 +00:00
|
|
|
Zone* zone_;
|
2010-12-07 11:31:57 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
class MaterializedObjectStore {
|
|
|
|
public:
|
|
|
|
explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) {
|
|
|
|
}
|
|
|
|
|
|
|
|
Handle<FixedArray> Get(Address fp);
|
|
|
|
void Set(Address fp, Handle<FixedArray> materialized_objects);
|
2015-05-04 16:43:56 +00:00
|
|
|
bool Remove(Address fp);
|
The current
version is passing all the existing test + a bunch of new tests
(packaged in the change list, too).
The patch extends the SlotRef object to describe captured and duplicated
objects. Since the SlotRefs are not independent of each other anymore,
there is a new SlotRefValueBuilder class that stores the SlotRefs and
later materializes the objects from the SlotRefs.
Note that unlike the previous implementation of SlotRefs, we now build
the SlotRef entries for the entire frame, not just the particular
function. This is because duplicate objects might refer to previous
captured objects (that might live inside other inlined function's part
of the frame).
We also need to store the materialized objects between other potential
invocations of the same arguments object so that we materialize each
captured object at most once. The materialized objects of frames live
in the new MaterielizedObjectStore object (contained in Isolate),
indexed by the frame's FP address. Each argument materialization (and
deoptimization) tries to lookup its captured objects in the store before
building new ones. Deoptimization also removes the materialized objects
from the store. We also schedule a lazy deopt to be sure that we always
get rid of the materialized objects and that the optmized function
adopts the materialized objects (instead of happily computing with its
captured representations).
Concerns:
- Is the FP address the right key for a frame? (Note that deoptimizer's
representation of frame is different from the argument object
materializer's one - it is not easy to find common ground.)
- Performance is suboptimal in several places, but a quick local run of
benchmarks does not seem to show a perf hit. Examples of possible
improvements: smarter generation of SlotRefs (build other functions'
SlotRefs only for captured objects and only if necessary), smarter
lookup of stored materialized objects.
- Ideally, we would like to share the code for argument materialization
with deoptimizer's materializer. However, the supporting data structures
(mainly the frame descriptor) are quite different in each case, so it
looks more like a separate project.
Thanks for any feedback.
R=danno@chromium.org, mstarzinger@chromium.org
LOG=N
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=18918
Review URL: https://codereview.chromium.org/103243005
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18936 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2014-01-30 10:33:53 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
Isolate* isolate() { return isolate_; }
|
|
|
|
Handle<FixedArray> GetStackEntries();
|
|
|
|
Handle<FixedArray> EnsureStackEntries(int size);
|
|
|
|
|
|
|
|
int StackIdToIndex(Address fp);
|
|
|
|
|
|
|
|
Isolate* isolate_;
|
|
|
|
List<Address> frame_fps_;
|
2011-04-01 11:41:36 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
// Class used to represent an unoptimized frame when the debugger
|
|
|
|
// needs to inspect a frame that is part of an optimized frame. The
|
|
|
|
// internally used FrameDescription objects are not GC safe so for use
|
|
|
|
// by the debugger frame information is copied to an object of this type.
|
2012-01-24 08:43:12 +00:00
|
|
|
// Represents parameters in unadapted form so their number might mismatch
|
|
|
|
// formal parameter count.
|
2011-06-29 13:02:00 +00:00
|
|
|
class DeoptimizedFrameInfo : public Malloced {
|
|
|
|
public:
|
2016-02-15 07:36:15 +00:00
|
|
|
DeoptimizedFrameInfo(TranslatedState* state,
|
|
|
|
TranslatedState::iterator frame_it, Isolate* isolate);
|
|
|
|
|
2011-07-07 14:29:16 +00:00
|
|
|
// Return the number of incoming arguments.
|
2016-02-12 05:49:46 +00:00
|
|
|
int parameters_count() { return static_cast<int>(parameters_.size()); }
|
2011-07-07 14:29:16 +00:00
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
// Return the height of the expression stack.
|
2016-02-12 05:49:46 +00:00
|
|
|
int expression_count() { return static_cast<int>(expression_stack_.size()); }
|
2011-06-29 13:02:00 +00:00
|
|
|
|
2011-07-06 13:02:17 +00:00
|
|
|
// Get the frame function.
|
2016-02-12 05:49:46 +00:00
|
|
|
Handle<JSFunction> GetFunction() { return function_; }
|
2011-07-06 13:02:17 +00:00
|
|
|
|
2014-09-01 09:31:14 +00:00
|
|
|
// Get the frame context.
|
2016-02-12 05:49:46 +00:00
|
|
|
Handle<Object> GetContext() { return context_; }
|
2014-09-01 09:31:14 +00:00
|
|
|
|
2012-02-28 09:05:55 +00:00
|
|
|
// Check if this frame is preceded by construct stub frame. The bottom-most
|
|
|
|
// inlined frame might still be called by an uninlined construct stub.
|
|
|
|
bool HasConstructStub() {
|
|
|
|
return has_construct_stub_;
|
|
|
|
}
|
|
|
|
|
2011-07-07 14:29:16 +00:00
|
|
|
// Get an incoming argument.
|
2016-02-12 05:49:46 +00:00
|
|
|
Handle<Object> GetParameter(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(0 <= index && index < parameters_count());
|
2011-07-07 14:29:16 +00:00
|
|
|
return parameters_[index];
|
|
|
|
}
|
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
// Get an expression from the expression stack.
|
2016-02-12 05:49:46 +00:00
|
|
|
Handle<Object> GetExpression(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(0 <= index && index < expression_count());
|
2011-06-29 13:02:00 +00:00
|
|
|
return expression_stack_[index];
|
|
|
|
}
|
|
|
|
|
2012-01-31 12:08:33 +00:00
|
|
|
int GetSourcePosition() {
|
|
|
|
return source_position_;
|
2012-01-30 13:07:01 +00:00
|
|
|
}
|
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
private:
|
2011-07-07 14:29:16 +00:00
|
|
|
// Set an incoming argument.
|
2016-02-12 05:49:46 +00:00
|
|
|
void SetParameter(int index, Handle<Object> obj) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(0 <= index && index < parameters_count());
|
2011-07-07 14:29:16 +00:00
|
|
|
parameters_[index] = obj;
|
|
|
|
}
|
|
|
|
|
2011-06-29 13:02:00 +00:00
|
|
|
// Set an expression on the expression stack.
|
2016-02-12 05:49:46 +00:00
|
|
|
void SetExpression(int index, Handle<Object> obj) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(0 <= index && index < expression_count());
|
2011-06-29 13:02:00 +00:00
|
|
|
expression_stack_[index] = obj;
|
|
|
|
}
|
|
|
|
|
2016-02-12 05:49:46 +00:00
|
|
|
Handle<JSFunction> function_;
|
|
|
|
Handle<Object> context_;
|
2012-02-28 09:05:55 +00:00
|
|
|
bool has_construct_stub_;
|
2016-02-12 05:49:46 +00:00
|
|
|
std::vector<Handle<Object> > parameters_;
|
|
|
|
std::vector<Handle<Object> > expression_stack_;
|
2012-01-31 12:08:33 +00:00
|
|
|
int source_position_;
|
2011-06-29 13:02:00 +00:00
|
|
|
|
|
|
|
friend class Deoptimizer;
|
|
|
|
};
|
|
|
|
|
2015-06-08 10:04:51 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
#endif // V8_DEOPTIMIZER_H_
|