v8/src/ast.cc

1173 lines
36 KiB
C++
Raw Normal View History

// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "ast.h"
#include <cmath> // For isfinite.
#include "builtins.h"
#include "code-stubs.h"
#include "contexts.h"
#include "conversions.h"
#include "hashmap.h"
#include "parser.h"
#include "property-details.h"
#include "property.h"
#include "scopes.h"
#include "string-stream.h"
#include "type-info.h"
namespace v8 {
namespace internal {
// ----------------------------------------------------------------------------
// All the Accept member functions for each syntax tree node type.
#define DECL_ACCEPT(type) \
void type::Accept(AstVisitor* v) { v->Visit##type(this); }
AST_NODE_LIST(DECL_ACCEPT)
#undef DECL_ACCEPT
// ----------------------------------------------------------------------------
// Implementation of other node functionality.
bool Expression::IsSmiLiteral() {
return AsLiteral() != NULL && AsLiteral()->value()->IsSmi();
}
bool Expression::IsStringLiteral() {
return AsLiteral() != NULL && AsLiteral()->value()->IsString();
}
bool Expression::IsNullLiteral() {
return AsLiteral() != NULL && AsLiteral()->value()->IsNull();
}
bool Expression::IsUndefinedLiteral(Isolate* isolate) {
VariableProxy* var_proxy = AsVariableProxy();
if (var_proxy == NULL) return false;
Variable* var = var_proxy->var();
// The global identifier "undefined" is immutable. Everything
// else could be reassigned.
return var != NULL && var->location() == Variable::UNALLOCATED &&
var_proxy->name()->Equals(isolate->heap()->undefined_string());
}
VariableProxy::VariableProxy(Isolate* isolate, Variable* var, int position)
: Expression(isolate, position),
name_(var->name()),
var_(NULL), // Will be set by the call to BindTo.
is_this_(var->is_this()),
is_trivial_(false),
is_lvalue_(false),
interface_(var->interface()) {
BindTo(var);
}
VariableProxy::VariableProxy(Isolate* isolate,
Handle<String> name,
bool is_this,
Interface* interface,
int position)
: Expression(isolate, position),
name_(name),
var_(NULL),
is_this_(is_this),
is_trivial_(false),
is_lvalue_(false),
interface_(interface) {
// Names must be canonicalized for fast equality checks.
ASSERT(name->IsInternalizedString());
}
void VariableProxy::BindTo(Variable* var) {
ASSERT(var_ == NULL); // must be bound only once
ASSERT(var != NULL); // must bind
Get rid of static module allocation, do it in code. Modules now have their own local scope, represented by their own context. Module instance objects have an accessor for every export that forwards access to the respective slot from the module's context. (Exports that are modules themselves, however, are simple data properties.) All modules have a _hosting_ scope/context, which (currently) is the (innermost) enclosing global scope. To deal with recursion, nested modules are hosted by the same scope as global ones. For every (global or nested) module literal, the hosting context has an internal slot that points directly to the respective module context. This enables quick access to (statically resolved) module members by 2-dimensional access through the hosting context. For example, module A { let x; module B { let y; } } module C { let z; } allocates contexts as follows: [header| .A | .B | .C | A | C ] (global) | | | | | +-- [header| z ] (module) | | | +------- [header| y ] (module) | +------------ [header| x | B ] (module) Here, .A, .B, .C are the internal slots pointing to the hosted module contexts, whereas A, B, C hold the actual instance objects (note that every module context also points to the respective instance object through its extension slot in the header). To deal with arbitrary recursion and aliases between modules, they are created and initialized in several stages. Each stage applies to all modules in the hosting global scope, including nested ones. 1. Allocate: for each module _literal_, allocate the module contexts and respective instance object and wire them up. This happens in the PushModuleContext runtime function, as generated by AllocateModules (invoked by VisitDeclarations in the hosting scope). 2. Bind: for each module _declaration_ (i.e. literals as well as aliases), assign the respective instance object to respective local variables. This happens in VisitModuleDeclaration, and uses the instance objects created in the previous stage. For each module _literal_, this phase also constructs a module descriptor for the next stage. This happens in VisitModuleLiteral. 3. Populate: invoke the DeclareModules runtime function to populate each _instance_ object with accessors for it exports. This is generated by DeclareModules (invoked by VisitDeclarations in the hosting scope again), and uses the descriptors generated in the previous stage. 4. Initialize: execute the module bodies (and other code) in sequence. This happens by the separate statements generated for module bodies. To reenter the module scopes properly, the parser inserted ModuleStatements. R=mstarzinger@chromium.org,svenpanne@chromium.org BUG= Review URL: https://codereview.chromium.org/11093074 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13033 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-11-22 10:25:22 +00:00
ASSERT(!FLAG_harmony_modules || interface_->IsUnified(var->interface()));
ASSERT((is_this() && var->is_this()) || name_.is_identical_to(var->name()));
// Ideally CONST-ness should match. However, this is very hard to achieve
// because we don't know the exact semantics of conflicting (const and
// non-const) multiple variable declarations, const vars introduced via
// eval() etc. Const-ness and variable declarations are a complete mess
// in JS. Sigh...
var_ = var;
var->set_is_used(true);
}
Assignment::Assignment(Isolate* isolate,
Token::Value op,
Expression* target,
Expression* value,
int pos)
: Expression(isolate, pos),
op_(op),
target_(target),
value_(value),
binary_operation_(NULL),
assignment_id_(GetNextId(isolate)),
is_monomorphic_(false),
is_uninitialized_(false),
is_pre_monomorphic_(false),
store_mode_(STANDARD_STORE) { }
Token::Value Assignment::binary_op() const {
switch (op_) {
case Token::ASSIGN_BIT_OR: return Token::BIT_OR;
case Token::ASSIGN_BIT_XOR: return Token::BIT_XOR;
case Token::ASSIGN_BIT_AND: return Token::BIT_AND;
case Token::ASSIGN_SHL: return Token::SHL;
case Token::ASSIGN_SAR: return Token::SAR;
case Token::ASSIGN_SHR: return Token::SHR;
case Token::ASSIGN_ADD: return Token::ADD;
case Token::ASSIGN_SUB: return Token::SUB;
case Token::ASSIGN_MUL: return Token::MUL;
case Token::ASSIGN_DIV: return Token::DIV;
case Token::ASSIGN_MOD: return Token::MOD;
default: UNREACHABLE();
}
return Token::ILLEGAL;
}
bool FunctionLiteral::AllowsLazyCompilation() {
return scope()->AllowsLazyCompilation();
}
bool FunctionLiteral::AllowsLazyCompilationWithoutContext() {
return scope()->AllowsLazyCompilationWithoutContext();
}
int FunctionLiteral::start_position() const {
return scope()->start_position();
}
int FunctionLiteral::end_position() const {
return scope()->end_position();
}
LanguageMode FunctionLiteral::language_mode() const {
return scope()->language_mode();
}
ObjectLiteralProperty::ObjectLiteralProperty(Literal* key,
Expression* value,
Isolate* isolate) {
emit_store_ = true;
key_ = key;
value_ = value;
Object* k = *key->value();
if (k->IsInternalizedString() &&
isolate->heap()->proto_string()->Equals(String::cast(k))) {
kind_ = PROTOTYPE;
} else if (value_->AsMaterializedLiteral() != NULL) {
kind_ = MATERIALIZED_LITERAL;
} else if (value_->AsLiteral() != NULL) {
kind_ = CONSTANT;
} else {
kind_ = COMPUTED;
}
}
ObjectLiteralProperty::ObjectLiteralProperty(bool is_getter,
FunctionLiteral* value) {
emit_store_ = true;
value_ = value;
kind_ = is_getter ? GETTER : SETTER;
}
bool ObjectLiteral::Property::IsCompileTimeValue() {
return kind_ == CONSTANT ||
(kind_ == MATERIALIZED_LITERAL &&
CompileTimeValue::IsCompileTimeValue(value_));
}
void ObjectLiteral::Property::set_emit_store(bool emit_store) {
emit_store_ = emit_store;
}
bool ObjectLiteral::Property::emit_store() {
return emit_store_;
}
void ObjectLiteral::CalculateEmitStore(Zone* zone) {
ZoneAllocationPolicy allocator(zone);
ZoneHashMap table(Literal::Match, ZoneHashMap::kDefaultHashMapCapacity,
allocator);
for (int i = properties()->length() - 1; i >= 0; i--) {
ObjectLiteral::Property* property = properties()->at(i);
Literal* literal = property->key();
if (literal->value()->IsNull()) continue;
uint32_t hash = literal->Hash();
// If the key of a computed property is in the table, do not emit
// a store for the property later.
if ((property->kind() == ObjectLiteral::Property::MATERIALIZED_LITERAL ||
property->kind() == ObjectLiteral::Property::COMPUTED) &&
table.Lookup(literal, hash, false, allocator) != NULL) {
property->set_emit_store(false);
} else {
// Add key to the table.
table.Lookup(literal, hash, true, allocator);
}
}
}
void TargetCollector::AddTarget(Label* target, Zone* zone) {
// Add the label to the collector, but discard duplicates.
int length = targets_.length();
for (int i = 0; i < length; i++) {
if (targets_[i] == target) return;
}
targets_.Add(target, zone);
}
void UnaryOperation::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
// TODO(olivf) If this Operation is used in a test context, then the
// expression has a ToBoolean stub and we want to collect the type
// information. However the GraphBuilder expects it to be on the instruction
// corresponding to the TestContext, therefore we have to store it here and
// not on the operand.
set_to_boolean_types(oracle->ToBooleanTypes(expression()->test_id()));
}
void BinaryOperation::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
// TODO(olivf) If this Operation is used in a test context, then the right
// hand side has a ToBoolean stub and we want to collect the type information.
// However the GraphBuilder expects it to be on the instruction corresponding
// to the TestContext, therefore we have to store it here and not on the
// right hand operand.
set_to_boolean_types(oracle->ToBooleanTypes(right()->test_id()));
}
bool BinaryOperation::ResultOverwriteAllowed() {
switch (op_) {
case Token::COMMA:
case Token::OR:
case Token::AND:
return false;
case Token::BIT_OR:
case Token::BIT_XOR:
case Token::BIT_AND:
case Token::SHL:
case Token::SAR:
case Token::SHR:
case Token::ADD:
case Token::SUB:
case Token::MUL:
case Token::DIV:
case Token::MOD:
return true;
default:
UNREACHABLE();
}
return false;
}
2011-09-19 14:50:33 +00:00
static bool IsTypeof(Expression* expr) {
UnaryOperation* maybe_unary = expr->AsUnaryOperation();
return maybe_unary != NULL && maybe_unary->op() == Token::TYPEOF;
}
2011-09-19 14:50:33 +00:00
// Check for the pattern: typeof <expression> equals <string literal>.
static bool MatchLiteralCompareTypeof(Expression* left,
Token::Value op,
Expression* right,
Expression** expr,
Handle<String>* check) {
if (IsTypeof(left) && right->IsStringLiteral() && Token::IsEqualityOp(op)) {
*expr = left->AsUnaryOperation()->expression();
*check = Handle<String>::cast(right->AsLiteral()->value());
return true;
}
return false;
}
2011-09-19 14:50:33 +00:00
bool CompareOperation::IsLiteralCompareTypeof(Expression** expr,
Handle<String>* check) {
return MatchLiteralCompareTypeof(left_, op_, right_, expr, check) ||
MatchLiteralCompareTypeof(right_, op_, left_, expr, check);
}
2011-09-19 14:50:33 +00:00
static bool IsVoidOfLiteral(Expression* expr) {
UnaryOperation* maybe_unary = expr->AsUnaryOperation();
return maybe_unary != NULL &&
maybe_unary->op() == Token::VOID &&
maybe_unary->expression()->AsLiteral() != NULL;
}
// Check for the pattern: void <literal> equals <expression> or
// undefined equals <expression>
2011-09-19 14:50:33 +00:00
static bool MatchLiteralCompareUndefined(Expression* left,
Token::Value op,
Expression* right,
Expression** expr,
Isolate* isolate) {
2011-09-19 14:50:33 +00:00
if (IsVoidOfLiteral(left) && Token::IsEqualityOp(op)) {
*expr = right;
return true;
}
if (left->IsUndefinedLiteral(isolate) && Token::IsEqualityOp(op)) {
2011-09-19 14:50:33 +00:00
*expr = right;
return true;
}
return false;
}
bool CompareOperation::IsLiteralCompareUndefined(
Expression** expr, Isolate* isolate) {
return MatchLiteralCompareUndefined(left_, op_, right_, expr, isolate) ||
MatchLiteralCompareUndefined(right_, op_, left_, expr, isolate);
2011-09-19 14:50:33 +00:00
}
2011-09-19 14:50:33 +00:00
// Check for the pattern: null equals <expression>
static bool MatchLiteralCompareNull(Expression* left,
Token::Value op,
Expression* right,
Expression** expr) {
if (left->IsNullLiteral() && Token::IsEqualityOp(op)) {
*expr = right;
return true;
}
return false;
}
2011-09-19 14:50:33 +00:00
bool CompareOperation::IsLiteralCompareNull(Expression** expr) {
return MatchLiteralCompareNull(left_, op_, right_, expr) ||
MatchLiteralCompareNull(right_, op_, left_, expr);
}
// ----------------------------------------------------------------------------
// Inlining support
bool Declaration::IsInlineable() const {
return proxy()->var()->IsStackAllocated();
}
bool FunctionDeclaration::IsInlineable() const {
return false;
}
// ----------------------------------------------------------------------------
// Recording of type feedback
// TODO(rossberg): all RecordTypeFeedback functions should disappear
// once we use the common type field in the AST consistently.
void ForInStatement::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
for_in_type_ = static_cast<ForInType>(oracle->ForInType(this));
}
void Expression::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
to_boolean_types_ = oracle->ToBooleanTypes(test_id());
}
void Property::RecordTypeFeedback(TypeFeedbackOracle* oracle,
Zone* zone) {
// Record type feedback from the oracle in the AST.
is_uninitialized_ = oracle->LoadIsUninitialized(this);
if (is_uninitialized_) return;
is_pre_monomorphic_ = oracle->LoadIsPreMonomorphic(this);
is_monomorphic_ = oracle->LoadIsMonomorphicNormal(this);
ASSERT(!is_pre_monomorphic_ || !is_monomorphic_);
receiver_types_.Clear();
if (key()->IsPropertyName()) {
FunctionPrototypeStub proto_stub(Code::LOAD_IC);
if (oracle->LoadIsStub(this, &proto_stub)) {
is_function_prototype_ = true;
} else {
Literal* lit_key = key()->AsLiteral();
ASSERT(lit_key != NULL && lit_key->value()->IsString());
Handle<String> name = Handle<String>::cast(lit_key->value());
oracle->LoadReceiverTypes(this, name, &receiver_types_);
}
} else if (oracle->LoadIsBuiltin(this, Builtins::kKeyedLoadIC_String)) {
is_string_access_ = true;
} else if (is_monomorphic_) {
receiver_types_.Add(oracle->LoadMonomorphicReceiverType(this), zone);
} else if (oracle->LoadIsPolymorphic(this)) {
receiver_types_.Reserve(kMaxKeyedPolymorphism, zone);
oracle->CollectKeyedReceiverTypes(PropertyFeedbackId(), &receiver_types_);
}
}
void Assignment::RecordTypeFeedback(TypeFeedbackOracle* oracle,
Zone* zone) {
Property* prop = target()->AsProperty();
ASSERT(prop != NULL);
TypeFeedbackId id = AssignmentFeedbackId();
is_uninitialized_ = oracle->StoreIsUninitialized(id);
if (is_uninitialized_) return;
is_pre_monomorphic_ = oracle->StoreIsPreMonomorphic(id);
is_monomorphic_ = oracle->StoreIsMonomorphicNormal(id);
ASSERT(!is_pre_monomorphic_ || !is_monomorphic_);
receiver_types_.Clear();
if (prop->key()->IsPropertyName()) {
Literal* lit_key = prop->key()->AsLiteral();
ASSERT(lit_key != NULL && lit_key->value()->IsString());
Handle<String> name = Handle<String>::cast(lit_key->value());
oracle->StoreReceiverTypes(this, name, &receiver_types_);
} else if (is_monomorphic_) {
// Record receiver type for monomorphic keyed stores.
receiver_types_.Add(oracle->StoreMonomorphicReceiverType(id), zone);
store_mode_ = oracle->GetStoreMode(id);
} else if (oracle->StoreIsKeyedPolymorphic(id)) {
receiver_types_.Reserve(kMaxKeyedPolymorphism, zone);
oracle->CollectKeyedReceiverTypes(id, &receiver_types_);
store_mode_ = oracle->GetStoreMode(id);
}
}
void CountOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle,
Zone* zone) {
TypeFeedbackId id = CountStoreFeedbackId();
is_monomorphic_ = oracle->StoreIsMonomorphicNormal(id);
receiver_types_.Clear();
if (is_monomorphic_) {
// Record receiver type for monomorphic keyed stores.
receiver_types_.Add(
oracle->StoreMonomorphicReceiverType(id), zone);
} else if (oracle->StoreIsKeyedPolymorphic(id)) {
receiver_types_.Reserve(kMaxKeyedPolymorphism, zone);
oracle->CollectKeyedReceiverTypes(id, &receiver_types_);
} else {
oracle->CollectPolymorphicStoreReceiverTypes(id, &receiver_types_);
}
store_mode_ = oracle->GetStoreMode(id);
type_ = oracle->IncrementType(this);
}
void CaseClause::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
compare_type_ = oracle->ClauseType(CompareId());
}
bool Call::ComputeTarget(Handle<Map> type, Handle<String> name) {
// If there is an interceptor, we can't compute the target for a direct call.
if (type->has_named_interceptor()) return false;
if (check_type_ == RECEIVER_MAP_CHECK) {
// For primitive checks the holder is set up to point to the corresponding
// prototype object, i.e. one step of the algorithm below has been already
// performed. For non-primitive checks we clear it to allow computing
// targets for polymorphic calls.
holder_ = Handle<JSObject>::null();
}
LookupResult lookup(type->GetIsolate());
while (true) {
// If a dictionary map is found in the prototype chain before the actual
// target, a new target can always appear. In that case, bail out.
// TODO(verwaest): Alternatively a runtime negative lookup on the normal
// receiver or prototype could be added.
if (type->is_dictionary_map()) return false;
type->LookupDescriptor(NULL, *name, &lookup);
if (lookup.IsFound()) {
switch (lookup.type()) {
case CONSTANT: {
// We surely know the target for a constant function.
Handle<Object> constant(lookup.GetConstantFromMap(*type),
type->GetIsolate());
if (constant->IsJSFunction()) {
target_ = Handle<JSFunction>::cast(constant);
return true;
}
// Fall through.
}
case NORMAL:
case FIELD:
case CALLBACKS:
case HANDLER:
case INTERCEPTOR:
// We don't know the target.
return false;
case TRANSITION:
case NONEXISTENT:
UNREACHABLE();
break;
}
}
// If we reach the end of the prototype chain, we don't know the target.
if (!type->prototype()->IsJSObject()) return false;
// Go up the prototype chain, recording where we are currently.
holder_ = Handle<JSObject>(JSObject::cast(type->prototype()));
type = Handle<Map>(holder()->map());
}
}
bool Call::ComputeGlobalTarget(Handle<GlobalObject> global,
LookupResult* lookup) {
target_ = Handle<JSFunction>::null();
cell_ = Handle<Cell>::null();
ASSERT(lookup->IsFound() &&
lookup->type() == NORMAL &&
lookup->holder() == *global);
cell_ = Handle<Cell>(global->GetPropertyCell(lookup));
if (cell_->value()->IsJSFunction()) {
Handle<JSFunction> candidate(JSFunction::cast(cell_->value()));
// If the function is in new space we assume it's more likely to
// change and thus prefer the general IC code.
if (!lookup->isolate()->heap()->InNewSpace(*candidate)) {
target_ = candidate;
return true;
}
}
return false;
}
Handle<JSObject> Call::GetPrototypeForPrimitiveCheck(
CheckType check, Isolate* isolate) {
v8::internal::Context* native_context = isolate->context()->native_context();
JSFunction* function = NULL;
switch (check) {
case RECEIVER_MAP_CHECK:
UNREACHABLE();
break;
case STRING_CHECK:
function = native_context->string_function();
break;
case SYMBOL_CHECK:
function = native_context->symbol_function();
break;
case NUMBER_CHECK:
function = native_context->number_function();
break;
case BOOLEAN_CHECK:
function = native_context->boolean_function();
break;
}
ASSERT(function != NULL);
return Handle<JSObject>(JSObject::cast(function->instance_prototype()));
}
void Call::RecordTypeFeedback(TypeFeedbackOracle* oracle,
CallKind call_kind) {
is_monomorphic_ = oracle->CallIsMonomorphic(this);
Property* property = expression()->AsProperty();
if (property == NULL) {
// Function call. Specialize for monomorphic calls.
if (is_monomorphic_) target_ = oracle->GetCallTarget(this);
} else {
// Method call. Specialize for the receiver types seen at runtime.
Literal* key = property->key()->AsLiteral();
ASSERT(key != NULL && key->value()->IsString());
Handle<String> name = Handle<String>::cast(key->value());
check_type_ = oracle->GetCallCheckType(this);
receiver_types_.Clear();
if (check_type_ == RECEIVER_MAP_CHECK) {
oracle->CallReceiverTypes(this, name, call_kind, &receiver_types_);
is_monomorphic_ = is_monomorphic_ && receiver_types_.length() > 0;
} else {
holder_ = GetPrototypeForPrimitiveCheck(check_type_, oracle->isolate());
receiver_types_.Add(handle(holder_->map()), oracle->zone());
}
#ifdef ENABLE_SLOW_ASSERTS
if (FLAG_enable_slow_asserts) {
int length = receiver_types_.length();
for (int i = 0; i < length; i++) {
Handle<Map> map = receiver_types_.at(i);
ASSERT(!map.is_null() && *map != NULL);
}
}
#endif
if (is_monomorphic_) {
Handle<Map> map = receiver_types_.first();
is_monomorphic_ = ComputeTarget(map, name);
}
}
}
void CallNew::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
allocation_info_cell_ = oracle->GetCallNewAllocationInfoCell(this);
is_monomorphic_ = oracle->CallNewIsMonomorphic(this);
if (is_monomorphic_) {
target_ = oracle->GetCallNewTarget(this);
Object* value = allocation_info_cell_->value();
ASSERT(!value->IsTheHole());
if (value->IsAllocationSite()) {
AllocationSite* site = AllocationSite::cast(value);
elements_kind_ = site->GetElementsKind();
}
}
}
void ObjectLiteral::Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
receiver_type_ = oracle->ObjectLiteralStoreIsMonomorphic(this)
? oracle->GetObjectLiteralStoreMap(this)
: Handle<Map>::null();
}
// ----------------------------------------------------------------------------
// Implementation of AstVisitor
void AstVisitor::VisitDeclarations(ZoneList<Declaration*>* declarations) {
for (int i = 0; i < declarations->length(); i++) {
Visit(declarations->at(i));
}
}
void AstVisitor::VisitStatements(ZoneList<Statement*>* statements) {
for (int i = 0; i < statements->length(); i++) {
Statement* stmt = statements->at(i);
Visit(stmt);
if (stmt->IsJump()) break;
}
}
void AstVisitor::VisitExpressions(ZoneList<Expression*>* expressions) {
for (int i = 0; i < expressions->length(); i++) {
// The variable statement visiting code may pass NULL expressions
// to this code. Maybe this should be handled by introducing an
// undefined expression or literal? Revisit this code if this
// changes
Expression* expression = expressions->at(i);
if (expression != NULL) Visit(expression);
}
}
// ----------------------------------------------------------------------------
// Regular expressions
#define MAKE_ACCEPT(Name) \
void* RegExp##Name::Accept(RegExpVisitor* visitor, void* data) { \
return visitor->Visit##Name(this, data); \
}
FOR_EACH_REG_EXP_TREE_TYPE(MAKE_ACCEPT)
#undef MAKE_ACCEPT
#define MAKE_TYPE_CASE(Name) \
RegExp##Name* RegExpTree::As##Name() { \
return NULL; \
} \
bool RegExpTree::Is##Name() { return false; }
FOR_EACH_REG_EXP_TREE_TYPE(MAKE_TYPE_CASE)
#undef MAKE_TYPE_CASE
#define MAKE_TYPE_CASE(Name) \
RegExp##Name* RegExp##Name::As##Name() { \
return this; \
} \
bool RegExp##Name::Is##Name() { return true; }
FOR_EACH_REG_EXP_TREE_TYPE(MAKE_TYPE_CASE)
#undef MAKE_TYPE_CASE
static Interval ListCaptureRegisters(ZoneList<RegExpTree*>* children) {
Interval result = Interval::Empty();
for (int i = 0; i < children->length(); i++)
result = result.Union(children->at(i)->CaptureRegisters());
return result;
}
Interval RegExpAlternative::CaptureRegisters() {
return ListCaptureRegisters(nodes());
}
Interval RegExpDisjunction::CaptureRegisters() {
return ListCaptureRegisters(alternatives());
}
Interval RegExpLookahead::CaptureRegisters() {
return body()->CaptureRegisters();
}
Interval RegExpCapture::CaptureRegisters() {
Interval self(StartRegister(index()), EndRegister(index()));
return self.Union(body()->CaptureRegisters());
}
Interval RegExpQuantifier::CaptureRegisters() {
return body()->CaptureRegisters();
}
bool RegExpAssertion::IsAnchoredAtStart() {
return assertion_type() == RegExpAssertion::START_OF_INPUT;
}
bool RegExpAssertion::IsAnchoredAtEnd() {
return assertion_type() == RegExpAssertion::END_OF_INPUT;
}
bool RegExpAlternative::IsAnchoredAtStart() {
ZoneList<RegExpTree*>* nodes = this->nodes();
for (int i = 0; i < nodes->length(); i++) {
RegExpTree* node = nodes->at(i);
if (node->IsAnchoredAtStart()) { return true; }
if (node->max_match() > 0) { return false; }
}
return false;
}
bool RegExpAlternative::IsAnchoredAtEnd() {
ZoneList<RegExpTree*>* nodes = this->nodes();
for (int i = nodes->length() - 1; i >= 0; i--) {
RegExpTree* node = nodes->at(i);
if (node->IsAnchoredAtEnd()) { return true; }
if (node->max_match() > 0) { return false; }
}
return false;
}
bool RegExpDisjunction::IsAnchoredAtStart() {
ZoneList<RegExpTree*>* alternatives = this->alternatives();
for (int i = 0; i < alternatives->length(); i++) {
if (!alternatives->at(i)->IsAnchoredAtStart())
return false;
}
return true;
}
bool RegExpDisjunction::IsAnchoredAtEnd() {
ZoneList<RegExpTree*>* alternatives = this->alternatives();
for (int i = 0; i < alternatives->length(); i++) {
if (!alternatives->at(i)->IsAnchoredAtEnd())
return false;
}
return true;
}
bool RegExpLookahead::IsAnchoredAtStart() {
return is_positive() && body()->IsAnchoredAtStart();
}
bool RegExpCapture::IsAnchoredAtStart() {
return body()->IsAnchoredAtStart();
}
bool RegExpCapture::IsAnchoredAtEnd() {
return body()->IsAnchoredAtEnd();
}
// Convert regular expression trees to a simple sexp representation.
// This representation should be different from the input grammar
// in as many cases as possible, to make it more difficult for incorrect
// parses to look as correct ones which is likely if the input and
// output formats are alike.
class RegExpUnparser V8_FINAL : public RegExpVisitor {
public:
explicit RegExpUnparser(Zone* zone);
void VisitCharacterRange(CharacterRange that);
SmartArrayPointer<const char> ToString() { return stream_.ToCString(); }
#define MAKE_CASE(Name) virtual void* Visit##Name(RegExp##Name*, \
void* data) V8_OVERRIDE;
FOR_EACH_REG_EXP_TREE_TYPE(MAKE_CASE)
#undef MAKE_CASE
private:
StringStream* stream() { return &stream_; }
HeapStringAllocator alloc_;
StringStream stream_;
Zone* zone_;
};
RegExpUnparser::RegExpUnparser(Zone* zone) : stream_(&alloc_), zone_(zone) {
}
void* RegExpUnparser::VisitDisjunction(RegExpDisjunction* that, void* data) {
stream()->Add("(|");
for (int i = 0; i < that->alternatives()->length(); i++) {
stream()->Add(" ");
that->alternatives()->at(i)->Accept(this, data);
}
stream()->Add(")");
return NULL;
}
void* RegExpUnparser::VisitAlternative(RegExpAlternative* that, void* data) {
stream()->Add("(:");
for (int i = 0; i < that->nodes()->length(); i++) {
stream()->Add(" ");
that->nodes()->at(i)->Accept(this, data);
}
stream()->Add(")");
return NULL;
}
void RegExpUnparser::VisitCharacterRange(CharacterRange that) {
stream()->Add("%k", that.from());
if (!that.IsSingleton()) {
stream()->Add("-%k", that.to());
}
}
void* RegExpUnparser::VisitCharacterClass(RegExpCharacterClass* that,
void* data) {
if (that->is_negated())
stream()->Add("^");
stream()->Add("[");
for (int i = 0; i < that->ranges(zone_)->length(); i++) {
if (i > 0) stream()->Add(" ");
VisitCharacterRange(that->ranges(zone_)->at(i));
}
stream()->Add("]");
return NULL;
}
void* RegExpUnparser::VisitAssertion(RegExpAssertion* that, void* data) {
switch (that->assertion_type()) {
case RegExpAssertion::START_OF_INPUT:
stream()->Add("@^i");
break;
case RegExpAssertion::END_OF_INPUT:
stream()->Add("@$i");
break;
case RegExpAssertion::START_OF_LINE:
stream()->Add("@^l");
break;
case RegExpAssertion::END_OF_LINE:
stream()->Add("@$l");
break;
case RegExpAssertion::BOUNDARY:
stream()->Add("@b");
break;
case RegExpAssertion::NON_BOUNDARY:
stream()->Add("@B");
break;
}
return NULL;
}
void* RegExpUnparser::VisitAtom(RegExpAtom* that, void* data) {
stream()->Add("'");
Vector<const uc16> chardata = that->data();
for (int i = 0; i < chardata.length(); i++) {
stream()->Add("%k", chardata[i]);
}
stream()->Add("'");
return NULL;
}
void* RegExpUnparser::VisitText(RegExpText* that, void* data) {
if (that->elements()->length() == 1) {
that->elements()->at(0).tree()->Accept(this, data);
} else {
stream()->Add("(!");
for (int i = 0; i < that->elements()->length(); i++) {
stream()->Add(" ");
that->elements()->at(i).tree()->Accept(this, data);
}
stream()->Add(")");
}
return NULL;
}
void* RegExpUnparser::VisitQuantifier(RegExpQuantifier* that, void* data) {
stream()->Add("(# %i ", that->min());
if (that->max() == RegExpTree::kInfinity) {
stream()->Add("- ");
} else {
stream()->Add("%i ", that->max());
}
stream()->Add(that->is_greedy() ? "g " : that->is_possessive() ? "p " : "n ");
that->body()->Accept(this, data);
stream()->Add(")");
return NULL;
}
void* RegExpUnparser::VisitCapture(RegExpCapture* that, void* data) {
stream()->Add("(^ ");
that->body()->Accept(this, data);
stream()->Add(")");
return NULL;
}
void* RegExpUnparser::VisitLookahead(RegExpLookahead* that, void* data) {
stream()->Add("(-> ");
stream()->Add(that->is_positive() ? "+ " : "- ");
that->body()->Accept(this, data);
stream()->Add(")");
return NULL;
}
void* RegExpUnparser::VisitBackReference(RegExpBackReference* that,
void* data) {
stream()->Add("(<- %i)", that->index());
return NULL;
}
void* RegExpUnparser::VisitEmpty(RegExpEmpty* that, void* data) {
stream()->Put('%');
return NULL;
}
SmartArrayPointer<const char> RegExpTree::ToString(Zone* zone) {
RegExpUnparser unparser(zone);
Accept(&unparser, NULL);
return unparser.ToString();
}
RegExpDisjunction::RegExpDisjunction(ZoneList<RegExpTree*>* alternatives)
: alternatives_(alternatives) {
ASSERT(alternatives->length() > 1);
RegExpTree* first_alternative = alternatives->at(0);
min_match_ = first_alternative->min_match();
max_match_ = first_alternative->max_match();
for (int i = 1; i < alternatives->length(); i++) {
RegExpTree* alternative = alternatives->at(i);
min_match_ = Min(min_match_, alternative->min_match());
max_match_ = Max(max_match_, alternative->max_match());
}
}
static int IncreaseBy(int previous, int increase) {
if (RegExpTree::kInfinity - previous < increase) {
return RegExpTree::kInfinity;
} else {
return previous + increase;
}
}
RegExpAlternative::RegExpAlternative(ZoneList<RegExpTree*>* nodes)
: nodes_(nodes) {
ASSERT(nodes->length() > 1);
min_match_ = 0;
max_match_ = 0;
for (int i = 0; i < nodes->length(); i++) {
RegExpTree* node = nodes->at(i);
int node_min_match = node->min_match();
min_match_ = IncreaseBy(min_match_, node_min_match);
int node_max_match = node->max_match();
max_match_ = IncreaseBy(max_match_, node_max_match);
}
}
CaseClause::CaseClause(Isolate* isolate,
Expression* label,
ZoneList<Statement*>* statements,
int pos)
: AstNode(pos),
label_(label),
statements_(statements),
compare_type_(Type::None(), isolate),
compare_id_(AstNode::GetNextId(isolate)),
entry_id_(AstNode::GetNextId(isolate)) {
}
#define REGULAR_NODE(NodeType) \
void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
increase_node_count(); \
}
#define DONT_OPTIMIZE_NODE(NodeType) \
void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
increase_node_count(); \
set_dont_optimize_reason(k##NodeType); \
add_flag(kDontInline); \
add_flag(kDontSelfOptimize); \
}
#define DONT_SELFOPTIMIZE_NODE(NodeType) \
void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
increase_node_count(); \
add_flag(kDontSelfOptimize); \
}
#define DONT_CACHE_NODE(NodeType) \
void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
increase_node_count(); \
set_dont_optimize_reason(k##NodeType); \
add_flag(kDontInline); \
add_flag(kDontSelfOptimize); \
add_flag(kDontCache); \
}
REGULAR_NODE(VariableDeclaration)
REGULAR_NODE(FunctionDeclaration)
REGULAR_NODE(Block)
REGULAR_NODE(ExpressionStatement)
REGULAR_NODE(EmptyStatement)
REGULAR_NODE(IfStatement)
REGULAR_NODE(ContinueStatement)
REGULAR_NODE(BreakStatement)
REGULAR_NODE(ReturnStatement)
REGULAR_NODE(SwitchStatement)
REGULAR_NODE(CaseClause)
REGULAR_NODE(Conditional)
REGULAR_NODE(Literal)
REGULAR_NODE(ArrayLiteral)
REGULAR_NODE(ObjectLiteral)
REGULAR_NODE(RegExpLiteral)
REGULAR_NODE(FunctionLiteral)
REGULAR_NODE(Assignment)
REGULAR_NODE(Throw)
REGULAR_NODE(Property)
REGULAR_NODE(UnaryOperation)
REGULAR_NODE(CountOperation)
REGULAR_NODE(BinaryOperation)
REGULAR_NODE(CompareOperation)
REGULAR_NODE(ThisFunction)
REGULAR_NODE(Call)
REGULAR_NODE(CallNew)
// In theory, for VariableProxy we'd have to add:
// if (node->var()->IsLookupSlot()) add_flag(kDontInline);
// But node->var() is usually not bound yet at VariableProxy creation time, and
// LOOKUP variables only result from constructs that cannot be inlined anyway.
REGULAR_NODE(VariableProxy)
Get rid of static module allocation, do it in code. Modules now have their own local scope, represented by their own context. Module instance objects have an accessor for every export that forwards access to the respective slot from the module's context. (Exports that are modules themselves, however, are simple data properties.) All modules have a _hosting_ scope/context, which (currently) is the (innermost) enclosing global scope. To deal with recursion, nested modules are hosted by the same scope as global ones. For every (global or nested) module literal, the hosting context has an internal slot that points directly to the respective module context. This enables quick access to (statically resolved) module members by 2-dimensional access through the hosting context. For example, module A { let x; module B { let y; } } module C { let z; } allocates contexts as follows: [header| .A | .B | .C | A | C ] (global) | | | | | +-- [header| z ] (module) | | | +------- [header| y ] (module) | +------------ [header| x | B ] (module) Here, .A, .B, .C are the internal slots pointing to the hosted module contexts, whereas A, B, C hold the actual instance objects (note that every module context also points to the respective instance object through its extension slot in the header). To deal with arbitrary recursion and aliases between modules, they are created and initialized in several stages. Each stage applies to all modules in the hosting global scope, including nested ones. 1. Allocate: for each module _literal_, allocate the module contexts and respective instance object and wire them up. This happens in the PushModuleContext runtime function, as generated by AllocateModules (invoked by VisitDeclarations in the hosting scope). 2. Bind: for each module _declaration_ (i.e. literals as well as aliases), assign the respective instance object to respective local variables. This happens in VisitModuleDeclaration, and uses the instance objects created in the previous stage. For each module _literal_, this phase also constructs a module descriptor for the next stage. This happens in VisitModuleLiteral. 3. Populate: invoke the DeclareModules runtime function to populate each _instance_ object with accessors for it exports. This is generated by DeclareModules (invoked by VisitDeclarations in the hosting scope again), and uses the descriptors generated in the previous stage. 4. Initialize: execute the module bodies (and other code) in sequence. This happens by the separate statements generated for module bodies. To reenter the module scopes properly, the parser inserted ModuleStatements. R=mstarzinger@chromium.org,svenpanne@chromium.org BUG= Review URL: https://codereview.chromium.org/11093074 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13033 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-11-22 10:25:22 +00:00
// We currently do not optimize any modules.
DONT_OPTIMIZE_NODE(ModuleDeclaration)
DONT_OPTIMIZE_NODE(ImportDeclaration)
DONT_OPTIMIZE_NODE(ExportDeclaration)
DONT_OPTIMIZE_NODE(ModuleVariable)
DONT_OPTIMIZE_NODE(ModulePath)
DONT_OPTIMIZE_NODE(ModuleUrl)
Get rid of static module allocation, do it in code. Modules now have their own local scope, represented by their own context. Module instance objects have an accessor for every export that forwards access to the respective slot from the module's context. (Exports that are modules themselves, however, are simple data properties.) All modules have a _hosting_ scope/context, which (currently) is the (innermost) enclosing global scope. To deal with recursion, nested modules are hosted by the same scope as global ones. For every (global or nested) module literal, the hosting context has an internal slot that points directly to the respective module context. This enables quick access to (statically resolved) module members by 2-dimensional access through the hosting context. For example, module A { let x; module B { let y; } } module C { let z; } allocates contexts as follows: [header| .A | .B | .C | A | C ] (global) | | | | | +-- [header| z ] (module) | | | +------- [header| y ] (module) | +------------ [header| x | B ] (module) Here, .A, .B, .C are the internal slots pointing to the hosted module contexts, whereas A, B, C hold the actual instance objects (note that every module context also points to the respective instance object through its extension slot in the header). To deal with arbitrary recursion and aliases between modules, they are created and initialized in several stages. Each stage applies to all modules in the hosting global scope, including nested ones. 1. Allocate: for each module _literal_, allocate the module contexts and respective instance object and wire them up. This happens in the PushModuleContext runtime function, as generated by AllocateModules (invoked by VisitDeclarations in the hosting scope). 2. Bind: for each module _declaration_ (i.e. literals as well as aliases), assign the respective instance object to respective local variables. This happens in VisitModuleDeclaration, and uses the instance objects created in the previous stage. For each module _literal_, this phase also constructs a module descriptor for the next stage. This happens in VisitModuleLiteral. 3. Populate: invoke the DeclareModules runtime function to populate each _instance_ object with accessors for it exports. This is generated by DeclareModules (invoked by VisitDeclarations in the hosting scope again), and uses the descriptors generated in the previous stage. 4. Initialize: execute the module bodies (and other code) in sequence. This happens by the separate statements generated for module bodies. To reenter the module scopes properly, the parser inserted ModuleStatements. R=mstarzinger@chromium.org,svenpanne@chromium.org BUG= Review URL: https://codereview.chromium.org/11093074 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13033 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-11-22 10:25:22 +00:00
DONT_OPTIMIZE_NODE(ModuleStatement)
DONT_OPTIMIZE_NODE(Yield)
DONT_OPTIMIZE_NODE(WithStatement)
DONT_OPTIMIZE_NODE(TryCatchStatement)
DONT_OPTIMIZE_NODE(TryFinallyStatement)
DONT_OPTIMIZE_NODE(DebuggerStatement)
DONT_OPTIMIZE_NODE(NativeFunctionLiteral)
DONT_SELFOPTIMIZE_NODE(DoWhileStatement)
DONT_SELFOPTIMIZE_NODE(WhileStatement)
DONT_SELFOPTIMIZE_NODE(ForStatement)
DONT_SELFOPTIMIZE_NODE(ForInStatement)
DONT_SELFOPTIMIZE_NODE(ForOfStatement)
DONT_CACHE_NODE(ModuleLiteral)
void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
increase_node_count();
if (node->is_jsruntime()) {
// Don't try to inline JS runtime calls because we don't (currently) even
// optimize them.
add_flag(kDontInline);
} else if (node->function()->intrinsic_type == Runtime::INLINE &&
(node->name()->IsOneByteEqualTo(
STATIC_ASCII_VECTOR("_ArgumentsLength")) ||
node->name()->IsOneByteEqualTo(STATIC_ASCII_VECTOR("_Arguments")))) {
// Don't inline the %_ArgumentsLength or %_Arguments because their
// implementation will not work. There is no stack frame to get them
// from.
add_flag(kDontInline);
}
}
#undef REGULAR_NODE
#undef DONT_OPTIMIZE_NODE
#undef DONT_SELFOPTIMIZE_NODE
#undef DONT_CACHE_NODE
Handle<String> Literal::ToString() {
if (value_->IsString()) return Handle<String>::cast(value_);
ASSERT(value_->IsNumber());
char arr[100];
Vector<char> buffer(arr, ARRAY_SIZE(arr));
const char* str;
if (value_->IsSmi()) {
// Optimization only, the heap number case would subsume this.
OS::SNPrintF(buffer, "%d", Smi::cast(*value_)->value());
str = arr;
} else {
str = DoubleToCString(value_->Number(), buffer);
}
return isolate_->factory()->NewStringFromAscii(CStrVector(str));
}
} } // namespace v8::internal