// Copyright 2013 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided // with the distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "hydrogen.h" #include #include "v8.h" #include "allocation-site-scopes.h" #include "codegen.h" #include "full-codegen.h" #include "hashmap.h" #include "hydrogen-bce.h" #include "hydrogen-bch.h" #include "hydrogen-canonicalize.h" #include "hydrogen-check-elimination.h" #include "hydrogen-dce.h" #include "hydrogen-dehoist.h" #include "hydrogen-environment-liveness.h" #include "hydrogen-escape-analysis.h" #include "hydrogen-infer-representation.h" #include "hydrogen-infer-types.h" #include "hydrogen-load-elimination.h" #include "hydrogen-gvn.h" #include "hydrogen-mark-deoptimize.h" #include "hydrogen-mark-unreachable.h" #include "hydrogen-osr.h" #include "hydrogen-range-analysis.h" #include "hydrogen-redundant-phi.h" #include "hydrogen-removable-simulates.h" #include "hydrogen-representation-changes.h" #include "hydrogen-sce.h" #include "hydrogen-store-elimination.h" #include "hydrogen-uint32-analysis.h" #include "lithium-allocator.h" #include "parser.h" #include "runtime.h" #include "scopeinfo.h" #include "scopes.h" #include "stub-cache.h" #include "typing.h" #if V8_TARGET_ARCH_IA32 #include "ia32/lithium-codegen-ia32.h" #elif V8_TARGET_ARCH_X64 #include "x64/lithium-codegen-x64.h" #elif V8_TARGET_ARCH_ARM64 #include "arm64/lithium-codegen-arm64.h" #elif V8_TARGET_ARCH_ARM #include "arm/lithium-codegen-arm.h" #elif V8_TARGET_ARCH_MIPS #include "mips/lithium-codegen-mips.h" #else #error Unsupported target architecture. #endif namespace v8 { namespace internal { HBasicBlock::HBasicBlock(HGraph* graph) : block_id_(graph->GetNextBlockID()), graph_(graph), phis_(4, graph->zone()), first_(NULL), last_(NULL), end_(NULL), loop_information_(NULL), predecessors_(2, graph->zone()), dominator_(NULL), dominated_blocks_(4, graph->zone()), last_environment_(NULL), argument_count_(-1), first_instruction_index_(-1), last_instruction_index_(-1), deleted_phis_(4, graph->zone()), parent_loop_header_(NULL), inlined_entry_block_(NULL), is_inline_return_target_(false), is_reachable_(true), dominates_loop_successors_(false), is_osr_entry_(false) { } Isolate* HBasicBlock::isolate() const { return graph_->isolate(); } void HBasicBlock::MarkUnreachable() { is_reachable_ = false; } void HBasicBlock::AttachLoopInformation() { ASSERT(!IsLoopHeader()); loop_information_ = new(zone()) HLoopInformation(this, zone()); } void HBasicBlock::DetachLoopInformation() { ASSERT(IsLoopHeader()); loop_information_ = NULL; } void HBasicBlock::AddPhi(HPhi* phi) { ASSERT(!IsStartBlock()); phis_.Add(phi, zone()); phi->SetBlock(this); } void HBasicBlock::RemovePhi(HPhi* phi) { ASSERT(phi->block() == this); ASSERT(phis_.Contains(phi)); phi->Kill(); phis_.RemoveElement(phi); phi->SetBlock(NULL); } void HBasicBlock::AddInstruction(HInstruction* instr, HSourcePosition position) { ASSERT(!IsStartBlock() || !IsFinished()); ASSERT(!instr->IsLinked()); ASSERT(!IsFinished()); if (!position.IsUnknown()) { instr->set_position(position); } if (first_ == NULL) { ASSERT(last_environment() != NULL); ASSERT(!last_environment()->ast_id().IsNone()); HBlockEntry* entry = new(zone()) HBlockEntry(); entry->InitializeAsFirst(this); if (!position.IsUnknown()) { entry->set_position(position); } else { ASSERT(!FLAG_hydrogen_track_positions || !graph()->info()->IsOptimizing()); } first_ = last_ = entry; } instr->InsertAfter(last_); } HPhi* HBasicBlock::AddNewPhi(int merged_index) { if (graph()->IsInsideNoSideEffectsScope()) { merged_index = HPhi::kInvalidMergedIndex; } HPhi* phi = new(zone()) HPhi(merged_index, zone()); AddPhi(phi); return phi; } HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id, RemovableSimulate removable) { ASSERT(HasEnvironment()); HEnvironment* environment = last_environment(); ASSERT(ast_id.IsNone() || ast_id == BailoutId::StubEntry() || environment->closure()->shared()->VerifyBailoutId(ast_id)); int push_count = environment->push_count(); int pop_count = environment->pop_count(); HSimulate* instr = new(zone()) HSimulate(ast_id, pop_count, zone(), removable); #ifdef DEBUG instr->set_closure(environment->closure()); #endif // Order of pushed values: newest (top of stack) first. This allows // HSimulate::MergeWith() to easily append additional pushed values // that are older (from further down the stack). for (int i = 0; i < push_count; ++i) { instr->AddPushedValue(environment->ExpressionStackAt(i)); } for (GrowableBitVector::Iterator it(environment->assigned_variables(), zone()); !it.Done(); it.Advance()) { int index = it.Current(); instr->AddAssignedValue(index, environment->Lookup(index)); } environment->ClearHistory(); return instr; } void HBasicBlock::Finish(HControlInstruction* end, HSourcePosition position) { ASSERT(!IsFinished()); AddInstruction(end, position); end_ = end; for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { it.Current()->RegisterPredecessor(this); } } void HBasicBlock::Goto(HBasicBlock* block, HSourcePosition position, FunctionState* state, bool add_simulate) { bool drop_extra = state != NULL && state->inlining_kind() == NORMAL_RETURN; if (block->IsInlineReturnTarget()) { HEnvironment* env = last_environment(); int argument_count = env->arguments_environment()->parameter_count(); AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count), position); UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); } if (add_simulate) AddNewSimulate(BailoutId::None(), position); HGoto* instr = new(zone()) HGoto(block); Finish(instr, position); } void HBasicBlock::AddLeaveInlined(HValue* return_value, FunctionState* state, HSourcePosition position) { HBasicBlock* target = state->function_return(); bool drop_extra = state->inlining_kind() == NORMAL_RETURN; ASSERT(target->IsInlineReturnTarget()); ASSERT(return_value != NULL); HEnvironment* env = last_environment(); int argument_count = env->arguments_environment()->parameter_count(); AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count), position); UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); last_environment()->Push(return_value); AddNewSimulate(BailoutId::None(), position); HGoto* instr = new(zone()) HGoto(target); Finish(instr, position); } void HBasicBlock::SetInitialEnvironment(HEnvironment* env) { ASSERT(!HasEnvironment()); ASSERT(first() == NULL); UpdateEnvironment(env); } void HBasicBlock::UpdateEnvironment(HEnvironment* env) { last_environment_ = env; graph()->update_maximum_environment_size(env->first_expression_index()); } void HBasicBlock::SetJoinId(BailoutId ast_id) { int length = predecessors_.length(); ASSERT(length > 0); for (int i = 0; i < length; i++) { HBasicBlock* predecessor = predecessors_[i]; ASSERT(predecessor->end()->IsGoto()); HSimulate* simulate = HSimulate::cast(predecessor->end()->previous()); ASSERT(i != 0 || (predecessor->last_environment()->closure().is_null() || predecessor->last_environment()->closure()->shared() ->VerifyBailoutId(ast_id))); simulate->set_ast_id(ast_id); predecessor->last_environment()->set_ast_id(ast_id); } } bool HBasicBlock::Dominates(HBasicBlock* other) const { HBasicBlock* current = other->dominator(); while (current != NULL) { if (current == this) return true; current = current->dominator(); } return false; } bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const { if (this == other) return true; return Dominates(other); } int HBasicBlock::LoopNestingDepth() const { const HBasicBlock* current = this; int result = (current->IsLoopHeader()) ? 1 : 0; while (current->parent_loop_header() != NULL) { current = current->parent_loop_header(); result++; } return result; } void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) { ASSERT(IsLoopHeader()); SetJoinId(stmt->EntryId()); if (predecessors()->length() == 1) { // This is a degenerated loop. DetachLoopInformation(); return; } // Only the first entry into the loop is from outside the loop. All other // entries must be back edges. for (int i = 1; i < predecessors()->length(); ++i) { loop_information()->RegisterBackEdge(predecessors()->at(i)); } } void HBasicBlock::MarkSuccEdgeUnreachable(int succ) { ASSERT(IsFinished()); HBasicBlock* succ_block = end()->SuccessorAt(succ); ASSERT(succ_block->predecessors()->length() == 1); succ_block->MarkUnreachable(); } void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) { if (HasPredecessor()) { // Only loop header blocks can have a predecessor added after // instructions have been added to the block (they have phis for all // values in the environment, these phis may be eliminated later). ASSERT(IsLoopHeader() || first_ == NULL); HEnvironment* incoming_env = pred->last_environment(); if (IsLoopHeader()) { ASSERT(phis()->length() == incoming_env->length()); for (int i = 0; i < phis_.length(); ++i) { phis_[i]->AddInput(incoming_env->values()->at(i)); } } else { last_environment()->AddIncomingEdge(this, pred->last_environment()); } } else if (!HasEnvironment() && !IsFinished()) { ASSERT(!IsLoopHeader()); SetInitialEnvironment(pred->last_environment()->Copy()); } predecessors_.Add(pred, zone()); } void HBasicBlock::AddDominatedBlock(HBasicBlock* block) { ASSERT(!dominated_blocks_.Contains(block)); // Keep the list of dominated blocks sorted such that if there is two // succeeding block in this list, the predecessor is before the successor. int index = 0; while (index < dominated_blocks_.length() && dominated_blocks_[index]->block_id() < block->block_id()) { ++index; } dominated_blocks_.InsertAt(index, block, zone()); } void HBasicBlock::AssignCommonDominator(HBasicBlock* other) { if (dominator_ == NULL) { dominator_ = other; other->AddDominatedBlock(this); } else if (other->dominator() != NULL) { HBasicBlock* first = dominator_; HBasicBlock* second = other; while (first != second) { if (first->block_id() > second->block_id()) { first = first->dominator(); } else { second = second->dominator(); } ASSERT(first != NULL && second != NULL); } if (dominator_ != first) { ASSERT(dominator_->dominated_blocks_.Contains(this)); dominator_->dominated_blocks_.RemoveElement(this); dominator_ = first; first->AddDominatedBlock(this); } } } void HBasicBlock::AssignLoopSuccessorDominators() { // Mark blocks that dominate all subsequent reachable blocks inside their // loop. Exploit the fact that blocks are sorted in reverse post order. When // the loop is visited in increasing block id order, if the number of // non-loop-exiting successor edges at the dominator_candidate block doesn't // exceed the number of previously encountered predecessor edges, there is no // path from the loop header to any block with higher id that doesn't go // through the dominator_candidate block. In this case, the // dominator_candidate block is guaranteed to dominate all blocks reachable // from it with higher ids. HBasicBlock* last = loop_information()->GetLastBackEdge(); int outstanding_successors = 1; // one edge from the pre-header // Header always dominates everything. MarkAsLoopSuccessorDominator(); for (int j = block_id(); j <= last->block_id(); ++j) { HBasicBlock* dominator_candidate = graph_->blocks()->at(j); for (HPredecessorIterator it(dominator_candidate); !it.Done(); it.Advance()) { HBasicBlock* predecessor = it.Current(); // Don't count back edges. if (predecessor->block_id() < dominator_candidate->block_id()) { outstanding_successors--; } } // If more successors than predecessors have been seen in the loop up to // now, it's not possible to guarantee that the current block dominates // all of the blocks with higher IDs. In this case, assume conservatively // that those paths through loop that don't go through the current block // contain all of the loop's dependencies. Also be careful to record // dominator information about the current loop that's being processed, // and not nested loops, which will be processed when // AssignLoopSuccessorDominators gets called on their header. ASSERT(outstanding_successors >= 0); HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header(); if (outstanding_successors == 0 && (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) { dominator_candidate->MarkAsLoopSuccessorDominator(); } HControlInstruction* end = dominator_candidate->end(); for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { HBasicBlock* successor = it.Current(); // Only count successors that remain inside the loop and don't loop back // to a loop header. if (successor->block_id() > dominator_candidate->block_id() && successor->block_id() <= last->block_id()) { // Backwards edges must land on loop headers. ASSERT(successor->block_id() > dominator_candidate->block_id() || successor->IsLoopHeader()); outstanding_successors++; } } } } int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const { for (int i = 0; i < predecessors_.length(); ++i) { if (predecessors_[i] == predecessor) return i; } UNREACHABLE(); return -1; } #ifdef DEBUG void HBasicBlock::Verify() { // Check that every block is finished. ASSERT(IsFinished()); ASSERT(block_id() >= 0); // Check that the incoming edges are in edge split form. if (predecessors_.length() > 1) { for (int i = 0; i < predecessors_.length(); ++i) { ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL); } } } #endif void HLoopInformation::RegisterBackEdge(HBasicBlock* block) { this->back_edges_.Add(block, block->zone()); AddBlock(block); } HBasicBlock* HLoopInformation::GetLastBackEdge() const { int max_id = -1; HBasicBlock* result = NULL; for (int i = 0; i < back_edges_.length(); ++i) { HBasicBlock* cur = back_edges_[i]; if (cur->block_id() > max_id) { max_id = cur->block_id(); result = cur; } } return result; } void HLoopInformation::AddBlock(HBasicBlock* block) { if (block == loop_header()) return; if (block->parent_loop_header() == loop_header()) return; if (block->parent_loop_header() != NULL) { AddBlock(block->parent_loop_header()); } else { block->set_parent_loop_header(loop_header()); blocks_.Add(block, block->zone()); for (int i = 0; i < block->predecessors()->length(); ++i) { AddBlock(block->predecessors()->at(i)); } } } #ifdef DEBUG // Checks reachability of the blocks in this graph and stores a bit in // the BitVector "reachable()" for every block that can be reached // from the start block of the graph. If "dont_visit" is non-null, the given // block is treated as if it would not be part of the graph. "visited_count()" // returns the number of reachable blocks. class ReachabilityAnalyzer BASE_EMBEDDED { public: ReachabilityAnalyzer(HBasicBlock* entry_block, int block_count, HBasicBlock* dont_visit) : visited_count_(0), stack_(16, entry_block->zone()), reachable_(block_count, entry_block->zone()), dont_visit_(dont_visit) { PushBlock(entry_block); Analyze(); } int visited_count() const { return visited_count_; } const BitVector* reachable() const { return &reachable_; } private: void PushBlock(HBasicBlock* block) { if (block != NULL && block != dont_visit_ && !reachable_.Contains(block->block_id())) { reachable_.Add(block->block_id()); stack_.Add(block, block->zone()); visited_count_++; } } void Analyze() { while (!stack_.is_empty()) { HControlInstruction* end = stack_.RemoveLast()->end(); for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { PushBlock(it.Current()); } } } int visited_count_; ZoneList stack_; BitVector reachable_; HBasicBlock* dont_visit_; }; void HGraph::Verify(bool do_full_verify) const { Heap::RelocationLock relocation_lock(isolate()->heap()); AllowHandleDereference allow_deref; AllowDeferredHandleDereference allow_deferred_deref; for (int i = 0; i < blocks_.length(); i++) { HBasicBlock* block = blocks_.at(i); block->Verify(); // Check that every block contains at least one node and that only the last // node is a control instruction. HInstruction* current = block->first(); ASSERT(current != NULL && current->IsBlockEntry()); while (current != NULL) { ASSERT((current->next() == NULL) == current->IsControlInstruction()); ASSERT(current->block() == block); current->Verify(); current = current->next(); } // Check that successors are correctly set. HBasicBlock* first = block->end()->FirstSuccessor(); HBasicBlock* second = block->end()->SecondSuccessor(); ASSERT(second == NULL || first != NULL); // Check that the predecessor array is correct. if (first != NULL) { ASSERT(first->predecessors()->Contains(block)); if (second != NULL) { ASSERT(second->predecessors()->Contains(block)); } } // Check that phis have correct arguments. for (int j = 0; j < block->phis()->length(); j++) { HPhi* phi = block->phis()->at(j); phi->Verify(); } // Check that all join blocks have predecessors that end with an // unconditional goto and agree on their environment node id. if (block->predecessors()->length() >= 2) { BailoutId id = block->predecessors()->first()->last_environment()->ast_id(); for (int k = 0; k < block->predecessors()->length(); k++) { HBasicBlock* predecessor = block->predecessors()->at(k); ASSERT(predecessor->end()->IsGoto() || predecessor->end()->IsDeoptimize()); ASSERT(predecessor->last_environment()->ast_id() == id); } } } // Check special property of first block to have no predecessors. ASSERT(blocks_.at(0)->predecessors()->is_empty()); if (do_full_verify) { // Check that the graph is fully connected. ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL); ASSERT(analyzer.visited_count() == blocks_.length()); // Check that entry block dominator is NULL. ASSERT(entry_block_->dominator() == NULL); // Check dominators. for (int i = 0; i < blocks_.length(); ++i) { HBasicBlock* block = blocks_.at(i); if (block->dominator() == NULL) { // Only start block may have no dominator assigned to. ASSERT(i == 0); } else { // Assert that block is unreachable if dominator must not be visited. ReachabilityAnalyzer dominator_analyzer(entry_block_, blocks_.length(), block->dominator()); ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id())); } } } } #endif HConstant* HGraph::GetConstant(SetOncePointer* pointer, int32_t value) { if (!pointer->is_set()) { // Can't pass GetInvalidContext() to HConstant::New, because that will // recursively call GetConstant HConstant* constant = HConstant::New(zone(), NULL, value); constant->InsertAfter(entry_block()->first()); pointer->set(constant); return constant; } return ReinsertConstantIfNecessary(pointer->get()); } HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) { if (!constant->IsLinked()) { // The constant was removed from the graph. Reinsert. constant->ClearFlag(HValue::kIsDead); constant->InsertAfter(entry_block()->first()); } return constant; } HConstant* HGraph::GetConstant0() { return GetConstant(&constant_0_, 0); } HConstant* HGraph::GetConstant1() { return GetConstant(&constant_1_, 1); } HConstant* HGraph::GetConstantMinus1() { return GetConstant(&constant_minus1_, -1); } #define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value) \ HConstant* HGraph::GetConstant##Name() { \ if (!constant_##name##_.is_set()) { \ HConstant* constant = new(zone()) HConstant( \ Unique::CreateImmovable(isolate()->factory()->name##_value()), \ Representation::Tagged(), \ htype, \ true, \ boolean_value, \ false, \ ODDBALL_TYPE); \ constant->InsertAfter(entry_block()->first()); \ constant_##name##_.set(constant); \ } \ return ReinsertConstantIfNecessary(constant_##name##_.get()); \ } DEFINE_GET_CONSTANT(Undefined, undefined, HType::Tagged(), false) DEFINE_GET_CONSTANT(True, true, HType::Boolean(), true) DEFINE_GET_CONSTANT(False, false, HType::Boolean(), false) DEFINE_GET_CONSTANT(Hole, the_hole, HType::Tagged(), false) DEFINE_GET_CONSTANT(Null, null, HType::Tagged(), false) #undef DEFINE_GET_CONSTANT #define DEFINE_IS_CONSTANT(Name, name) \ bool HGraph::IsConstant##Name(HConstant* constant) { \ return constant_##name##_.is_set() && constant == constant_##name##_.get(); \ } DEFINE_IS_CONSTANT(Undefined, undefined) DEFINE_IS_CONSTANT(0, 0) DEFINE_IS_CONSTANT(1, 1) DEFINE_IS_CONSTANT(Minus1, minus1) DEFINE_IS_CONSTANT(True, true) DEFINE_IS_CONSTANT(False, false) DEFINE_IS_CONSTANT(Hole, the_hole) DEFINE_IS_CONSTANT(Null, null) #undef DEFINE_IS_CONSTANT HConstant* HGraph::GetInvalidContext() { return GetConstant(&constant_invalid_context_, 0xFFFFC0C7); } bool HGraph::IsStandardConstant(HConstant* constant) { if (IsConstantUndefined(constant)) return true; if (IsConstant0(constant)) return true; if (IsConstant1(constant)) return true; if (IsConstantMinus1(constant)) return true; if (IsConstantTrue(constant)) return true; if (IsConstantFalse(constant)) return true; if (IsConstantHole(constant)) return true; if (IsConstantNull(constant)) return true; return false; } HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder) : builder_(builder), finished_(false), did_then_(false), did_else_(false), did_else_if_(false), did_and_(false), did_or_(false), captured_(false), needs_compare_(true), pending_merge_block_(false), split_edge_merge_block_(NULL), merge_at_join_blocks_(NULL), normal_merge_at_join_block_count_(0), deopt_merge_at_join_block_count_(0) { HEnvironment* env = builder->environment(); first_true_block_ = builder->CreateBasicBlock(env->Copy()); first_false_block_ = builder->CreateBasicBlock(env->Copy()); } HGraphBuilder::IfBuilder::IfBuilder( HGraphBuilder* builder, HIfContinuation* continuation) : builder_(builder), finished_(false), did_then_(false), did_else_(false), did_else_if_(false), did_and_(false), did_or_(false), captured_(false), needs_compare_(false), pending_merge_block_(false), first_true_block_(NULL), first_false_block_(NULL), split_edge_merge_block_(NULL), merge_at_join_blocks_(NULL), normal_merge_at_join_block_count_(0), deopt_merge_at_join_block_count_(0) { continuation->Continue(&first_true_block_, &first_false_block_); } HControlInstruction* HGraphBuilder::IfBuilder::AddCompare( HControlInstruction* compare) { ASSERT(did_then_ == did_else_); if (did_else_) { // Handle if-then-elseif did_else_if_ = true; did_else_ = false; did_then_ = false; did_and_ = false; did_or_ = false; pending_merge_block_ = false; split_edge_merge_block_ = NULL; HEnvironment* env = builder_->environment(); first_true_block_ = builder_->CreateBasicBlock(env->Copy()); first_false_block_ = builder_->CreateBasicBlock(env->Copy()); } if (split_edge_merge_block_ != NULL) { HEnvironment* env = first_false_block_->last_environment(); HBasicBlock* split_edge = builder_->CreateBasicBlock(env->Copy()); if (did_or_) { compare->SetSuccessorAt(0, split_edge); compare->SetSuccessorAt(1, first_false_block_); } else { compare->SetSuccessorAt(0, first_true_block_); compare->SetSuccessorAt(1, split_edge); } builder_->GotoNoSimulate(split_edge, split_edge_merge_block_); } else { compare->SetSuccessorAt(0, first_true_block_); compare->SetSuccessorAt(1, first_false_block_); } builder_->FinishCurrentBlock(compare); needs_compare_ = false; return compare; } void HGraphBuilder::IfBuilder::Or() { ASSERT(!needs_compare_); ASSERT(!did_and_); did_or_ = true; HEnvironment* env = first_false_block_->last_environment(); if (split_edge_merge_block_ == NULL) { split_edge_merge_block_ = builder_->CreateBasicBlock(env->Copy()); builder_->GotoNoSimulate(first_true_block_, split_edge_merge_block_); first_true_block_ = split_edge_merge_block_; } builder_->set_current_block(first_false_block_); first_false_block_ = builder_->CreateBasicBlock(env->Copy()); } void HGraphBuilder::IfBuilder::And() { ASSERT(!needs_compare_); ASSERT(!did_or_); did_and_ = true; HEnvironment* env = first_false_block_->last_environment(); if (split_edge_merge_block_ == NULL) { split_edge_merge_block_ = builder_->CreateBasicBlock(env->Copy()); builder_->GotoNoSimulate(first_false_block_, split_edge_merge_block_); first_false_block_ = split_edge_merge_block_; } builder_->set_current_block(first_true_block_); first_true_block_ = builder_->CreateBasicBlock(env->Copy()); } void HGraphBuilder::IfBuilder::CaptureContinuation( HIfContinuation* continuation) { ASSERT(!did_else_if_); ASSERT(!finished_); ASSERT(!captured_); HBasicBlock* true_block = NULL; HBasicBlock* false_block = NULL; Finish(&true_block, &false_block); ASSERT(true_block != NULL); ASSERT(false_block != NULL); continuation->Capture(true_block, false_block); captured_ = true; builder_->set_current_block(NULL); End(); } void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) { ASSERT(!did_else_if_); ASSERT(!finished_); ASSERT(!captured_); HBasicBlock* true_block = NULL; HBasicBlock* false_block = NULL; Finish(&true_block, &false_block); merge_at_join_blocks_ = NULL; if (true_block != NULL && !true_block->IsFinished()) { ASSERT(continuation->IsTrueReachable()); builder_->GotoNoSimulate(true_block, continuation->true_branch()); } if (false_block != NULL && !false_block->IsFinished()) { ASSERT(continuation->IsFalseReachable()); builder_->GotoNoSimulate(false_block, continuation->false_branch()); } captured_ = true; End(); } void HGraphBuilder::IfBuilder::Then() { ASSERT(!captured_); ASSERT(!finished_); did_then_ = true; if (needs_compare_) { // Handle if's without any expressions, they jump directly to the "else" // branch. However, we must pretend that the "then" branch is reachable, // so that the graph builder visits it and sees any live range extending // constructs within it. HConstant* constant_false = builder_->graph()->GetConstantFalse(); ToBooleanStub::Types boolean_type = ToBooleanStub::Types(); boolean_type.Add(ToBooleanStub::BOOLEAN); HBranch* branch = builder()->New( constant_false, boolean_type, first_true_block_, first_false_block_); builder_->FinishCurrentBlock(branch); } builder_->set_current_block(first_true_block_); pending_merge_block_ = true; } void HGraphBuilder::IfBuilder::Else() { ASSERT(did_then_); ASSERT(!captured_); ASSERT(!finished_); AddMergeAtJoinBlock(false); builder_->set_current_block(first_false_block_); pending_merge_block_ = true; did_else_ = true; } void HGraphBuilder::IfBuilder::Deopt(const char* reason) { ASSERT(did_then_); builder_->Add(reason, Deoptimizer::EAGER); AddMergeAtJoinBlock(true); } void HGraphBuilder::IfBuilder::Return(HValue* value) { HValue* parameter_count = builder_->graph()->GetConstantMinus1(); builder_->FinishExitCurrentBlock( builder_->New(value, parameter_count)); AddMergeAtJoinBlock(false); } void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) { if (!pending_merge_block_) return; HBasicBlock* block = builder_->current_block(); ASSERT(block == NULL || !block->IsFinished()); MergeAtJoinBlock* record = new(builder_->zone()) MergeAtJoinBlock(block, deopt, merge_at_join_blocks_); merge_at_join_blocks_ = record; if (block != NULL) { ASSERT(block->end() == NULL); if (deopt) { normal_merge_at_join_block_count_++; } else { deopt_merge_at_join_block_count_++; } } builder_->set_current_block(NULL); pending_merge_block_ = false; } void HGraphBuilder::IfBuilder::Finish() { ASSERT(!finished_); if (!did_then_) { Then(); } AddMergeAtJoinBlock(false); if (!did_else_) { Else(); AddMergeAtJoinBlock(false); } finished_ = true; } void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation, HBasicBlock** else_continuation) { Finish(); MergeAtJoinBlock* else_record = merge_at_join_blocks_; if (else_continuation != NULL) { *else_continuation = else_record->block_; } MergeAtJoinBlock* then_record = else_record->next_; if (then_continuation != NULL) { *then_continuation = then_record->block_; } ASSERT(then_record->next_ == NULL); } void HGraphBuilder::IfBuilder::End() { if (captured_) return; Finish(); int total_merged_blocks = normal_merge_at_join_block_count_ + deopt_merge_at_join_block_count_; ASSERT(total_merged_blocks >= 1); HBasicBlock* merge_block = total_merged_blocks == 1 ? NULL : builder_->graph()->CreateBasicBlock(); // Merge non-deopt blocks first to ensure environment has right size for // padding. MergeAtJoinBlock* current = merge_at_join_blocks_; while (current != NULL) { if (!current->deopt_ && current->block_ != NULL) { // If there is only one block that makes it through to the end of the // if, then just set it as the current block and continue rather then // creating an unnecessary merge block. if (total_merged_blocks == 1) { builder_->set_current_block(current->block_); return; } builder_->GotoNoSimulate(current->block_, merge_block); } current = current->next_; } // Merge deopt blocks, padding when necessary. current = merge_at_join_blocks_; while (current != NULL) { if (current->deopt_ && current->block_ != NULL) { current->block_->FinishExit( HAbnormalExit::New(builder_->zone(), NULL), HSourcePosition::Unknown()); } current = current->next_; } builder_->set_current_block(merge_block); } HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context, LoopBuilder::Direction direction) : builder_(builder), context_(context), direction_(direction), finished_(false) { header_block_ = builder->CreateLoopHeaderBlock(); body_block_ = NULL; exit_block_ = NULL; exit_trampoline_block_ = NULL; increment_amount_ = builder_->graph()->GetConstant1(); } HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context, LoopBuilder::Direction direction, HValue* increment_amount) : builder_(builder), context_(context), direction_(direction), finished_(false) { header_block_ = builder->CreateLoopHeaderBlock(); body_block_ = NULL; exit_block_ = NULL; exit_trampoline_block_ = NULL; increment_amount_ = increment_amount; } HValue* HGraphBuilder::LoopBuilder::BeginBody( HValue* initial, HValue* terminating, Token::Value token) { HEnvironment* env = builder_->environment(); phi_ = header_block_->AddNewPhi(env->values()->length()); phi_->AddInput(initial); env->Push(initial); builder_->GotoNoSimulate(header_block_); HEnvironment* body_env = env->Copy(); HEnvironment* exit_env = env->Copy(); // Remove the phi from the expression stack body_env->Pop(); exit_env->Pop(); body_block_ = builder_->CreateBasicBlock(body_env); exit_block_ = builder_->CreateBasicBlock(exit_env); builder_->set_current_block(header_block_); env->Pop(); builder_->FinishCurrentBlock(builder_->New( phi_, terminating, token, body_block_, exit_block_)); builder_->set_current_block(body_block_); if (direction_ == kPreIncrement || direction_ == kPreDecrement) { HValue* one = builder_->graph()->GetConstant1(); if (direction_ == kPreIncrement) { increment_ = HAdd::New(zone(), context_, phi_, one); } else { increment_ = HSub::New(zone(), context_, phi_, one); } increment_->ClearFlag(HValue::kCanOverflow); builder_->AddInstruction(increment_); return increment_; } else { return phi_; } } void HGraphBuilder::LoopBuilder::Break() { if (exit_trampoline_block_ == NULL) { // Its the first time we saw a break. HEnvironment* env = exit_block_->last_environment()->Copy(); exit_trampoline_block_ = builder_->CreateBasicBlock(env); builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_); } builder_->GotoNoSimulate(exit_trampoline_block_); builder_->set_current_block(NULL); } void HGraphBuilder::LoopBuilder::EndBody() { ASSERT(!finished_); if (direction_ == kPostIncrement || direction_ == kPostDecrement) { if (direction_ == kPostIncrement) { increment_ = HAdd::New(zone(), context_, phi_, increment_amount_); } else { increment_ = HSub::New(zone(), context_, phi_, increment_amount_); } increment_->ClearFlag(HValue::kCanOverflow); builder_->AddInstruction(increment_); } // Push the new increment value on the expression stack to merge into the phi. builder_->environment()->Push(increment_); HBasicBlock* last_block = builder_->current_block(); builder_->GotoNoSimulate(last_block, header_block_); header_block_->loop_information()->RegisterBackEdge(last_block); if (exit_trampoline_block_ != NULL) { builder_->set_current_block(exit_trampoline_block_); } else { builder_->set_current_block(exit_block_); } finished_ = true; } HGraph* HGraphBuilder::CreateGraph() { graph_ = new(zone()) HGraph(info_); if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_); CompilationPhase phase("H_Block building", info_); set_current_block(graph()->entry_block()); if (!BuildGraph()) return NULL; graph()->FinalizeUniqueness(); return graph_; } HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) { ASSERT(current_block() != NULL); ASSERT(!FLAG_hydrogen_track_positions || !position_.IsUnknown() || !info_->IsOptimizing()); current_block()->AddInstruction(instr, source_position()); if (graph()->IsInsideNoSideEffectsScope()) { instr->SetFlag(HValue::kHasNoObservableSideEffects); } return instr; } void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) { ASSERT(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() || !position_.IsUnknown()); current_block()->Finish(last, source_position()); if (last->IsReturn() || last->IsAbnormalExit()) { set_current_block(NULL); } } void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) { ASSERT(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() || !position_.IsUnknown()); current_block()->FinishExit(instruction, source_position()); if (instruction->IsReturn() || instruction->IsAbnormalExit()) { set_current_block(NULL); } } void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) { if (FLAG_native_code_counters && counter->Enabled()) { HValue* reference = Add(ExternalReference(counter)); HValue* old_value = Add( reference, static_cast(NULL), HObjectAccess::ForCounter()); HValue* new_value = AddUncasted(old_value, graph()->GetConstant1()); new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow Add(reference, HObjectAccess::ForCounter(), new_value, STORE_TO_INITIALIZED_ENTRY); } } void HGraphBuilder::AddSimulate(BailoutId id, RemovableSimulate removable) { ASSERT(current_block() != NULL); ASSERT(!graph()->IsInsideNoSideEffectsScope()); current_block()->AddNewSimulate(id, source_position(), removable); } HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) { HBasicBlock* b = graph()->CreateBasicBlock(); b->SetInitialEnvironment(env); return b; } HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() { HBasicBlock* header = graph()->CreateBasicBlock(); HEnvironment* entry_env = environment()->CopyAsLoopHeader(header); header->SetInitialEnvironment(entry_env); header->AttachLoopInformation(); return header; } HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) { if (obj->type().IsHeapObject()) return obj; return Add(obj); } void HGraphBuilder::FinishExitWithHardDeoptimization(const char* reason) { Add(reason, Deoptimizer::EAGER); FinishExitCurrentBlock(New()); } HValue* HGraphBuilder::BuildCheckMap(HValue* obj, Handle map) { return Add(obj, map, top_info()); } HValue* HGraphBuilder::BuildCheckString(HValue* string) { if (!string->type().IsString()) { ASSERT(!string->IsConstant() || !HConstant::cast(string)->HasStringValue()); BuildCheckHeapObject(string); return Add(string, HCheckInstanceType::IS_STRING); } return string; } HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) { if (object->type().IsJSObject()) return object; if (function->IsConstant() && HConstant::cast(function)->handle(isolate())->IsJSFunction()) { Handle f = Handle::cast( HConstant::cast(function)->handle(isolate())); SharedFunctionInfo* shared = f->shared(); if (shared->strict_mode() == STRICT || shared->native()) return object; } return Add(object, function); } HValue* HGraphBuilder::BuildCheckForCapacityGrow( HValue* object, HValue* elements, ElementsKind kind, HValue* length, HValue* key, bool is_js_array, PropertyAccessType access_type) { IfBuilder length_checker(this); Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ; length_checker.If(key, length, token); length_checker.Then(); HValue* current_capacity = AddLoadFixedArrayLength(elements); IfBuilder capacity_checker(this); capacity_checker.If(key, current_capacity, Token::GTE); capacity_checker.Then(); HValue* max_gap = Add(static_cast(JSObject::kMaxGap)); HValue* max_capacity = AddUncasted(current_capacity, max_gap); Add(key, max_capacity); HValue* new_capacity = BuildNewElementsCapacity(key); HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, kind, length, new_capacity); environment()->Push(new_elements); capacity_checker.Else(); environment()->Push(elements); capacity_checker.End(); if (is_js_array) { HValue* new_length = AddUncasted(key, graph_->GetConstant1()); new_length->ClearFlag(HValue::kCanOverflow); Add(object, HObjectAccess::ForArrayLength(kind), new_length); } if (access_type == STORE && kind == FAST_SMI_ELEMENTS) { HValue* checked_elements = environment()->Top(); // Write zero to ensure that the new element is initialized with some smi. Add(checked_elements, key, graph()->GetConstant0(), kind); } length_checker.Else(); Add(key, length); environment()->Push(elements); length_checker.End(); return environment()->Pop(); } HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object, HValue* elements, ElementsKind kind, HValue* length) { Factory* factory = isolate()->factory(); IfBuilder cow_checker(this); cow_checker.If(elements, factory->fixed_cow_array_map()); cow_checker.Then(); HValue* capacity = AddLoadFixedArrayLength(elements); HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, kind, length, capacity); environment()->Push(new_elements); cow_checker.Else(); environment()->Push(elements); cow_checker.End(); return environment()->Pop(); } void HGraphBuilder::BuildTransitionElementsKind(HValue* object, HValue* map, ElementsKind from_kind, ElementsKind to_kind, bool is_jsarray) { ASSERT(!IsFastHoleyElementsKind(from_kind) || IsFastHoleyElementsKind(to_kind)); if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) { Add(object); } if (!IsSimpleMapChangeTransition(from_kind, to_kind)) { HInstruction* elements = AddLoadElements(object); HInstruction* empty_fixed_array = Add( isolate()->factory()->empty_fixed_array()); IfBuilder if_builder(this); if_builder.IfNot(elements, empty_fixed_array); if_builder.Then(); HInstruction* elements_length = AddLoadFixedArrayLength(elements); HInstruction* array_length = is_jsarray ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(from_kind)) : elements_length; BuildGrowElementsCapacity(object, elements, from_kind, to_kind, array_length, elements_length); if_builder.End(); } Add(object, HObjectAccess::ForMap(), map); } HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoadHelper( HValue* elements, HValue* key, HValue* hash, HValue* mask, int current_probe) { if (current_probe == kNumberDictionaryProbes) { return NULL; } int32_t offset = SeededNumberDictionary::GetProbeOffset(current_probe); HValue* raw_index = (current_probe == 0) ? hash : AddUncasted(hash, Add(offset)); raw_index = AddUncasted(Token::BIT_AND, raw_index, mask); int32_t entry_size = SeededNumberDictionary::kEntrySize; raw_index = AddUncasted(raw_index, Add(entry_size)); raw_index->ClearFlag(HValue::kCanOverflow); int32_t base_offset = SeededNumberDictionary::kElementsStartIndex; HValue* key_index = AddUncasted(raw_index, Add(base_offset)); key_index->ClearFlag(HValue::kCanOverflow); HValue* candidate_key = Add(elements, key_index, static_cast(NULL), FAST_ELEMENTS); IfBuilder key_compare(this); key_compare.IfNot(key, candidate_key); key_compare.Then(); { // Key at the current probe doesn't match, try at the next probe. HValue* result = BuildUncheckedDictionaryElementLoadHelper( elements, key, hash, mask, current_probe + 1); if (result == NULL) { key_compare.Deopt("probes exhausted in keyed load dictionary lookup"); result = graph()->GetConstantUndefined(); } else { Push(result); } } key_compare.Else(); { // Key at current probe matches. Details must be zero, otherwise the // dictionary element requires special handling. HValue* details_index = AddUncasted( raw_index, Add(base_offset + 2)); details_index->ClearFlag(HValue::kCanOverflow); HValue* details = Add(elements, details_index, static_cast(NULL), FAST_ELEMENTS); IfBuilder details_compare(this); details_compare.If(details, graph()->GetConstant0(), Token::NE); details_compare.ThenDeopt("keyed load dictionary element not fast case"); details_compare.Else(); { // Key matches and details are zero --> fast case. Load and return the // value. HValue* result_index = AddUncasted( raw_index, Add(base_offset + 1)); result_index->ClearFlag(HValue::kCanOverflow); Push(Add(elements, result_index, static_cast(NULL), FAST_ELEMENTS)); } details_compare.End(); } key_compare.End(); return Pop(); } HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) { int32_t seed_value = static_cast(isolate()->heap()->HashSeed()); HValue* seed = Add(seed_value); HValue* hash = AddUncasted(Token::BIT_XOR, index, seed); // hash = ~hash + (hash << 15); HValue* shifted_hash = AddUncasted(hash, Add(15)); HValue* not_hash = AddUncasted(Token::BIT_XOR, hash, graph()->GetConstantMinus1()); hash = AddUncasted(shifted_hash, not_hash); // hash = hash ^ (hash >> 12); shifted_hash = AddUncasted(hash, Add(12)); hash = AddUncasted(Token::BIT_XOR, hash, shifted_hash); // hash = hash + (hash << 2); shifted_hash = AddUncasted(hash, Add(2)); hash = AddUncasted(hash, shifted_hash); // hash = hash ^ (hash >> 4); shifted_hash = AddUncasted(hash, Add(4)); hash = AddUncasted(Token::BIT_XOR, hash, shifted_hash); // hash = hash * 2057; hash = AddUncasted(hash, Add(2057)); hash->ClearFlag(HValue::kCanOverflow); // hash = hash ^ (hash >> 16); shifted_hash = AddUncasted(hash, Add(16)); return AddUncasted(Token::BIT_XOR, hash, shifted_hash); } HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver, HValue* key) { HValue* elements = AddLoadElements(receiver); HValue* hash = BuildElementIndexHash(key); HValue* capacity = Add( elements, Add(NameDictionary::kCapacityIndex), static_cast(NULL), FAST_ELEMENTS); HValue* mask = AddUncasted(capacity, graph()->GetConstant1()); mask->ChangeRepresentation(Representation::Integer32()); mask->ClearFlag(HValue::kCanOverflow); return BuildUncheckedDictionaryElementLoadHelper(elements, key, hash, mask, 0); } HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length, HValue* index, HValue* input) { NoObservableSideEffectsScope scope(this); // Compute the size of the RegExpResult followed by FixedArray with length. HValue* size = length; size = AddUncasted(size, Add(kPointerSizeLog2)); size = AddUncasted(size, Add(static_cast( JSRegExpResult::kSize + FixedArray::kHeaderSize))); // Make sure size does not exceeds max regular heap object size. Add(size, Add(Page::kMaxRegularHeapObjectSize)); // Allocate the JSRegExpResult and the FixedArray in one step. HValue* result = Add( size, HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE); // Determine the elements FixedArray. HValue* elements = Add( result, Add(JSRegExpResult::kSize)); // Initialize the JSRegExpResult header. HValue* global_object = Add( context(), static_cast(NULL), HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); HValue* native_context = Add( global_object, static_cast(NULL), HObjectAccess::ForGlobalObjectNativeContext()); AddStoreMapNoWriteBarrier(result, Add( native_context, static_cast(NULL), HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX))); Add( result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset), Add(isolate()->factory()->empty_fixed_array())); Add( result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset), elements); Add( result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length); // Initialize the additional fields. Add( result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset), index); Add( result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset), input); // Initialize the elements header. AddStoreMapConstantNoWriteBarrier(elements, isolate()->factory()->fixed_array_map()); Add(elements, HObjectAccess::ForFixedArrayLength(), length); // Initialize the elements contents with undefined. LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement); index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT); { Add(elements, index, graph()->GetConstantUndefined(), FAST_ELEMENTS); } loop.EndBody(); return result; } HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) { NoObservableSideEffectsScope scope(this); // Convert constant numbers at compile time. if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) { Handle number = HConstant::cast(object)->handle(isolate()); Handle result = isolate()->factory()->NumberToString(number); return Add(result); } // Create a joinable continuation. HIfContinuation found(graph()->CreateBasicBlock(), graph()->CreateBasicBlock()); // Load the number string cache. HValue* number_string_cache = Add(Heap::kNumberStringCacheRootIndex); // Make the hash mask from the length of the number string cache. It // contains two elements (number and string) for each cache entry. HValue* mask = AddLoadFixedArrayLength(number_string_cache); mask->set_type(HType::Smi()); mask = AddUncasted(mask, graph()->GetConstant1()); mask = AddUncasted(mask, graph()->GetConstant1()); // Check whether object is a smi. IfBuilder if_objectissmi(this); if_objectissmi.If(object); if_objectissmi.Then(); { // Compute hash for smi similar to smi_get_hash(). HValue* hash = AddUncasted(Token::BIT_AND, object, mask); // Load the key. HValue* key_index = AddUncasted(hash, graph()->GetConstant1()); HValue* key = Add(number_string_cache, key_index, static_cast(NULL), FAST_ELEMENTS, ALLOW_RETURN_HOLE); // Check if object == key. IfBuilder if_objectiskey(this); if_objectiskey.If(object, key); if_objectiskey.Then(); { // Make the key_index available. Push(key_index); } if_objectiskey.JoinContinuation(&found); } if_objectissmi.Else(); { if (type->Is(Type::SignedSmall())) { if_objectissmi.Deopt("Expected smi"); } else { // Check if the object is a heap number. IfBuilder if_objectisnumber(this); HValue* objectisnumber = if_objectisnumber.If( object, isolate()->factory()->heap_number_map()); if_objectisnumber.Then(); { // Compute hash for heap number similar to double_get_hash(). HValue* low = Add( object, objectisnumber, HObjectAccess::ForHeapNumberValueLowestBits()); HValue* high = Add( object, objectisnumber, HObjectAccess::ForHeapNumberValueHighestBits()); HValue* hash = AddUncasted(Token::BIT_XOR, low, high); hash = AddUncasted(Token::BIT_AND, hash, mask); // Load the key. HValue* key_index = AddUncasted(hash, graph()->GetConstant1()); HValue* key = Add(number_string_cache, key_index, static_cast(NULL), FAST_ELEMENTS, ALLOW_RETURN_HOLE); // Check if key is a heap number (the number string cache contains only // SMIs and heap number, so it is sufficient to do a SMI check here). IfBuilder if_keyisnotsmi(this); HValue* keyisnotsmi = if_keyisnotsmi.IfNot(key); if_keyisnotsmi.Then(); { // Check if values of key and object match. IfBuilder if_keyeqobject(this); if_keyeqobject.If( Add(key, keyisnotsmi, HObjectAccess::ForHeapNumberValue()), Add(object, objectisnumber, HObjectAccess::ForHeapNumberValue()), Token::EQ); if_keyeqobject.Then(); { // Make the key_index available. Push(key_index); } if_keyeqobject.JoinContinuation(&found); } if_keyisnotsmi.JoinContinuation(&found); } if_objectisnumber.Else(); { if (type->Is(Type::Number())) { if_objectisnumber.Deopt("Expected heap number"); } } if_objectisnumber.JoinContinuation(&found); } } if_objectissmi.JoinContinuation(&found); // Check for cache hit. IfBuilder if_found(this, &found); if_found.Then(); { // Count number to string operation in native code. AddIncrementCounter(isolate()->counters()->number_to_string_native()); // Load the value in case of cache hit. HValue* key_index = Pop(); HValue* value_index = AddUncasted(key_index, graph()->GetConstant1()); Push(Add(number_string_cache, value_index, static_cast(NULL), FAST_ELEMENTS, ALLOW_RETURN_HOLE)); } if_found.Else(); { // Cache miss, fallback to runtime. Add(object); Push(Add( isolate()->factory()->empty_string(), Runtime::FunctionForId(Runtime::kHiddenNumberToStringSkipCache), 1)); } if_found.End(); return Pop(); } HAllocate* HGraphBuilder::BuildAllocate( HValue* object_size, HType type, InstanceType instance_type, HAllocationMode allocation_mode) { // Compute the effective allocation size. HValue* size = object_size; if (allocation_mode.CreateAllocationMementos()) { size = AddUncasted(size, Add(AllocationMemento::kSize)); size->ClearFlag(HValue::kCanOverflow); } // Perform the actual allocation. HAllocate* object = Add( size, type, allocation_mode.GetPretenureMode(), instance_type, allocation_mode.feedback_site()); // Setup the allocation memento. if (allocation_mode.CreateAllocationMementos()) { BuildCreateAllocationMemento( object, object_size, allocation_mode.current_site()); } return object; } HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length, HValue* right_length) { // Compute the combined string length and check against max string length. HValue* length = AddUncasted(left_length, right_length); // Check that length <= kMaxLength <=> length < MaxLength + 1. HValue* max_length = Add(String::kMaxLength + 1); Add(length, max_length); return length; } HValue* HGraphBuilder::BuildCreateConsString( HValue* length, HValue* left, HValue* right, HAllocationMode allocation_mode) { // Determine the string instance types. HInstruction* left_instance_type = AddLoadStringInstanceType(left); HInstruction* right_instance_type = AddLoadStringInstanceType(right); // Allocate the cons string object. HAllocate does not care whether we // pass CONS_STRING_TYPE or CONS_ASCII_STRING_TYPE here, so we just use // CONS_STRING_TYPE here. Below we decide whether the cons string is // one-byte or two-byte and set the appropriate map. ASSERT(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE, CONS_ASCII_STRING_TYPE)); HAllocate* result = BuildAllocate(Add(ConsString::kSize), HType::String(), CONS_STRING_TYPE, allocation_mode); // Compute intersection and difference of instance types. HValue* anded_instance_types = AddUncasted( Token::BIT_AND, left_instance_type, right_instance_type); HValue* xored_instance_types = AddUncasted( Token::BIT_XOR, left_instance_type, right_instance_type); // We create a one-byte cons string if // 1. both strings are one-byte, or // 2. at least one of the strings is two-byte, but happens to contain only // one-byte characters. // To do this, we check // 1. if both strings are one-byte, or if the one-byte data hint is set in // both strings, or // 2. if one of the strings has the one-byte data hint set and the other // string is one-byte. IfBuilder if_onebyte(this); STATIC_ASSERT(kOneByteStringTag != 0); STATIC_ASSERT(kOneByteDataHintMask != 0); if_onebyte.If( AddUncasted( Token::BIT_AND, anded_instance_types, Add(static_cast( kStringEncodingMask | kOneByteDataHintMask))), graph()->GetConstant0(), Token::NE); if_onebyte.Or(); STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0 && kOneByteDataHintTag != kOneByteStringTag); if_onebyte.If( AddUncasted( Token::BIT_AND, xored_instance_types, Add(static_cast( kOneByteStringTag | kOneByteDataHintTag))), Add(static_cast( kOneByteStringTag | kOneByteDataHintTag)), Token::EQ); if_onebyte.Then(); { // We can safely skip the write barrier for storing the map here. Handle map = isolate()->factory()->cons_ascii_string_map(); AddStoreMapConstantNoWriteBarrier(result, map); } if_onebyte.Else(); { // We can safely skip the write barrier for storing the map here. Handle map = isolate()->factory()->cons_string_map(); AddStoreMapConstantNoWriteBarrier(result, map); } if_onebyte.End(); // Initialize the cons string fields. Add(result, HObjectAccess::ForStringHashField(), Add(String::kEmptyHashField)); Add(result, HObjectAccess::ForStringLength(), length); Add(result, HObjectAccess::ForConsStringFirst(), left); Add(result, HObjectAccess::ForConsStringSecond(), right); // Count the native string addition. AddIncrementCounter(isolate()->counters()->string_add_native()); return result; } void HGraphBuilder::BuildCopySeqStringChars(HValue* src, HValue* src_offset, String::Encoding src_encoding, HValue* dst, HValue* dst_offset, String::Encoding dst_encoding, HValue* length) { ASSERT(dst_encoding != String::ONE_BYTE_ENCODING || src_encoding == String::ONE_BYTE_ENCODING); LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement); HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT); { HValue* src_index = AddUncasted(src_offset, index); HValue* value = AddUncasted(src_encoding, src, src_index); HValue* dst_index = AddUncasted(dst_offset, index); Add(dst_encoding, dst, dst_index, value); } loop.EndBody(); } HValue* HGraphBuilder::BuildObjectSizeAlignment( HValue* unaligned_size, int header_size) { ASSERT((header_size & kObjectAlignmentMask) == 0); HValue* size = AddUncasted( unaligned_size, Add(static_cast( header_size + kObjectAlignmentMask))); size->ClearFlag(HValue::kCanOverflow); return AddUncasted( Token::BIT_AND, size, Add(static_cast( ~kObjectAlignmentMask))); } HValue* HGraphBuilder::BuildUncheckedStringAdd( HValue* left, HValue* right, HAllocationMode allocation_mode) { // Determine the string lengths. HValue* left_length = AddLoadStringLength(left); HValue* right_length = AddLoadStringLength(right); // Compute the combined string length. HValue* length = BuildAddStringLengths(left_length, right_length); // Do some manual constant folding here. if (left_length->IsConstant()) { HConstant* c_left_length = HConstant::cast(left_length); ASSERT_NE(0, c_left_length->Integer32Value()); if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) { // The right string contains at least one character. return BuildCreateConsString(length, left, right, allocation_mode); } } else if (right_length->IsConstant()) { HConstant* c_right_length = HConstant::cast(right_length); ASSERT_NE(0, c_right_length->Integer32Value()); if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) { // The left string contains at least one character. return BuildCreateConsString(length, left, right, allocation_mode); } } // Check if we should create a cons string. IfBuilder if_createcons(this); if_createcons.If( length, Add(ConsString::kMinLength), Token::GTE); if_createcons.Then(); { // Create a cons string. Push(BuildCreateConsString(length, left, right, allocation_mode)); } if_createcons.Else(); { // Determine the string instance types. HValue* left_instance_type = AddLoadStringInstanceType(left); HValue* right_instance_type = AddLoadStringInstanceType(right); // Compute union and difference of instance types. HValue* ored_instance_types = AddUncasted( Token::BIT_OR, left_instance_type, right_instance_type); HValue* xored_instance_types = AddUncasted( Token::BIT_XOR, left_instance_type, right_instance_type); // Check if both strings have the same encoding and both are // sequential. IfBuilder if_sameencodingandsequential(this); if_sameencodingandsequential.If( AddUncasted( Token::BIT_AND, xored_instance_types, Add(static_cast(kStringEncodingMask))), graph()->GetConstant0(), Token::EQ); if_sameencodingandsequential.And(); STATIC_ASSERT(kSeqStringTag == 0); if_sameencodingandsequential.If( AddUncasted( Token::BIT_AND, ored_instance_types, Add(static_cast(kStringRepresentationMask))), graph()->GetConstant0(), Token::EQ); if_sameencodingandsequential.Then(); { HConstant* string_map = Add(isolate()->factory()->string_map()); HConstant* ascii_string_map = Add(isolate()->factory()->ascii_string_map()); // Determine map and size depending on whether result is one-byte string. IfBuilder if_onebyte(this); STATIC_ASSERT(kOneByteStringTag != 0); if_onebyte.If( AddUncasted( Token::BIT_AND, ored_instance_types, Add(static_cast(kStringEncodingMask))), graph()->GetConstant0(), Token::NE); if_onebyte.Then(); { // Allocate sequential one-byte string object. Push(length); Push(ascii_string_map); } if_onebyte.Else(); { // Allocate sequential two-byte string object. HValue* size = AddUncasted(length, graph()->GetConstant1()); size->ClearFlag(HValue::kCanOverflow); size->SetFlag(HValue::kUint32); Push(size); Push(string_map); } if_onebyte.End(); HValue* map = Pop(); // Calculate the number of bytes needed for the characters in the // string while observing object alignment. STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0); HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize); // Allocate the string object. HAllocate does not care whether we pass // STRING_TYPE or ASCII_STRING_TYPE here, so we just use STRING_TYPE here. HAllocate* result = BuildAllocate( size, HType::String(), STRING_TYPE, allocation_mode); // We can safely skip the write barrier for storing map here. AddStoreMapNoWriteBarrier(result, map); // Initialize the string fields. Add(result, HObjectAccess::ForStringHashField(), Add(String::kEmptyHashField)); Add(result, HObjectAccess::ForStringLength(), length); // Copy characters to the result string. IfBuilder if_twobyte(this); if_twobyte.If(map, string_map); if_twobyte.Then(); { // Copy characters from the left string. BuildCopySeqStringChars( left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, left_length); // Copy characters from the right string. BuildCopySeqStringChars( right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result, left_length, String::TWO_BYTE_ENCODING, right_length); } if_twobyte.Else(); { // Copy characters from the left string. BuildCopySeqStringChars( left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, left_length); // Copy characters from the right string. BuildCopySeqStringChars( right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result, left_length, String::ONE_BYTE_ENCODING, right_length); } if_twobyte.End(); // Count the native string addition. AddIncrementCounter(isolate()->counters()->string_add_native()); // Return the sequential string. Push(result); } if_sameencodingandsequential.Else(); { // Fallback to the runtime to add the two strings. Add(left); Add(right); Push(Add( isolate()->factory()->empty_string(), Runtime::FunctionForId(Runtime::kHiddenStringAdd), 2)); } if_sameencodingandsequential.End(); } if_createcons.End(); return Pop(); } HValue* HGraphBuilder::BuildStringAdd( HValue* left, HValue* right, HAllocationMode allocation_mode) { NoObservableSideEffectsScope no_effects(this); // Determine string lengths. HValue* left_length = AddLoadStringLength(left); HValue* right_length = AddLoadStringLength(right); // Check if left string is empty. IfBuilder if_leftempty(this); if_leftempty.If( left_length, graph()->GetConstant0(), Token::EQ); if_leftempty.Then(); { // Count the native string addition. AddIncrementCounter(isolate()->counters()->string_add_native()); // Just return the right string. Push(right); } if_leftempty.Else(); { // Check if right string is empty. IfBuilder if_rightempty(this); if_rightempty.If( right_length, graph()->GetConstant0(), Token::EQ); if_rightempty.Then(); { // Count the native string addition. AddIncrementCounter(isolate()->counters()->string_add_native()); // Just return the left string. Push(left); } if_rightempty.Else(); { // Add the two non-empty strings. Push(BuildUncheckedStringAdd(left, right, allocation_mode)); } if_rightempty.End(); } if_leftempty.End(); return Pop(); } HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess( HValue* checked_object, HValue* key, HValue* val, bool is_js_array, ElementsKind elements_kind, PropertyAccessType access_type, LoadKeyedHoleMode load_mode, KeyedAccessStoreMode store_mode) { ASSERT((!IsExternalArrayElementsKind(elements_kind) && !IsFixedTypedArrayElementsKind(elements_kind)) || !is_js_array); // No GVNFlag is necessary for ElementsKind if there is an explicit dependency // on a HElementsTransition instruction. The flag can also be removed if the // map to check has FAST_HOLEY_ELEMENTS, since there can be no further // ElementsKind transitions. Finally, the dependency can be removed for stores // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the // generated store code. if ((elements_kind == FAST_HOLEY_ELEMENTS) || (elements_kind == FAST_ELEMENTS && access_type == STORE)) { checked_object->ClearDependsOnFlag(kElementsKind); } bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind); bool fast_elements = IsFastObjectElementsKind(elements_kind); HValue* elements = AddLoadElements(checked_object); if (access_type == STORE && (fast_elements || fast_smi_only_elements) && store_mode != STORE_NO_TRANSITION_HANDLE_COW) { HCheckMaps* check_cow_map = Add( elements, isolate()->factory()->fixed_array_map(), top_info()); check_cow_map->ClearDependsOnFlag(kElementsKind); } HInstruction* length = NULL; if (is_js_array) { length = Add( checked_object, static_cast(NULL), HObjectAccess::ForArrayLength(elements_kind)); } else { length = AddLoadFixedArrayLength(elements); } length->set_type(HType::Smi()); HValue* checked_key = NULL; if (IsExternalArrayElementsKind(elements_kind) || IsFixedTypedArrayElementsKind(elements_kind)) { HValue* backing_store; if (IsExternalArrayElementsKind(elements_kind)) { backing_store = Add( elements, static_cast(NULL), HObjectAccess::ForExternalArrayExternalPointer()); } else { backing_store = elements; } if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) { NoObservableSideEffectsScope no_effects(this); IfBuilder length_checker(this); length_checker.If(key, length, Token::LT); length_checker.Then(); IfBuilder negative_checker(this); HValue* bounds_check = negative_checker.If( key, graph()->GetConstant0(), Token::GTE); negative_checker.Then(); HInstruction* result = AddElementAccess( backing_store, key, val, bounds_check, elements_kind, access_type); negative_checker.ElseDeopt("Negative key encountered"); negative_checker.End(); length_checker.End(); return result; } else { ASSERT(store_mode == STANDARD_STORE); checked_key = Add(key, length); return AddElementAccess( backing_store, checked_key, val, checked_object, elements_kind, access_type); } } ASSERT(fast_smi_only_elements || fast_elements || IsFastDoubleElementsKind(elements_kind)); // In case val is stored into a fast smi array, assure that the value is a smi // before manipulating the backing store. Otherwise the actual store may // deopt, leaving the backing store in an invalid state. if (access_type == STORE && IsFastSmiElementsKind(elements_kind) && !val->type().IsSmi()) { val = AddUncasted(val, Representation::Smi()); } if (IsGrowStoreMode(store_mode)) { NoObservableSideEffectsScope no_effects(this); elements = BuildCheckForCapacityGrow(checked_object, elements, elements_kind, length, key, is_js_array, access_type); checked_key = key; } else { checked_key = Add(key, length); if (access_type == STORE && (fast_elements || fast_smi_only_elements)) { if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) { NoObservableSideEffectsScope no_effects(this); elements = BuildCopyElementsOnWrite(checked_object, elements, elements_kind, length); } else { HCheckMaps* check_cow_map = Add( elements, isolate()->factory()->fixed_array_map(), top_info()); check_cow_map->ClearDependsOnFlag(kElementsKind); } } } return AddElementAccess(elements, checked_key, val, checked_object, elements_kind, access_type, load_mode); } HValue* HGraphBuilder::BuildAllocateArrayFromLength( JSArrayBuilder* array_builder, HValue* length_argument) { if (length_argument->IsConstant() && HConstant::cast(length_argument)->HasSmiValue()) { int array_length = HConstant::cast(length_argument)->Integer32Value(); HValue* new_object = array_length == 0 ? array_builder->AllocateEmptyArray() : array_builder->AllocateArray(length_argument, length_argument); return new_object; } HValue* constant_zero = graph()->GetConstant0(); HConstant* max_alloc_length = Add(JSObject::kInitialMaxFastElementArray); HInstruction* checked_length = Add(length_argument, max_alloc_length); IfBuilder if_builder(this); if_builder.If(checked_length, constant_zero, Token::EQ); if_builder.Then(); const int initial_capacity = JSArray::kPreallocatedArrayElements; HConstant* initial_capacity_node = Add(initial_capacity); Push(initial_capacity_node); // capacity Push(constant_zero); // length if_builder.Else(); if (!(top_info()->IsStub()) && IsFastPackedElementsKind(array_builder->kind())) { // We'll come back later with better (holey) feedback. if_builder.Deopt("Holey array despite packed elements_kind feedback"); } else { Push(checked_length); // capacity Push(checked_length); // length } if_builder.End(); // Figure out total size HValue* length = Pop(); HValue* capacity = Pop(); return array_builder->AllocateArray(capacity, length); } HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind, HValue* capacity) { int elements_size; InstanceType instance_type; if (IsFastDoubleElementsKind(kind)) { elements_size = kDoubleSize; instance_type = FIXED_DOUBLE_ARRAY_TYPE; } else { elements_size = kPointerSize; instance_type = FIXED_ARRAY_TYPE; } HConstant* elements_size_value = Add(elements_size); HValue* mul = AddUncasted(capacity, elements_size_value); mul->ClearFlag(HValue::kCanOverflow); HConstant* header_size = Add(FixedArray::kHeaderSize); HValue* total_size = AddUncasted(mul, header_size); total_size->ClearFlag(HValue::kCanOverflow); PretenureFlag pretenure_flag = !FLAG_allocation_site_pretenuring ? isolate()->heap()->GetPretenureMode() : NOT_TENURED; return Add(total_size, HType::Tagged(), pretenure_flag, instance_type); } void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements, ElementsKind kind, HValue* capacity) { Factory* factory = isolate()->factory(); Handle map = IsFastDoubleElementsKind(kind) ? factory->fixed_double_array_map() : factory->fixed_array_map(); AddStoreMapConstant(elements, map); Add(elements, HObjectAccess::ForFixedArrayLength(), capacity); } HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader( ElementsKind kind, HValue* capacity) { // The HForceRepresentation is to prevent possible deopt on int-smi // conversion after allocation but before the new object fields are set. capacity = AddUncasted(capacity, Representation::Smi()); HValue* new_elements = BuildAllocateElements(kind, capacity); BuildInitializeElementsHeader(new_elements, kind, capacity); return new_elements; } HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array, HValue* array_map, AllocationSiteMode mode, ElementsKind elements_kind, HValue* allocation_site_payload, HValue* length_field) { Add(array, HObjectAccess::ForMap(), array_map); HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); HObjectAccess access = HObjectAccess::ForPropertiesPointer(); Add(array, access, empty_fixed_array); Add(array, HObjectAccess::ForArrayLength(elements_kind), length_field); if (mode == TRACK_ALLOCATION_SITE) { BuildCreateAllocationMemento( array, Add(JSArray::kSize), allocation_site_payload); } int elements_location = JSArray::kSize; if (mode == TRACK_ALLOCATION_SITE) { elements_location += AllocationMemento::kSize; } HInnerAllocatedObject* elements = Add( array, Add(elements_location)); Add(array, HObjectAccess::ForElementsPointer(), elements); return elements; } HInstruction* HGraphBuilder::AddElementAccess( HValue* elements, HValue* checked_key, HValue* val, HValue* dependency, ElementsKind elements_kind, PropertyAccessType access_type, LoadKeyedHoleMode load_mode) { if (access_type == STORE) { ASSERT(val != NULL); if (elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS || elements_kind == UINT8_CLAMPED_ELEMENTS) { val = Add(val); } return Add(elements, checked_key, val, elements_kind, elements_kind == FAST_SMI_ELEMENTS ? STORE_TO_INITIALIZED_ENTRY : INITIALIZING_STORE); } ASSERT(access_type == LOAD); ASSERT(val == NULL); HLoadKeyed* load = Add( elements, checked_key, dependency, elements_kind, load_mode); if (FLAG_opt_safe_uint32_operations && (elements_kind == EXTERNAL_UINT32_ELEMENTS || elements_kind == UINT32_ELEMENTS)) { graph()->RecordUint32Instruction(load); } return load; } HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) { return Add( object, static_cast(NULL), HObjectAccess::ForElementsPointer()); } HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) { return Add( object, static_cast(NULL), HObjectAccess::ForFixedArrayLength()); } HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) { HValue* half_old_capacity = AddUncasted(old_capacity, graph_->GetConstant1()); HValue* new_capacity = AddUncasted(half_old_capacity, old_capacity); new_capacity->ClearFlag(HValue::kCanOverflow); HValue* min_growth = Add(16); new_capacity = AddUncasted(new_capacity, min_growth); new_capacity->ClearFlag(HValue::kCanOverflow); return new_capacity; } void HGraphBuilder::BuildNewSpaceArrayCheck(HValue* length, ElementsKind kind) { int element_size = IsFastDoubleElementsKind(kind) ? kDoubleSize : kPointerSize; int max_size = Page::kMaxRegularHeapObjectSize / element_size; max_size -= JSArray::kSize / element_size; HConstant* max_size_constant = Add(max_size); Add(length, max_size_constant); } HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object, HValue* elements, ElementsKind kind, ElementsKind new_kind, HValue* length, HValue* new_capacity) { BuildNewSpaceArrayCheck(new_capacity, new_kind); HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader( new_kind, new_capacity); BuildCopyElements(elements, kind, new_elements, new_kind, length, new_capacity); Add(object, HObjectAccess::ForElementsPointer(), new_elements); return new_elements; } void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, ElementsKind elements_kind, HValue* from, HValue* to) { // Fast elements kinds need to be initialized in case statements below cause // a garbage collection. Factory* factory = isolate()->factory(); double nan_double = FixedDoubleArray::hole_nan_as_double(); HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind) ? Add(factory->the_hole_value()) : Add(nan_double); // Special loop unfolding case static const int kLoopUnfoldLimit = 8; STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit); int initial_capacity = -1; if (from->IsInteger32Constant() && to->IsInteger32Constant()) { int constant_from = from->GetInteger32Constant(); int constant_to = to->GetInteger32Constant(); if (constant_from == 0 && constant_to <= kLoopUnfoldLimit) { initial_capacity = constant_to; } } // Since we're about to store a hole value, the store instruction below must // assume an elements kind that supports heap object values. if (IsFastSmiOrObjectElementsKind(elements_kind)) { elements_kind = FAST_HOLEY_ELEMENTS; } if (initial_capacity >= 0) { for (int i = 0; i < initial_capacity; i++) { HInstruction* key = Add(i); Add(elements, key, hole, elements_kind); } } else { LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* key = builder.BeginBody(from, to, Token::LT); Add(elements, key, hole, elements_kind); builder.EndBody(); } } void HGraphBuilder::BuildCopyElements(HValue* from_elements, ElementsKind from_elements_kind, HValue* to_elements, ElementsKind to_elements_kind, HValue* length, HValue* capacity) { bool pre_fill_with_holes = IsFastDoubleElementsKind(from_elements_kind) && IsFastObjectElementsKind(to_elements_kind); if (pre_fill_with_holes) { // If the copy might trigger a GC, make sure that the FixedArray is // pre-initialized with holes to make sure that it's always in a consistent // state. BuildFillElementsWithHole(to_elements, to_elements_kind, graph()->GetConstant0(), capacity); } LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT); HValue* element = Add(from_elements, key, static_cast(NULL), from_elements_kind, ALLOW_RETURN_HOLE); ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && IsFastSmiElementsKind(to_elements_kind)) ? FAST_HOLEY_ELEMENTS : to_elements_kind; if (IsHoleyElementsKind(from_elements_kind) && from_elements_kind != to_elements_kind) { IfBuilder if_hole(this); if_hole.If(element); if_hole.Then(); HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) ? Add(FixedDoubleArray::hole_nan_as_double()) : graph()->GetConstantHole(); Add(to_elements, key, hole_constant, kind); if_hole.Else(); HStoreKeyed* store = Add(to_elements, key, element, kind); store->SetFlag(HValue::kAllowUndefinedAsNaN); if_hole.End(); } else { HStoreKeyed* store = Add(to_elements, key, element, kind); store->SetFlag(HValue::kAllowUndefinedAsNaN); } builder.EndBody(); if (!pre_fill_with_holes && length != capacity) { // Fill unused capacity with the hole. BuildFillElementsWithHole(to_elements, to_elements_kind, key, capacity); } } HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate, HValue* allocation_site, AllocationSiteMode mode, ElementsKind kind, int length) { NoObservableSideEffectsScope no_effects(this); // All sizes here are multiples of kPointerSize. int size = JSArray::kSize; if (mode == TRACK_ALLOCATION_SITE) { size += AllocationMemento::kSize; } HValue* size_in_bytes = Add(size); HInstruction* object = Add(size_in_bytes, HType::JSObject(), NOT_TENURED, JS_OBJECT_TYPE); // Copy the JS array part. for (int i = 0; i < JSArray::kSize; i += kPointerSize) { if ((i != JSArray::kElementsOffset) || (length == 0)) { HObjectAccess access = HObjectAccess::ForJSArrayOffset(i); Add( object, access, Add( boilerplate, static_cast(NULL), access)); } } // Create an allocation site info if requested. if (mode == TRACK_ALLOCATION_SITE) { BuildCreateAllocationMemento( object, Add(JSArray::kSize), allocation_site); } if (length > 0) { // We have to initialize the elements pointer if allocation folding is // turned off. if (!FLAG_use_gvn || !FLAG_use_allocation_folding) { HConstant* empty_fixed_array = Add( isolate()->factory()->empty_fixed_array()); Add(object, HObjectAccess::ForElementsPointer(), empty_fixed_array, INITIALIZING_STORE); } HValue* boilerplate_elements = AddLoadElements(boilerplate); HValue* object_elements; if (IsFastDoubleElementsKind(kind)) { HValue* elems_size = Add(FixedDoubleArray::SizeFor(length)); object_elements = Add(elems_size, HType::Tagged(), NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE); } else { HValue* elems_size = Add(FixedArray::SizeFor(length)); object_elements = Add(elems_size, HType::Tagged(), NOT_TENURED, FIXED_ARRAY_TYPE); } Add(object, HObjectAccess::ForElementsPointer(), object_elements); // Copy the elements array header. for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); Add( object_elements, access, Add( boilerplate_elements, static_cast(NULL), access)); } // Copy the elements array contents. // TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold // copying loops with constant length up to a given boundary and use this // helper here instead. for (int i = 0; i < length; i++) { HValue* key_constant = Add(i); HInstruction* value = Add(boilerplate_elements, key_constant, static_cast(NULL), kind); Add(object_elements, key_constant, value, kind); } } return object; } void HGraphBuilder::BuildCompareNil( HValue* value, Type* type, HIfContinuation* continuation) { IfBuilder if_nil(this); bool some_case_handled = false; bool some_case_missing = false; if (type->Maybe(Type::Null())) { if (some_case_handled) if_nil.Or(); if_nil.If(value, graph()->GetConstantNull()); some_case_handled = true; } else { some_case_missing = true; } if (type->Maybe(Type::Undefined())) { if (some_case_handled) if_nil.Or(); if_nil.If(value, graph()->GetConstantUndefined()); some_case_handled = true; } else { some_case_missing = true; } if (type->Maybe(Type::Undetectable())) { if (some_case_handled) if_nil.Or(); if_nil.If(value); some_case_handled = true; } else { some_case_missing = true; } if (some_case_missing) { if_nil.Then(); if_nil.Else(); if (type->NumClasses() == 1) { BuildCheckHeapObject(value); // For ICs, the map checked below is a sentinel map that gets replaced by // the monomorphic map when the code is used as a template to generate a // new IC. For optimized functions, there is no sentinel map, the map // emitted below is the actual monomorphic map. BuildCheckMap(value, type->Classes().Current()); } else { if_nil.Deopt("Too many undetectable types"); } } if_nil.CaptureContinuation(continuation); } void HGraphBuilder::BuildCreateAllocationMemento( HValue* previous_object, HValue* previous_object_size, HValue* allocation_site) { ASSERT(allocation_site != NULL); HInnerAllocatedObject* allocation_memento = Add( previous_object, previous_object_size); AddStoreMapConstant( allocation_memento, isolate()->factory()->allocation_memento_map()); Add( allocation_memento, HObjectAccess::ForAllocationMementoSite(), allocation_site); if (FLAG_allocation_site_pretenuring) { HValue* memento_create_count = Add( allocation_site, static_cast(NULL), HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset)); memento_create_count = AddUncasted( memento_create_count, graph()->GetConstant1()); // This smi value is reset to zero after every gc, overflow isn't a problem // since the counter is bounded by the new space size. memento_create_count->ClearFlag(HValue::kCanOverflow); HStoreNamedField* store = Add( allocation_site, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset), memento_create_count); // No write barrier needed to store a smi. store->SkipWriteBarrier(); } } HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) { // Get the global context, then the native context HInstruction* context = Add(closure, static_cast(NULL), HObjectAccess::ForFunctionContextPointer()); HInstruction* global_object = Add( context, static_cast(NULL), HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset( GlobalObject::kNativeContextOffset); return Add( global_object, static_cast(NULL), access); } HInstruction* HGraphBuilder::BuildGetNativeContext() { // Get the global context, then the native context HValue* global_object = Add( context(), static_cast(NULL), HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); return Add( global_object, static_cast(NULL), HObjectAccess::ForObservableJSObjectOffset( GlobalObject::kNativeContextOffset)); } HInstruction* HGraphBuilder::BuildGetArrayFunction() { HInstruction* native_context = BuildGetNativeContext(); HInstruction* index = Add(static_cast(Context::ARRAY_FUNCTION_INDEX)); return Add( native_context, index, static_cast(NULL), FAST_ELEMENTS); } HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, ElementsKind kind, HValue* allocation_site_payload, HValue* constructor_function, AllocationSiteOverrideMode override_mode) : builder_(builder), kind_(kind), allocation_site_payload_(allocation_site_payload), constructor_function_(constructor_function) { ASSERT(!allocation_site_payload->IsConstant() || HConstant::cast(allocation_site_payload)->handle( builder_->isolate())->IsAllocationSite()); mode_ = override_mode == DISABLE_ALLOCATION_SITES ? DONT_TRACK_ALLOCATION_SITE : AllocationSite::GetMode(kind); } HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, ElementsKind kind, HValue* constructor_function) : builder_(builder), kind_(kind), mode_(DONT_TRACK_ALLOCATION_SITE), allocation_site_payload_(NULL), constructor_function_(constructor_function) { } HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() { if (!builder()->top_info()->IsStub()) { // A constant map is fine. Handle map(builder()->isolate()->get_initial_js_array_map(kind_), builder()->isolate()); return builder()->Add(map); } if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) { // No need for a context lookup if the kind_ matches the initial // map, because we can just load the map in that case. HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); return builder()->Add( constructor_function_, static_cast(NULL), access); } // TODO(mvstanton): we should always have a constructor function if we // are creating a stub. HInstruction* native_context = constructor_function_ != NULL ? builder()->BuildGetNativeContext(constructor_function_) : builder()->BuildGetNativeContext(); HInstruction* index = builder()->Add( static_cast(Context::JS_ARRAY_MAPS_INDEX)); HInstruction* map_array = builder()->Add( native_context, index, static_cast(NULL), FAST_ELEMENTS); HInstruction* kind_index = builder()->Add(kind_); return builder()->Add( map_array, kind_index, static_cast(NULL), FAST_ELEMENTS); } HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() { // Find the map near the constructor function HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); return builder()->Add( constructor_function_, static_cast(NULL), access); } HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize( HValue* length_node) { ASSERT(length_node != NULL); int base_size = JSArray::kSize; if (mode_ == TRACK_ALLOCATION_SITE) { base_size += AllocationMemento::kSize; } STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); base_size += FixedArray::kHeaderSize; HInstruction* elements_size_value = builder()->Add(elements_size()); HInstruction* mul = HMul::NewImul(builder()->zone(), builder()->context(), length_node, elements_size_value); builder()->AddInstruction(mul); HInstruction* base = builder()->Add(base_size); HInstruction* total_size = HAdd::New(builder()->zone(), builder()->context(), base, mul); total_size->ClearFlag(HValue::kCanOverflow); builder()->AddInstruction(total_size); return total_size; } HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() { int base_size = JSArray::kSize; if (mode_ == TRACK_ALLOCATION_SITE) { base_size += AllocationMemento::kSize; } base_size += IsFastDoubleElementsKind(kind_) ? FixedDoubleArray::SizeFor(initial_capacity()) : FixedArray::SizeFor(initial_capacity()); return builder()->Add(base_size); } HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() { HValue* size_in_bytes = EstablishEmptyArrayAllocationSize(); HConstant* capacity = builder()->Add(initial_capacity()); return AllocateArray(size_in_bytes, capacity, builder()->graph()->GetConstant0()); } HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity, HValue* length_field, FillMode fill_mode) { HValue* size_in_bytes = EstablishAllocationSize(capacity); return AllocateArray(size_in_bytes, capacity, length_field, fill_mode); } HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes, HValue* capacity, HValue* length_field, FillMode fill_mode) { // These HForceRepresentations are because we store these as fields in the // objects we construct, and an int32-to-smi HChange could deopt. Accept // the deopt possibility now, before allocation occurs. capacity = builder()->AddUncasted(capacity, Representation::Smi()); length_field = builder()->AddUncasted(length_field, Representation::Smi()); // Allocate (dealing with failure appropriately) HAllocate* new_object = builder()->Add(size_in_bytes, HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE); // Folded array allocation should be aligned if it has fast double elements. if (IsFastDoubleElementsKind(kind_)) { new_object->MakeDoubleAligned(); } // Fill in the fields: map, properties, length HValue* map; if (allocation_site_payload_ == NULL) { map = EmitInternalMapCode(); } else { map = EmitMapCode(); } elements_location_ = builder()->BuildJSArrayHeader(new_object, map, mode_, kind_, allocation_site_payload_, length_field); // Initialize the elements builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity); if (fill_mode == FILL_WITH_HOLE) { builder()->BuildFillElementsWithHole(elements_location_, kind_, graph()->GetConstant0(), capacity); } return new_object; } HStoreNamedField* HGraphBuilder::AddStoreMapConstant(HValue *object, Handle map) { return Add(object, HObjectAccess::ForMap(), Add(map)); } HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin) { HValue* global_object = Add( context(), static_cast(NULL), HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset( GlobalObject::kBuiltinsOffset); HValue* builtins = Add( global_object, static_cast(NULL), access); HObjectAccess function_access = HObjectAccess::ForObservableJSObjectOffset( JSBuiltinsObject::OffsetOfFunctionWithId(builtin)); return Add( builtins, static_cast(NULL), function_access); } HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info) : HGraphBuilder(info), function_state_(NULL), initial_function_state_(this, info, NORMAL_RETURN, 0), ast_context_(NULL), break_scope_(NULL), inlined_count_(0), globals_(10, info->zone()), inline_bailout_(false), osr_(new(info->zone()) HOsrBuilder(this)) { // This is not initialized in the initializer list because the // constructor for the initial state relies on function_state_ == NULL // to know it's the initial state. function_state_= &initial_function_state_; InitializeAstVisitor(info->zone()); if (FLAG_hydrogen_track_positions) { SetSourcePosition(info->shared_info()->start_position()); } } HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first, HBasicBlock* second, BailoutId join_id) { if (first == NULL) { return second; } else if (second == NULL) { return first; } else { HBasicBlock* join_block = graph()->CreateBasicBlock(); Goto(first, join_block); Goto(second, join_block); join_block->SetJoinId(join_id); return join_block; } } HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement, HBasicBlock* exit_block, HBasicBlock* continue_block) { if (continue_block != NULL) { if (exit_block != NULL) Goto(exit_block, continue_block); continue_block->SetJoinId(statement->ContinueId()); return continue_block; } return exit_block; } HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement, HBasicBlock* loop_entry, HBasicBlock* body_exit, HBasicBlock* loop_successor, HBasicBlock* break_block) { if (body_exit != NULL) Goto(body_exit, loop_entry); loop_entry->PostProcessLoopHeader(statement); if (break_block != NULL) { if (loop_successor != NULL) Goto(loop_successor, break_block); break_block->SetJoinId(statement->ExitId()); return break_block; } return loop_successor; } // Build a new loop header block and set it as the current block. HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() { HBasicBlock* loop_entry = CreateLoopHeaderBlock(); Goto(loop_entry); set_current_block(loop_entry); return loop_entry; } HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry( IterationStatement* statement) { HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement) ? osr()->BuildOsrLoopEntry(statement) : BuildLoopEntry(); return loop_entry; } void HBasicBlock::FinishExit(HControlInstruction* instruction, HSourcePosition position) { Finish(instruction, position); ClearEnvironment(); } HGraph::HGraph(CompilationInfo* info) : isolate_(info->isolate()), next_block_id_(0), entry_block_(NULL), blocks_(8, info->zone()), values_(16, info->zone()), phi_list_(NULL), uint32_instructions_(NULL), osr_(NULL), info_(info), zone_(info->zone()), is_recursive_(false), use_optimistic_licm_(false), depends_on_empty_array_proto_elements_(false), type_change_checksum_(0), maximum_environment_size_(0), no_side_effects_scope_count_(0), disallow_adding_new_values_(false), next_inline_id_(0), inlined_functions_(5, info->zone()) { if (info->IsStub()) { HydrogenCodeStub* stub = info->code_stub(); CodeStubInterfaceDescriptor* descriptor = stub->GetInterfaceDescriptor(); start_environment_ = new(zone_) HEnvironment(zone_, descriptor->environment_length()); } else { TraceInlinedFunction(info->shared_info(), HSourcePosition::Unknown()); start_environment_ = new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_); } start_environment_->set_ast_id(BailoutId::FunctionEntry()); entry_block_ = CreateBasicBlock(); entry_block_->SetInitialEnvironment(start_environment_); } HBasicBlock* HGraph::CreateBasicBlock() { HBasicBlock* result = new(zone()) HBasicBlock(this); blocks_.Add(result, zone()); return result; } void HGraph::FinalizeUniqueness() { DisallowHeapAllocation no_gc; ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate())); for (int i = 0; i < blocks()->length(); ++i) { for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) { it.Current()->FinalizeUniqueness(); } } } int HGraph::TraceInlinedFunction( Handle shared, HSourcePosition position) { if (!FLAG_hydrogen_track_positions) { return 0; } int id = 0; for (; id < inlined_functions_.length(); id++) { if (inlined_functions_[id].shared().is_identical_to(shared)) { break; } } if (id == inlined_functions_.length()) { inlined_functions_.Add(InlinedFunctionInfo(shared), zone()); if (!shared->script()->IsUndefined()) { Handle