// Copyright 2013 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/hydrogen.h" #include #include "src/v8.h" #include "src/allocation-site-scopes.h" #include "src/codegen.h" #include "src/full-codegen.h" #include "src/hashmap.h" #include "src/hydrogen-bce.h" #include "src/hydrogen-bch.h" #include "src/hydrogen-canonicalize.h" #include "src/hydrogen-check-elimination.h" #include "src/hydrogen-dce.h" #include "src/hydrogen-dehoist.h" #include "src/hydrogen-environment-liveness.h" #include "src/hydrogen-escape-analysis.h" #include "src/hydrogen-gvn.h" #include "src/hydrogen-infer-representation.h" #include "src/hydrogen-infer-types.h" #include "src/hydrogen-load-elimination.h" #include "src/hydrogen-mark-deoptimize.h" #include "src/hydrogen-mark-unreachable.h" #include "src/hydrogen-osr.h" #include "src/hydrogen-range-analysis.h" #include "src/hydrogen-redundant-phi.h" #include "src/hydrogen-removable-simulates.h" #include "src/hydrogen-representation-changes.h" #include "src/hydrogen-sce.h" #include "src/hydrogen-store-elimination.h" #include "src/hydrogen-uint32-analysis.h" #include "src/ic/call-optimization.h" // GetRootConstructor #include "src/ic/ic-inl.h" #include "src/lithium-allocator.h" #include "src/parser.h" #include "src/runtime.h" #include "src/scopeinfo.h" #include "src/scopes.h" #include "src/typing.h" #if V8_TARGET_ARCH_IA32 #include "src/ia32/lithium-codegen-ia32.h" // NOLINT #elif V8_TARGET_ARCH_X64 #include "src/x64/lithium-codegen-x64.h" // NOLINT #elif V8_TARGET_ARCH_ARM64 #include "src/arm64/lithium-codegen-arm64.h" // NOLINT #elif V8_TARGET_ARCH_ARM #include "src/arm/lithium-codegen-arm.h" // NOLINT #elif V8_TARGET_ARCH_MIPS #include "src/mips/lithium-codegen-mips.h" // NOLINT #elif V8_TARGET_ARCH_MIPS64 #include "src/mips64/lithium-codegen-mips64.h" // NOLINT #elif V8_TARGET_ARCH_X87 #include "src/x87/lithium-codegen-x87.h" // NOLINT #else #error Unsupported target architecture. #endif namespace v8 { namespace internal { HBasicBlock::HBasicBlock(HGraph* graph) : block_id_(graph->GetNextBlockID()), graph_(graph), phis_(4, graph->zone()), first_(NULL), last_(NULL), end_(NULL), loop_information_(NULL), predecessors_(2, graph->zone()), dominator_(NULL), dominated_blocks_(4, graph->zone()), last_environment_(NULL), argument_count_(-1), first_instruction_index_(-1), last_instruction_index_(-1), deleted_phis_(4, graph->zone()), parent_loop_header_(NULL), inlined_entry_block_(NULL), is_inline_return_target_(false), is_reachable_(true), dominates_loop_successors_(false), is_osr_entry_(false), is_ordered_(false) { } Isolate* HBasicBlock::isolate() const { return graph_->isolate(); } void HBasicBlock::MarkUnreachable() { is_reachable_ = false; } void HBasicBlock::AttachLoopInformation() { DCHECK(!IsLoopHeader()); loop_information_ = new(zone()) HLoopInformation(this, zone()); } void HBasicBlock::DetachLoopInformation() { DCHECK(IsLoopHeader()); loop_information_ = NULL; } void HBasicBlock::AddPhi(HPhi* phi) { DCHECK(!IsStartBlock()); phis_.Add(phi, zone()); phi->SetBlock(this); } void HBasicBlock::RemovePhi(HPhi* phi) { DCHECK(phi->block() == this); DCHECK(phis_.Contains(phi)); phi->Kill(); phis_.RemoveElement(phi); phi->SetBlock(NULL); } void HBasicBlock::AddInstruction(HInstruction* instr, HSourcePosition position) { DCHECK(!IsStartBlock() || !IsFinished()); DCHECK(!instr->IsLinked()); DCHECK(!IsFinished()); if (!position.IsUnknown()) { instr->set_position(position); } if (first_ == NULL) { DCHECK(last_environment() != NULL); DCHECK(!last_environment()->ast_id().IsNone()); HBlockEntry* entry = new(zone()) HBlockEntry(); entry->InitializeAsFirst(this); if (!position.IsUnknown()) { entry->set_position(position); } else { DCHECK(!FLAG_hydrogen_track_positions || !graph()->info()->IsOptimizing()); } first_ = last_ = entry; } instr->InsertAfter(last_); } HPhi* HBasicBlock::AddNewPhi(int merged_index) { if (graph()->IsInsideNoSideEffectsScope()) { merged_index = HPhi::kInvalidMergedIndex; } HPhi* phi = new(zone()) HPhi(merged_index, zone()); AddPhi(phi); return phi; } HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id, RemovableSimulate removable) { DCHECK(HasEnvironment()); HEnvironment* environment = last_environment(); DCHECK(ast_id.IsNone() || ast_id == BailoutId::StubEntry() || environment->closure()->shared()->VerifyBailoutId(ast_id)); int push_count = environment->push_count(); int pop_count = environment->pop_count(); HSimulate* instr = new(zone()) HSimulate(ast_id, pop_count, zone(), removable); #ifdef DEBUG instr->set_closure(environment->closure()); #endif // Order of pushed values: newest (top of stack) first. This allows // HSimulate::MergeWith() to easily append additional pushed values // that are older (from further down the stack). for (int i = 0; i < push_count; ++i) { instr->AddPushedValue(environment->ExpressionStackAt(i)); } for (GrowableBitVector::Iterator it(environment->assigned_variables(), zone()); !it.Done(); it.Advance()) { int index = it.Current(); instr->AddAssignedValue(index, environment->Lookup(index)); } environment->ClearHistory(); return instr; } void HBasicBlock::Finish(HControlInstruction* end, HSourcePosition position) { DCHECK(!IsFinished()); AddInstruction(end, position); end_ = end; for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { it.Current()->RegisterPredecessor(this); } } void HBasicBlock::Goto(HBasicBlock* block, HSourcePosition position, FunctionState* state, bool add_simulate) { bool drop_extra = state != NULL && state->inlining_kind() == NORMAL_RETURN; if (block->IsInlineReturnTarget()) { HEnvironment* env = last_environment(); int argument_count = env->arguments_environment()->parameter_count(); AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count), position); UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); } if (add_simulate) AddNewSimulate(BailoutId::None(), position); HGoto* instr = new(zone()) HGoto(block); Finish(instr, position); } void HBasicBlock::AddLeaveInlined(HValue* return_value, FunctionState* state, HSourcePosition position) { HBasicBlock* target = state->function_return(); bool drop_extra = state->inlining_kind() == NORMAL_RETURN; DCHECK(target->IsInlineReturnTarget()); DCHECK(return_value != NULL); HEnvironment* env = last_environment(); int argument_count = env->arguments_environment()->parameter_count(); AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count), position); UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); last_environment()->Push(return_value); AddNewSimulate(BailoutId::None(), position); HGoto* instr = new(zone()) HGoto(target); Finish(instr, position); } void HBasicBlock::SetInitialEnvironment(HEnvironment* env) { DCHECK(!HasEnvironment()); DCHECK(first() == NULL); UpdateEnvironment(env); } void HBasicBlock::UpdateEnvironment(HEnvironment* env) { last_environment_ = env; graph()->update_maximum_environment_size(env->first_expression_index()); } void HBasicBlock::SetJoinId(BailoutId ast_id) { int length = predecessors_.length(); DCHECK(length > 0); for (int i = 0; i < length; i++) { HBasicBlock* predecessor = predecessors_[i]; DCHECK(predecessor->end()->IsGoto()); HSimulate* simulate = HSimulate::cast(predecessor->end()->previous()); DCHECK(i != 0 || (predecessor->last_environment()->closure().is_null() || predecessor->last_environment()->closure()->shared() ->VerifyBailoutId(ast_id))); simulate->set_ast_id(ast_id); predecessor->last_environment()->set_ast_id(ast_id); } } bool HBasicBlock::Dominates(HBasicBlock* other) const { HBasicBlock* current = other->dominator(); while (current != NULL) { if (current == this) return true; current = current->dominator(); } return false; } bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const { if (this == other) return true; return Dominates(other); } int HBasicBlock::LoopNestingDepth() const { const HBasicBlock* current = this; int result = (current->IsLoopHeader()) ? 1 : 0; while (current->parent_loop_header() != NULL) { current = current->parent_loop_header(); result++; } return result; } void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) { DCHECK(IsLoopHeader()); SetJoinId(stmt->EntryId()); if (predecessors()->length() == 1) { // This is a degenerated loop. DetachLoopInformation(); return; } // Only the first entry into the loop is from outside the loop. All other // entries must be back edges. for (int i = 1; i < predecessors()->length(); ++i) { loop_information()->RegisterBackEdge(predecessors()->at(i)); } } void HBasicBlock::MarkSuccEdgeUnreachable(int succ) { DCHECK(IsFinished()); HBasicBlock* succ_block = end()->SuccessorAt(succ); DCHECK(succ_block->predecessors()->length() == 1); succ_block->MarkUnreachable(); } void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) { if (HasPredecessor()) { // Only loop header blocks can have a predecessor added after // instructions have been added to the block (they have phis for all // values in the environment, these phis may be eliminated later). DCHECK(IsLoopHeader() || first_ == NULL); HEnvironment* incoming_env = pred->last_environment(); if (IsLoopHeader()) { DCHECK(phis()->length() == incoming_env->length()); for (int i = 0; i < phis_.length(); ++i) { phis_[i]->AddInput(incoming_env->values()->at(i)); } } else { last_environment()->AddIncomingEdge(this, pred->last_environment()); } } else if (!HasEnvironment() && !IsFinished()) { DCHECK(!IsLoopHeader()); SetInitialEnvironment(pred->last_environment()->Copy()); } predecessors_.Add(pred, zone()); } void HBasicBlock::AddDominatedBlock(HBasicBlock* block) { DCHECK(!dominated_blocks_.Contains(block)); // Keep the list of dominated blocks sorted such that if there is two // succeeding block in this list, the predecessor is before the successor. int index = 0; while (index < dominated_blocks_.length() && dominated_blocks_[index]->block_id() < block->block_id()) { ++index; } dominated_blocks_.InsertAt(index, block, zone()); } void HBasicBlock::AssignCommonDominator(HBasicBlock* other) { if (dominator_ == NULL) { dominator_ = other; other->AddDominatedBlock(this); } else if (other->dominator() != NULL) { HBasicBlock* first = dominator_; HBasicBlock* second = other; while (first != second) { if (first->block_id() > second->block_id()) { first = first->dominator(); } else { second = second->dominator(); } DCHECK(first != NULL && second != NULL); } if (dominator_ != first) { DCHECK(dominator_->dominated_blocks_.Contains(this)); dominator_->dominated_blocks_.RemoveElement(this); dominator_ = first; first->AddDominatedBlock(this); } } } void HBasicBlock::AssignLoopSuccessorDominators() { // Mark blocks that dominate all subsequent reachable blocks inside their // loop. Exploit the fact that blocks are sorted in reverse post order. When // the loop is visited in increasing block id order, if the number of // non-loop-exiting successor edges at the dominator_candidate block doesn't // exceed the number of previously encountered predecessor edges, there is no // path from the loop header to any block with higher id that doesn't go // through the dominator_candidate block. In this case, the // dominator_candidate block is guaranteed to dominate all blocks reachable // from it with higher ids. HBasicBlock* last = loop_information()->GetLastBackEdge(); int outstanding_successors = 1; // one edge from the pre-header // Header always dominates everything. MarkAsLoopSuccessorDominator(); for (int j = block_id(); j <= last->block_id(); ++j) { HBasicBlock* dominator_candidate = graph_->blocks()->at(j); for (HPredecessorIterator it(dominator_candidate); !it.Done(); it.Advance()) { HBasicBlock* predecessor = it.Current(); // Don't count back edges. if (predecessor->block_id() < dominator_candidate->block_id()) { outstanding_successors--; } } // If more successors than predecessors have been seen in the loop up to // now, it's not possible to guarantee that the current block dominates // all of the blocks with higher IDs. In this case, assume conservatively // that those paths through loop that don't go through the current block // contain all of the loop's dependencies. Also be careful to record // dominator information about the current loop that's being processed, // and not nested loops, which will be processed when // AssignLoopSuccessorDominators gets called on their header. DCHECK(outstanding_successors >= 0); HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header(); if (outstanding_successors == 0 && (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) { dominator_candidate->MarkAsLoopSuccessorDominator(); } HControlInstruction* end = dominator_candidate->end(); for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { HBasicBlock* successor = it.Current(); // Only count successors that remain inside the loop and don't loop back // to a loop header. if (successor->block_id() > dominator_candidate->block_id() && successor->block_id() <= last->block_id()) { // Backwards edges must land on loop headers. DCHECK(successor->block_id() > dominator_candidate->block_id() || successor->IsLoopHeader()); outstanding_successors++; } } } } int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const { for (int i = 0; i < predecessors_.length(); ++i) { if (predecessors_[i] == predecessor) return i; } UNREACHABLE(); return -1; } #ifdef DEBUG void HBasicBlock::Verify() { // Check that every block is finished. DCHECK(IsFinished()); DCHECK(block_id() >= 0); // Check that the incoming edges are in edge split form. if (predecessors_.length() > 1) { for (int i = 0; i < predecessors_.length(); ++i) { DCHECK(predecessors_[i]->end()->SecondSuccessor() == NULL); } } } #endif void HLoopInformation::RegisterBackEdge(HBasicBlock* block) { this->back_edges_.Add(block, block->zone()); AddBlock(block); } HBasicBlock* HLoopInformation::GetLastBackEdge() const { int max_id = -1; HBasicBlock* result = NULL; for (int i = 0; i < back_edges_.length(); ++i) { HBasicBlock* cur = back_edges_[i]; if (cur->block_id() > max_id) { max_id = cur->block_id(); result = cur; } } return result; } void HLoopInformation::AddBlock(HBasicBlock* block) { if (block == loop_header()) return; if (block->parent_loop_header() == loop_header()) return; if (block->parent_loop_header() != NULL) { AddBlock(block->parent_loop_header()); } else { block->set_parent_loop_header(loop_header()); blocks_.Add(block, block->zone()); for (int i = 0; i < block->predecessors()->length(); ++i) { AddBlock(block->predecessors()->at(i)); } } } #ifdef DEBUG // Checks reachability of the blocks in this graph and stores a bit in // the BitVector "reachable()" for every block that can be reached // from the start block of the graph. If "dont_visit" is non-null, the given // block is treated as if it would not be part of the graph. "visited_count()" // returns the number of reachable blocks. class ReachabilityAnalyzer BASE_EMBEDDED { public: ReachabilityAnalyzer(HBasicBlock* entry_block, int block_count, HBasicBlock* dont_visit) : visited_count_(0), stack_(16, entry_block->zone()), reachable_(block_count, entry_block->zone()), dont_visit_(dont_visit) { PushBlock(entry_block); Analyze(); } int visited_count() const { return visited_count_; } const BitVector* reachable() const { return &reachable_; } private: void PushBlock(HBasicBlock* block) { if (block != NULL && block != dont_visit_ && !reachable_.Contains(block->block_id())) { reachable_.Add(block->block_id()); stack_.Add(block, block->zone()); visited_count_++; } } void Analyze() { while (!stack_.is_empty()) { HControlInstruction* end = stack_.RemoveLast()->end(); for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { PushBlock(it.Current()); } } } int visited_count_; ZoneList stack_; BitVector reachable_; HBasicBlock* dont_visit_; }; void HGraph::Verify(bool do_full_verify) const { Heap::RelocationLock relocation_lock(isolate()->heap()); AllowHandleDereference allow_deref; AllowDeferredHandleDereference allow_deferred_deref; for (int i = 0; i < blocks_.length(); i++) { HBasicBlock* block = blocks_.at(i); block->Verify(); // Check that every block contains at least one node and that only the last // node is a control instruction. HInstruction* current = block->first(); DCHECK(current != NULL && current->IsBlockEntry()); while (current != NULL) { DCHECK((current->next() == NULL) == current->IsControlInstruction()); DCHECK(current->block() == block); current->Verify(); current = current->next(); } // Check that successors are correctly set. HBasicBlock* first = block->end()->FirstSuccessor(); HBasicBlock* second = block->end()->SecondSuccessor(); DCHECK(second == NULL || first != NULL); // Check that the predecessor array is correct. if (first != NULL) { DCHECK(first->predecessors()->Contains(block)); if (second != NULL) { DCHECK(second->predecessors()->Contains(block)); } } // Check that phis have correct arguments. for (int j = 0; j < block->phis()->length(); j++) { HPhi* phi = block->phis()->at(j); phi->Verify(); } // Check that all join blocks have predecessors that end with an // unconditional goto and agree on their environment node id. if (block->predecessors()->length() >= 2) { BailoutId id = block->predecessors()->first()->last_environment()->ast_id(); for (int k = 0; k < block->predecessors()->length(); k++) { HBasicBlock* predecessor = block->predecessors()->at(k); DCHECK(predecessor->end()->IsGoto() || predecessor->end()->IsDeoptimize()); DCHECK(predecessor->last_environment()->ast_id() == id); } } } // Check special property of first block to have no predecessors. DCHECK(blocks_.at(0)->predecessors()->is_empty()); if (do_full_verify) { // Check that the graph is fully connected. ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL); DCHECK(analyzer.visited_count() == blocks_.length()); // Check that entry block dominator is NULL. DCHECK(entry_block_->dominator() == NULL); // Check dominators. for (int i = 0; i < blocks_.length(); ++i) { HBasicBlock* block = blocks_.at(i); if (block->dominator() == NULL) { // Only start block may have no dominator assigned to. DCHECK(i == 0); } else { // Assert that block is unreachable if dominator must not be visited. ReachabilityAnalyzer dominator_analyzer(entry_block_, blocks_.length(), block->dominator()); DCHECK(!dominator_analyzer.reachable()->Contains(block->block_id())); } } } } #endif HConstant* HGraph::GetConstant(SetOncePointer* pointer, int32_t value) { if (!pointer->is_set()) { // Can't pass GetInvalidContext() to HConstant::New, because that will // recursively call GetConstant HConstant* constant = HConstant::New(zone(), NULL, value); constant->InsertAfter(entry_block()->first()); pointer->set(constant); return constant; } return ReinsertConstantIfNecessary(pointer->get()); } HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) { if (!constant->IsLinked()) { // The constant was removed from the graph. Reinsert. constant->ClearFlag(HValue::kIsDead); constant->InsertAfter(entry_block()->first()); } return constant; } HConstant* HGraph::GetConstant0() { return GetConstant(&constant_0_, 0); } HConstant* HGraph::GetConstant1() { return GetConstant(&constant_1_, 1); } HConstant* HGraph::GetConstantMinus1() { return GetConstant(&constant_minus1_, -1); } #define DEFINE_GET_CONSTANT(Name, name, type, htype, boolean_value) \ HConstant* HGraph::GetConstant##Name() { \ if (!constant_##name##_.is_set()) { \ HConstant* constant = new(zone()) HConstant( \ Unique::CreateImmovable(isolate()->factory()->name##_value()), \ Unique::CreateImmovable(isolate()->factory()->type##_map()), \ false, \ Representation::Tagged(), \ htype, \ true, \ boolean_value, \ false, \ ODDBALL_TYPE); \ constant->InsertAfter(entry_block()->first()); \ constant_##name##_.set(constant); \ } \ return ReinsertConstantIfNecessary(constant_##name##_.get()); \ } DEFINE_GET_CONSTANT(Undefined, undefined, undefined, HType::Undefined(), false) DEFINE_GET_CONSTANT(True, true, boolean, HType::Boolean(), true) DEFINE_GET_CONSTANT(False, false, boolean, HType::Boolean(), false) DEFINE_GET_CONSTANT(Hole, the_hole, the_hole, HType::None(), false) DEFINE_GET_CONSTANT(Null, null, null, HType::Null(), false) #undef DEFINE_GET_CONSTANT #define DEFINE_IS_CONSTANT(Name, name) \ bool HGraph::IsConstant##Name(HConstant* constant) { \ return constant_##name##_.is_set() && constant == constant_##name##_.get(); \ } DEFINE_IS_CONSTANT(Undefined, undefined) DEFINE_IS_CONSTANT(0, 0) DEFINE_IS_CONSTANT(1, 1) DEFINE_IS_CONSTANT(Minus1, minus1) DEFINE_IS_CONSTANT(True, true) DEFINE_IS_CONSTANT(False, false) DEFINE_IS_CONSTANT(Hole, the_hole) DEFINE_IS_CONSTANT(Null, null) #undef DEFINE_IS_CONSTANT HConstant* HGraph::GetInvalidContext() { return GetConstant(&constant_invalid_context_, 0xFFFFC0C7); } bool HGraph::IsStandardConstant(HConstant* constant) { if (IsConstantUndefined(constant)) return true; if (IsConstant0(constant)) return true; if (IsConstant1(constant)) return true; if (IsConstantMinus1(constant)) return true; if (IsConstantTrue(constant)) return true; if (IsConstantFalse(constant)) return true; if (IsConstantHole(constant)) return true; if (IsConstantNull(constant)) return true; return false; } HGraphBuilder::IfBuilder::IfBuilder() : builder_(NULL), needs_compare_(true) {} HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder) : needs_compare_(true) { Initialize(builder); } HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder, HIfContinuation* continuation) : needs_compare_(false), first_true_block_(NULL), first_false_block_(NULL) { InitializeDontCreateBlocks(builder); continuation->Continue(&first_true_block_, &first_false_block_); } void HGraphBuilder::IfBuilder::InitializeDontCreateBlocks( HGraphBuilder* builder) { builder_ = builder; finished_ = false; did_then_ = false; did_else_ = false; did_else_if_ = false; did_and_ = false; did_or_ = false; captured_ = false; pending_merge_block_ = false; split_edge_merge_block_ = NULL; merge_at_join_blocks_ = NULL; normal_merge_at_join_block_count_ = 0; deopt_merge_at_join_block_count_ = 0; } void HGraphBuilder::IfBuilder::Initialize(HGraphBuilder* builder) { InitializeDontCreateBlocks(builder); HEnvironment* env = builder->environment(); first_true_block_ = builder->CreateBasicBlock(env->Copy()); first_false_block_ = builder->CreateBasicBlock(env->Copy()); } HControlInstruction* HGraphBuilder::IfBuilder::AddCompare( HControlInstruction* compare) { DCHECK(did_then_ == did_else_); if (did_else_) { // Handle if-then-elseif did_else_if_ = true; did_else_ = false; did_then_ = false; did_and_ = false; did_or_ = false; pending_merge_block_ = false; split_edge_merge_block_ = NULL; HEnvironment* env = builder()->environment(); first_true_block_ = builder()->CreateBasicBlock(env->Copy()); first_false_block_ = builder()->CreateBasicBlock(env->Copy()); } if (split_edge_merge_block_ != NULL) { HEnvironment* env = first_false_block_->last_environment(); HBasicBlock* split_edge = builder()->CreateBasicBlock(env->Copy()); if (did_or_) { compare->SetSuccessorAt(0, split_edge); compare->SetSuccessorAt(1, first_false_block_); } else { compare->SetSuccessorAt(0, first_true_block_); compare->SetSuccessorAt(1, split_edge); } builder()->GotoNoSimulate(split_edge, split_edge_merge_block_); } else { compare->SetSuccessorAt(0, first_true_block_); compare->SetSuccessorAt(1, first_false_block_); } builder()->FinishCurrentBlock(compare); needs_compare_ = false; return compare; } void HGraphBuilder::IfBuilder::Or() { DCHECK(!needs_compare_); DCHECK(!did_and_); did_or_ = true; HEnvironment* env = first_false_block_->last_environment(); if (split_edge_merge_block_ == NULL) { split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy()); builder()->GotoNoSimulate(first_true_block_, split_edge_merge_block_); first_true_block_ = split_edge_merge_block_; } builder()->set_current_block(first_false_block_); first_false_block_ = builder()->CreateBasicBlock(env->Copy()); } void HGraphBuilder::IfBuilder::And() { DCHECK(!needs_compare_); DCHECK(!did_or_); did_and_ = true; HEnvironment* env = first_false_block_->last_environment(); if (split_edge_merge_block_ == NULL) { split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy()); builder()->GotoNoSimulate(first_false_block_, split_edge_merge_block_); first_false_block_ = split_edge_merge_block_; } builder()->set_current_block(first_true_block_); first_true_block_ = builder()->CreateBasicBlock(env->Copy()); } void HGraphBuilder::IfBuilder::CaptureContinuation( HIfContinuation* continuation) { DCHECK(!did_else_if_); DCHECK(!finished_); DCHECK(!captured_); HBasicBlock* true_block = NULL; HBasicBlock* false_block = NULL; Finish(&true_block, &false_block); DCHECK(true_block != NULL); DCHECK(false_block != NULL); continuation->Capture(true_block, false_block); captured_ = true; builder()->set_current_block(NULL); End(); } void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) { DCHECK(!did_else_if_); DCHECK(!finished_); DCHECK(!captured_); HBasicBlock* true_block = NULL; HBasicBlock* false_block = NULL; Finish(&true_block, &false_block); merge_at_join_blocks_ = NULL; if (true_block != NULL && !true_block->IsFinished()) { DCHECK(continuation->IsTrueReachable()); builder()->GotoNoSimulate(true_block, continuation->true_branch()); } if (false_block != NULL && !false_block->IsFinished()) { DCHECK(continuation->IsFalseReachable()); builder()->GotoNoSimulate(false_block, continuation->false_branch()); } captured_ = true; End(); } void HGraphBuilder::IfBuilder::Then() { DCHECK(!captured_); DCHECK(!finished_); did_then_ = true; if (needs_compare_) { // Handle if's without any expressions, they jump directly to the "else" // branch. However, we must pretend that the "then" branch is reachable, // so that the graph builder visits it and sees any live range extending // constructs within it. HConstant* constant_false = builder()->graph()->GetConstantFalse(); ToBooleanStub::Types boolean_type = ToBooleanStub::Types(); boolean_type.Add(ToBooleanStub::BOOLEAN); HBranch* branch = builder()->New( constant_false, boolean_type, first_true_block_, first_false_block_); builder()->FinishCurrentBlock(branch); } builder()->set_current_block(first_true_block_); pending_merge_block_ = true; } void HGraphBuilder::IfBuilder::Else() { DCHECK(did_then_); DCHECK(!captured_); DCHECK(!finished_); AddMergeAtJoinBlock(false); builder()->set_current_block(first_false_block_); pending_merge_block_ = true; did_else_ = true; } void HGraphBuilder::IfBuilder::Deopt(const char* reason) { DCHECK(did_then_); builder()->Add(reason, Deoptimizer::EAGER); AddMergeAtJoinBlock(true); } void HGraphBuilder::IfBuilder::Return(HValue* value) { HValue* parameter_count = builder()->graph()->GetConstantMinus1(); builder()->FinishExitCurrentBlock( builder()->New(value, parameter_count)); AddMergeAtJoinBlock(false); } void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) { if (!pending_merge_block_) return; HBasicBlock* block = builder()->current_block(); DCHECK(block == NULL || !block->IsFinished()); MergeAtJoinBlock* record = new (builder()->zone()) MergeAtJoinBlock(block, deopt, merge_at_join_blocks_); merge_at_join_blocks_ = record; if (block != NULL) { DCHECK(block->end() == NULL); if (deopt) { normal_merge_at_join_block_count_++; } else { deopt_merge_at_join_block_count_++; } } builder()->set_current_block(NULL); pending_merge_block_ = false; } void HGraphBuilder::IfBuilder::Finish() { DCHECK(!finished_); if (!did_then_) { Then(); } AddMergeAtJoinBlock(false); if (!did_else_) { Else(); AddMergeAtJoinBlock(false); } finished_ = true; } void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation, HBasicBlock** else_continuation) { Finish(); MergeAtJoinBlock* else_record = merge_at_join_blocks_; if (else_continuation != NULL) { *else_continuation = else_record->block_; } MergeAtJoinBlock* then_record = else_record->next_; if (then_continuation != NULL) { *then_continuation = then_record->block_; } DCHECK(then_record->next_ == NULL); } void HGraphBuilder::IfBuilder::End() { if (captured_) return; Finish(); int total_merged_blocks = normal_merge_at_join_block_count_ + deopt_merge_at_join_block_count_; DCHECK(total_merged_blocks >= 1); HBasicBlock* merge_block = total_merged_blocks == 1 ? NULL : builder()->graph()->CreateBasicBlock(); // Merge non-deopt blocks first to ensure environment has right size for // padding. MergeAtJoinBlock* current = merge_at_join_blocks_; while (current != NULL) { if (!current->deopt_ && current->block_ != NULL) { // If there is only one block that makes it through to the end of the // if, then just set it as the current block and continue rather then // creating an unnecessary merge block. if (total_merged_blocks == 1) { builder()->set_current_block(current->block_); return; } builder()->GotoNoSimulate(current->block_, merge_block); } current = current->next_; } // Merge deopt blocks, padding when necessary. current = merge_at_join_blocks_; while (current != NULL) { if (current->deopt_ && current->block_ != NULL) { current->block_->FinishExit(HAbnormalExit::New(builder()->zone(), NULL), HSourcePosition::Unknown()); } current = current->next_; } builder()->set_current_block(merge_block); } HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder) { Initialize(builder, NULL, kWhileTrue, NULL); } HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context, LoopBuilder::Direction direction) { Initialize(builder, context, direction, builder->graph()->GetConstant1()); } HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context, LoopBuilder::Direction direction, HValue* increment_amount) { Initialize(builder, context, direction, increment_amount); increment_amount_ = increment_amount; } void HGraphBuilder::LoopBuilder::Initialize(HGraphBuilder* builder, HValue* context, Direction direction, HValue* increment_amount) { builder_ = builder; context_ = context; direction_ = direction; increment_amount_ = increment_amount; finished_ = false; header_block_ = builder->CreateLoopHeaderBlock(); body_block_ = NULL; exit_block_ = NULL; exit_trampoline_block_ = NULL; } HValue* HGraphBuilder::LoopBuilder::BeginBody( HValue* initial, HValue* terminating, Token::Value token) { DCHECK(direction_ != kWhileTrue); HEnvironment* env = builder_->environment(); phi_ = header_block_->AddNewPhi(env->values()->length()); phi_->AddInput(initial); env->Push(initial); builder_->GotoNoSimulate(header_block_); HEnvironment* body_env = env->Copy(); HEnvironment* exit_env = env->Copy(); // Remove the phi from the expression stack body_env->Pop(); exit_env->Pop(); body_block_ = builder_->CreateBasicBlock(body_env); exit_block_ = builder_->CreateBasicBlock(exit_env); builder_->set_current_block(header_block_); env->Pop(); builder_->FinishCurrentBlock(builder_->New( phi_, terminating, token, body_block_, exit_block_)); builder_->set_current_block(body_block_); if (direction_ == kPreIncrement || direction_ == kPreDecrement) { HValue* one = builder_->graph()->GetConstant1(); if (direction_ == kPreIncrement) { increment_ = HAdd::New(zone(), context_, phi_, one); } else { increment_ = HSub::New(zone(), context_, phi_, one); } increment_->ClearFlag(HValue::kCanOverflow); builder_->AddInstruction(increment_); return increment_; } else { return phi_; } } void HGraphBuilder::LoopBuilder::BeginBody(int drop_count) { DCHECK(direction_ == kWhileTrue); HEnvironment* env = builder_->environment(); builder_->GotoNoSimulate(header_block_); builder_->set_current_block(header_block_); env->Drop(drop_count); } void HGraphBuilder::LoopBuilder::Break() { if (exit_trampoline_block_ == NULL) { // Its the first time we saw a break. if (direction_ == kWhileTrue) { HEnvironment* env = builder_->environment()->Copy(); exit_trampoline_block_ = builder_->CreateBasicBlock(env); } else { HEnvironment* env = exit_block_->last_environment()->Copy(); exit_trampoline_block_ = builder_->CreateBasicBlock(env); builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_); } } builder_->GotoNoSimulate(exit_trampoline_block_); builder_->set_current_block(NULL); } void HGraphBuilder::LoopBuilder::EndBody() { DCHECK(!finished_); if (direction_ == kPostIncrement || direction_ == kPostDecrement) { if (direction_ == kPostIncrement) { increment_ = HAdd::New(zone(), context_, phi_, increment_amount_); } else { increment_ = HSub::New(zone(), context_, phi_, increment_amount_); } increment_->ClearFlag(HValue::kCanOverflow); builder_->AddInstruction(increment_); } if (direction_ != kWhileTrue) { // Push the new increment value on the expression stack to merge into // the phi. builder_->environment()->Push(increment_); } HBasicBlock* last_block = builder_->current_block(); builder_->GotoNoSimulate(last_block, header_block_); header_block_->loop_information()->RegisterBackEdge(last_block); if (exit_trampoline_block_ != NULL) { builder_->set_current_block(exit_trampoline_block_); } else { builder_->set_current_block(exit_block_); } finished_ = true; } HGraph* HGraphBuilder::CreateGraph() { graph_ = new(zone()) HGraph(info_); if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_); CompilationPhase phase("H_Block building", info_); set_current_block(graph()->entry_block()); if (!BuildGraph()) return NULL; graph()->FinalizeUniqueness(); return graph_; } HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) { DCHECK(current_block() != NULL); DCHECK(!FLAG_hydrogen_track_positions || !position_.IsUnknown() || !info_->IsOptimizing()); current_block()->AddInstruction(instr, source_position()); if (graph()->IsInsideNoSideEffectsScope()) { instr->SetFlag(HValue::kHasNoObservableSideEffects); } return instr; } void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) { DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() || !position_.IsUnknown()); current_block()->Finish(last, source_position()); if (last->IsReturn() || last->IsAbnormalExit()) { set_current_block(NULL); } } void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) { DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() || !position_.IsUnknown()); current_block()->FinishExit(instruction, source_position()); if (instruction->IsReturn() || instruction->IsAbnormalExit()) { set_current_block(NULL); } } void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) { if (FLAG_native_code_counters && counter->Enabled()) { HValue* reference = Add(ExternalReference(counter)); HValue* old_value = Add( reference, static_cast(NULL), HObjectAccess::ForCounter()); HValue* new_value = AddUncasted(old_value, graph()->GetConstant1()); new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow Add(reference, HObjectAccess::ForCounter(), new_value, STORE_TO_INITIALIZED_ENTRY); } } void HGraphBuilder::AddSimulate(BailoutId id, RemovableSimulate removable) { DCHECK(current_block() != NULL); DCHECK(!graph()->IsInsideNoSideEffectsScope()); current_block()->AddNewSimulate(id, source_position(), removable); } HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) { HBasicBlock* b = graph()->CreateBasicBlock(); b->SetInitialEnvironment(env); return b; } HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() { HBasicBlock* header = graph()->CreateBasicBlock(); HEnvironment* entry_env = environment()->CopyAsLoopHeader(header); header->SetInitialEnvironment(entry_env); header->AttachLoopInformation(); return header; } HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) { HValue* map = Add(object, static_cast(NULL), HObjectAccess::ForMap()); HValue* bit_field2 = Add(map, static_cast(NULL), HObjectAccess::ForMapBitField2()); return BuildDecodeField(bit_field2); } HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) { if (obj->type().IsHeapObject()) return obj; return Add(obj); } void HGraphBuilder::FinishExitWithHardDeoptimization(const char* reason) { Add(reason, Deoptimizer::EAGER); FinishExitCurrentBlock(New()); } HValue* HGraphBuilder::BuildCheckString(HValue* string) { if (!string->type().IsString()) { DCHECK(!string->IsConstant() || !HConstant::cast(string)->HasStringValue()); BuildCheckHeapObject(string); return Add(string, HCheckInstanceType::IS_STRING); } return string; } HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) { if (object->type().IsJSObject()) return object; if (function->IsConstant() && HConstant::cast(function)->handle(isolate())->IsJSFunction()) { Handle f = Handle::cast( HConstant::cast(function)->handle(isolate())); SharedFunctionInfo* shared = f->shared(); if (shared->strict_mode() == STRICT || shared->native()) return object; } return Add(object, function); } HValue* HGraphBuilder::BuildCheckForCapacityGrow( HValue* object, HValue* elements, ElementsKind kind, HValue* length, HValue* key, bool is_js_array, PropertyAccessType access_type) { IfBuilder length_checker(this); Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ; length_checker.If(key, length, token); length_checker.Then(); HValue* current_capacity = AddLoadFixedArrayLength(elements); IfBuilder capacity_checker(this); capacity_checker.If(key, current_capacity, Token::GTE); capacity_checker.Then(); HValue* max_gap = Add(static_cast(JSObject::kMaxGap)); HValue* max_capacity = AddUncasted(current_capacity, max_gap); Add(key, max_capacity); HValue* new_capacity = BuildNewElementsCapacity(key); HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, kind, length, new_capacity); environment()->Push(new_elements); capacity_checker.Else(); environment()->Push(elements); capacity_checker.End(); if (is_js_array) { HValue* new_length = AddUncasted(key, graph_->GetConstant1()); new_length->ClearFlag(HValue::kCanOverflow); Add(object, HObjectAccess::ForArrayLength(kind), new_length); } if (access_type == STORE && kind == FAST_SMI_ELEMENTS) { HValue* checked_elements = environment()->Top(); // Write zero to ensure that the new element is initialized with some smi. Add(checked_elements, key, graph()->GetConstant0(), kind); } length_checker.Else(); Add(key, length); environment()->Push(elements); length_checker.End(); return environment()->Pop(); } HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object, HValue* elements, ElementsKind kind, HValue* length) { Factory* factory = isolate()->factory(); IfBuilder cow_checker(this); cow_checker.If(elements, factory->fixed_cow_array_map()); cow_checker.Then(); HValue* capacity = AddLoadFixedArrayLength(elements); HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, kind, length, capacity); environment()->Push(new_elements); cow_checker.Else(); environment()->Push(elements); cow_checker.End(); return environment()->Pop(); } void HGraphBuilder::BuildTransitionElementsKind(HValue* object, HValue* map, ElementsKind from_kind, ElementsKind to_kind, bool is_jsarray) { DCHECK(!IsFastHoleyElementsKind(from_kind) || IsFastHoleyElementsKind(to_kind)); if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) { Add(object); } if (!IsSimpleMapChangeTransition(from_kind, to_kind)) { HInstruction* elements = AddLoadElements(object); HInstruction* empty_fixed_array = Add( isolate()->factory()->empty_fixed_array()); IfBuilder if_builder(this); if_builder.IfNot(elements, empty_fixed_array); if_builder.Then(); HInstruction* elements_length = AddLoadFixedArrayLength(elements); HInstruction* array_length = is_jsarray ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(from_kind)) : elements_length; BuildGrowElementsCapacity(object, elements, from_kind, to_kind, array_length, elements_length); if_builder.End(); } Add(object, HObjectAccess::ForMap(), map); } void HGraphBuilder::BuildJSObjectCheck(HValue* receiver, int bit_field_mask) { // Check that the object isn't a smi. Add(receiver); // Get the map of the receiver. HValue* map = Add(receiver, static_cast(NULL), HObjectAccess::ForMap()); // Check the instance type and if an access check is needed, this can be // done with a single load, since both bytes are adjacent in the map. HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField()); HValue* instance_type_and_bit_field = Add(map, static_cast(NULL), access); HValue* mask = Add(0x00FF | (bit_field_mask << 8)); HValue* and_result = AddUncasted(Token::BIT_AND, instance_type_and_bit_field, mask); HValue* sub_result = AddUncasted(and_result, Add(JS_OBJECT_TYPE)); Add(sub_result, Add(LAST_JS_OBJECT_TYPE + 1 - JS_OBJECT_TYPE)); } void HGraphBuilder::BuildKeyedIndexCheck(HValue* key, HIfContinuation* join_continuation) { // The sometimes unintuitively backward ordering of the ifs below is // convoluted, but necessary. All of the paths must guarantee that the // if-true of the continuation returns a smi element index and the if-false of // the continuation returns either a symbol or a unique string key. All other // object types cause a deopt to fall back to the runtime. IfBuilder key_smi_if(this); key_smi_if.If(key); key_smi_if.Then(); { Push(key); // Nothing to do, just continue to true of continuation. } key_smi_if.Else(); { HValue* map = Add(key, static_cast(NULL), HObjectAccess::ForMap()); HValue* instance_type = Add(map, static_cast(NULL), HObjectAccess::ForMapInstanceType()); // Non-unique string, check for a string with a hash code that is actually // an index. STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); IfBuilder not_string_or_name_if(this); not_string_or_name_if.If( instance_type, Add(LAST_UNIQUE_NAME_TYPE), Token::GT); not_string_or_name_if.Then(); { // Non-smi, non-Name, non-String: Try to convert to smi in case of // HeapNumber. // TODO(danno): This could call some variant of ToString Push(AddUncasted(key, Representation::Smi())); } not_string_or_name_if.Else(); { // String or Name: check explicitly for Name, they can short-circuit // directly to unique non-index key path. IfBuilder not_symbol_if(this); not_symbol_if.If( instance_type, Add(SYMBOL_TYPE), Token::NE); not_symbol_if.Then(); { // String: check whether the String is a String of an index. If it is, // extract the index value from the hash. HValue* hash = Add(key, static_cast(NULL), HObjectAccess::ForNameHashField()); HValue* not_index_mask = Add(static_cast( String::kContainsCachedArrayIndexMask)); HValue* not_index_test = AddUncasted( Token::BIT_AND, hash, not_index_mask); IfBuilder string_index_if(this); string_index_if.If(not_index_test, graph()->GetConstant0(), Token::EQ); string_index_if.Then(); { // String with index in hash: extract string and merge to index path. Push(BuildDecodeField(hash)); } string_index_if.Else(); { // Key is a non-index String, check for uniqueness/internalization. // If it's not internalized yet, internalize it now. HValue* not_internalized_bit = AddUncasted( Token::BIT_AND, instance_type, Add(static_cast(kIsNotInternalizedMask))); IfBuilder internalized(this); internalized.If(not_internalized_bit, graph()->GetConstant0(), Token::EQ); internalized.Then(); Push(key); internalized.Else(); Add(key); HValue* intern_key = Add( isolate()->factory()->empty_string(), Runtime::FunctionForId(Runtime::kInternalizeString), 1); Push(intern_key); internalized.End(); // Key guaranteed to be a unique string } string_index_if.JoinContinuation(join_continuation); } not_symbol_if.Else(); { Push(key); // Key is symbol } not_symbol_if.JoinContinuation(join_continuation); } not_string_or_name_if.JoinContinuation(join_continuation); } key_smi_if.JoinContinuation(join_continuation); } void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) { // Get the the instance type of the receiver, and make sure that it is // not one of the global object types. HValue* map = Add(receiver, static_cast(NULL), HObjectAccess::ForMap()); HValue* instance_type = Add(map, static_cast(NULL), HObjectAccess::ForMapInstanceType()); STATIC_ASSERT(JS_BUILTINS_OBJECT_TYPE == JS_GLOBAL_OBJECT_TYPE + 1); HValue* min_global_type = Add(JS_GLOBAL_OBJECT_TYPE); HValue* max_global_type = Add(JS_BUILTINS_OBJECT_TYPE); IfBuilder if_global_object(this); if_global_object.If(instance_type, max_global_type, Token::LTE); if_global_object.And(); if_global_object.If(instance_type, min_global_type, Token::GTE); if_global_object.ThenDeopt("receiver was a global object"); if_global_object.End(); } void HGraphBuilder::BuildTestForDictionaryProperties( HValue* object, HIfContinuation* continuation) { HValue* properties = Add( object, static_cast(NULL), HObjectAccess::ForPropertiesPointer()); HValue* properties_map = Add(properties, static_cast(NULL), HObjectAccess::ForMap()); HValue* hash_map = Add(Heap::kHashTableMapRootIndex); IfBuilder builder(this); builder.If(properties_map, hash_map); builder.CaptureContinuation(continuation); } HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object, HValue* key) { // Load the map of the receiver, compute the keyed lookup cache hash // based on 32 bits of the map pointer and the string hash. HValue* object_map = Add(object, static_cast(NULL), HObjectAccess::ForMapAsInteger32()); HValue* shifted_map = AddUncasted( object_map, Add(KeyedLookupCache::kMapHashShift)); HValue* string_hash = Add(key, static_cast(NULL), HObjectAccess::ForStringHashField()); HValue* shifted_hash = AddUncasted( string_hash, Add(String::kHashShift)); HValue* xor_result = AddUncasted(Token::BIT_XOR, shifted_map, shifted_hash); int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); return AddUncasted(Token::BIT_AND, xor_result, Add(mask)); } HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) { int32_t seed_value = static_cast(isolate()->heap()->HashSeed()); HValue* seed = Add(seed_value); HValue* hash = AddUncasted(Token::BIT_XOR, index, seed); // hash = ~hash + (hash << 15); HValue* shifted_hash = AddUncasted(hash, Add(15)); HValue* not_hash = AddUncasted(Token::BIT_XOR, hash, graph()->GetConstantMinus1()); hash = AddUncasted(shifted_hash, not_hash); // hash = hash ^ (hash >> 12); shifted_hash = AddUncasted(hash, Add(12)); hash = AddUncasted(Token::BIT_XOR, hash, shifted_hash); // hash = hash + (hash << 2); shifted_hash = AddUncasted(hash, Add(2)); hash = AddUncasted(hash, shifted_hash); // hash = hash ^ (hash >> 4); shifted_hash = AddUncasted(hash, Add(4)); hash = AddUncasted(Token::BIT_XOR, hash, shifted_hash); // hash = hash * 2057; hash = AddUncasted(hash, Add(2057)); hash->ClearFlag(HValue::kCanOverflow); // hash = hash ^ (hash >> 16); shifted_hash = AddUncasted(hash, Add(16)); return AddUncasted(Token::BIT_XOR, hash, shifted_hash); } HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver, HValue* elements, HValue* key, HValue* hash) { HValue* capacity = Add( elements, Add(NameDictionary::kCapacityIndex), static_cast(NULL), FAST_ELEMENTS); HValue* mask = AddUncasted(capacity, graph()->GetConstant1()); mask->ChangeRepresentation(Representation::Integer32()); mask->ClearFlag(HValue::kCanOverflow); HValue* entry = hash; HValue* count = graph()->GetConstant1(); Push(entry); Push(count); HIfContinuation return_or_loop_continuation(graph()->CreateBasicBlock(), graph()->CreateBasicBlock()); HIfContinuation found_key_match_continuation(graph()->CreateBasicBlock(), graph()->CreateBasicBlock()); LoopBuilder probe_loop(this); probe_loop.BeginBody(2); // Drop entry, count from last environment to // appease live range building without simulates. count = Pop(); entry = Pop(); entry = AddUncasted(Token::BIT_AND, entry, mask); int entry_size = SeededNumberDictionary::kEntrySize; HValue* base_index = AddUncasted(entry, Add(entry_size)); base_index->ClearFlag(HValue::kCanOverflow); int start_offset = SeededNumberDictionary::kElementsStartIndex; HValue* key_index = AddUncasted(base_index, Add(start_offset)); key_index->ClearFlag(HValue::kCanOverflow); HValue* candidate_key = Add( elements, key_index, static_cast(NULL), FAST_ELEMENTS); IfBuilder if_undefined(this); if_undefined.If(candidate_key, graph()->GetConstantUndefined()); if_undefined.Then(); { // element == undefined means "not found". Call the runtime. // TODO(jkummerow): walk the prototype chain instead. Add(receiver, key); Push(Add(isolate()->factory()->empty_string(), Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2)); } if_undefined.Else(); { IfBuilder if_match(this); if_match.If(candidate_key, key); if_match.Then(); if_match.Else(); // Update non-internalized string in the dictionary with internalized key? IfBuilder if_update_with_internalized(this); HValue* smi_check = if_update_with_internalized.IfNot(candidate_key); if_update_with_internalized.And(); HValue* map = AddLoadMap(candidate_key, smi_check); HValue* instance_type = Add( map, static_cast(NULL), HObjectAccess::ForMapInstanceType()); HValue* not_internalized_bit = AddUncasted( Token::BIT_AND, instance_type, Add(static_cast(kIsNotInternalizedMask))); if_update_with_internalized.If( not_internalized_bit, graph()->GetConstant0(), Token::NE); if_update_with_internalized.And(); if_update_with_internalized.IfNot( candidate_key, graph()->GetConstantHole()); if_update_with_internalized.AndIf(candidate_key, key, Token::EQ); if_update_with_internalized.Then(); // Replace a key that is a non-internalized string by the equivalent // internalized string for faster further lookups. Add(elements, key_index, key, FAST_ELEMENTS); if_update_with_internalized.Else(); if_update_with_internalized.JoinContinuation(&found_key_match_continuation); if_match.JoinContinuation(&found_key_match_continuation); IfBuilder found_key_match(this, &found_key_match_continuation); found_key_match.Then(); // Key at current probe matches. Relevant bits in the |details| field must // be zero, otherwise the dictionary element requires special handling. HValue* details_index = AddUncasted(base_index, Add(start_offset + 2)); details_index->ClearFlag(HValue::kCanOverflow); HValue* details = Add( elements, details_index, static_cast(NULL), FAST_ELEMENTS); int details_mask = PropertyDetails::TypeField::kMask | PropertyDetails::DeletedField::kMask; details = AddUncasted(Token::BIT_AND, details, Add(details_mask)); IfBuilder details_compare(this); details_compare.If( details, graph()->GetConstant0(), Token::EQ); details_compare.Then(); HValue* result_index = AddUncasted(base_index, Add(start_offset + 1)); result_index->ClearFlag(HValue::kCanOverflow); Push(Add(elements, result_index, static_cast(NULL), FAST_ELEMENTS)); details_compare.Else(); Add(receiver, key); Push(Add(isolate()->factory()->empty_string(), Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2)); details_compare.End(); found_key_match.Else(); found_key_match.JoinContinuation(&return_or_loop_continuation); } if_undefined.JoinContinuation(&return_or_loop_continuation); IfBuilder return_or_loop(this, &return_or_loop_continuation); return_or_loop.Then(); probe_loop.Break(); return_or_loop.Else(); entry = AddUncasted(entry, count); entry->ClearFlag(HValue::kCanOverflow); count = AddUncasted(count, graph()->GetConstant1()); count->ClearFlag(HValue::kCanOverflow); Push(entry); Push(count); probe_loop.EndBody(); return_or_loop.End(); return Pop(); } HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length, HValue* index, HValue* input) { NoObservableSideEffectsScope scope(this); HConstant* max_length = Add(JSObject::kInitialMaxFastElementArray); Add(length, max_length); // Generate size calculation code here in order to make it dominate // the JSRegExpResult allocation. ElementsKind elements_kind = FAST_ELEMENTS; HValue* size = BuildCalculateElementsSize(elements_kind, length); // Allocate the JSRegExpResult and the FixedArray in one step. HValue* result = Add( Add(JSRegExpResult::kSize), HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE); // Initialize the JSRegExpResult header. HValue* global_object = Add( context(), static_cast(NULL), HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); HValue* native_context = Add( global_object, static_cast(NULL), HObjectAccess::ForGlobalObjectNativeContext()); Add( result, HObjectAccess::ForMap(), Add( native_context, static_cast(NULL), HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX))); HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); Add( result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset), empty_fixed_array); Add( result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset), empty_fixed_array); Add( result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length); // Initialize the additional fields. Add( result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset), index); Add( result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset), input); // Allocate and initialize the elements header. HAllocate* elements = BuildAllocateElements(elements_kind, size); BuildInitializeElementsHeader(elements, elements_kind, length); if (!elements->has_size_upper_bound()) { HConstant* size_in_bytes_upper_bound = EstablishElementsAllocationSize( elements_kind, max_length->Integer32Value()); elements->set_size_upper_bound(size_in_bytes_upper_bound); } Add( result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset), elements); // Initialize the elements contents with undefined. BuildFillElementsWithValue( elements, elements_kind, graph()->GetConstant0(), length, graph()->GetConstantUndefined()); return result; } HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) { NoObservableSideEffectsScope scope(this); // Convert constant numbers at compile time. if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) { Handle number = HConstant::cast(object)->handle(isolate()); Handle result = isolate()->factory()->NumberToString(number); return Add(result); } // Create a joinable continuation. HIfContinuation found(graph()->CreateBasicBlock(), graph()->CreateBasicBlock()); // Load the number string cache. HValue* number_string_cache = Add(Heap::kNumberStringCacheRootIndex); // Make the hash mask from the length of the number string cache. It // contains two elements (number and string) for each cache entry. HValue* mask = AddLoadFixedArrayLength(number_string_cache); mask->set_type(HType::Smi()); mask = AddUncasted(mask, graph()->GetConstant1()); mask = AddUncasted(mask, graph()->GetConstant1()); // Check whether object is a smi. IfBuilder if_objectissmi(this); if_objectissmi.If(object); if_objectissmi.Then(); { // Compute hash for smi similar to smi_get_hash(). HValue* hash = AddUncasted(Token::BIT_AND, object, mask); // Load the key. HValue* key_index = AddUncasted(hash, graph()->GetConstant1()); HValue* key = Add(number_string_cache, key_index, static_cast(NULL), FAST_ELEMENTS, ALLOW_RETURN_HOLE); // Check if object == key. IfBuilder if_objectiskey(this); if_objectiskey.If(object, key); if_objectiskey.Then(); { // Make the key_index available. Push(key_index); } if_objectiskey.JoinContinuation(&found); } if_objectissmi.Else(); { if (type->Is(Type::SignedSmall())) { if_objectissmi.Deopt("Expected smi"); } else { // Check if the object is a heap number. IfBuilder if_objectisnumber(this); HValue* objectisnumber = if_objectisnumber.If( object, isolate()->factory()->heap_number_map()); if_objectisnumber.Then(); { // Compute hash for heap number similar to double_get_hash(). HValue* low = Add( object, objectisnumber, HObjectAccess::ForHeapNumberValueLowestBits()); HValue* high = Add( object, objectisnumber, HObjectAccess::ForHeapNumberValueHighestBits()); HValue* hash = AddUncasted(Token::BIT_XOR, low, high); hash = AddUncasted(Token::BIT_AND, hash, mask); // Load the key. HValue* key_index = AddUncasted(hash, graph()->GetConstant1()); HValue* key = Add(number_string_cache, key_index, static_cast(NULL), FAST_ELEMENTS, ALLOW_RETURN_HOLE); // Check if the key is a heap number and compare it with the object. IfBuilder if_keyisnotsmi(this); HValue* keyisnotsmi = if_keyisnotsmi.IfNot(key); if_keyisnotsmi.Then(); { IfBuilder if_keyisheapnumber(this); if_keyisheapnumber.If( key, isolate()->factory()->heap_number_map()); if_keyisheapnumber.Then(); { // Check if values of key and object match. IfBuilder if_keyeqobject(this); if_keyeqobject.If( Add(key, keyisnotsmi, HObjectAccess::ForHeapNumberValue()), Add(object, objectisnumber, HObjectAccess::ForHeapNumberValue()), Token::EQ); if_keyeqobject.Then(); { // Make the key_index available. Push(key_index); } if_keyeqobject.JoinContinuation(&found); } if_keyisheapnumber.JoinContinuation(&found); } if_keyisnotsmi.JoinContinuation(&found); } if_objectisnumber.Else(); { if (type->Is(Type::Number())) { if_objectisnumber.Deopt("Expected heap number"); } } if_objectisnumber.JoinContinuation(&found); } } if_objectissmi.JoinContinuation(&found); // Check for cache hit. IfBuilder if_found(this, &found); if_found.Then(); { // Count number to string operation in native code. AddIncrementCounter(isolate()->counters()->number_to_string_native()); // Load the value in case of cache hit. HValue* key_index = Pop(); HValue* value_index = AddUncasted(key_index, graph()->GetConstant1()); Push(Add(number_string_cache, value_index, static_cast(NULL), FAST_ELEMENTS, ALLOW_RETURN_HOLE)); } if_found.Else(); { // Cache miss, fallback to runtime. Add(object); Push(Add( isolate()->factory()->empty_string(), Runtime::FunctionForId(Runtime::kNumberToStringSkipCache), 1)); } if_found.End(); return Pop(); } HAllocate* HGraphBuilder::BuildAllocate( HValue* object_size, HType type, InstanceType instance_type, HAllocationMode allocation_mode) { // Compute the effective allocation size. HValue* size = object_size; if (allocation_mode.CreateAllocationMementos()) { size = AddUncasted(size, Add(AllocationMemento::kSize)); size->ClearFlag(HValue::kCanOverflow); } // Perform the actual allocation. HAllocate* object = Add( size, type, allocation_mode.GetPretenureMode(), instance_type, allocation_mode.feedback_site()); // Setup the allocation memento. if (allocation_mode.CreateAllocationMementos()) { BuildCreateAllocationMemento( object, object_size, allocation_mode.current_site()); } return object; } HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length, HValue* right_length) { // Compute the combined string length and check against max string length. HValue* length = AddUncasted(left_length, right_length); // Check that length <= kMaxLength <=> length < MaxLength + 1. HValue* max_length = Add(String::kMaxLength + 1); Add(length, max_length); return length; } HValue* HGraphBuilder::BuildCreateConsString( HValue* length, HValue* left, HValue* right, HAllocationMode allocation_mode) { // Determine the string instance types. HInstruction* left_instance_type = AddLoadStringInstanceType(left); HInstruction* right_instance_type = AddLoadStringInstanceType(right); // Allocate the cons string object. HAllocate does not care whether we // pass CONS_STRING_TYPE or CONS_ASCII_STRING_TYPE here, so we just use // CONS_STRING_TYPE here. Below we decide whether the cons string is // one-byte or two-byte and set the appropriate map. DCHECK(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE, CONS_ASCII_STRING_TYPE)); HAllocate* result = BuildAllocate(Add(ConsString::kSize), HType::String(), CONS_STRING_TYPE, allocation_mode); // Compute intersection and difference of instance types. HValue* anded_instance_types = AddUncasted( Token::BIT_AND, left_instance_type, right_instance_type); HValue* xored_instance_types = AddUncasted( Token::BIT_XOR, left_instance_type, right_instance_type); // We create a one-byte cons string if // 1. both strings are one-byte, or // 2. at least one of the strings is two-byte, but happens to contain only // one-byte characters. // To do this, we check // 1. if both strings are one-byte, or if the one-byte data hint is set in // both strings, or // 2. if one of the strings has the one-byte data hint set and the other // string is one-byte. IfBuilder if_onebyte(this); STATIC_ASSERT(kOneByteStringTag != 0); STATIC_ASSERT(kOneByteDataHintMask != 0); if_onebyte.If( AddUncasted( Token::BIT_AND, anded_instance_types, Add(static_cast( kStringEncodingMask | kOneByteDataHintMask))), graph()->GetConstant0(), Token::NE); if_onebyte.Or(); STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0 && kOneByteDataHintTag != kOneByteStringTag); if_onebyte.If( AddUncasted( Token::BIT_AND, xored_instance_types, Add(static_cast( kOneByteStringTag | kOneByteDataHintTag))), Add(static_cast( kOneByteStringTag | kOneByteDataHintTag)), Token::EQ); if_onebyte.Then(); { // We can safely skip the write barrier for storing the map here. Add( result, HObjectAccess::ForMap(), Add(isolate()->factory()->cons_ascii_string_map())); } if_onebyte.Else(); { // We can safely skip the write barrier for storing the map here. Add( result, HObjectAccess::ForMap(), Add(isolate()->factory()->cons_string_map())); } if_onebyte.End(); // Initialize the cons string fields. Add(result, HObjectAccess::ForStringHashField(), Add(String::kEmptyHashField)); Add(result, HObjectAccess::ForStringLength(), length); Add(result, HObjectAccess::ForConsStringFirst(), left); Add(result, HObjectAccess::ForConsStringSecond(), right); // Count the native string addition. AddIncrementCounter(isolate()->counters()->string_add_native()); return result; } void HGraphBuilder::BuildCopySeqStringChars(HValue* src, HValue* src_offset, String::Encoding src_encoding, HValue* dst, HValue* dst_offset, String::Encoding dst_encoding, HValue* length) { DCHECK(dst_encoding != String::ONE_BYTE_ENCODING || src_encoding == String::ONE_BYTE_ENCODING); LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement); HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT); { HValue* src_index = AddUncasted(src_offset, index); HValue* value = AddUncasted(src_encoding, src, src_index); HValue* dst_index = AddUncasted(dst_offset, index); Add(dst_encoding, dst, dst_index, value); } loop.EndBody(); } HValue* HGraphBuilder::BuildObjectSizeAlignment( HValue* unaligned_size, int header_size) { DCHECK((header_size & kObjectAlignmentMask) == 0); HValue* size = AddUncasted( unaligned_size, Add(static_cast( header_size + kObjectAlignmentMask))); size->ClearFlag(HValue::kCanOverflow); return AddUncasted( Token::BIT_AND, size, Add(static_cast( ~kObjectAlignmentMask))); } HValue* HGraphBuilder::BuildUncheckedStringAdd( HValue* left, HValue* right, HAllocationMode allocation_mode) { // Determine the string lengths. HValue* left_length = AddLoadStringLength(left); HValue* right_length = AddLoadStringLength(right); // Compute the combined string length. HValue* length = BuildAddStringLengths(left_length, right_length); // Do some manual constant folding here. if (left_length->IsConstant()) { HConstant* c_left_length = HConstant::cast(left_length); DCHECK_NE(0, c_left_length->Integer32Value()); if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) { // The right string contains at least one character. return BuildCreateConsString(length, left, right, allocation_mode); } } else if (right_length->IsConstant()) { HConstant* c_right_length = HConstant::cast(right_length); DCHECK_NE(0, c_right_length->Integer32Value()); if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) { // The left string contains at least one character. return BuildCreateConsString(length, left, right, allocation_mode); } } // Check if we should create a cons string. IfBuilder if_createcons(this); if_createcons.If( length, Add(ConsString::kMinLength), Token::GTE); if_createcons.Then(); { // Create a cons string. Push(BuildCreateConsString(length, left, right, allocation_mode)); } if_createcons.Else(); { // Determine the string instance types. HValue* left_instance_type = AddLoadStringInstanceType(left); HValue* right_instance_type = AddLoadStringInstanceType(right); // Compute union and difference of instance types. HValue* ored_instance_types = AddUncasted( Token::BIT_OR, left_instance_type, right_instance_type); HValue* xored_instance_types = AddUncasted( Token::BIT_XOR, left_instance_type, right_instance_type); // Check if both strings have the same encoding and both are // sequential. IfBuilder if_sameencodingandsequential(this); if_sameencodingandsequential.If( AddUncasted( Token::BIT_AND, xored_instance_types, Add(static_cast(kStringEncodingMask))), graph()->GetConstant0(), Token::EQ); if_sameencodingandsequential.And(); STATIC_ASSERT(kSeqStringTag == 0); if_sameencodingandsequential.If( AddUncasted( Token::BIT_AND, ored_instance_types, Add(static_cast(kStringRepresentationMask))), graph()->GetConstant0(), Token::EQ); if_sameencodingandsequential.Then(); { HConstant* string_map = Add(isolate()->factory()->string_map()); HConstant* ascii_string_map = Add(isolate()->factory()->ascii_string_map()); // Determine map and size depending on whether result is one-byte string. IfBuilder if_onebyte(this); STATIC_ASSERT(kOneByteStringTag != 0); if_onebyte.If( AddUncasted( Token::BIT_AND, ored_instance_types, Add(static_cast(kStringEncodingMask))), graph()->GetConstant0(), Token::NE); if_onebyte.Then(); { // Allocate sequential one-byte string object. Push(length); Push(ascii_string_map); } if_onebyte.Else(); { // Allocate sequential two-byte string object. HValue* size = AddUncasted(length, graph()->GetConstant1()); size->ClearFlag(HValue::kCanOverflow); size->SetFlag(HValue::kUint32); Push(size); Push(string_map); } if_onebyte.End(); HValue* map = Pop(); // Calculate the number of bytes needed for the characters in the // string while observing object alignment. STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0); HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize); // Allocate the string object. HAllocate does not care whether we pass // STRING_TYPE or ASCII_STRING_TYPE here, so we just use STRING_TYPE here. HAllocate* result = BuildAllocate( size, HType::String(), STRING_TYPE, allocation_mode); Add(result, HObjectAccess::ForMap(), map); // Initialize the string fields. Add(result, HObjectAccess::ForStringHashField(), Add(String::kEmptyHashField)); Add(result, HObjectAccess::ForStringLength(), length); // Copy characters to the result string. IfBuilder if_twobyte(this); if_twobyte.If(map, string_map); if_twobyte.Then(); { // Copy characters from the left string. BuildCopySeqStringChars( left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, left_length); // Copy characters from the right string. BuildCopySeqStringChars( right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result, left_length, String::TWO_BYTE_ENCODING, right_length); } if_twobyte.Else(); { // Copy characters from the left string. BuildCopySeqStringChars( left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, left_length); // Copy characters from the right string. BuildCopySeqStringChars( right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result, left_length, String::ONE_BYTE_ENCODING, right_length); } if_twobyte.End(); // Count the native string addition. AddIncrementCounter(isolate()->counters()->string_add_native()); // Return the sequential string. Push(result); } if_sameencodingandsequential.Else(); { // Fallback to the runtime to add the two strings. Add(left, right); Push(Add( isolate()->factory()->empty_string(), Runtime::FunctionForId(Runtime::kStringAdd), 2)); } if_sameencodingandsequential.End(); } if_createcons.End(); return Pop(); } HValue* HGraphBuilder::BuildStringAdd( HValue* left, HValue* right, HAllocationMode allocation_mode) { NoObservableSideEffectsScope no_effects(this); // Determine string lengths. HValue* left_length = AddLoadStringLength(left); HValue* right_length = AddLoadStringLength(right); // Check if left string is empty. IfBuilder if_leftempty(this); if_leftempty.If( left_length, graph()->GetConstant0(), Token::EQ); if_leftempty.Then(); { // Count the native string addition. AddIncrementCounter(isolate()->counters()->string_add_native()); // Just return the right string. Push(right); } if_leftempty.Else(); { // Check if right string is empty. IfBuilder if_rightempty(this); if_rightempty.If( right_length, graph()->GetConstant0(), Token::EQ); if_rightempty.Then(); { // Count the native string addition. AddIncrementCounter(isolate()->counters()->string_add_native()); // Just return the left string. Push(left); } if_rightempty.Else(); { // Add the two non-empty strings. Push(BuildUncheckedStringAdd(left, right, allocation_mode)); } if_rightempty.End(); } if_leftempty.End(); return Pop(); } HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess( HValue* checked_object, HValue* key, HValue* val, bool is_js_array, ElementsKind elements_kind, PropertyAccessType access_type, LoadKeyedHoleMode load_mode, KeyedAccessStoreMode store_mode) { DCHECK((!IsExternalArrayElementsKind(elements_kind) && !IsFixedTypedArrayElementsKind(elements_kind)) || !is_js_array); // No GVNFlag is necessary for ElementsKind if there is an explicit dependency // on a HElementsTransition instruction. The flag can also be removed if the // map to check has FAST_HOLEY_ELEMENTS, since there can be no further // ElementsKind transitions. Finally, the dependency can be removed for stores // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the // generated store code. if ((elements_kind == FAST_HOLEY_ELEMENTS) || (elements_kind == FAST_ELEMENTS && access_type == STORE)) { checked_object->ClearDependsOnFlag(kElementsKind); } bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind); bool fast_elements = IsFastObjectElementsKind(elements_kind); HValue* elements = AddLoadElements(checked_object); if (access_type == STORE && (fast_elements || fast_smi_only_elements) && store_mode != STORE_NO_TRANSITION_HANDLE_COW) { HCheckMaps* check_cow_map = Add( elements, isolate()->factory()->fixed_array_map()); check_cow_map->ClearDependsOnFlag(kElementsKind); } HInstruction* length = NULL; if (is_js_array) { length = Add( checked_object->ActualValue(), checked_object, HObjectAccess::ForArrayLength(elements_kind)); } else { length = AddLoadFixedArrayLength(elements); } length->set_type(HType::Smi()); HValue* checked_key = NULL; if (IsExternalArrayElementsKind(elements_kind) || IsFixedTypedArrayElementsKind(elements_kind)) { HValue* backing_store; if (IsExternalArrayElementsKind(elements_kind)) { backing_store = Add( elements, static_cast(NULL), HObjectAccess::ForExternalArrayExternalPointer()); } else { backing_store = elements; } if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) { NoObservableSideEffectsScope no_effects(this); IfBuilder length_checker(this); length_checker.If(key, length, Token::LT); length_checker.Then(); IfBuilder negative_checker(this); HValue* bounds_check = negative_checker.If( key, graph()->GetConstant0(), Token::GTE); negative_checker.Then(); HInstruction* result = AddElementAccess( backing_store, key, val, bounds_check, elements_kind, access_type); negative_checker.ElseDeopt("Negative key encountered"); negative_checker.End(); length_checker.End(); return result; } else { DCHECK(store_mode == STANDARD_STORE); checked_key = Add(key, length); return AddElementAccess( backing_store, checked_key, val, checked_object, elements_kind, access_type); } } DCHECK(fast_smi_only_elements || fast_elements || IsFastDoubleElementsKind(elements_kind)); // In case val is stored into a fast smi array, assure that the value is a smi // before manipulating the backing store. Otherwise the actual store may // deopt, leaving the backing store in an invalid state. if (access_type == STORE && IsFastSmiElementsKind(elements_kind) && !val->type().IsSmi()) { val = AddUncasted(val, Representation::Smi()); } if (IsGrowStoreMode(store_mode)) { NoObservableSideEffectsScope no_effects(this); Representation representation = HStoreKeyed::RequiredValueRepresentation( elements_kind, STORE_TO_INITIALIZED_ENTRY); val = AddUncasted(val, representation); elements = BuildCheckForCapacityGrow(checked_object, elements, elements_kind, length, key, is_js_array, access_type); checked_key = key; } else { checked_key = Add(key, length); if (access_type == STORE && (fast_elements || fast_smi_only_elements)) { if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) { NoObservableSideEffectsScope no_effects(this); elements = BuildCopyElementsOnWrite(checked_object, elements, elements_kind, length); } else { HCheckMaps* check_cow_map = Add( elements, isolate()->factory()->fixed_array_map()); check_cow_map->ClearDependsOnFlag(kElementsKind); } } } return AddElementAccess(elements, checked_key, val, checked_object, elements_kind, access_type, load_mode); } HValue* HGraphBuilder::BuildAllocateArrayFromLength( JSArrayBuilder* array_builder, HValue* length_argument) { if (length_argument->IsConstant() && HConstant::cast(length_argument)->HasSmiValue()) { int array_length = HConstant::cast(length_argument)->Integer32Value(); if (array_length == 0) { return array_builder->AllocateEmptyArray(); } else { return array_builder->AllocateArray(length_argument, array_length, length_argument); } } HValue* constant_zero = graph()->GetConstant0(); HConstant* max_alloc_length = Add(JSObject::kInitialMaxFastElementArray); HInstruction* checked_length = Add(length_argument, max_alloc_length); IfBuilder if_builder(this); if_builder.If(checked_length, constant_zero, Token::EQ); if_builder.Then(); const int initial_capacity = JSArray::kPreallocatedArrayElements; HConstant* initial_capacity_node = Add(initial_capacity); Push(initial_capacity_node); // capacity Push(constant_zero); // length if_builder.Else(); if (!(top_info()->IsStub()) && IsFastPackedElementsKind(array_builder->kind())) { // We'll come back later with better (holey) feedback. if_builder.Deopt("Holey array despite packed elements_kind feedback"); } else { Push(checked_length); // capacity Push(checked_length); // length } if_builder.End(); // Figure out total size HValue* length = Pop(); HValue* capacity = Pop(); return array_builder->AllocateArray(capacity, max_alloc_length, length); } HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind, HValue* capacity) { int elements_size = IsFastDoubleElementsKind(kind) ? kDoubleSize : kPointerSize; HConstant* elements_size_value = Add(elements_size); HInstruction* mul = HMul::NewImul(zone(), context(), capacity->ActualValue(), elements_size_value); AddInstruction(mul); mul->ClearFlag(HValue::kCanOverflow); STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); HConstant* header_size = Add(FixedArray::kHeaderSize); HValue* total_size = AddUncasted(mul, header_size); total_size->ClearFlag(HValue::kCanOverflow); return total_size; } HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) { int base_size = JSArray::kSize; if (mode == TRACK_ALLOCATION_SITE) { base_size += AllocationMemento::kSize; } HConstant* size_in_bytes = Add(base_size); return Add( size_in_bytes, HType::JSArray(), NOT_TENURED, JS_OBJECT_TYPE); } HConstant* HGraphBuilder::EstablishElementsAllocationSize( ElementsKind kind, int capacity) { int base_size = IsFastDoubleElementsKind(kind) ? FixedDoubleArray::SizeFor(capacity) : FixedArray::SizeFor(capacity); return Add(base_size); } HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind, HValue* size_in_bytes) { InstanceType instance_type = IsFastDoubleElementsKind(kind) ? FIXED_DOUBLE_ARRAY_TYPE : FIXED_ARRAY_TYPE; return Add(size_in_bytes, HType::HeapObject(), NOT_TENURED, instance_type); } void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements, ElementsKind kind, HValue* capacity) { Factory* factory = isolate()->factory(); Handle map = IsFastDoubleElementsKind(kind) ? factory->fixed_double_array_map() : factory->fixed_array_map(); Add(elements, HObjectAccess::ForMap(), Add(map)); Add(elements, HObjectAccess::ForFixedArrayLength(), capacity); } HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader( ElementsKind kind, HValue* capacity) { // The HForceRepresentation is to prevent possible deopt on int-smi // conversion after allocation but before the new object fields are set. capacity = AddUncasted(capacity, Representation::Smi()); HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity); HValue* new_elements = BuildAllocateElements(kind, size_in_bytes); BuildInitializeElementsHeader(new_elements, kind, capacity); return new_elements; } void HGraphBuilder::BuildJSArrayHeader(HValue* array, HValue* array_map, HValue* elements, AllocationSiteMode mode, ElementsKind elements_kind, HValue* allocation_site_payload, HValue* length_field) { Add(array, HObjectAccess::ForMap(), array_map); HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); Add( array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array); Add( array, HObjectAccess::ForElementsPointer(), elements != NULL ? elements : empty_fixed_array); Add( array, HObjectAccess::ForArrayLength(elements_kind), length_field); if (mode == TRACK_ALLOCATION_SITE) { BuildCreateAllocationMemento( array, Add(JSArray::kSize), allocation_site_payload); } } HInstruction* HGraphBuilder::AddElementAccess( HValue* elements, HValue* checked_key, HValue* val, HValue* dependency, ElementsKind elements_kind, PropertyAccessType access_type, LoadKeyedHoleMode load_mode) { if (access_type == STORE) { DCHECK(val != NULL); if (elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS || elements_kind == UINT8_CLAMPED_ELEMENTS) { val = Add(val); } return Add(elements, checked_key, val, elements_kind, STORE_TO_INITIALIZED_ENTRY); } DCHECK(access_type == LOAD); DCHECK(val == NULL); HLoadKeyed* load = Add( elements, checked_key, dependency, elements_kind, load_mode); if (FLAG_opt_safe_uint32_operations && (elements_kind == EXTERNAL_UINT32_ELEMENTS || elements_kind == UINT32_ELEMENTS)) { graph()->RecordUint32Instruction(load); } return load; } HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object, HValue* dependency) { return Add(object, dependency, HObjectAccess::ForMap()); } HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object, HValue* dependency) { return Add( object, dependency, HObjectAccess::ForElementsPointer()); } HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength( HValue* array, HValue* dependency) { return Add( array, dependency, HObjectAccess::ForFixedArrayLength()); } HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array, ElementsKind kind, HValue* dependency) { return Add( array, dependency, HObjectAccess::ForArrayLength(kind)); } HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) { HValue* half_old_capacity = AddUncasted(old_capacity, graph_->GetConstant1()); HValue* new_capacity = AddUncasted(half_old_capacity, old_capacity); new_capacity->ClearFlag(HValue::kCanOverflow); HValue* min_growth = Add(16); new_capacity = AddUncasted(new_capacity, min_growth); new_capacity->ClearFlag(HValue::kCanOverflow); return new_capacity; } HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object, HValue* elements, ElementsKind kind, ElementsKind new_kind, HValue* length, HValue* new_capacity) { Add(new_capacity, Add( (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(new_kind))); HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader( new_kind, new_capacity); BuildCopyElements(elements, kind, new_elements, new_kind, length, new_capacity); Add(object, HObjectAccess::ForElementsPointer(), new_elements); return new_elements; } void HGraphBuilder::BuildFillElementsWithValue(HValue* elements, ElementsKind elements_kind, HValue* from, HValue* to, HValue* value) { if (to == NULL) { to = AddLoadFixedArrayLength(elements); } // Special loop unfolding case STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kElementLoopUnrollThreshold); int initial_capacity = -1; if (from->IsInteger32Constant() && to->IsInteger32Constant()) { int constant_from = from->GetInteger32Constant(); int constant_to = to->GetInteger32Constant(); if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) { initial_capacity = constant_to; } } // Since we're about to store a hole value, the store instruction below must // assume an elements kind that supports heap object values. if (IsFastSmiOrObjectElementsKind(elements_kind)) { elements_kind = FAST_HOLEY_ELEMENTS; } if (initial_capacity >= 0) { for (int i = 0; i < initial_capacity; i++) { HInstruction* key = Add(i); Add(elements, key, value, elements_kind); } } else { // Carefully loop backwards so that the "from" remains live through the loop // rather than the to. This often corresponds to keeping length live rather // then capacity, which helps register allocation, since length is used more // other than capacity after filling with holes. LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); HValue* key = builder.BeginBody(to, from, Token::GT); HValue* adjusted_key = AddUncasted(key, graph()->GetConstant1()); adjusted_key->ClearFlag(HValue::kCanOverflow); Add(elements, adjusted_key, value, elements_kind); builder.EndBody(); } } void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, ElementsKind elements_kind, HValue* from, HValue* to) { // Fast elements kinds need to be initialized in case statements below cause a // garbage collection. Factory* factory = isolate()->factory(); double nan_double = FixedDoubleArray::hole_nan_as_double(); HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind) ? Add(factory->the_hole_value()) : Add(nan_double); BuildFillElementsWithValue(elements, elements_kind, from, to, hole); } void HGraphBuilder::BuildCopyElements(HValue* from_elements, ElementsKind from_elements_kind, HValue* to_elements, ElementsKind to_elements_kind, HValue* length, HValue* capacity) { int constant_capacity = -1; if (capacity != NULL && capacity->IsConstant() && HConstant::cast(capacity)->HasInteger32Value()) { int constant_candidate = HConstant::cast(capacity)->Integer32Value(); if (constant_candidate <= kElementLoopUnrollThreshold) { constant_capacity = constant_candidate; } } bool pre_fill_with_holes = IsFastDoubleElementsKind(from_elements_kind) && IsFastObjectElementsKind(to_elements_kind); if (pre_fill_with_holes) { // If the copy might trigger a GC, make sure that the FixedArray is // pre-initialized with holes to make sure that it's always in a // consistent state. BuildFillElementsWithHole(to_elements, to_elements_kind, graph()->GetConstant0(), NULL); } if (constant_capacity != -1) { // Unroll the loop for small elements kinds. for (int i = 0; i < constant_capacity; i++) { HValue* key_constant = Add(i); HInstruction* value = Add(from_elements, key_constant, static_cast(NULL), from_elements_kind); Add(to_elements, key_constant, value, to_elements_kind); } } else { if (!pre_fill_with_holes && (capacity == NULL || !length->Equals(capacity))) { BuildFillElementsWithHole(to_elements, to_elements_kind, length, NULL); } if (capacity == NULL) { capacity = AddLoadFixedArrayLength(to_elements); } LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); HValue* key = builder.BeginBody(length, graph()->GetConstant0(), Token::GT); key = AddUncasted(key, graph()->GetConstant1()); key->ClearFlag(HValue::kCanOverflow); HValue* element = Add(from_elements, key, static_cast(NULL), from_elements_kind, ALLOW_RETURN_HOLE); ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && IsFastSmiElementsKind(to_elements_kind)) ? FAST_HOLEY_ELEMENTS : to_elements_kind; if (IsHoleyElementsKind(from_elements_kind) && from_elements_kind != to_elements_kind) { IfBuilder if_hole(this); if_hole.If(element); if_hole.Then(); HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) ? Add(FixedDoubleArray::hole_nan_as_double()) : graph()->GetConstantHole(); Add(to_elements, key, hole_constant, kind); if_hole.Else(); HStoreKeyed* store = Add(to_elements, key, element, kind); store->SetFlag(HValue::kAllowUndefinedAsNaN); if_hole.End(); } else { HStoreKeyed* store = Add(to_elements, key, element, kind); store->SetFlag(HValue::kAllowUndefinedAsNaN); } builder.EndBody(); } Counters* counters = isolate()->counters(); AddIncrementCounter(counters->inlined_copied_elements()); } HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate, HValue* allocation_site, AllocationSiteMode mode, ElementsKind kind) { HAllocate* array = AllocateJSArrayObject(mode); HValue* map = AddLoadMap(boilerplate); HValue* elements = AddLoadElements(boilerplate); HValue* length = AddLoadArrayLength(boilerplate, kind); BuildJSArrayHeader(array, map, elements, mode, FAST_ELEMENTS, allocation_site, length); return array; } HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate, HValue* allocation_site, AllocationSiteMode mode) { HAllocate* array = AllocateJSArrayObject(mode); HValue* map = AddLoadMap(boilerplate); BuildJSArrayHeader(array, map, NULL, // set elements to empty fixed array mode, FAST_ELEMENTS, allocation_site, graph()->GetConstant0()); return array; } HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate, HValue* allocation_site, AllocationSiteMode mode, ElementsKind kind) { HValue* boilerplate_elements = AddLoadElements(boilerplate); HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements); // Generate size calculation code here in order to make it dominate // the JSArray allocation. HValue* elements_size = BuildCalculateElementsSize(kind, capacity); // Create empty JSArray object for now, store elimination should remove // redundant initialization of elements and length fields and at the same // time the object will be fully prepared for GC if it happens during // elements allocation. HValue* result = BuildCloneShallowArrayEmpty( boilerplate, allocation_site, mode); HAllocate* elements = BuildAllocateElements(kind, elements_size); // This function implicitly relies on the fact that the // FastCloneShallowArrayStub is called only for literals shorter than // JSObject::kInitialMaxFastElementArray. // Can't add HBoundsCheck here because otherwise the stub will eager a frame. HConstant* size_upper_bound = EstablishElementsAllocationSize( kind, JSObject::kInitialMaxFastElementArray); elements->set_size_upper_bound(size_upper_bound); Add(result, HObjectAccess::ForElementsPointer(), elements); // The allocation for the cloned array above causes register pressure on // machines with low register counts. Force a reload of the boilerplate // elements here to free up a register for the allocation to avoid unnecessary // spillage. boilerplate_elements = AddLoadElements(boilerplate); boilerplate_elements->SetFlag(HValue::kCantBeReplaced); // Copy the elements array header. for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); Add(elements, access, Add(boilerplate_elements, static_cast(NULL), access)); } // And the result of the length HValue* length = AddLoadArrayLength(boilerplate, kind); Add(result, HObjectAccess::ForArrayLength(kind), length); BuildCopyElements(boilerplate_elements, kind, elements, kind, length, NULL); return result; } void HGraphBuilder::BuildCompareNil( HValue* value, Type* type, HIfContinuation* continuation) { IfBuilder if_nil(this); bool some_case_handled = false; bool some_case_missing = false; if (type->Maybe(Type::Null())) { if (some_case_handled) if_nil.Or(); if_nil.If(value, graph()->GetConstantNull()); some_case_handled = true; } else { some_case_missing = true; } if (type->Maybe(Type::Undefined())) { if (some_case_handled) if_nil.Or(); if_nil.If(value, graph()->GetConstantUndefined()); some_case_handled = true; } else { some_case_missing = true; } if (type->Maybe(Type::Undetectable())) { if (some_case_handled) if_nil.Or(); if_nil.If(value); some_case_handled = true; } else { some_case_missing = true; } if (some_case_missing) { if_nil.Then(); if_nil.Else(); if (type->NumClasses() == 1) { BuildCheckHeapObject(value); // For ICs, the map checked below is a sentinel map that gets replaced by // the monomorphic map when the code is used as a template to generate a // new IC. For optimized functions, there is no sentinel map, the map // emitted below is the actual monomorphic map. Add(value, type->Classes().Current()); } else { if_nil.Deopt("Too many undetectable types"); } } if_nil.CaptureContinuation(continuation); } void HGraphBuilder::BuildCreateAllocationMemento( HValue* previous_object, HValue* previous_object_size, HValue* allocation_site) { DCHECK(allocation_site != NULL); HInnerAllocatedObject* allocation_memento = Add( previous_object, previous_object_size, HType::HeapObject()); AddStoreMapConstant( allocation_memento, isolate()->factory()->allocation_memento_map()); Add( allocation_memento, HObjectAccess::ForAllocationMementoSite(), allocation_site); if (FLAG_allocation_site_pretenuring) { HValue* memento_create_count = Add( allocation_site, static_cast(NULL), HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset)); memento_create_count = AddUncasted( memento_create_count, graph()->GetConstant1()); // This smi value is reset to zero after every gc, overflow isn't a problem // since the counter is bounded by the new space size. memento_create_count->ClearFlag(HValue::kCanOverflow); Add( allocation_site, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset), memento_create_count); } } HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) { // Get the global context, then the native context HInstruction* context = Add(closure, static_cast(NULL), HObjectAccess::ForFunctionContextPointer()); HInstruction* global_object = Add( context, static_cast(NULL), HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset( GlobalObject::kNativeContextOffset); return Add( global_object, static_cast(NULL), access); } HInstruction* HGraphBuilder::BuildGetNativeContext() { // Get the global context, then the native context HValue* global_object = Add( context(), static_cast(NULL), HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); return Add( global_object, static_cast(NULL), HObjectAccess::ForObservableJSObjectOffset( GlobalObject::kNativeContextOffset)); } HInstruction* HGraphBuilder::BuildGetArrayFunction() { HInstruction* native_context = BuildGetNativeContext(); HInstruction* index = Add(static_cast(Context::ARRAY_FUNCTION_INDEX)); return Add( native_context, index, static_cast(NULL), FAST_ELEMENTS); } HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, ElementsKind kind, HValue* allocation_site_payload, HValue* constructor_function, AllocationSiteOverrideMode override_mode) : builder_(builder), kind_(kind), allocation_site_payload_(allocation_site_payload), constructor_function_(constructor_function) { DCHECK(!allocation_site_payload->IsConstant() || HConstant::cast(allocation_site_payload)->handle( builder_->isolate())->IsAllocationSite()); mode_ = override_mode == DISABLE_ALLOCATION_SITES ? DONT_TRACK_ALLOCATION_SITE : AllocationSite::GetMode(kind); } HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, ElementsKind kind, HValue* constructor_function) : builder_(builder), kind_(kind), mode_(DONT_TRACK_ALLOCATION_SITE), allocation_site_payload_(NULL), constructor_function_(constructor_function) { } HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() { if (!builder()->top_info()->IsStub()) { // A constant map is fine. Handle map(builder()->isolate()->get_initial_js_array_map(kind_), builder()->isolate()); return builder()->Add(map); } if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) { // No need for a context lookup if the kind_ matches the initial // map, because we can just load the map in that case. HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); return builder()->Add( constructor_function_, static_cast(NULL), access); } // TODO(mvstanton): we should always have a constructor function if we // are creating a stub. HInstruction* native_context = constructor_function_ != NULL ? builder()->BuildGetNativeContext(constructor_function_) : builder()->BuildGetNativeContext(); HInstruction* index = builder()->Add( static_cast(Context::JS_ARRAY_MAPS_INDEX)); HInstruction* map_array = builder()->Add( native_context, index, static_cast(NULL), FAST_ELEMENTS); HInstruction* kind_index = builder()->Add(kind_); return builder()->Add( map_array, kind_index, static_cast(NULL), FAST_ELEMENTS); } HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() { // Find the map near the constructor function HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); return builder()->Add( constructor_function_, static_cast(NULL), access); } HAllocate* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() { HConstant* capacity = builder()->Add(initial_capacity()); return AllocateArray(capacity, capacity, builder()->graph()->GetConstant0()); } HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( HValue* capacity, HConstant* capacity_upper_bound, HValue* length_field, FillMode fill_mode) { return AllocateArray(capacity, capacity_upper_bound->GetInteger32Constant(), length_field, fill_mode); } HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( HValue* capacity, int capacity_upper_bound, HValue* length_field, FillMode fill_mode) { HConstant* elememts_size_upper_bound = capacity->IsInteger32Constant() ? HConstant::cast(capacity) : builder()->EstablishElementsAllocationSize(kind_, capacity_upper_bound); HAllocate* array = AllocateArray(capacity, length_field, fill_mode); if (!elements_location_->has_size_upper_bound()) { elements_location_->set_size_upper_bound(elememts_size_upper_bound); } return array; } HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( HValue* capacity, HValue* length_field, FillMode fill_mode) { // These HForceRepresentations are because we store these as fields in the // objects we construct, and an int32-to-smi HChange could deopt. Accept // the deopt possibility now, before allocation occurs. capacity = builder()->AddUncasted(capacity, Representation::Smi()); length_field = builder()->AddUncasted(length_field, Representation::Smi()); // Generate size calculation code here in order to make it dominate // the JSArray allocation. HValue* elements_size = builder()->BuildCalculateElementsSize(kind_, capacity); // Allocate (dealing with failure appropriately) HAllocate* array_object = builder()->AllocateJSArrayObject(mode_); // Fill in the fields: map, properties, length HValue* map; if (allocation_site_payload_ == NULL) { map = EmitInternalMapCode(); } else { map = EmitMapCode(); } builder()->BuildJSArrayHeader(array_object, map, NULL, // set elements to empty fixed array mode_, kind_, allocation_site_payload_, length_field); // Allocate and initialize the elements elements_location_ = builder()->BuildAllocateElements(kind_, elements_size); builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity); // Set the elements builder()->Add( array_object, HObjectAccess::ForElementsPointer(), elements_location_); if (fill_mode == FILL_WITH_HOLE) { builder()->BuildFillElementsWithHole(elements_location_, kind_, graph()->GetConstant0(), capacity); } return array_object; } HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin) { HValue* global_object = Add( context(), static_cast(NULL), HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset( GlobalObject::kBuiltinsOffset); HValue* builtins = Add( global_object, static_cast(NULL), access); HObjectAccess function_access = HObjectAccess::ForObservableJSObjectOffset( JSBuiltinsObject::OffsetOfFunctionWithId(builtin)); return Add( builtins, static_cast(NULL), function_access); } HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info) : HGraphBuilder(info), function_state_(NULL), initial_function_state_(this, info, NORMAL_RETURN, 0), ast_context_(NULL), break_scope_(NULL), inlined_count_(0), globals_(10, info->zone()), inline_bailout_(false), osr_(new(info->zone()) HOsrBuilder(this)) { // This is not initialized in the initializer list because the // constructor for the initial state relies on function_state_ == NULL // to know it's the initial state. function_state_= &initial_function_state_; InitializeAstVisitor(info->zone()); if (FLAG_hydrogen_track_positions) { SetSourcePosition(info->shared_info()->start_position()); } } HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first, HBasicBlock* second, BailoutId join_id) { if (first == NULL) { return second; } else if (second == NULL) { return first; } else { HBasicBlock* join_block = graph()->CreateBasicBlock(); Goto(first, join_block); Goto(second, join_block); join_block->SetJoinId(join_id); return join_block; } } HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement, HBasicBlock* exit_block, HBasicBlock* continue_block) { if (continue_block != NULL) { if (exit_block != NULL) Goto(exit_block, continue_block); continue_block->SetJoinId(statement->ContinueId()); return continue_block; } return exit_block; } HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement, HBasicBlock* loop_entry, HBasicBlock* body_exit, HBasicBlock* loop_successor, HBasicBlock* break_block) { if (body_exit != NULL) Goto(body_exit, loop_entry); loop_entry->PostProcessLoopHeader(statement); if (break_block != NULL) { if (loop_successor != NULL) Goto(loop_successor, break_block); break_block->SetJoinId(statement->ExitId()); return break_block; } return loop_successor; } // Build a new loop header block and set it as the current block. HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() { HBasicBlock* loop_entry = CreateLoopHeaderBlock(); Goto(loop_entry); set_current_block(loop_entry); return loop_entry; } HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry( IterationStatement* statement) { HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement) ? osr()->BuildOsrLoopEntry(statement) : BuildLoopEntry(); return loop_entry; } void HBasicBlock::FinishExit(HControlInstruction* instruction, HSourcePosition position) { Finish(instruction, position); ClearEnvironment(); } OStream& operator<<(OStream& os, const HBasicBlock& b) { return os << "B" << b.block_id(); } HGraph::HGraph(CompilationInfo* info) : isolate_(info->isolate()), next_block_id_(0), entry_block_(NULL), blocks_(8, info->zone()), values_(16, info->zone()), phi_list_(NULL), uint32_instructions_(NULL), osr_(NULL), info_(info), zone_(info->zone()), is_recursive_(false), use_optimistic_licm_(false), depends_on_empty_array_proto_elements_(false), type_change_checksum_(0), maximum_environment_size_(0), no_side_effects_scope_count_(0), disallow_adding_new_values_(false), next_inline_id_(0), inlined_functions_(5, info->zone()) { if (info->IsStub()) { CallInterfaceDescriptor descriptor = info->code_stub()->GetCallInterfaceDescriptor(); start_environment_ = new (zone_) HEnvironment(zone_, descriptor.GetEnvironmentParameterCount()); } else { TraceInlinedFunction(info->shared_info(), HSourcePosition::Unknown()); start_environment_ = new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_); } start_environment_->set_ast_id(BailoutId::FunctionEntry()); entry_block_ = CreateBasicBlock(); entry_block_->SetInitialEnvironment(start_environment_); } HBasicBlock* HGraph::CreateBasicBlock() { HBasicBlock* result = new(zone()) HBasicBlock(this); blocks_.Add(result, zone()); return result; } void HGraph::FinalizeUniqueness() { DisallowHeapAllocation no_gc; DCHECK(!OptimizingCompilerThread::IsOptimizerThread(isolate())); for (int i = 0; i < blocks()->length(); ++i) { for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) { it.Current()->FinalizeUniqueness(); } } } int HGraph::TraceInlinedFunction( Handle shared, HSourcePosition position) { if (!FLAG_hydrogen_track_positions) { return 0; } int id = 0; for (; id < inlined_functions_.length(); id++) { if (inlined_functions_[id].shared().is_identical_to(shared)) { break; } } if (id == inlined_functions_.length()) { inlined_functions_.Add(InlinedFunctionInfo(shared), zone()); if (!shared->script()->IsUndefined()) { Handle