[compiler] Fixed some perf related issues.
This patch removes unnecessary copies and adds reserves to vectors that are filled in a loop afterwards. Fixing clang-tidy warning. Bug: v8:8015 Change-Id: I4e13c0445a9760e09ef03a62ae48be622ebecc6b Reviewed-on: https://chromium-review.googlesource.com/1209783 Reviewed-by: Michael Starzinger <mstarzinger@chromium.org> Commit-Queue: Florian Sattler <sattlerf@google.com> Cr-Commit-Position: refs/heads/master@{#55776}
This commit is contained in:
parent
d048600ce9
commit
7d98d8e01d
@ -801,7 +801,7 @@ void BytecodeGraphBuilder::AdvanceToOsrEntryAndPeelLoops(
|
||||
int current_parent_offset =
|
||||
analysis.GetLoopInfoFor(osr_offset).parent_offset();
|
||||
while (current_parent_offset != -1) {
|
||||
LoopInfo current_parent_loop =
|
||||
const LoopInfo& current_parent_loop =
|
||||
analysis.GetLoopInfoFor(current_parent_offset);
|
||||
// We iterate until the back edge of the parent loop, which we detect by
|
||||
// the offset that the JumpLoop targets.
|
||||
|
@ -69,7 +69,7 @@ CodeGenerator::CodeGenerator(
|
||||
caller_registers_saved_(false),
|
||||
jump_tables_(nullptr),
|
||||
ools_(nullptr),
|
||||
osr_helper_(osr_helper),
|
||||
osr_helper_(std::move(osr_helper)),
|
||||
osr_pc_offset_(-1),
|
||||
optimized_out_literal_id_(-1),
|
||||
source_position_table_builder_(
|
||||
|
@ -282,7 +282,7 @@ EffectGraphReducer::EffectGraphReducer(
|
||||
state_(graph, kNumStates),
|
||||
revisit_(zone),
|
||||
stack_(zone),
|
||||
reduce_(reduce) {}
|
||||
reduce_(std::move(reduce)) {}
|
||||
|
||||
void EffectGraphReducer::ReduceFrom(Node* node) {
|
||||
// Perform DFS and eagerly trigger revisitation as soon as possible.
|
||||
|
@ -145,6 +145,12 @@ Node* CreateStubBuiltinContinuationFrameState(
|
||||
// by the deoptimizer and aren't explicitly passed in the frame state.
|
||||
int stack_parameter_count = descriptor.GetRegisterParameterCount() -
|
||||
DeoptimizerParameterCountFor(mode);
|
||||
// Reserving space in the vector, except for the case where
|
||||
// stack_parameter_count is -1.
|
||||
actual_parameters.reserve(stack_parameter_count >= 0
|
||||
? stack_parameter_count +
|
||||
descriptor.GetRegisterParameterCount()
|
||||
: 0);
|
||||
for (int i = 0; i < stack_parameter_count; ++i) {
|
||||
actual_parameters.push_back(
|
||||
parameters[descriptor.GetRegisterParameterCount() + i]);
|
||||
|
@ -5533,6 +5533,7 @@ Node* JSCallReducer::CreateArtificialFrameState(
|
||||
const Operator* op0 = common()->StateValues(0, SparseInputMask::Dense());
|
||||
Node* node0 = graph()->NewNode(op0);
|
||||
std::vector<Node*> params;
|
||||
params.reserve(parameter_count + 1);
|
||||
for (int parameter = 0; parameter < parameter_count + 1; ++parameter) {
|
||||
params.push_back(node->InputAt(1 + parameter));
|
||||
}
|
||||
|
@ -95,12 +95,14 @@ REPLACE_STUB_CALL(RejectPromise)
|
||||
REPLACE_STUB_CALL(ResolvePromise)
|
||||
#undef REPLACE_STUB_CALL
|
||||
|
||||
void JSGenericLowering::ReplaceWithStubCall(Node* node, Callable callable,
|
||||
void JSGenericLowering::ReplaceWithStubCall(Node* node,
|
||||
Callable callable,
|
||||
CallDescriptor::Flags flags) {
|
||||
ReplaceWithStubCall(node, callable, flags, node->op()->properties());
|
||||
}
|
||||
|
||||
void JSGenericLowering::ReplaceWithStubCall(Node* node, Callable callable,
|
||||
void JSGenericLowering::ReplaceWithStubCall(Node* node,
|
||||
Callable callable,
|
||||
CallDescriptor::Flags flags,
|
||||
Operator::Properties properties) {
|
||||
const CallInterfaceDescriptor& descriptor = callable.descriptor();
|
||||
|
@ -102,7 +102,7 @@ class UnobservablesSet final {
|
||||
UnobservablesSet(); // unvisited
|
||||
UnobservablesSet(const UnobservablesSet& other) : set_(other.set_) {}
|
||||
|
||||
UnobservablesSet Intersect(UnobservablesSet other, Zone* zone) const;
|
||||
UnobservablesSet Intersect(const UnobservablesSet& other, Zone* zone) const;
|
||||
UnobservablesSet Add(UnobservableStore obs, Zone* zone) const;
|
||||
UnobservablesSet RemoveSameOffset(StoreOffset off, Zone* zone) const;
|
||||
|
||||
@ -140,7 +140,7 @@ class RedundantStoreFinder final {
|
||||
private:
|
||||
void VisitEffectfulNode(Node* node);
|
||||
UnobservablesSet RecomputeUseIntersection(Node* node);
|
||||
UnobservablesSet RecomputeSet(Node* node, UnobservablesSet uses);
|
||||
UnobservablesSet RecomputeSet(Node* node, const UnobservablesSet& uses);
|
||||
static bool CannotObserveStoreField(Node* node);
|
||||
|
||||
void MarkForRevisit(Node* node);
|
||||
@ -252,8 +252,8 @@ void StoreStoreElimination::Run(JSGraph* js_graph, Zone* temp_zone) {
|
||||
// Recompute unobservables-set for a node. Will also mark superfluous nodes
|
||||
// as to be removed.
|
||||
|
||||
UnobservablesSet RedundantStoreFinder::RecomputeSet(Node* node,
|
||||
UnobservablesSet uses) {
|
||||
UnobservablesSet RedundantStoreFinder::RecomputeSet(
|
||||
Node* node, const UnobservablesSet& uses) {
|
||||
switch (node->op()->opcode()) {
|
||||
case IrOpcode::kStoreField: {
|
||||
Node* stored_to = node->InputAt(0);
|
||||
@ -472,7 +472,7 @@ UnobservablesSet UnobservablesSet::VisitedEmpty(Zone* zone) {
|
||||
// Computes the intersection of two UnobservablesSets. May return
|
||||
// UnobservablesSet::Unvisited() instead of an empty UnobservablesSet for
|
||||
// speed.
|
||||
UnobservablesSet UnobservablesSet::Intersect(UnobservablesSet other,
|
||||
UnobservablesSet UnobservablesSet::Intersect(const UnobservablesSet& other,
|
||||
Zone* zone) const {
|
||||
if (IsEmpty() || other.IsEmpty()) {
|
||||
return Unvisited();
|
||||
|
Loading…
Reference in New Issue
Block a user