2017-06-08 16:37:21 +00:00
|
|
|
// Copyright (c) 2017 The Khronos Group Inc.
|
|
|
|
// Copyright (c) 2017 Valve Corporation
|
|
|
|
// Copyright (c) 2017 LunarG Inc.
|
2018-04-16 15:33:13 +00:00
|
|
|
// Copyright (c) 2018 Google LLC
|
2017-06-08 16:37:21 +00:00
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2018-08-03 19:06:09 +00:00
|
|
|
#include "source/opt/aggressive_dead_code_elim_pass.h"
|
2017-06-08 16:37:21 +00:00
|
|
|
|
2018-08-03 19:06:09 +00:00
|
|
|
#include <memory>
|
2017-10-17 22:33:43 +00:00
|
|
|
#include <stack>
|
|
|
|
|
2018-08-03 19:06:09 +00:00
|
|
|
#include "source/cfa.h"
|
|
|
|
#include "source/latest_version_glsl_std_450_header.h"
|
|
|
|
#include "source/opt/iterator.h"
|
|
|
|
#include "source/opt/reflect.h"
|
2019-05-07 18:52:22 +00:00
|
|
|
#include "source/spirv_constant.h"
|
2018-08-03 19:06:09 +00:00
|
|
|
|
2017-06-08 16:37:21 +00:00
|
|
|
namespace spvtools {
|
|
|
|
namespace opt {
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
const uint32_t kTypePointerStorageClassInIdx = 0;
|
2017-10-10 20:35:53 +00:00
|
|
|
const uint32_t kEntryPointFunctionIdInIdx = 1;
|
2017-10-17 22:33:43 +00:00
|
|
|
const uint32_t kSelectionMergeMergeBlockIdInIdx = 0;
|
|
|
|
const uint32_t kLoopMergeMergeBlockIdInIdx = 0;
|
2017-11-28 23:18:05 +00:00
|
|
|
const uint32_t kLoopMergeContinueBlockIdInIdx = 1;
|
2018-05-16 14:41:40 +00:00
|
|
|
const uint32_t kCopyMemoryTargetAddrInIdx = 0;
|
|
|
|
const uint32_t kCopyMemorySourceAddrInIdx = 1;
|
2017-06-08 16:37:21 +00:00
|
|
|
|
2017-12-18 17:13:10 +00:00
|
|
|
// Sorting functor to present annotation instructions in an easy-to-process
|
|
|
|
// order. The functor orders by opcode first and falls back on unique id
|
|
|
|
// ordering if both instructions have the same opcode.
|
|
|
|
//
|
|
|
|
// Desired priority:
|
|
|
|
// SpvOpGroupDecorate
|
|
|
|
// SpvOpGroupMemberDecorate
|
|
|
|
// SpvOpDecorate
|
|
|
|
// SpvOpMemberDecorate
|
|
|
|
// SpvOpDecorateId
|
2018-04-16 15:33:13 +00:00
|
|
|
// SpvOpDecorateStringGOOGLE
|
2017-12-18 17:13:10 +00:00
|
|
|
// SpvOpDecorationGroup
|
|
|
|
struct DecorationLess {
|
2018-07-12 19:14:43 +00:00
|
|
|
bool operator()(const Instruction* lhs, const Instruction* rhs) const {
|
2017-12-18 17:13:10 +00:00
|
|
|
assert(lhs && rhs);
|
|
|
|
SpvOp lhsOp = lhs->opcode();
|
|
|
|
SpvOp rhsOp = rhs->opcode();
|
|
|
|
if (lhsOp != rhsOp) {
|
2018-04-16 15:33:13 +00:00
|
|
|
#define PRIORITY_CASE(opcode) \
|
|
|
|
if (lhsOp == opcode && rhsOp != opcode) return true; \
|
|
|
|
if (rhsOp == opcode && lhsOp != opcode) return false;
|
2017-12-18 17:13:10 +00:00
|
|
|
// OpGroupDecorate and OpGroupMember decorate are highest priority to
|
|
|
|
// eliminate dead targets early and simplify subsequent checks.
|
2018-04-16 15:33:13 +00:00
|
|
|
PRIORITY_CASE(SpvOpGroupDecorate)
|
|
|
|
PRIORITY_CASE(SpvOpGroupMemberDecorate)
|
|
|
|
PRIORITY_CASE(SpvOpDecorate)
|
|
|
|
PRIORITY_CASE(SpvOpMemberDecorate)
|
|
|
|
PRIORITY_CASE(SpvOpDecorateId)
|
|
|
|
PRIORITY_CASE(SpvOpDecorateStringGOOGLE)
|
2017-12-18 17:13:10 +00:00
|
|
|
// OpDecorationGroup is lowest priority to ensure use/def chains remain
|
|
|
|
// usable for instructions that target this group.
|
2018-04-16 15:33:13 +00:00
|
|
|
PRIORITY_CASE(SpvOpDecorationGroup)
|
|
|
|
#undef PRIORITY_CASE
|
2017-12-18 17:13:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Fall back to maintain total ordering (compare unique ids).
|
|
|
|
return *lhs < *rhs;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-11-11 01:26:55 +00:00
|
|
|
} // namespace
|
2017-06-08 16:37:21 +00:00
|
|
|
|
2017-11-08 17:40:02 +00:00
|
|
|
bool AggressiveDCEPass::IsVarOfStorage(uint32_t varId, uint32_t storageClass) {
|
2017-12-11 18:10:24 +00:00
|
|
|
if (varId == 0) return false;
|
2018-07-12 19:14:43 +00:00
|
|
|
const Instruction* varInst = get_def_use_mgr()->GetDef(varId);
|
2017-08-10 22:42:16 +00:00
|
|
|
const SpvOp op = varInst->opcode();
|
2017-11-08 17:40:02 +00:00
|
|
|
if (op != SpvOpVariable) return false;
|
2017-06-08 16:37:21 +00:00
|
|
|
const uint32_t varTypeId = varInst->type_id();
|
2018-07-12 19:14:43 +00:00
|
|
|
const Instruction* varTypeInst = get_def_use_mgr()->GetDef(varTypeId);
|
2017-11-08 17:40:02 +00:00
|
|
|
if (varTypeInst->opcode() != SpvOpTypePointer) return false;
|
2017-06-08 16:37:21 +00:00
|
|
|
return varTypeInst->GetSingleWordInOperand(kTypePointerStorageClassInIdx) ==
|
2017-11-08 17:40:02 +00:00
|
|
|
storageClass;
|
2017-10-10 20:35:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool AggressiveDCEPass::IsLocalVar(uint32_t varId) {
|
2018-05-08 18:02:03 +00:00
|
|
|
if (IsVarOfStorage(varId, SpvStorageClassFunction)) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
if (!private_like_local_) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return IsVarOfStorage(varId, SpvStorageClassPrivate) ||
|
|
|
|
IsVarOfStorage(varId, SpvStorageClassWorkgroup);
|
2017-06-08 16:37:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void AggressiveDCEPass::AddStores(uint32_t ptrId) {
|
2018-07-12 19:14:43 +00:00
|
|
|
get_def_use_mgr()->ForEachUser(ptrId, [this, ptrId](Instruction* user) {
|
2017-11-14 19:11:50 +00:00
|
|
|
switch (user->opcode()) {
|
2017-06-08 16:37:21 +00:00
|
|
|
case SpvOpAccessChain:
|
|
|
|
case SpvOpInBoundsAccessChain:
|
2017-11-14 19:11:50 +00:00
|
|
|
case SpvOpCopyObject:
|
|
|
|
this->AddStores(user->result_id());
|
|
|
|
break;
|
2017-06-08 16:37:21 +00:00
|
|
|
case SpvOpLoad:
|
|
|
|
break;
|
2018-05-16 14:41:40 +00:00
|
|
|
case SpvOpCopyMemory:
|
|
|
|
case SpvOpCopyMemorySized:
|
|
|
|
if (user->GetSingleWordInOperand(kCopyMemoryTargetAddrInIdx) == ptrId) {
|
|
|
|
AddToWorklist(user);
|
|
|
|
}
|
|
|
|
break;
|
2017-11-14 19:11:50 +00:00
|
|
|
// If default, assume it stores e.g. frexp, modf, function call
|
2017-06-08 16:37:21 +00:00
|
|
|
case SpvOpStore:
|
2017-11-14 19:11:50 +00:00
|
|
|
default:
|
2018-01-18 19:15:54 +00:00
|
|
|
AddToWorklist(user);
|
2017-11-14 19:11:50 +00:00
|
|
|
break;
|
2017-06-08 16:37:21 +00:00
|
|
|
}
|
2017-11-14 19:11:50 +00:00
|
|
|
});
|
2017-06-08 16:37:21 +00:00
|
|
|
}
|
|
|
|
|
2017-07-19 00:57:26 +00:00
|
|
|
bool AggressiveDCEPass::AllExtensionsSupported() const {
|
|
|
|
// If any extension not in whitelist, return false
|
2017-10-25 17:26:25 +00:00
|
|
|
for (auto& ei : get_module()->extensions()) {
|
2017-11-08 17:40:02 +00:00
|
|
|
const char* extName =
|
|
|
|
reinterpret_cast<const char*>(&ei.GetInOperand(0).words[0]);
|
2017-07-19 00:57:26 +00:00
|
|
|
if (extensions_whitelist_.find(extName) == extensions_whitelist_.end())
|
|
|
|
return false;
|
2017-06-08 16:37:21 +00:00
|
|
|
}
|
2017-07-19 00:57:26 +00:00
|
|
|
return true;
|
2017-06-08 16:37:21 +00:00
|
|
|
}
|
|
|
|
|
2018-07-12 19:14:43 +00:00
|
|
|
bool AggressiveDCEPass::IsDead(Instruction* inst) {
|
2017-12-18 17:13:10 +00:00
|
|
|
if (IsLive(inst)) return false;
|
2018-10-19 14:16:35 +00:00
|
|
|
if ((inst->IsBranch() || inst->opcode() == SpvOpUnreachable) &&
|
|
|
|
!IsStructuredHeader(context()->get_instr_block(inst), nullptr, nullptr,
|
|
|
|
nullptr))
|
2017-12-18 17:13:10 +00:00
|
|
|
return false;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2018-07-12 19:14:43 +00:00
|
|
|
bool AggressiveDCEPass::IsTargetDead(Instruction* inst) {
|
2017-06-08 16:37:21 +00:00
|
|
|
const uint32_t tId = inst->GetSingleWordInOperand(0);
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* tInst = get_def_use_mgr()->GetDef(tId);
|
|
|
|
if (IsAnnotationInst(tInst->opcode())) {
|
2017-12-18 17:13:10 +00:00
|
|
|
// This must be a decoration group. We go through annotations in a specific
|
|
|
|
// order. So if this is not used by any group or group member decorates, it
|
|
|
|
// is dead.
|
|
|
|
assert(tInst->opcode() == SpvOpDecorationGroup);
|
|
|
|
bool dead = true;
|
2018-07-12 19:14:43 +00:00
|
|
|
get_def_use_mgr()->ForEachUser(tInst, [&dead](Instruction* user) {
|
2017-12-18 17:13:10 +00:00
|
|
|
if (user->opcode() == SpvOpGroupDecorate ||
|
|
|
|
user->opcode() == SpvOpGroupMemberDecorate)
|
|
|
|
dead = false;
|
|
|
|
});
|
|
|
|
return dead;
|
2017-08-04 21:04:37 +00:00
|
|
|
}
|
2017-12-18 17:13:10 +00:00
|
|
|
return IsDead(tInst);
|
2017-08-04 21:04:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void AggressiveDCEPass::ProcessLoad(uint32_t varId) {
|
|
|
|
// Only process locals
|
2017-11-08 17:40:02 +00:00
|
|
|
if (!IsLocalVar(varId)) return;
|
2017-08-04 21:04:37 +00:00
|
|
|
// Return if already processed
|
2017-11-08 17:40:02 +00:00
|
|
|
if (live_local_vars_.find(varId) != live_local_vars_.end()) return;
|
2017-08-04 21:04:37 +00:00
|
|
|
// Mark all stores to varId as live
|
|
|
|
AddStores(varId);
|
|
|
|
// Cache varId as processed
|
|
|
|
live_local_vars_.insert(varId);
|
2017-06-08 16:37:21 +00:00
|
|
|
}
|
|
|
|
|
2018-07-12 19:14:43 +00:00
|
|
|
bool AggressiveDCEPass::IsStructuredHeader(BasicBlock* bp,
|
|
|
|
Instruction** mergeInst,
|
|
|
|
Instruction** branchInst,
|
2018-01-15 18:25:45 +00:00
|
|
|
uint32_t* mergeBlockId) {
|
2017-12-18 17:13:10 +00:00
|
|
|
if (!bp) return false;
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* mi = bp->GetMergeInst();
|
2017-10-25 00:58:48 +00:00
|
|
|
if (mi == nullptr) return false;
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* bri = &*bp->tail();
|
2017-10-25 00:58:48 +00:00
|
|
|
if (branchInst != nullptr) *branchInst = bri;
|
|
|
|
if (mergeInst != nullptr) *mergeInst = mi;
|
|
|
|
if (mergeBlockId != nullptr) *mergeBlockId = mi->GetSingleWordInOperand(0);
|
2017-10-17 22:33:43 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void AggressiveDCEPass::ComputeBlock2HeaderMaps(
|
2018-07-12 19:14:43 +00:00
|
|
|
std::list<BasicBlock*>& structuredOrder) {
|
2017-10-17 22:33:43 +00:00
|
|
|
block2headerBranch_.clear();
|
2018-10-16 12:00:07 +00:00
|
|
|
header2nextHeaderBranch_.clear();
|
2017-12-12 21:27:46 +00:00
|
|
|
branch2merge_.clear();
|
2018-01-18 20:44:00 +00:00
|
|
|
structured_order_index_.clear();
|
2018-07-12 19:14:43 +00:00
|
|
|
std::stack<Instruction*> currentHeaderBranch;
|
2017-12-12 21:27:46 +00:00
|
|
|
currentHeaderBranch.push(nullptr);
|
|
|
|
uint32_t currentMergeBlockId = 0;
|
2018-01-18 20:44:00 +00:00
|
|
|
uint32_t index = 0;
|
|
|
|
for (auto bi = structuredOrder.begin(); bi != structuredOrder.end();
|
|
|
|
++bi, ++index) {
|
|
|
|
structured_order_index_[*bi] = index;
|
2017-12-12 21:27:46 +00:00
|
|
|
// If this block is the merge block of the current control construct,
|
|
|
|
// we are leaving the current construct so we must update state
|
|
|
|
if ((*bi)->id() == currentMergeBlockId) {
|
|
|
|
currentHeaderBranch.pop();
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* chb = currentHeaderBranch.top();
|
2017-12-12 21:27:46 +00:00
|
|
|
if (chb != nullptr)
|
|
|
|
currentMergeBlockId = branch2merge_[chb]->GetSingleWordInOperand(0);
|
2017-10-17 22:33:43 +00:00
|
|
|
}
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* mergeInst;
|
|
|
|
Instruction* branchInst;
|
2017-10-17 22:33:43 +00:00
|
|
|
uint32_t mergeBlockId;
|
2017-10-25 00:58:48 +00:00
|
|
|
bool is_header =
|
2018-01-15 18:25:45 +00:00
|
|
|
IsStructuredHeader(*bi, &mergeInst, &branchInst, &mergeBlockId);
|
2018-10-16 12:00:07 +00:00
|
|
|
// Map header block to next enclosing header.
|
|
|
|
if (is_header) header2nextHeaderBranch_[*bi] = currentHeaderBranch.top();
|
2017-12-12 21:27:46 +00:00
|
|
|
// If this is a loop header, update state first so the block will map to
|
2018-10-16 12:00:07 +00:00
|
|
|
// itself.
|
2017-10-25 00:58:48 +00:00
|
|
|
if (is_header && mergeInst->opcode() == SpvOpLoopMerge) {
|
2017-12-12 21:27:46 +00:00
|
|
|
currentHeaderBranch.push(branchInst);
|
|
|
|
branch2merge_[branchInst] = mergeInst;
|
|
|
|
currentMergeBlockId = mergeBlockId;
|
2017-10-25 00:58:48 +00:00
|
|
|
}
|
2017-12-12 21:27:46 +00:00
|
|
|
// Map the block to the current construct.
|
|
|
|
block2headerBranch_[*bi] = currentHeaderBranch.top();
|
|
|
|
// If this is an if header, update state so following blocks map to the if.
|
2017-10-25 00:58:48 +00:00
|
|
|
if (is_header && mergeInst->opcode() == SpvOpSelectionMerge) {
|
2017-12-12 21:27:46 +00:00
|
|
|
currentHeaderBranch.push(branchInst);
|
|
|
|
branch2merge_[branchInst] = mergeInst;
|
|
|
|
currentMergeBlockId = mergeBlockId;
|
2017-10-17 22:33:43 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-07-12 19:14:43 +00:00
|
|
|
void AggressiveDCEPass::AddBranch(uint32_t labelId, BasicBlock* bp) {
|
|
|
|
std::unique_ptr<Instruction> newBranch(
|
|
|
|
new Instruction(context(), SpvOpBranch, 0, 0,
|
|
|
|
{{spv_operand_type_t::SPV_OPERAND_TYPE_ID, {labelId}}}));
|
2018-06-01 17:04:46 +00:00
|
|
|
context()->AnalyzeDefUse(&*newBranch);
|
|
|
|
context()->set_instr_block(&*newBranch, bp);
|
2017-10-17 22:33:43 +00:00
|
|
|
bp->AddInstruction(std::move(newBranch));
|
|
|
|
}
|
|
|
|
|
2017-12-12 21:27:46 +00:00
|
|
|
void AggressiveDCEPass::AddBreaksAndContinuesToWorklist(
|
2018-08-21 15:54:44 +00:00
|
|
|
Instruction* mergeInst) {
|
|
|
|
assert(mergeInst->opcode() == SpvOpSelectionMerge ||
|
|
|
|
mergeInst->opcode() == SpvOpLoopMerge);
|
|
|
|
|
|
|
|
BasicBlock* header = context()->get_instr_block(mergeInst);
|
2018-01-18 20:44:00 +00:00
|
|
|
uint32_t headerIndex = structured_order_index_[header];
|
2018-08-21 15:54:44 +00:00
|
|
|
const uint32_t mergeId = mergeInst->GetSingleWordInOperand(0);
|
2018-07-12 19:14:43 +00:00
|
|
|
BasicBlock* merge = context()->get_instr_block(mergeId);
|
2018-01-18 20:44:00 +00:00
|
|
|
uint32_t mergeIndex = structured_order_index_[merge];
|
2017-12-12 21:27:46 +00:00
|
|
|
get_def_use_mgr()->ForEachUser(
|
2018-07-12 19:14:43 +00:00
|
|
|
mergeId, [headerIndex, mergeIndex, this](Instruction* user) {
|
2018-01-17 19:09:24 +00:00
|
|
|
if (!user->IsBranch()) return;
|
2018-07-12 19:14:43 +00:00
|
|
|
BasicBlock* block = context()->get_instr_block(user);
|
2018-01-18 20:44:00 +00:00
|
|
|
uint32_t index = structured_order_index_[block];
|
|
|
|
if (headerIndex < index && index < mergeIndex) {
|
|
|
|
// This is a break from the loop.
|
|
|
|
AddToWorklist(user);
|
|
|
|
// Add branch's merge if there is one.
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* userMerge = branch2merge_[user];
|
2018-01-18 20:44:00 +00:00
|
|
|
if (userMerge != nullptr) AddToWorklist(userMerge);
|
2017-12-12 21:27:46 +00:00
|
|
|
}
|
|
|
|
});
|
2018-08-21 15:54:44 +00:00
|
|
|
|
|
|
|
if (mergeInst->opcode() != SpvOpLoopMerge) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// For loops we need to find the continues as well.
|
2017-12-12 21:27:46 +00:00
|
|
|
const uint32_t contId =
|
2018-08-21 15:54:44 +00:00
|
|
|
mergeInst->GetSingleWordInOperand(kLoopMergeContinueBlockIdInIdx);
|
2018-07-12 19:14:43 +00:00
|
|
|
get_def_use_mgr()->ForEachUser(contId, [&contId, this](Instruction* user) {
|
2017-11-28 23:18:05 +00:00
|
|
|
SpvOp op = user->opcode();
|
2018-01-15 18:25:45 +00:00
|
|
|
if (op == SpvOpBranchConditional || op == SpvOpSwitch) {
|
|
|
|
// A conditional branch or switch can only be a continue if it does not
|
|
|
|
// have a merge instruction or its merge block is not the continue block.
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* hdrMerge = branch2merge_[user];
|
2017-12-12 21:27:46 +00:00
|
|
|
if (hdrMerge != nullptr && hdrMerge->opcode() == SpvOpSelectionMerge) {
|
|
|
|
uint32_t hdrMergeId =
|
|
|
|
hdrMerge->GetSingleWordInOperand(kSelectionMergeMergeBlockIdInIdx);
|
|
|
|
if (hdrMergeId == contId) return;
|
|
|
|
// Need to mark merge instruction too
|
2018-01-18 19:15:54 +00:00
|
|
|
AddToWorklist(hdrMerge);
|
2017-12-12 21:27:46 +00:00
|
|
|
}
|
|
|
|
} else if (op == SpvOpBranch) {
|
|
|
|
// An unconditional branch can only be a continue if it is not
|
|
|
|
// branching to its own merge block.
|
2018-07-12 19:14:43 +00:00
|
|
|
BasicBlock* blk = context()->get_instr_block(user);
|
|
|
|
Instruction* hdrBranch = block2headerBranch_[blk];
|
2017-12-12 21:27:46 +00:00
|
|
|
if (hdrBranch == nullptr) return;
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* hdrMerge = branch2merge_[hdrBranch];
|
2017-12-12 21:27:46 +00:00
|
|
|
if (hdrMerge->opcode() == SpvOpLoopMerge) return;
|
|
|
|
uint32_t hdrMergeId =
|
|
|
|
hdrMerge->GetSingleWordInOperand(kSelectionMergeMergeBlockIdInIdx);
|
|
|
|
if (contId == hdrMergeId) return;
|
|
|
|
} else {
|
|
|
|
return;
|
|
|
|
}
|
2018-01-18 19:15:54 +00:00
|
|
|
AddToWorklist(user);
|
2017-11-28 23:18:05 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2018-07-12 19:14:43 +00:00
|
|
|
bool AggressiveDCEPass::AggressiveDCE(Function* func) {
|
2017-12-18 17:13:10 +00:00
|
|
|
// Mark function parameters as live.
|
|
|
|
AddToWorklist(&func->DefInst());
|
|
|
|
func->ForEachParam(
|
2018-07-12 19:14:43 +00:00
|
|
|
[this](const Instruction* param) {
|
|
|
|
AddToWorklist(const_cast<Instruction*>(param));
|
2017-12-18 17:13:10 +00:00
|
|
|
},
|
|
|
|
false);
|
|
|
|
|
2017-10-17 22:33:43 +00:00
|
|
|
// Compute map from block to controlling conditional branch
|
2018-07-12 19:14:43 +00:00
|
|
|
std::list<BasicBlock*> structuredOrder;
|
2017-10-30 21:42:26 +00:00
|
|
|
cfg()->ComputeStructuredOrder(func, &*func->begin(), &structuredOrder);
|
2017-10-17 22:33:43 +00:00
|
|
|
ComputeBlock2HeaderMaps(structuredOrder);
|
2017-06-08 16:37:21 +00:00
|
|
|
bool modified = false;
|
2017-10-17 22:33:43 +00:00
|
|
|
// Add instructions with external side effects to worklist. Also add branches
|
2017-10-25 00:58:48 +00:00
|
|
|
// EXCEPT those immediately contained in an "if" selection construct or a loop
|
|
|
|
// or continue construct.
|
2017-06-08 16:37:21 +00:00
|
|
|
// TODO(greg-lunarg): Handle Frexp, Modf more optimally
|
2017-10-10 20:35:53 +00:00
|
|
|
call_in_func_ = false;
|
|
|
|
func_is_entry_point_ = false;
|
|
|
|
private_stores_.clear();
|
2017-10-25 00:58:48 +00:00
|
|
|
// Stacks to keep track of when we are inside an if- or loop-construct.
|
2017-12-12 21:27:46 +00:00
|
|
|
// When immediately inside an if- or loop-construct, we do not initially
|
|
|
|
// mark branches live. All other branches must be marked live.
|
2017-10-17 22:33:43 +00:00
|
|
|
std::stack<bool> assume_branches_live;
|
|
|
|
std::stack<uint32_t> currentMergeBlockId;
|
|
|
|
// Push sentinel values on stack for when outside of any control flow.
|
|
|
|
assume_branches_live.push(true);
|
|
|
|
currentMergeBlockId.push(0);
|
|
|
|
for (auto bi = structuredOrder.begin(); bi != structuredOrder.end(); ++bi) {
|
2017-10-25 00:58:48 +00:00
|
|
|
// If exiting if or loop, update stacks
|
2017-10-17 22:33:43 +00:00
|
|
|
if ((*bi)->id() == currentMergeBlockId.top()) {
|
|
|
|
assume_branches_live.pop();
|
|
|
|
currentMergeBlockId.pop();
|
|
|
|
}
|
|
|
|
for (auto ii = (*bi)->begin(); ii != (*bi)->end(); ++ii) {
|
|
|
|
SpvOp op = ii->opcode();
|
2017-06-08 16:37:21 +00:00
|
|
|
switch (op) {
|
|
|
|
case SpvOpStore: {
|
|
|
|
uint32_t varId;
|
2017-11-08 17:40:02 +00:00
|
|
|
(void)GetPtr(&*ii, &varId);
|
2017-10-10 20:35:53 +00:00
|
|
|
// Mark stores as live if their variable is not function scope
|
|
|
|
// and is not private scope. Remember private stores for possible
|
2018-05-08 18:02:03 +00:00
|
|
|
// later inclusion. We cannot call IsLocalVar at this point because
|
|
|
|
// private_like_local_ has not been set yet.
|
|
|
|
if (IsVarOfStorage(varId, SpvStorageClassPrivate) ||
|
|
|
|
IsVarOfStorage(varId, SpvStorageClassWorkgroup))
|
2017-10-17 22:33:43 +00:00
|
|
|
private_stores_.push_back(&*ii);
|
2017-10-10 20:35:53 +00:00
|
|
|
else if (!IsVarOfStorage(varId, SpvStorageClassFunction))
|
2017-10-17 22:33:43 +00:00
|
|
|
AddToWorklist(&*ii);
|
2017-06-08 16:37:21 +00:00
|
|
|
} break;
|
2018-05-16 14:41:40 +00:00
|
|
|
case SpvOpCopyMemory:
|
|
|
|
case SpvOpCopyMemorySized: {
|
|
|
|
uint32_t varId;
|
|
|
|
(void)GetPtr(ii->GetSingleWordInOperand(kCopyMemoryTargetAddrInIdx),
|
|
|
|
&varId);
|
|
|
|
if (IsVarOfStorage(varId, SpvStorageClassPrivate) ||
|
|
|
|
IsVarOfStorage(varId, SpvStorageClassWorkgroup))
|
|
|
|
private_stores_.push_back(&*ii);
|
|
|
|
else if (!IsVarOfStorage(varId, SpvStorageClassFunction))
|
|
|
|
AddToWorklist(&*ii);
|
|
|
|
} break;
|
2017-10-17 22:33:43 +00:00
|
|
|
case SpvOpLoopMerge: {
|
2017-10-25 00:58:48 +00:00
|
|
|
assume_branches_live.push(false);
|
2017-10-17 22:33:43 +00:00
|
|
|
currentMergeBlockId.push(
|
|
|
|
ii->GetSingleWordInOperand(kLoopMergeMergeBlockIdInIdx));
|
|
|
|
} break;
|
|
|
|
case SpvOpSelectionMerge: {
|
2018-01-15 18:25:45 +00:00
|
|
|
assume_branches_live.push(false);
|
2017-10-17 22:33:43 +00:00
|
|
|
currentMergeBlockId.push(
|
|
|
|
ii->GetSingleWordInOperand(kSelectionMergeMergeBlockIdInIdx));
|
|
|
|
} break;
|
2018-01-15 18:25:45 +00:00
|
|
|
case SpvOpSwitch:
|
2017-10-17 22:33:43 +00:00
|
|
|
case SpvOpBranch:
|
2018-10-19 14:16:35 +00:00
|
|
|
case SpvOpBranchConditional:
|
|
|
|
case SpvOpUnreachable: {
|
2018-08-21 15:54:44 +00:00
|
|
|
if (assume_branches_live.top()) {
|
|
|
|
AddToWorklist(&*ii);
|
|
|
|
}
|
2017-06-08 16:37:21 +00:00
|
|
|
} break;
|
|
|
|
default: {
|
2017-10-17 22:33:43 +00:00
|
|
|
// Function calls, atomics, function params, function returns, etc.
|
2017-08-04 21:04:37 +00:00
|
|
|
// TODO(greg-lunarg): function calls live only if write to non-local
|
2018-05-02 18:54:26 +00:00
|
|
|
if (!ii->IsOpcodeSafeToDelete()) {
|
2017-11-11 01:26:55 +00:00
|
|
|
AddToWorklist(&*ii);
|
|
|
|
}
|
2017-10-10 20:35:53 +00:00
|
|
|
// Remember function calls
|
2017-11-08 17:40:02 +00:00
|
|
|
if (op == SpvOpFunctionCall) call_in_func_ = true;
|
2017-06-08 16:37:21 +00:00
|
|
|
} break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-10-10 20:35:53 +00:00
|
|
|
// See if current function is an entry point
|
2017-10-25 17:26:25 +00:00
|
|
|
for (auto& ei : get_module()->entry_points()) {
|
2017-10-10 20:35:53 +00:00
|
|
|
if (ei.GetSingleWordInOperand(kEntryPointFunctionIdInIdx) ==
|
|
|
|
func->result_id()) {
|
|
|
|
func_is_entry_point_ = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// If the current function is an entry point and has no function calls,
|
|
|
|
// we can optimize private variables as locals
|
|
|
|
private_like_local_ = func_is_entry_point_ && !call_in_func_;
|
|
|
|
// If privates are not like local, add their stores to worklist
|
|
|
|
if (!private_like_local_)
|
2017-11-08 17:40:02 +00:00
|
|
|
for (auto& ps : private_stores_) AddToWorklist(ps);
|
|
|
|
// Perform closure on live instruction set.
|
2017-06-08 16:37:21 +00:00
|
|
|
while (!worklist_.empty()) {
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* liveInst = worklist_.front();
|
2017-06-08 16:37:21 +00:00
|
|
|
// Add all operand instructions if not already live
|
2017-10-25 00:58:48 +00:00
|
|
|
liveInst->ForEachInId([&liveInst, this](const uint32_t* iid) {
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* inInst = get_def_use_mgr()->GetDef(*iid);
|
2017-10-25 00:58:48 +00:00
|
|
|
// Do not add label if an operand of a branch. This is not needed
|
|
|
|
// as part of live code discovery and can create false live code,
|
|
|
|
// for example, the branch to a header of a loop.
|
|
|
|
if (inInst->opcode() == SpvOpLabel && liveInst->IsBranch()) return;
|
2018-01-18 19:15:54 +00:00
|
|
|
AddToWorklist(inInst);
|
2017-06-08 16:37:21 +00:00
|
|
|
});
|
2017-12-18 17:13:10 +00:00
|
|
|
if (liveInst->type_id() != 0) {
|
|
|
|
AddToWorklist(get_def_use_mgr()->GetDef(liveInst->type_id()));
|
|
|
|
}
|
2017-10-25 00:58:48 +00:00
|
|
|
// If in a structured if or loop construct, add the controlling
|
2018-10-16 12:00:07 +00:00
|
|
|
// conditional branch and its merge.
|
2018-07-12 19:14:43 +00:00
|
|
|
BasicBlock* blk = context()->get_instr_block(liveInst);
|
|
|
|
Instruction* branchInst = block2headerBranch_[blk];
|
2018-01-18 19:15:54 +00:00
|
|
|
if (branchInst != nullptr) {
|
2017-10-17 22:33:43 +00:00
|
|
|
AddToWorklist(branchInst);
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* mergeInst = branch2merge_[branchInst];
|
2017-10-25 00:58:48 +00:00
|
|
|
AddToWorklist(mergeInst);
|
2018-10-16 12:00:07 +00:00
|
|
|
}
|
|
|
|
// If the block is a header, add the next outermost controlling
|
|
|
|
// conditional branch and its merge.
|
|
|
|
Instruction* nextBranchInst = header2nextHeaderBranch_[blk];
|
|
|
|
if (nextBranchInst != nullptr) {
|
|
|
|
AddToWorklist(nextBranchInst);
|
|
|
|
Instruction* mergeInst = branch2merge_[nextBranchInst];
|
|
|
|
AddToWorklist(mergeInst);
|
2017-10-17 22:33:43 +00:00
|
|
|
}
|
2017-06-08 16:37:21 +00:00
|
|
|
// If local load, add all variable's stores if variable not already live
|
2018-10-12 12:46:35 +00:00
|
|
|
if (liveInst->opcode() == SpvOpLoad || liveInst->IsAtomicWithLoad()) {
|
2017-06-08 16:37:21 +00:00
|
|
|
uint32_t varId;
|
2017-11-08 17:40:02 +00:00
|
|
|
(void)GetPtr(liveInst, &varId);
|
2017-12-11 18:10:24 +00:00
|
|
|
if (varId != 0) {
|
|
|
|
ProcessLoad(varId);
|
|
|
|
}
|
2018-10-16 12:00:07 +00:00
|
|
|
// Process memory copies like loads
|
2018-05-16 14:41:40 +00:00
|
|
|
} else if (liveInst->opcode() == SpvOpCopyMemory ||
|
|
|
|
liveInst->opcode() == SpvOpCopyMemorySized) {
|
|
|
|
uint32_t varId;
|
|
|
|
(void)GetPtr(liveInst->GetSingleWordInOperand(kCopyMemorySourceAddrInIdx),
|
|
|
|
&varId);
|
|
|
|
if (varId != 0) {
|
|
|
|
ProcessLoad(varId);
|
|
|
|
}
|
2018-10-19 14:16:35 +00:00
|
|
|
// If merge, add other branches that are part of its control structure
|
2018-10-16 12:00:07 +00:00
|
|
|
} else if (liveInst->opcode() == SpvOpLoopMerge ||
|
|
|
|
liveInst->opcode() == SpvOpSelectionMerge) {
|
|
|
|
AddBreaksAndContinuesToWorklist(liveInst);
|
2018-08-21 15:54:44 +00:00
|
|
|
// If function call, treat as if it loads from all pointer arguments
|
2018-07-10 17:09:46 +00:00
|
|
|
} else if (liveInst->opcode() == SpvOpFunctionCall) {
|
2017-08-04 21:04:37 +00:00
|
|
|
liveInst->ForEachInId([this](const uint32_t* iid) {
|
|
|
|
// Skip non-ptr args
|
|
|
|
if (!IsPtr(*iid)) return;
|
|
|
|
uint32_t varId;
|
2017-11-08 17:40:02 +00:00
|
|
|
(void)GetPtr(*iid, &varId);
|
2017-08-04 21:04:37 +00:00
|
|
|
ProcessLoad(varId);
|
|
|
|
});
|
2018-08-21 15:54:44 +00:00
|
|
|
// If function parameter, treat as if it's result id is loaded from
|
2018-07-10 17:09:46 +00:00
|
|
|
} else if (liveInst->opcode() == SpvOpFunctionParameter) {
|
2017-08-10 22:42:16 +00:00
|
|
|
ProcessLoad(liveInst->result_id());
|
2018-08-21 15:54:44 +00:00
|
|
|
// We treat an OpImageTexelPointer as a load of the pointer, and
|
|
|
|
// that value is manipulated to get the result.
|
2018-07-10 17:09:46 +00:00
|
|
|
} else if (liveInst->opcode() == SpvOpImageTexelPointer) {
|
2018-04-02 17:40:53 +00:00
|
|
|
uint32_t varId;
|
|
|
|
(void)GetPtr(liveInst, &varId);
|
|
|
|
if (varId != 0) {
|
|
|
|
ProcessLoad(varId);
|
|
|
|
}
|
|
|
|
}
|
2019-07-24 18:43:49 +00:00
|
|
|
|
|
|
|
// Add OpDecorateId instructions that apply to this instruction to the work
|
|
|
|
// list. We use the decoration manager to look through the group
|
|
|
|
// decorations to get to the OpDecorate* instructions themselves.
|
|
|
|
auto decorations =
|
|
|
|
get_decoration_mgr()->GetDecorationsFor(liveInst->result_id(), false);
|
|
|
|
for (Instruction* dec : decorations) {
|
|
|
|
// We only care about OpDecorateId instructions because the are the only
|
|
|
|
// decorations that will reference an id that will have to be kept live
|
|
|
|
// because of that use.
|
|
|
|
if (dec->opcode() != SpvOpDecorateId) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if (dec->GetSingleWordInOperand(1) ==
|
|
|
|
SpvDecorationHlslCounterBufferGOOGLE) {
|
|
|
|
// These decorations should not force the use id to be live. It will be
|
|
|
|
// removed if either the target or the in operand are dead.
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
AddToWorklist(dec);
|
|
|
|
}
|
|
|
|
|
2017-06-08 16:37:21 +00:00
|
|
|
worklist_.pop();
|
|
|
|
}
|
2017-11-21 19:47:46 +00:00
|
|
|
|
2017-10-17 22:33:43 +00:00
|
|
|
// Kill dead instructions and remember dead blocks
|
|
|
|
for (auto bi = structuredOrder.begin(); bi != structuredOrder.end();) {
|
|
|
|
uint32_t mergeBlockId = 0;
|
2018-07-12 19:14:43 +00:00
|
|
|
(*bi)->ForEachInst([this, &modified, &mergeBlockId](Instruction* inst) {
|
2017-12-18 17:13:10 +00:00
|
|
|
if (!IsDead(inst)) return;
|
|
|
|
if (inst->opcode() == SpvOpLabel) return;
|
2017-10-17 22:33:43 +00:00
|
|
|
// If dead instruction is selection merge, remember merge block
|
|
|
|
// for new branch at end of block
|
2017-10-25 00:58:48 +00:00
|
|
|
if (inst->opcode() == SpvOpSelectionMerge ||
|
|
|
|
inst->opcode() == SpvOpLoopMerge)
|
|
|
|
mergeBlockId = inst->GetSingleWordInOperand(0);
|
2017-12-18 17:13:10 +00:00
|
|
|
to_kill_.push_back(inst);
|
2017-06-08 16:37:21 +00:00
|
|
|
modified = true;
|
2017-11-21 19:47:46 +00:00
|
|
|
});
|
2017-10-25 00:58:48 +00:00
|
|
|
// If a structured if or loop was deleted, add a branch to its merge
|
|
|
|
// block, and traverse to the merge block and continue processing there.
|
|
|
|
// We know the block still exists because the label is not deleted.
|
2017-10-17 22:33:43 +00:00
|
|
|
if (mergeBlockId != 0) {
|
|
|
|
AddBranch(mergeBlockId, *bi);
|
|
|
|
for (++bi; (*bi)->id() != mergeBlockId; ++bi) {
|
|
|
|
}
|
2019-04-03 14:30:12 +00:00
|
|
|
|
|
|
|
auto merge_terminator = (*bi)->terminator();
|
|
|
|
if (merge_terminator->opcode() == SpvOpUnreachable) {
|
|
|
|
// The merge was unreachable. This is undefined behaviour so just
|
|
|
|
// return (or return an undef). Then mark the new return as live.
|
|
|
|
auto func_ret_type_inst = get_def_use_mgr()->GetDef(func->type_id());
|
|
|
|
if (func_ret_type_inst->opcode() == SpvOpTypeVoid) {
|
|
|
|
merge_terminator->SetOpcode(SpvOpReturn);
|
|
|
|
} else {
|
|
|
|
// Find an undef for the return value and make sure it gets kept by
|
|
|
|
// the pass.
|
|
|
|
auto undef_id = Type2Undef(func->type_id());
|
|
|
|
auto undef = get_def_use_mgr()->GetDef(undef_id);
|
|
|
|
live_insts_.Set(undef->unique_id());
|
|
|
|
merge_terminator->SetOpcode(SpvOpReturnValue);
|
|
|
|
merge_terminator->SetInOperands({{SPV_OPERAND_TYPE_ID, {undef_id}}});
|
|
|
|
get_def_use_mgr()->AnalyzeInstUse(merge_terminator);
|
|
|
|
}
|
|
|
|
live_insts_.Set(merge_terminator->unique_id());
|
|
|
|
}
|
2017-11-08 17:40:02 +00:00
|
|
|
} else {
|
2017-10-17 22:33:43 +00:00
|
|
|
++bi;
|
|
|
|
}
|
2017-06-08 16:37:21 +00:00
|
|
|
}
|
2017-10-17 22:33:43 +00:00
|
|
|
|
2017-06-08 16:37:21 +00:00
|
|
|
return modified;
|
|
|
|
}
|
|
|
|
|
2017-12-18 17:13:10 +00:00
|
|
|
void AggressiveDCEPass::InitializeModuleScopeLiveInstructions() {
|
2018-03-13 17:48:48 +00:00
|
|
|
// Keep all execution modes.
|
2017-12-18 17:13:10 +00:00
|
|
|
for (auto& exec : get_module()->execution_modes()) {
|
|
|
|
AddToWorklist(&exec);
|
|
|
|
}
|
2018-03-13 17:48:48 +00:00
|
|
|
// Keep all entry points.
|
2017-12-18 17:13:10 +00:00
|
|
|
for (auto& entry : get_module()->entry_points()) {
|
2019-05-07 18:52:22 +00:00
|
|
|
if (get_module()->version() >= SPV_SPIRV_VERSION_WORD(1, 4)) {
|
|
|
|
// In SPIR-V 1.4 and later, entry points must list all global variables
|
|
|
|
// used. DCE can still remove non-input/output variables and update the
|
|
|
|
// interface list. Mark the entry point as live and inputs and outputs as
|
|
|
|
// live, but defer decisions all other interfaces.
|
|
|
|
live_insts_.Set(entry.unique_id());
|
|
|
|
// The actual function is live always.
|
|
|
|
AddToWorklist(
|
|
|
|
get_def_use_mgr()->GetDef(entry.GetSingleWordInOperand(1u)));
|
|
|
|
for (uint32_t i = 3; i < entry.NumInOperands(); ++i) {
|
|
|
|
auto* var = get_def_use_mgr()->GetDef(entry.GetSingleWordInOperand(i));
|
|
|
|
auto storage_class = var->GetSingleWordInOperand(0u);
|
|
|
|
if (storage_class == SpvStorageClassInput ||
|
|
|
|
storage_class == SpvStorageClassOutput) {
|
|
|
|
AddToWorklist(var);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
AddToWorklist(&entry);
|
|
|
|
}
|
2017-12-18 17:13:10 +00:00
|
|
|
}
|
2018-03-13 17:48:48 +00:00
|
|
|
for (auto& anno : get_module()->annotations()) {
|
|
|
|
if (anno.opcode() == SpvOpDecorate) {
|
2019-07-10 18:12:19 +00:00
|
|
|
// Keep workgroup size.
|
2018-03-13 17:48:48 +00:00
|
|
|
if (anno.GetSingleWordInOperand(1u) == SpvDecorationBuiltIn &&
|
|
|
|
anno.GetSingleWordInOperand(2u) == SpvBuiltInWorkgroupSize) {
|
|
|
|
AddToWorklist(&anno);
|
|
|
|
}
|
2019-07-10 18:12:19 +00:00
|
|
|
|
|
|
|
if (context()->preserve_bindings()) {
|
|
|
|
// Keep all bindings.
|
|
|
|
if ((anno.GetSingleWordInOperand(1u) == SpvDecorationDescriptorSet) ||
|
|
|
|
(anno.GetSingleWordInOperand(1u) == SpvDecorationBinding)) {
|
|
|
|
AddToWorklist(&anno);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (context()->preserve_spec_constants()) {
|
|
|
|
// Keep all specialization constant instructions
|
|
|
|
if (anno.GetSingleWordInOperand(1u) == SpvDecorationSpecId) {
|
|
|
|
AddToWorklist(&anno);
|
|
|
|
}
|
|
|
|
}
|
2018-03-13 17:48:48 +00:00
|
|
|
}
|
|
|
|
}
|
2017-12-18 17:13:10 +00:00
|
|
|
}
|
|
|
|
|
2017-06-08 16:37:21 +00:00
|
|
|
Pass::Status AggressiveDCEPass::ProcessImpl() {
|
2017-11-08 17:40:02 +00:00
|
|
|
// Current functionality assumes shader capability
|
2017-06-08 16:37:21 +00:00
|
|
|
// TODO(greg-lunarg): Handle additional capabilities
|
2017-12-19 19:18:13 +00:00
|
|
|
if (!context()->get_feature_mgr()->HasCapability(SpvCapabilityShader))
|
2017-06-08 16:37:21 +00:00
|
|
|
return Status::SuccessWithoutChange;
|
2019-04-03 16:47:51 +00:00
|
|
|
|
2017-12-11 18:10:24 +00:00
|
|
|
// Current functionality assumes relaxed logical addressing (see
|
|
|
|
// instruction.h)
|
2017-06-08 16:37:21 +00:00
|
|
|
// TODO(greg-lunarg): Handle non-logical addressing
|
2017-12-19 19:18:13 +00:00
|
|
|
if (context()->get_feature_mgr()->HasCapability(SpvCapabilityAddresses))
|
2017-06-08 16:37:21 +00:00
|
|
|
return Status::SuccessWithoutChange;
|
2019-04-03 16:47:51 +00:00
|
|
|
|
|
|
|
// The variable pointer extension is no longer needed to use the capability,
|
|
|
|
// so we have to look for the capability.
|
|
|
|
if (context()->get_feature_mgr()->HasCapability(
|
|
|
|
SpvCapabilityVariablePointersStorageBuffer))
|
|
|
|
return Status::SuccessWithoutChange;
|
|
|
|
|
2017-07-10 15:45:59 +00:00
|
|
|
// If any extensions in the module are not explicitly supported,
|
2017-11-08 17:40:02 +00:00
|
|
|
// return unmodified.
|
|
|
|
if (!AllExtensionsSupported()) return Status::SuccessWithoutChange;
|
2017-12-18 17:13:10 +00:00
|
|
|
|
|
|
|
// Eliminate Dead functions.
|
|
|
|
bool modified = EliminateDeadFunctions();
|
|
|
|
|
|
|
|
InitializeModuleScopeLiveInstructions();
|
|
|
|
|
|
|
|
// Process all entry point functions.
|
2018-07-12 19:14:43 +00:00
|
|
|
ProcessFunction pfn = [this](Function* fp) { return AggressiveDCE(fp); };
|
2018-11-29 19:24:58 +00:00
|
|
|
modified |= context()->ProcessEntryPointCallTree(pfn);
|
2017-12-18 17:13:10 +00:00
|
|
|
|
2019-07-24 18:43:49 +00:00
|
|
|
// If the decoration manager is kept live then the context will try to keep it
|
|
|
|
// up to date. ADCE deals with group decorations by changing the operands in
|
|
|
|
// |OpGroupDecorate| instruction directly without informing the decoration
|
|
|
|
// manager. This can put it in an invalid state which will cause an error
|
|
|
|
// when the context tries to update it. To avoid this problem invalidate
|
|
|
|
// the decoration manager upfront.
|
|
|
|
//
|
|
|
|
// We kill it at now because it is used when processing the entry point
|
|
|
|
// functions.
|
|
|
|
context()->InvalidateAnalyses(IRContext::Analysis::kAnalysisDecorations);
|
|
|
|
|
2017-12-18 17:13:10 +00:00
|
|
|
// Process module-level instructions. Now that all live instructions have
|
|
|
|
// been marked, it is safe to remove dead global values.
|
|
|
|
modified |= ProcessGlobalValues();
|
|
|
|
|
2019-08-30 20:27:22 +00:00
|
|
|
// Sanity check.
|
|
|
|
assert(to_kill_.size() == 0 || modified);
|
|
|
|
|
2017-12-18 17:13:10 +00:00
|
|
|
// Kill all dead instructions.
|
|
|
|
for (auto inst : to_kill_) {
|
|
|
|
context()->KillInst(inst);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Cleanup all CFG including all unreachable blocks.
|
2018-07-12 19:14:43 +00:00
|
|
|
ProcessFunction cleanup = [this](Function* f) { return CFGCleanup(f); };
|
2018-11-29 19:24:58 +00:00
|
|
|
modified |= context()->ProcessEntryPointCallTree(cleanup);
|
2017-12-18 17:13:10 +00:00
|
|
|
|
2017-06-08 16:37:21 +00:00
|
|
|
return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
|
|
|
|
}
|
|
|
|
|
2017-12-18 17:13:10 +00:00
|
|
|
bool AggressiveDCEPass::EliminateDeadFunctions() {
|
|
|
|
// Identify live functions first. Those that are not live
|
|
|
|
// are dead. ADCE is disabled for non-shaders so we do not check for exported
|
|
|
|
// functions here.
|
2018-07-12 19:14:43 +00:00
|
|
|
std::unordered_set<const Function*> live_function_set;
|
|
|
|
ProcessFunction mark_live = [&live_function_set](Function* fp) {
|
2017-12-18 17:13:10 +00:00
|
|
|
live_function_set.insert(fp);
|
|
|
|
return false;
|
|
|
|
};
|
2018-11-29 19:24:58 +00:00
|
|
|
context()->ProcessEntryPointCallTree(mark_live);
|
2017-12-18 17:13:10 +00:00
|
|
|
|
|
|
|
bool modified = false;
|
|
|
|
for (auto funcIter = get_module()->begin();
|
|
|
|
funcIter != get_module()->end();) {
|
|
|
|
if (live_function_set.count(&*funcIter) == 0) {
|
|
|
|
modified = true;
|
|
|
|
EliminateFunction(&*funcIter);
|
|
|
|
funcIter = funcIter.Erase();
|
|
|
|
} else {
|
|
|
|
++funcIter;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return modified;
|
|
|
|
}
|
|
|
|
|
2018-07-12 19:14:43 +00:00
|
|
|
void AggressiveDCEPass::EliminateFunction(Function* func) {
|
2017-12-18 17:13:10 +00:00
|
|
|
// Remove all of the instruction in the function body
|
2018-07-12 19:14:43 +00:00
|
|
|
func->ForEachInst([this](Instruction* inst) { context()->KillInst(inst); },
|
|
|
|
true);
|
2017-12-18 17:13:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool AggressiveDCEPass::ProcessGlobalValues() {
|
|
|
|
// Remove debug and annotation statements referencing dead instructions.
|
|
|
|
// This must be done before killing the instructions, otherwise there are
|
|
|
|
// dead objects in the def/use database.
|
|
|
|
bool modified = false;
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* instruction = &*get_module()->debug2_begin();
|
2017-12-18 17:13:10 +00:00
|
|
|
while (instruction) {
|
|
|
|
if (instruction->opcode() != SpvOpName) {
|
|
|
|
instruction = instruction->NextNode();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (IsTargetDead(instruction)) {
|
|
|
|
instruction = context()->KillInst(instruction);
|
|
|
|
modified = true;
|
|
|
|
} else {
|
|
|
|
instruction = instruction->NextNode();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// This code removes all unnecessary decorations safely (see #1174). It also
|
|
|
|
// does so in a more efficient manner than deleting them only as the targets
|
|
|
|
// are deleted.
|
2018-07-12 19:14:43 +00:00
|
|
|
std::vector<Instruction*> annotations;
|
2017-12-18 17:13:10 +00:00
|
|
|
for (auto& inst : get_module()->annotations()) annotations.push_back(&inst);
|
|
|
|
std::sort(annotations.begin(), annotations.end(), DecorationLess());
|
|
|
|
for (auto annotation : annotations) {
|
|
|
|
switch (annotation->opcode()) {
|
|
|
|
case SpvOpDecorate:
|
|
|
|
case SpvOpMemberDecorate:
|
2018-04-16 15:33:13 +00:00
|
|
|
case SpvOpDecorateStringGOOGLE:
|
2018-11-02 17:42:45 +00:00
|
|
|
case SpvOpMemberDecorateStringGOOGLE:
|
2018-04-16 15:33:13 +00:00
|
|
|
if (IsTargetDead(annotation)) {
|
|
|
|
context()->KillInst(annotation);
|
|
|
|
modified = true;
|
|
|
|
}
|
2017-12-18 17:13:10 +00:00
|
|
|
break;
|
2018-10-05 12:23:09 +00:00
|
|
|
case SpvOpDecorateId:
|
|
|
|
if (IsTargetDead(annotation)) {
|
|
|
|
context()->KillInst(annotation);
|
|
|
|
modified = true;
|
|
|
|
} else {
|
|
|
|
if (annotation->GetSingleWordInOperand(1) ==
|
|
|
|
SpvDecorationHlslCounterBufferGOOGLE) {
|
|
|
|
// HlslCounterBuffer will reference an id other than the target.
|
|
|
|
// If that id is dead, then the decoration can be removed as well.
|
|
|
|
uint32_t counter_buffer_id = annotation->GetSingleWordInOperand(2);
|
|
|
|
Instruction* counter_buffer_inst =
|
|
|
|
get_def_use_mgr()->GetDef(counter_buffer_id);
|
|
|
|
if (IsDead(counter_buffer_inst)) {
|
|
|
|
context()->KillInst(annotation);
|
|
|
|
modified = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
2017-12-18 17:13:10 +00:00
|
|
|
case SpvOpGroupDecorate: {
|
|
|
|
// Go through the targets of this group decorate. Remove each dead
|
|
|
|
// target. If all targets are dead, remove this decoration.
|
|
|
|
bool dead = true;
|
2018-09-28 18:39:06 +00:00
|
|
|
bool removed_operand = false;
|
2017-12-18 17:13:10 +00:00
|
|
|
for (uint32_t i = 1; i < annotation->NumOperands();) {
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* opInst =
|
2017-12-18 17:13:10 +00:00
|
|
|
get_def_use_mgr()->GetDef(annotation->GetSingleWordOperand(i));
|
|
|
|
if (IsDead(opInst)) {
|
|
|
|
// Don't increment |i|.
|
|
|
|
annotation->RemoveOperand(i);
|
2018-04-16 15:33:13 +00:00
|
|
|
modified = true;
|
2018-09-28 18:39:06 +00:00
|
|
|
removed_operand = true;
|
2017-12-18 17:13:10 +00:00
|
|
|
} else {
|
|
|
|
i++;
|
|
|
|
dead = false;
|
|
|
|
}
|
|
|
|
}
|
2018-04-16 15:33:13 +00:00
|
|
|
if (dead) {
|
|
|
|
context()->KillInst(annotation);
|
|
|
|
modified = true;
|
2018-09-28 18:39:06 +00:00
|
|
|
} else if (removed_operand) {
|
|
|
|
context()->UpdateDefUse(annotation);
|
2018-04-16 15:33:13 +00:00
|
|
|
}
|
2017-12-18 17:13:10 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case SpvOpGroupMemberDecorate: {
|
|
|
|
// Go through the targets of this group member decorate. Remove each
|
|
|
|
// dead target (and member index). If all targets are dead, remove this
|
|
|
|
// decoration.
|
|
|
|
bool dead = true;
|
2018-09-28 18:39:06 +00:00
|
|
|
bool removed_operand = false;
|
2017-12-18 17:13:10 +00:00
|
|
|
for (uint32_t i = 1; i < annotation->NumOperands();) {
|
2018-07-12 19:14:43 +00:00
|
|
|
Instruction* opInst =
|
2017-12-18 17:13:10 +00:00
|
|
|
get_def_use_mgr()->GetDef(annotation->GetSingleWordOperand(i));
|
|
|
|
if (IsDead(opInst)) {
|
|
|
|
// Don't increment |i|.
|
|
|
|
annotation->RemoveOperand(i + 1);
|
|
|
|
annotation->RemoveOperand(i);
|
2018-04-16 15:33:13 +00:00
|
|
|
modified = true;
|
2018-09-28 18:39:06 +00:00
|
|
|
removed_operand = true;
|
2017-12-18 17:13:10 +00:00
|
|
|
} else {
|
|
|
|
i += 2;
|
|
|
|
dead = false;
|
|
|
|
}
|
|
|
|
}
|
2018-04-16 15:33:13 +00:00
|
|
|
if (dead) {
|
|
|
|
context()->KillInst(annotation);
|
|
|
|
modified = true;
|
2018-09-28 18:39:06 +00:00
|
|
|
} else if (removed_operand) {
|
|
|
|
context()->UpdateDefUse(annotation);
|
2018-04-16 15:33:13 +00:00
|
|
|
}
|
2017-12-18 17:13:10 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case SpvOpDecorationGroup:
|
|
|
|
// By the time we hit decoration groups we've checked everything that
|
|
|
|
// can target them. So if they have no uses they must be dead.
|
2018-04-16 15:33:13 +00:00
|
|
|
if (get_def_use_mgr()->NumUsers(annotation) == 0) {
|
2017-12-18 17:13:10 +00:00
|
|
|
context()->KillInst(annotation);
|
2018-04-16 15:33:13 +00:00
|
|
|
modified = true;
|
|
|
|
}
|
2017-12-18 17:13:10 +00:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
assert(false);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Since ADCE is disabled for non-shaders, we don't check for export linkage
|
|
|
|
// attributes here.
|
|
|
|
for (auto& val : get_module()->types_values()) {
|
|
|
|
if (IsDead(&val)) {
|
2019-08-08 13:45:59 +00:00
|
|
|
// Save forwarded pointer if pointer is live since closure does not mark
|
|
|
|
// this live as it does not have a result id. This is a little too
|
|
|
|
// conservative since it is not known if the structure type that needed
|
|
|
|
// it is still live. TODO(greg-lunarg): Only save if needed.
|
|
|
|
if (val.opcode() == SpvOpTypeForwardPointer) {
|
|
|
|
uint32_t ptr_ty_id = val.GetSingleWordInOperand(0);
|
|
|
|
Instruction* ptr_ty_inst = get_def_use_mgr()->GetDef(ptr_ty_id);
|
|
|
|
if (!IsDead(ptr_ty_inst)) continue;
|
|
|
|
}
|
2017-12-18 17:13:10 +00:00
|
|
|
to_kill_.push_back(&val);
|
2019-08-30 20:27:22 +00:00
|
|
|
modified = true;
|
2017-12-18 17:13:10 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-07 18:52:22 +00:00
|
|
|
if (get_module()->version() >= SPV_SPIRV_VERSION_WORD(1, 4)) {
|
|
|
|
// Remove the dead interface variables from the entry point interface list.
|
|
|
|
for (auto& entry : get_module()->entry_points()) {
|
|
|
|
std::vector<Operand> new_operands;
|
|
|
|
for (uint32_t i = 0; i < entry.NumInOperands(); ++i) {
|
|
|
|
if (i < 3) {
|
|
|
|
// Execution model, function id and name are always valid.
|
|
|
|
new_operands.push_back(entry.GetInOperand(i));
|
|
|
|
} else {
|
|
|
|
auto* var =
|
|
|
|
get_def_use_mgr()->GetDef(entry.GetSingleWordInOperand(i));
|
|
|
|
if (!IsDead(var)) {
|
|
|
|
new_operands.push_back(entry.GetInOperand(i));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (new_operands.size() != entry.NumInOperands()) {
|
|
|
|
entry.SetInOperands(std::move(new_operands));
|
|
|
|
get_def_use_mgr()->UpdateDefUse(&entry);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-18 17:13:10 +00:00
|
|
|
return modified;
|
|
|
|
}
|
|
|
|
|
2018-07-12 13:08:45 +00:00
|
|
|
AggressiveDCEPass::AggressiveDCEPass() = default;
|
2017-06-08 16:37:21 +00:00
|
|
|
|
2018-07-12 13:08:45 +00:00
|
|
|
Pass::Status AggressiveDCEPass::Process() {
|
|
|
|
// Initialize extensions whitelist
|
|
|
|
InitExtensions();
|
2017-06-08 16:37:21 +00:00
|
|
|
return ProcessImpl();
|
|
|
|
}
|
|
|
|
|
2017-07-19 00:57:26 +00:00
|
|
|
void AggressiveDCEPass::InitExtensions() {
|
|
|
|
extensions_whitelist_.clear();
|
|
|
|
extensions_whitelist_.insert({
|
2017-11-08 17:40:02 +00:00
|
|
|
"SPV_AMD_shader_explicit_vertex_parameter",
|
2017-11-11 01:26:55 +00:00
|
|
|
"SPV_AMD_shader_trinary_minmax",
|
|
|
|
"SPV_AMD_gcn_shader",
|
|
|
|
"SPV_KHR_shader_ballot",
|
|
|
|
"SPV_AMD_shader_ballot",
|
|
|
|
"SPV_AMD_gpu_shader_half_float",
|
|
|
|
"SPV_KHR_shader_draw_parameters",
|
|
|
|
"SPV_KHR_subgroup_vote",
|
|
|
|
"SPV_KHR_16bit_storage",
|
|
|
|
"SPV_KHR_device_group",
|
|
|
|
"SPV_KHR_multiview",
|
|
|
|
"SPV_NVX_multiview_per_view_attributes",
|
|
|
|
"SPV_NV_viewport_array2",
|
|
|
|
"SPV_NV_stereo_view_rendering",
|
2017-11-08 17:40:02 +00:00
|
|
|
"SPV_NV_sample_mask_override_coverage",
|
2017-11-11 01:26:55 +00:00
|
|
|
"SPV_NV_geometry_shader_passthrough",
|
|
|
|
"SPV_AMD_texture_gather_bias_lod",
|
2017-11-08 17:40:02 +00:00
|
|
|
"SPV_KHR_storage_buffer_storage_class",
|
|
|
|
// SPV_KHR_variable_pointers
|
|
|
|
// Currently do not support extended pointer expressions
|
2017-11-11 01:26:55 +00:00
|
|
|
"SPV_AMD_gpu_shader_int16",
|
|
|
|
"SPV_KHR_post_depth_coverage",
|
2017-11-08 17:40:02 +00:00
|
|
|
"SPV_KHR_shader_atomic_counter_ops",
|
2018-03-08 13:54:00 +00:00
|
|
|
"SPV_EXT_shader_stencil_export",
|
|
|
|
"SPV_EXT_shader_viewport_index_layer",
|
|
|
|
"SPV_AMD_shader_image_load_store_lod",
|
|
|
|
"SPV_AMD_shader_fragment_mask",
|
|
|
|
"SPV_EXT_fragment_fully_covered",
|
|
|
|
"SPV_AMD_gpu_shader_half_float_fetch",
|
2018-03-08 20:33:28 +00:00
|
|
|
"SPV_GOOGLE_decorate_string",
|
|
|
|
"SPV_GOOGLE_hlsl_functionality1",
|
2019-06-19 16:18:13 +00:00
|
|
|
"SPV_GOOGLE_user_type",
|
2018-04-06 14:18:34 +00:00
|
|
|
"SPV_NV_shader_subgroup_partitioned",
|
|
|
|
"SPV_EXT_descriptor_indexing",
|
2018-09-19 18:53:33 +00:00
|
|
|
"SPV_NV_fragment_shader_barycentric",
|
|
|
|
"SPV_NV_compute_shader_derivatives",
|
|
|
|
"SPV_NV_shader_image_footprint",
|
|
|
|
"SPV_NV_shading_rate",
|
|
|
|
"SPV_NV_mesh_shader",
|
2018-10-25 18:07:46 +00:00
|
|
|
"SPV_NV_ray_tracing",
|
2018-11-23 15:21:19 +00:00
|
|
|
"SPV_EXT_fragment_invocation_density",
|
2019-08-08 13:45:59 +00:00
|
|
|
"SPV_EXT_physical_storage_buffer",
|
2017-07-19 00:57:26 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-06-08 16:37:21 +00:00
|
|
|
} // namespace opt
|
|
|
|
} // namespace spvtools
|