2016-07-01 15:22:01 +00:00
|
|
|
/*
|
|
|
|
* Copyright 2016 Google Inc.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
|
|
|
*/
|
2016-10-26 14:35:22 +00:00
|
|
|
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "src/sksl/SkSLCompiler.h"
|
|
|
|
|
2020-08-03 17:21:46 +00:00
|
|
|
#include <memory>
|
2020-08-11 22:05:39 +00:00
|
|
|
#include <unordered_set>
|
2020-08-03 17:21:46 +00:00
|
|
|
|
2020-08-31 22:09:01 +00:00
|
|
|
#include "src/sksl/SkSLAnalysis.h"
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "src/sksl/SkSLByteCodeGenerator.h"
|
|
|
|
#include "src/sksl/SkSLCFGGenerator.h"
|
|
|
|
#include "src/sksl/SkSLCPPCodeGenerator.h"
|
|
|
|
#include "src/sksl/SkSLGLSLCodeGenerator.h"
|
|
|
|
#include "src/sksl/SkSLHCodeGenerator.h"
|
|
|
|
#include "src/sksl/SkSLIRGenerator.h"
|
|
|
|
#include "src/sksl/SkSLMetalCodeGenerator.h"
|
|
|
|
#include "src/sksl/SkSLPipelineStageCodeGenerator.h"
|
2020-07-28 18:46:53 +00:00
|
|
|
#include "src/sksl/SkSLRehydrator.h"
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "src/sksl/SkSLSPIRVCodeGenerator.h"
|
2020-02-19 20:35:26 +00:00
|
|
|
#include "src/sksl/SkSLSPIRVtoHLSL.h"
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "src/sksl/ir/SkSLEnum.h"
|
|
|
|
#include "src/sksl/ir/SkSLExpression.h"
|
|
|
|
#include "src/sksl/ir/SkSLExpressionStatement.h"
|
|
|
|
#include "src/sksl/ir/SkSLFunctionCall.h"
|
|
|
|
#include "src/sksl/ir/SkSLIntLiteral.h"
|
|
|
|
#include "src/sksl/ir/SkSLModifiersDeclaration.h"
|
|
|
|
#include "src/sksl/ir/SkSLNop.h"
|
|
|
|
#include "src/sksl/ir/SkSLSymbolTable.h"
|
|
|
|
#include "src/sksl/ir/SkSLTernaryExpression.h"
|
|
|
|
#include "src/sksl/ir/SkSLUnresolvedFunction.h"
|
|
|
|
#include "src/sksl/ir/SkSLVarDeclarations.h"
|
2016-07-01 15:22:01 +00:00
|
|
|
|
2020-08-06 17:00:19 +00:00
|
|
|
#include <fstream>
|
|
|
|
|
2019-11-26 21:27:47 +00:00
|
|
|
#if !defined(SKSL_STANDALONE) & SK_SUPPORT_GPU
|
|
|
|
#include "include/gpu/GrContextOptions.h"
|
|
|
|
#include "src/gpu/GrShaderCaps.h"
|
|
|
|
#endif
|
|
|
|
|
2017-03-16 13:56:54 +00:00
|
|
|
#ifdef SK_ENABLE_SPIRV_VALIDATION
|
|
|
|
#include "spirv-tools/libspirv.hpp"
|
|
|
|
#endif
|
|
|
|
|
Reland "Untangle dependency cycle in sksl dehydration"
Explanation: The sksl standalone compiler is used to convert the raw
(text) SkSL pre-includes into a "dehydrated" binary format. It also
(previously) depended on those files, as they were #included and used,
unless a special #define was changed. This created a dependency cycle
that we hid from GN (by lying about the outputs of the dehydrate step).
As a result, builds would never reach steady-state, because the compiler
would be rebuilt (due to the newer dehydrated files), and then the
dehydrated files would be rebuilt (due to the newer compiler).
This CL changes the logic so that the standalone compiler always uses
the textual pre-includes, and no longer depends on the dehydrated binary
files. Thus, to make any kind of change to the dehydrated files (whether
due to pre-include changes, or the encoding format itself), you just
need skia_compile_processors enabled. The dependencies are now honestly
communicated to GN, and we reach steady state after one build.
The NOTE above is because GN/ninja cache the dependencies of each
target, and will still think that the SkSLCompiler.obj linked into the
standalone compiler depends on the dehydrated files, at least until one
successful build, when it will realize that's no longer true.
Reland notes:
The bots originally rejected this CL, because SkSLCompiler was
hard-coded to load the text files from a relative path that assumed the
executable was in "<skia_checkout>/out/<some_dir>". That's not true for
bots, and it was fragile, even for users. Now, we use GN to directly
generate sksl_fp.sksl, and copy all of the other pre-includes to the
root out dir (working directory when running skslc). This means we
no longer need to generate the sksl_fp.sksl file into the src tree, and
the compiler can more safely assume that the files will be in the
working directory.
Bug: skia:10571
Change-Id: Id7837a9aba7ee0c3f7fa82eb84f7761e24b9c705
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/308896
Reviewed-by: Mike Klein <mtklein@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
Reviewed-by: Michael Ludwig <michaelludwig@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-08-08 12:17:18 +00:00
|
|
|
#if !SKSL_STANDALONE
|
2017-11-16 16:20:11 +00:00
|
|
|
|
2020-07-28 18:46:53 +00:00
|
|
|
#include "src/sksl/generated/sksl_fp.dehydrated.sksl"
|
|
|
|
#include "src/sksl/generated/sksl_frag.dehydrated.sksl"
|
|
|
|
#include "src/sksl/generated/sksl_geom.dehydrated.sksl"
|
|
|
|
#include "src/sksl/generated/sksl_gpu.dehydrated.sksl"
|
|
|
|
#include "src/sksl/generated/sksl_interp.dehydrated.sksl"
|
|
|
|
#include "src/sksl/generated/sksl_pipeline.dehydrated.sksl"
|
|
|
|
#include "src/sksl/generated/sksl_vert.dehydrated.sksl"
|
2019-05-24 15:01:59 +00:00
|
|
|
|
2020-07-28 18:46:53 +00:00
|
|
|
#else
|
2019-12-19 19:41:57 +00:00
|
|
|
|
Reland "Untangle dependency cycle in sksl dehydration"
Explanation: The sksl standalone compiler is used to convert the raw
(text) SkSL pre-includes into a "dehydrated" binary format. It also
(previously) depended on those files, as they were #included and used,
unless a special #define was changed. This created a dependency cycle
that we hid from GN (by lying about the outputs of the dehydrate step).
As a result, builds would never reach steady-state, because the compiler
would be rebuilt (due to the newer dehydrated files), and then the
dehydrated files would be rebuilt (due to the newer compiler).
This CL changes the logic so that the standalone compiler always uses
the textual pre-includes, and no longer depends on the dehydrated binary
files. Thus, to make any kind of change to the dehydrated files (whether
due to pre-include changes, or the encoding format itself), you just
need skia_compile_processors enabled. The dependencies are now honestly
communicated to GN, and we reach steady state after one build.
The NOTE above is because GN/ninja cache the dependencies of each
target, and will still think that the SkSLCompiler.obj linked into the
standalone compiler depends on the dehydrated files, at least until one
successful build, when it will realize that's no longer true.
Reland notes:
The bots originally rejected this CL, because SkSLCompiler was
hard-coded to load the text files from a relative path that assumed the
executable was in "<skia_checkout>/out/<some_dir>". That's not true for
bots, and it was fragile, even for users. Now, we use GN to directly
generate sksl_fp.sksl, and copy all of the other pre-includes to the
root out dir (working directory when running skslc). This means we
no longer need to generate the sksl_fp.sksl file into the src tree, and
the compiler can more safely assume that the files will be in the
working directory.
Bug: skia:10571
Change-Id: Id7837a9aba7ee0c3f7fa82eb84f7761e24b9c705
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/308896
Reviewed-by: Mike Klein <mtklein@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
Reviewed-by: Michael Ludwig <michaelludwig@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-08-08 12:17:18 +00:00
|
|
|
// GN generates or copies all of these files to the skslc executable directory
|
|
|
|
static const char SKSL_GPU_INCLUDE[] = "sksl_gpu.sksl";
|
|
|
|
static const char SKSL_INTERP_INCLUDE[] = "sksl_interp.sksl";
|
|
|
|
static const char SKSL_VERT_INCLUDE[] = "sksl_vert.sksl";
|
|
|
|
static const char SKSL_FRAG_INCLUDE[] = "sksl_frag.sksl";
|
|
|
|
static const char SKSL_GEOM_INCLUDE[] = "sksl_geom.sksl";
|
|
|
|
static const char SKSL_FP_INCLUDE[] = "sksl_fp.sksl";
|
|
|
|
static const char SKSL_PIPELINE_INCLUDE[] = "sksl_pipeline.sksl";
|
2020-07-28 18:46:53 +00:00
|
|
|
|
|
|
|
#endif
|
2019-04-08 13:46:01 +00:00
|
|
|
|
2016-07-01 15:22:01 +00:00
|
|
|
namespace SkSL {
|
|
|
|
|
2019-11-22 19:06:12 +00:00
|
|
|
static void grab_intrinsics(std::vector<std::unique_ptr<ProgramElement>>* src,
|
2020-08-26 23:46:27 +00:00
|
|
|
IRIntrinsicMap* target) {
|
2020-05-13 21:06:46 +00:00
|
|
|
for (auto iter = src->begin(); iter != src->end(); ) {
|
|
|
|
std::unique_ptr<ProgramElement>& element = *iter;
|
2020-09-08 14:22:09 +00:00
|
|
|
switch (element->kind()) {
|
|
|
|
case ProgramElement::Kind::kFunction: {
|
2020-08-19 21:48:31 +00:00
|
|
|
FunctionDefinition& f = element->as<FunctionDefinition>();
|
2020-05-13 21:06:46 +00:00
|
|
|
SkASSERT(f.fDeclaration.fBuiltin);
|
SkSL: Start refactoring pre-include handling
This makes IRIntrinsicMap an actual type, and supports chaining (so an
intrinsic map can have a parent, just like a symbol table). That lets us
put Enums and defined functions at multiple levels of the pre-include
hierarchy.
With that done, we add an intrinsic map for sksl_fp.sksl, containing the
enum declarations from that file. This lets .fp processing using the FP
intrinsic map (which is parented to the GPU one) to resolve those enums
(PMConversion, GrClipEdgeType), as well as the enums in sksl_gpu
(SkBlendMode).
Because sksl_fp was being used to generate an inherited element list
(containing several builtin variables), I have relaxed the restriction
around grab_intrinsics - unsupported element types are simply left in
the original vector, unchanged. for the GPU and interpreter intrinsic
maps (where the element lists are discarded), we still assert that we
didn't end up with any unsupported elements.
Doing all of this lets us remove the redundant enum resolution code in
IR generator (where we previously supported looking up enums in both the
inherited element list, and in the intrinsic map).
Subsequent changes will add support for variables/declarations to the
intrinsic map, so we won't need both the inherited list and the
intrinsic map, if all goes well.
Change-Id: Ic6174511e5f8d68f65e4919f2ec0b923717d6cd9
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/318212
Commit-Queue: Brian Osman <brianosman@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
2020-09-21 15:32:10 +00:00
|
|
|
target->insertOrDie(f.fDeclaration.description(), std::move(element));
|
2020-05-13 21:06:46 +00:00
|
|
|
iter = src->erase(iter);
|
2019-11-22 19:06:12 +00:00
|
|
|
break;
|
|
|
|
}
|
2020-09-08 14:22:09 +00:00
|
|
|
case ProgramElement::Kind::kEnum: {
|
2020-08-19 21:48:31 +00:00
|
|
|
Enum& e = element->as<Enum>();
|
SkSL: Start refactoring pre-include handling
This makes IRIntrinsicMap an actual type, and supports chaining (so an
intrinsic map can have a parent, just like a symbol table). That lets us
put Enums and defined functions at multiple levels of the pre-include
hierarchy.
With that done, we add an intrinsic map for sksl_fp.sksl, containing the
enum declarations from that file. This lets .fp processing using the FP
intrinsic map (which is parented to the GPU one) to resolve those enums
(PMConversion, GrClipEdgeType), as well as the enums in sksl_gpu
(SkBlendMode).
Because sksl_fp was being used to generate an inherited element list
(containing several builtin variables), I have relaxed the restriction
around grab_intrinsics - unsupported element types are simply left in
the original vector, unchanged. for the GPU and interpreter intrinsic
maps (where the element lists are discarded), we still assert that we
didn't end up with any unsupported elements.
Doing all of this lets us remove the redundant enum resolution code in
IR generator (where we previously supported looking up enums in both the
inherited element list, and in the intrinsic map).
Subsequent changes will add support for variables/declarations to the
intrinsic map, so we won't need both the inherited list and the
intrinsic map, if all goes well.
Change-Id: Ic6174511e5f8d68f65e4919f2ec0b923717d6cd9
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/318212
Commit-Queue: Brian Osman <brianosman@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
2020-09-21 15:32:10 +00:00
|
|
|
target->insertOrDie(e.fTypeName, std::move(element));
|
2020-05-13 21:06:46 +00:00
|
|
|
iter = src->erase(iter);
|
2019-11-22 19:06:12 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
SkSL: Start refactoring pre-include handling
This makes IRIntrinsicMap an actual type, and supports chaining (so an
intrinsic map can have a parent, just like a symbol table). That lets us
put Enums and defined functions at multiple levels of the pre-include
hierarchy.
With that done, we add an intrinsic map for sksl_fp.sksl, containing the
enum declarations from that file. This lets .fp processing using the FP
intrinsic map (which is parented to the GPU one) to resolve those enums
(PMConversion, GrClipEdgeType), as well as the enums in sksl_gpu
(SkBlendMode).
Because sksl_fp was being used to generate an inherited element list
(containing several builtin variables), I have relaxed the restriction
around grab_intrinsics - unsupported element types are simply left in
the original vector, unchanged. for the GPU and interpreter intrinsic
maps (where the element lists are discarded), we still assert that we
didn't end up with any unsupported elements.
Doing all of this lets us remove the redundant enum resolution code in
IR generator (where we previously supported looking up enums in both the
inherited element list, and in the intrinsic map).
Subsequent changes will add support for variables/declarations to the
intrinsic map, so we won't need both the inherited list and the
intrinsic map, if all goes well.
Change-Id: Ic6174511e5f8d68f65e4919f2ec0b923717d6cd9
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/318212
Commit-Queue: Brian Osman <brianosman@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
2020-09-21 15:32:10 +00:00
|
|
|
// Unsupported element, leave it in the list.
|
|
|
|
++iter;
|
|
|
|
break;
|
2019-11-22 19:06:12 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-28 17:13:40 +00:00
|
|
|
static void reset_call_counts(std::vector<std::unique_ptr<ProgramElement>>* src) {
|
|
|
|
for (std::unique_ptr<ProgramElement>& element : *src) {
|
|
|
|
if (element->is<FunctionDefinition>()) {
|
|
|
|
const FunctionDeclaration& fnDecl = element->as<FunctionDefinition>().fDeclaration;
|
|
|
|
fnDecl.fCallCount = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-14 14:12:15 +00:00
|
|
|
Compiler::Compiler(Flags flags)
|
SkSL: Start refactoring pre-include handling
This makes IRIntrinsicMap an actual type, and supports chaining (so an
intrinsic map can have a parent, just like a symbol table). That lets us
put Enums and defined functions at multiple levels of the pre-include
hierarchy.
With that done, we add an intrinsic map for sksl_fp.sksl, containing the
enum declarations from that file. This lets .fp processing using the FP
intrinsic map (which is parented to the GPU one) to resolve those enums
(PMConversion, GrClipEdgeType), as well as the enums in sksl_gpu
(SkBlendMode).
Because sksl_fp was being used to generate an inherited element list
(containing several builtin variables), I have relaxed the restriction
around grab_intrinsics - unsupported element types are simply left in
the original vector, unchanged. for the GPU and interpreter intrinsic
maps (where the element lists are discarded), we still assert that we
didn't end up with any unsupported elements.
Doing all of this lets us remove the redundant enum resolution code in
IR generator (where we previously supported looking up enums in both the
inherited element list, and in the intrinsic map).
Subsequent changes will add support for variables/declarations to the
intrinsic map, so we won't need both the inherited list and the
intrinsic map, if all goes well.
Change-Id: Ic6174511e5f8d68f65e4919f2ec0b923717d6cd9
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/318212
Commit-Queue: Brian Osman <brianosman@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
2020-09-21 15:32:10 +00:00
|
|
|
: fGPUIntrinsics(std::make_unique<IRIntrinsicMap>(/*parent=*/nullptr))
|
|
|
|
, fInterpreterIntrinsics(std::make_unique<IRIntrinsicMap>(/*parent=*/nullptr))
|
2020-08-26 23:46:27 +00:00
|
|
|
, fFlags(flags)
|
|
|
|
, fContext(std::make_shared<Context>())
|
2017-07-14 14:12:15 +00:00
|
|
|
, fErrorCount(0) {
|
2020-09-18 15:49:22 +00:00
|
|
|
fRootSymbolTable = std::make_shared<SymbolTable>(this);
|
2020-09-19 12:47:10 +00:00
|
|
|
fIRGenerator =
|
|
|
|
std::make_unique<IRGenerator>(fContext.get(), &fInliner, fRootSymbolTable, *this);
|
2020-09-18 15:49:22 +00:00
|
|
|
#define ADD_TYPE(t) fRootSymbolTable->addWithoutOwnership(fContext->f ## t ## _Type->fName, \
|
|
|
|
fContext->f ## t ## _Type.get())
|
2016-07-01 15:22:01 +00:00
|
|
|
ADD_TYPE(Void);
|
|
|
|
ADD_TYPE(Float);
|
2017-07-28 19:19:46 +00:00
|
|
|
ADD_TYPE(Float2);
|
|
|
|
ADD_TYPE(Float3);
|
|
|
|
ADD_TYPE(Float4);
|
2017-08-02 14:52:54 +00:00
|
|
|
ADD_TYPE(Half);
|
|
|
|
ADD_TYPE(Half2);
|
|
|
|
ADD_TYPE(Half3);
|
|
|
|
ADD_TYPE(Half4);
|
2016-07-01 15:22:01 +00:00
|
|
|
ADD_TYPE(Int);
|
2017-07-28 19:19:46 +00:00
|
|
|
ADD_TYPE(Int2);
|
|
|
|
ADD_TYPE(Int3);
|
|
|
|
ADD_TYPE(Int4);
|
2016-07-01 15:22:01 +00:00
|
|
|
ADD_TYPE(UInt);
|
2017-07-28 19:19:46 +00:00
|
|
|
ADD_TYPE(UInt2);
|
|
|
|
ADD_TYPE(UInt3);
|
|
|
|
ADD_TYPE(UInt4);
|
2017-08-02 14:52:54 +00:00
|
|
|
ADD_TYPE(Short);
|
|
|
|
ADD_TYPE(Short2);
|
|
|
|
ADD_TYPE(Short3);
|
|
|
|
ADD_TYPE(Short4);
|
|
|
|
ADD_TYPE(UShort);
|
|
|
|
ADD_TYPE(UShort2);
|
|
|
|
ADD_TYPE(UShort3);
|
|
|
|
ADD_TYPE(UShort4);
|
2018-07-17 14:19:38 +00:00
|
|
|
ADD_TYPE(Byte);
|
|
|
|
ADD_TYPE(Byte2);
|
|
|
|
ADD_TYPE(Byte3);
|
|
|
|
ADD_TYPE(Byte4);
|
|
|
|
ADD_TYPE(UByte);
|
|
|
|
ADD_TYPE(UByte2);
|
|
|
|
ADD_TYPE(UByte3);
|
|
|
|
ADD_TYPE(UByte4);
|
2016-07-01 15:22:01 +00:00
|
|
|
ADD_TYPE(Bool);
|
2017-07-28 19:19:46 +00:00
|
|
|
ADD_TYPE(Bool2);
|
|
|
|
ADD_TYPE(Bool3);
|
|
|
|
ADD_TYPE(Bool4);
|
|
|
|
ADD_TYPE(Float2x2);
|
|
|
|
ADD_TYPE(Float2x3);
|
|
|
|
ADD_TYPE(Float2x4);
|
|
|
|
ADD_TYPE(Float3x2);
|
|
|
|
ADD_TYPE(Float3x3);
|
|
|
|
ADD_TYPE(Float3x4);
|
|
|
|
ADD_TYPE(Float4x2);
|
|
|
|
ADD_TYPE(Float4x3);
|
|
|
|
ADD_TYPE(Float4x4);
|
2017-08-02 14:52:54 +00:00
|
|
|
ADD_TYPE(Half2x2);
|
|
|
|
ADD_TYPE(Half2x3);
|
|
|
|
ADD_TYPE(Half2x4);
|
|
|
|
ADD_TYPE(Half3x2);
|
|
|
|
ADD_TYPE(Half3x3);
|
|
|
|
ADD_TYPE(Half3x4);
|
|
|
|
ADD_TYPE(Half4x2);
|
|
|
|
ADD_TYPE(Half4x3);
|
|
|
|
ADD_TYPE(Half4x4);
|
2016-07-01 15:22:01 +00:00
|
|
|
ADD_TYPE(GenType);
|
2017-08-02 14:52:54 +00:00
|
|
|
ADD_TYPE(GenHType);
|
2016-07-01 15:22:01 +00:00
|
|
|
ADD_TYPE(GenIType);
|
|
|
|
ADD_TYPE(GenUType);
|
|
|
|
ADD_TYPE(GenBType);
|
|
|
|
ADD_TYPE(Mat);
|
|
|
|
ADD_TYPE(Vec);
|
|
|
|
ADD_TYPE(GVec);
|
|
|
|
ADD_TYPE(GVec2);
|
|
|
|
ADD_TYPE(GVec3);
|
|
|
|
ADD_TYPE(GVec4);
|
2017-08-02 14:52:54 +00:00
|
|
|
ADD_TYPE(HVec);
|
2016-07-01 15:22:01 +00:00
|
|
|
ADD_TYPE(IVec);
|
|
|
|
ADD_TYPE(UVec);
|
2017-08-02 14:52:54 +00:00
|
|
|
ADD_TYPE(SVec);
|
|
|
|
ADD_TYPE(USVec);
|
2018-07-17 14:19:38 +00:00
|
|
|
ADD_TYPE(ByteVec);
|
|
|
|
ADD_TYPE(UByteVec);
|
2016-07-01 15:22:01 +00:00
|
|
|
ADD_TYPE(BVec);
|
|
|
|
|
|
|
|
ADD_TYPE(Sampler1D);
|
|
|
|
ADD_TYPE(Sampler2D);
|
|
|
|
ADD_TYPE(Sampler3D);
|
2016-10-12 13:39:56 +00:00
|
|
|
ADD_TYPE(SamplerExternalOES);
|
2016-07-01 15:22:01 +00:00
|
|
|
ADD_TYPE(SamplerCube);
|
|
|
|
ADD_TYPE(Sampler2DRect);
|
|
|
|
ADD_TYPE(Sampler1DArray);
|
|
|
|
ADD_TYPE(Sampler2DArray);
|
|
|
|
ADD_TYPE(SamplerCubeArray);
|
|
|
|
ADD_TYPE(SamplerBuffer);
|
|
|
|
ADD_TYPE(Sampler2DMS);
|
|
|
|
ADD_TYPE(Sampler2DMSArray);
|
|
|
|
|
2016-11-11 21:08:03 +00:00
|
|
|
ADD_TYPE(ISampler2D);
|
|
|
|
|
2016-11-16 17:06:01 +00:00
|
|
|
ADD_TYPE(Image2D);
|
|
|
|
ADD_TYPE(IImage2D);
|
|
|
|
|
2016-11-22 14:44:03 +00:00
|
|
|
ADD_TYPE(SubpassInput);
|
|
|
|
ADD_TYPE(SubpassInputMS);
|
|
|
|
|
2016-07-01 15:22:01 +00:00
|
|
|
ADD_TYPE(GSampler1D);
|
|
|
|
ADD_TYPE(GSampler2D);
|
|
|
|
ADD_TYPE(GSampler3D);
|
|
|
|
ADD_TYPE(GSamplerCube);
|
|
|
|
ADD_TYPE(GSampler2DRect);
|
|
|
|
ADD_TYPE(GSampler1DArray);
|
|
|
|
ADD_TYPE(GSampler2DArray);
|
|
|
|
ADD_TYPE(GSamplerCubeArray);
|
|
|
|
ADD_TYPE(GSamplerBuffer);
|
|
|
|
ADD_TYPE(GSampler2DMS);
|
|
|
|
ADD_TYPE(GSampler2DMSArray);
|
|
|
|
|
|
|
|
ADD_TYPE(Sampler1DShadow);
|
|
|
|
ADD_TYPE(Sampler2DShadow);
|
|
|
|
ADD_TYPE(SamplerCubeShadow);
|
|
|
|
ADD_TYPE(Sampler2DRectShadow);
|
|
|
|
ADD_TYPE(Sampler1DArrayShadow);
|
|
|
|
ADD_TYPE(Sampler2DArrayShadow);
|
|
|
|
ADD_TYPE(SamplerCubeArrayShadow);
|
|
|
|
ADD_TYPE(GSampler2DArrayShadow);
|
|
|
|
ADD_TYPE(GSamplerCubeArrayShadow);
|
2017-10-10 20:30:21 +00:00
|
|
|
ADD_TYPE(FragmentProcessor);
|
2019-07-26 21:42:06 +00:00
|
|
|
ADD_TYPE(Sampler);
|
|
|
|
ADD_TYPE(Texture2D);
|
2016-07-01 15:22:01 +00:00
|
|
|
|
2020-03-23 20:59:08 +00:00
|
|
|
StringFragment fpAliasName("shader");
|
2020-09-18 15:49:22 +00:00
|
|
|
fRootSymbolTable->addWithoutOwnership(fpAliasName, fContext->fFragmentProcessor_Type.get());
|
2020-03-23 20:59:08 +00:00
|
|
|
|
2017-09-11 20:50:14 +00:00
|
|
|
StringFragment skCapsName("sk_Caps");
|
2020-09-18 15:49:22 +00:00
|
|
|
fRootSymbolTable->add(
|
2020-08-13 21:09:29 +00:00
|
|
|
skCapsName,
|
|
|
|
std::make_unique<Variable>(/*offset=*/-1, Modifiers(), skCapsName,
|
2020-09-11 16:27:26 +00:00
|
|
|
fContext->fSkCaps_Type.get(), Variable::kGlobal_Storage));
|
2016-11-21 20:59:48 +00:00
|
|
|
|
2020-08-26 23:46:27 +00:00
|
|
|
fIRGenerator->fIntrinsics = fGPUIntrinsics.get();
|
2019-11-22 19:06:12 +00:00
|
|
|
std::vector<std::unique_ptr<ProgramElement>> gpuIntrinsics;
|
Reland "Untangle dependency cycle in sksl dehydration"
Explanation: The sksl standalone compiler is used to convert the raw
(text) SkSL pre-includes into a "dehydrated" binary format. It also
(previously) depended on those files, as they were #included and used,
unless a special #define was changed. This created a dependency cycle
that we hid from GN (by lying about the outputs of the dehydrate step).
As a result, builds would never reach steady-state, because the compiler
would be rebuilt (due to the newer dehydrated files), and then the
dehydrated files would be rebuilt (due to the newer compiler).
This CL changes the logic so that the standalone compiler always uses
the textual pre-includes, and no longer depends on the dehydrated binary
files. Thus, to make any kind of change to the dehydrated files (whether
due to pre-include changes, or the encoding format itself), you just
need skia_compile_processors enabled. The dependencies are now honestly
communicated to GN, and we reach steady state after one build.
The NOTE above is because GN/ninja cache the dependencies of each
target, and will still think that the SkSLCompiler.obj linked into the
standalone compiler depends on the dehydrated files, at least until one
successful build, when it will realize that's no longer true.
Reland notes:
The bots originally rejected this CL, because SkSLCompiler was
hard-coded to load the text files from a relative path that assumed the
executable was in "<skia_checkout>/out/<some_dir>". That's not true for
bots, and it was fragile, even for users. Now, we use GN to directly
generate sksl_fp.sksl, and copy all of the other pre-includes to the
root out dir (working directory when running skslc). This means we
no longer need to generate the sksl_fp.sksl file into the src tree, and
the compiler can more safely assume that the files will be in the
working directory.
Bug: skia:10571
Change-Id: Id7837a9aba7ee0c3f7fa82eb84f7761e24b9c705
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/308896
Reviewed-by: Mike Klein <mtklein@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
Reviewed-by: Michael Ludwig <michaelludwig@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-08-08 12:17:18 +00:00
|
|
|
#if SKSL_STANDALONE
|
2020-09-18 15:49:22 +00:00
|
|
|
this->processIncludeFile(Program::kFragment_Kind, SKSL_GPU_INCLUDE, fRootSymbolTable,
|
|
|
|
&gpuIntrinsics, &fGpuSymbolTable);
|
2020-08-06 17:00:19 +00:00
|
|
|
this->processIncludeFile(Program::kVertex_Kind, SKSL_VERT_INCLUDE, fGpuSymbolTable,
|
|
|
|
&fVertexInclude, &fVertexSymbolTable);
|
|
|
|
this->processIncludeFile(Program::kFragment_Kind, SKSL_FRAG_INCLUDE, fGpuSymbolTable,
|
|
|
|
&fFragmentInclude, &fFragmentSymbolTable);
|
2020-07-28 18:46:53 +00:00
|
|
|
#else
|
|
|
|
{
|
2020-09-18 15:49:22 +00:00
|
|
|
Rehydrator rehydrator(fContext.get(), fRootSymbolTable, this, SKSL_INCLUDE_sksl_gpu,
|
|
|
|
SKSL_INCLUDE_sksl_gpu_LENGTH);
|
2020-07-28 18:46:53 +00:00
|
|
|
fGpuSymbolTable = rehydrator.symbolTable();
|
|
|
|
gpuIntrinsics = rehydrator.elements();
|
|
|
|
}
|
|
|
|
{
|
|
|
|
Rehydrator rehydrator(fContext.get(), fGpuSymbolTable, this, SKSL_INCLUDE_sksl_vert,
|
2020-09-18 15:49:22 +00:00
|
|
|
SKSL_INCLUDE_sksl_vert_LENGTH);
|
2020-07-28 18:46:53 +00:00
|
|
|
fVertexSymbolTable = rehydrator.symbolTable();
|
|
|
|
fVertexInclude = rehydrator.elements();
|
|
|
|
}
|
|
|
|
{
|
|
|
|
Rehydrator rehydrator(fContext.get(), fGpuSymbolTable, this, SKSL_INCLUDE_sksl_frag,
|
2020-09-18 15:49:22 +00:00
|
|
|
SKSL_INCLUDE_sksl_frag_LENGTH);
|
2020-07-28 18:46:53 +00:00
|
|
|
fFragmentSymbolTable = rehydrator.symbolTable();
|
|
|
|
fFragmentInclude = rehydrator.elements();
|
|
|
|
}
|
|
|
|
#endif
|
2020-09-28 17:13:40 +00:00
|
|
|
// Call counts are used to track dead-stripping and inlinability within the program being
|
|
|
|
// currently compiled, and always should start at zero for a new program. Zero out any call
|
|
|
|
// counts that were registered during the assembly of the intrinsics/include data. (If we
|
|
|
|
// actually use calls from inside the intrinsics, we will clone them into the program and they
|
|
|
|
// will get new call counts.)
|
|
|
|
reset_call_counts(&gpuIntrinsics);
|
|
|
|
reset_call_counts(&fVertexInclude);
|
|
|
|
reset_call_counts(&fFragmentInclude);
|
|
|
|
|
2020-08-26 23:46:27 +00:00
|
|
|
grab_intrinsics(&gpuIntrinsics, fGPUIntrinsics.get());
|
SkSL: Start refactoring pre-include handling
This makes IRIntrinsicMap an actual type, and supports chaining (so an
intrinsic map can have a parent, just like a symbol table). That lets us
put Enums and defined functions at multiple levels of the pre-include
hierarchy.
With that done, we add an intrinsic map for sksl_fp.sksl, containing the
enum declarations from that file. This lets .fp processing using the FP
intrinsic map (which is parented to the GPU one) to resolve those enums
(PMConversion, GrClipEdgeType), as well as the enums in sksl_gpu
(SkBlendMode).
Because sksl_fp was being used to generate an inherited element list
(containing several builtin variables), I have relaxed the restriction
around grab_intrinsics - unsupported element types are simply left in
the original vector, unchanged. for the GPU and interpreter intrinsic
maps (where the element lists are discarded), we still assert that we
didn't end up with any unsupported elements.
Doing all of this lets us remove the redundant enum resolution code in
IR generator (where we previously supported looking up enums in both the
inherited element list, and in the intrinsic map).
Subsequent changes will add support for variables/declarations to the
intrinsic map, so we won't need both the inherited list and the
intrinsic map, if all goes well.
Change-Id: Ic6174511e5f8d68f65e4919f2ec0b923717d6cd9
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/318212
Commit-Queue: Brian Osman <brianosman@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
2020-09-21 15:32:10 +00:00
|
|
|
SkASSERT(gpuIntrinsics.empty());
|
2016-07-01 15:22:01 +00:00
|
|
|
}
|
|
|
|
|
2020-08-27 19:26:26 +00:00
|
|
|
Compiler::~Compiler() {}
|
2016-07-01 15:22:01 +00:00
|
|
|
|
2020-07-28 18:46:53 +00:00
|
|
|
void Compiler::loadGeometryIntrinsics() {
|
|
|
|
if (fGeometrySymbolTable) {
|
|
|
|
return;
|
|
|
|
}
|
Reland "Untangle dependency cycle in sksl dehydration"
Explanation: The sksl standalone compiler is used to convert the raw
(text) SkSL pre-includes into a "dehydrated" binary format. It also
(previously) depended on those files, as they were #included and used,
unless a special #define was changed. This created a dependency cycle
that we hid from GN (by lying about the outputs of the dehydrate step).
As a result, builds would never reach steady-state, because the compiler
would be rebuilt (due to the newer dehydrated files), and then the
dehydrated files would be rebuilt (due to the newer compiler).
This CL changes the logic so that the standalone compiler always uses
the textual pre-includes, and no longer depends on the dehydrated binary
files. Thus, to make any kind of change to the dehydrated files (whether
due to pre-include changes, or the encoding format itself), you just
need skia_compile_processors enabled. The dependencies are now honestly
communicated to GN, and we reach steady state after one build.
The NOTE above is because GN/ninja cache the dependencies of each
target, and will still think that the SkSLCompiler.obj linked into the
standalone compiler depends on the dehydrated files, at least until one
successful build, when it will realize that's no longer true.
Reland notes:
The bots originally rejected this CL, because SkSLCompiler was
hard-coded to load the text files from a relative path that assumed the
executable was in "<skia_checkout>/out/<some_dir>". That's not true for
bots, and it was fragile, even for users. Now, we use GN to directly
generate sksl_fp.sksl, and copy all of the other pre-includes to the
root out dir (working directory when running skslc). This means we
no longer need to generate the sksl_fp.sksl file into the src tree, and
the compiler can more safely assume that the files will be in the
working directory.
Bug: skia:10571
Change-Id: Id7837a9aba7ee0c3f7fa82eb84f7761e24b9c705
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/308896
Reviewed-by: Mike Klein <mtklein@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
Reviewed-by: Michael Ludwig <michaelludwig@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-08-08 12:17:18 +00:00
|
|
|
#if !SKSL_STANDALONE
|
2020-07-28 18:46:53 +00:00
|
|
|
{
|
|
|
|
Rehydrator rehydrator(fContext.get(), fGpuSymbolTable, this, SKSL_INCLUDE_sksl_geom,
|
|
|
|
SKSL_INCLUDE_sksl_geom_LENGTH);
|
|
|
|
fGeometrySymbolTable = rehydrator.symbolTable();
|
|
|
|
fGeometryInclude = rehydrator.elements();
|
|
|
|
}
|
|
|
|
#else
|
2020-08-06 17:00:19 +00:00
|
|
|
this->processIncludeFile(Program::kGeometry_Kind, SKSL_GEOM_INCLUDE, fGpuSymbolTable,
|
|
|
|
&fGeometryInclude, &fGeometrySymbolTable);
|
2020-07-28 18:46:53 +00:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
void Compiler::loadPipelineIntrinsics() {
|
|
|
|
if (fPipelineSymbolTable) {
|
|
|
|
return;
|
|
|
|
}
|
Reland "Untangle dependency cycle in sksl dehydration"
Explanation: The sksl standalone compiler is used to convert the raw
(text) SkSL pre-includes into a "dehydrated" binary format. It also
(previously) depended on those files, as they were #included and used,
unless a special #define was changed. This created a dependency cycle
that we hid from GN (by lying about the outputs of the dehydrate step).
As a result, builds would never reach steady-state, because the compiler
would be rebuilt (due to the newer dehydrated files), and then the
dehydrated files would be rebuilt (due to the newer compiler).
This CL changes the logic so that the standalone compiler always uses
the textual pre-includes, and no longer depends on the dehydrated binary
files. Thus, to make any kind of change to the dehydrated files (whether
due to pre-include changes, or the encoding format itself), you just
need skia_compile_processors enabled. The dependencies are now honestly
communicated to GN, and we reach steady state after one build.
The NOTE above is because GN/ninja cache the dependencies of each
target, and will still think that the SkSLCompiler.obj linked into the
standalone compiler depends on the dehydrated files, at least until one
successful build, when it will realize that's no longer true.
Reland notes:
The bots originally rejected this CL, because SkSLCompiler was
hard-coded to load the text files from a relative path that assumed the
executable was in "<skia_checkout>/out/<some_dir>". That's not true for
bots, and it was fragile, even for users. Now, we use GN to directly
generate sksl_fp.sksl, and copy all of the other pre-includes to the
root out dir (working directory when running skslc). This means we
no longer need to generate the sksl_fp.sksl file into the src tree, and
the compiler can more safely assume that the files will be in the
working directory.
Bug: skia:10571
Change-Id: Id7837a9aba7ee0c3f7fa82eb84f7761e24b9c705
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/308896
Reviewed-by: Mike Klein <mtklein@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
Reviewed-by: Michael Ludwig <michaelludwig@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-08-08 12:17:18 +00:00
|
|
|
#if !SKSL_STANDALONE
|
2020-07-28 18:46:53 +00:00
|
|
|
{
|
|
|
|
Rehydrator rehydrator(fContext.get(), fGpuSymbolTable, this,
|
|
|
|
SKSL_INCLUDE_sksl_pipeline,
|
|
|
|
SKSL_INCLUDE_sksl_pipeline_LENGTH);
|
|
|
|
fPipelineSymbolTable = rehydrator.symbolTable();
|
|
|
|
fPipelineInclude = rehydrator.elements();
|
|
|
|
}
|
|
|
|
#else
|
|
|
|
this->processIncludeFile(Program::kPipelineStage_Kind, SKSL_PIPELINE_INCLUDE,
|
2020-08-06 17:00:19 +00:00
|
|
|
fGpuSymbolTable, &fPipelineInclude, &fPipelineSymbolTable);
|
2020-07-28 18:46:53 +00:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
void Compiler::loadInterpreterIntrinsics() {
|
|
|
|
if (fInterpreterSymbolTable) {
|
|
|
|
return;
|
|
|
|
}
|
2020-09-18 15:49:22 +00:00
|
|
|
std::vector<std::unique_ptr<ProgramElement>> interpIntrinsics;
|
Reland "Untangle dependency cycle in sksl dehydration"
Explanation: The sksl standalone compiler is used to convert the raw
(text) SkSL pre-includes into a "dehydrated" binary format. It also
(previously) depended on those files, as they were #included and used,
unless a special #define was changed. This created a dependency cycle
that we hid from GN (by lying about the outputs of the dehydrate step).
As a result, builds would never reach steady-state, because the compiler
would be rebuilt (due to the newer dehydrated files), and then the
dehydrated files would be rebuilt (due to the newer compiler).
This CL changes the logic so that the standalone compiler always uses
the textual pre-includes, and no longer depends on the dehydrated binary
files. Thus, to make any kind of change to the dehydrated files (whether
due to pre-include changes, or the encoding format itself), you just
need skia_compile_processors enabled. The dependencies are now honestly
communicated to GN, and we reach steady state after one build.
The NOTE above is because GN/ninja cache the dependencies of each
target, and will still think that the SkSLCompiler.obj linked into the
standalone compiler depends on the dehydrated files, at least until one
successful build, when it will realize that's no longer true.
Reland notes:
The bots originally rejected this CL, because SkSLCompiler was
hard-coded to load the text files from a relative path that assumed the
executable was in "<skia_checkout>/out/<some_dir>". That's not true for
bots, and it was fragile, even for users. Now, we use GN to directly
generate sksl_fp.sksl, and copy all of the other pre-includes to the
root out dir (working directory when running skslc). This means we
no longer need to generate the sksl_fp.sksl file into the src tree, and
the compiler can more safely assume that the files will be in the
working directory.
Bug: skia:10571
Change-Id: Id7837a9aba7ee0c3f7fa82eb84f7761e24b9c705
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/308896
Reviewed-by: Mike Klein <mtklein@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
Reviewed-by: Michael Ludwig <michaelludwig@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-08-08 12:17:18 +00:00
|
|
|
#if !SKSL_STANDALONE
|
2020-07-28 18:46:53 +00:00
|
|
|
{
|
2020-09-18 15:49:22 +00:00
|
|
|
Rehydrator rehydrator(fContext.get(), fRootSymbolTable, this,
|
2020-07-28 18:46:53 +00:00
|
|
|
SKSL_INCLUDE_sksl_interp,
|
|
|
|
SKSL_INCLUDE_sksl_interp_LENGTH);
|
|
|
|
fInterpreterSymbolTable = rehydrator.symbolTable();
|
2020-09-18 15:49:22 +00:00
|
|
|
interpIntrinsics = rehydrator.elements();
|
2020-07-28 18:46:53 +00:00
|
|
|
}
|
|
|
|
#else
|
|
|
|
this->processIncludeFile(Program::kGeneric_Kind, SKSL_INTERP_INCLUDE,
|
2020-09-18 15:49:22 +00:00
|
|
|
fIRGenerator->fSymbolTable, &interpIntrinsics,
|
2020-08-06 17:00:19 +00:00
|
|
|
&fInterpreterSymbolTable);
|
2020-07-28 18:46:53 +00:00
|
|
|
#endif
|
2020-09-18 15:49:22 +00:00
|
|
|
grab_intrinsics(&interpIntrinsics, fInterpreterIntrinsics.get());
|
SkSL: Start refactoring pre-include handling
This makes IRIntrinsicMap an actual type, and supports chaining (so an
intrinsic map can have a parent, just like a symbol table). That lets us
put Enums and defined functions at multiple levels of the pre-include
hierarchy.
With that done, we add an intrinsic map for sksl_fp.sksl, containing the
enum declarations from that file. This lets .fp processing using the FP
intrinsic map (which is parented to the GPU one) to resolve those enums
(PMConversion, GrClipEdgeType), as well as the enums in sksl_gpu
(SkBlendMode).
Because sksl_fp was being used to generate an inherited element list
(containing several builtin variables), I have relaxed the restriction
around grab_intrinsics - unsupported element types are simply left in
the original vector, unchanged. for the GPU and interpreter intrinsic
maps (where the element lists are discarded), we still assert that we
didn't end up with any unsupported elements.
Doing all of this lets us remove the redundant enum resolution code in
IR generator (where we previously supported looking up enums in both the
inherited element list, and in the intrinsic map).
Subsequent changes will add support for variables/declarations to the
intrinsic map, so we won't need both the inherited list and the
intrinsic map, if all goes well.
Change-Id: Ic6174511e5f8d68f65e4919f2ec0b923717d6cd9
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/318212
Commit-Queue: Brian Osman <brianosman@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
2020-09-21 15:32:10 +00:00
|
|
|
SkASSERT(interpIntrinsics.empty());
|
2020-07-28 18:46:53 +00:00
|
|
|
}
|
|
|
|
|
2020-08-06 17:00:19 +00:00
|
|
|
void Compiler::processIncludeFile(Program::Kind kind, const char* path,
|
2019-05-24 15:01:59 +00:00
|
|
|
std::shared_ptr<SymbolTable> base,
|
|
|
|
std::vector<std::unique_ptr<ProgramElement>>* outElements,
|
|
|
|
std::shared_ptr<SymbolTable>* outSymbolTable) {
|
2020-08-06 17:00:19 +00:00
|
|
|
std::ifstream in(path);
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
std::unique_ptr<String> text = std::make_unique<String>(std::istreambuf_iterator<char>(in),
|
|
|
|
std::istreambuf_iterator<char>());
|
2020-08-06 17:00:19 +00:00
|
|
|
if (in.rdstate()) {
|
|
|
|
printf("error reading %s\n", path);
|
|
|
|
abort();
|
|
|
|
}
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
const String* source = fRootSymbolTable->takeOwnershipOfString(std::move(text));
|
2020-08-06 17:00:19 +00:00
|
|
|
fSource = source;
|
2019-05-24 15:01:59 +00:00
|
|
|
Program::Settings settings;
|
2019-11-26 21:27:47 +00:00
|
|
|
#if !defined(SKSL_STANDALONE) & SK_SUPPORT_GPU
|
|
|
|
GrContextOptions opts;
|
|
|
|
GrShaderCaps caps(opts);
|
|
|
|
settings.fCaps = ∩︀
|
|
|
|
#endif
|
2020-09-19 14:13:24 +00:00
|
|
|
SkASSERT(fIRGenerator->fCanInline);
|
|
|
|
fIRGenerator->fCanInline = false;
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
fIRGenerator->start(&settings, base ? base : fRootSymbolTable, nullptr, true);
|
2020-08-06 17:00:19 +00:00
|
|
|
fIRGenerator->convertProgram(kind, source->c_str(), source->length(), outElements);
|
2020-09-19 14:13:24 +00:00
|
|
|
fIRGenerator->fCanInline = true;
|
2019-05-24 15:01:59 +00:00
|
|
|
if (this->fErrorCount) {
|
|
|
|
printf("Unexpected errors: %s\n", this->fErrorText.c_str());
|
|
|
|
}
|
|
|
|
SkASSERT(!fErrorCount);
|
|
|
|
*outSymbolTable = fIRGenerator->fSymbolTable;
|
2019-11-26 21:27:47 +00:00
|
|
|
#ifdef SK_DEBUG
|
|
|
|
fSource = nullptr;
|
|
|
|
#endif
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
fIRGenerator->finish();
|
2019-05-24 15:01:59 +00:00
|
|
|
}
|
|
|
|
|
2016-10-13 20:25:34 +00:00
|
|
|
// add the definition created by assigning to the lvalue to the definition set
|
2017-01-19 18:32:00 +00:00
|
|
|
void Compiler::addDefinition(const Expression* lvalue, std::unique_ptr<Expression>* expr,
|
|
|
|
DefinitionMap* definitions) {
|
2020-09-08 14:22:09 +00:00
|
|
|
switch (lvalue->kind()) {
|
|
|
|
case Expression::Kind::kVariableReference: {
|
2020-09-24 19:01:27 +00:00
|
|
|
const Variable& var = *lvalue->as<VariableReference>().fVariable;
|
2016-10-13 20:25:34 +00:00
|
|
|
if (var.fStorage == Variable::kLocal_Storage) {
|
|
|
|
(*definitions)[&var] = expr;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kSwizzle:
|
2016-10-13 20:25:34 +00:00
|
|
|
// We consider the variable written to as long as at least some of its components have
|
|
|
|
// been written to. This will lead to some false negatives (we won't catch it if you
|
|
|
|
// write to foo.x and then read foo.y), but being stricter could lead to false positives
|
2016-10-26 14:35:22 +00:00
|
|
|
// (we write to foo.x, and then pass foo to a function which happens to only read foo.x,
|
|
|
|
// but since we pass foo as a whole it is flagged as an error) unless we perform a much
|
2016-10-13 20:25:34 +00:00
|
|
|
// more complicated whole-program analysis. This is probably good enough.
|
2020-08-18 15:19:07 +00:00
|
|
|
this->addDefinition(lvalue->as<Swizzle>().fBase.get(),
|
2018-03-27 18:10:52 +00:00
|
|
|
(std::unique_ptr<Expression>*) &fContext->fDefined_Expression,
|
2016-10-13 20:25:34 +00:00
|
|
|
definitions);
|
|
|
|
break;
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kIndex:
|
2016-10-13 20:25:34 +00:00
|
|
|
// see comments in Swizzle
|
2020-08-18 15:19:07 +00:00
|
|
|
this->addDefinition(lvalue->as<IndexExpression>().fBase.get(),
|
2018-03-27 18:10:52 +00:00
|
|
|
(std::unique_ptr<Expression>*) &fContext->fDefined_Expression,
|
2016-10-13 20:25:34 +00:00
|
|
|
definitions);
|
|
|
|
break;
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kFieldAccess:
|
2016-10-13 20:25:34 +00:00
|
|
|
// see comments in Swizzle
|
2020-08-18 15:19:07 +00:00
|
|
|
this->addDefinition(lvalue->as<FieldAccess>().fBase.get(),
|
2018-03-27 18:10:52 +00:00
|
|
|
(std::unique_ptr<Expression>*) &fContext->fDefined_Expression,
|
2016-10-13 20:25:34 +00:00
|
|
|
definitions);
|
|
|
|
break;
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kTernary:
|
2018-01-18 18:32:11 +00:00
|
|
|
// To simplify analysis, we just pretend that we write to both sides of the ternary.
|
|
|
|
// This allows for false positives (meaning we fail to detect that a variable might not
|
|
|
|
// have been assigned), but is preferable to false negatives.
|
2020-08-18 15:19:07 +00:00
|
|
|
this->addDefinition(lvalue->as<TernaryExpression>().fIfTrue.get(),
|
2018-03-27 18:10:52 +00:00
|
|
|
(std::unique_ptr<Expression>*) &fContext->fDefined_Expression,
|
2018-01-18 18:32:11 +00:00
|
|
|
definitions);
|
2020-08-18 15:19:07 +00:00
|
|
|
this->addDefinition(lvalue->as<TernaryExpression>().fIfFalse.get(),
|
2018-03-27 18:10:52 +00:00
|
|
|
(std::unique_ptr<Expression>*) &fContext->fDefined_Expression,
|
2018-01-18 18:32:11 +00:00
|
|
|
definitions);
|
|
|
|
break;
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kExternalValue:
|
2019-05-15 19:29:54 +00:00
|
|
|
break;
|
2016-10-13 20:25:34 +00:00
|
|
|
default:
|
|
|
|
// not an lvalue, can't happen
|
2018-06-12 15:05:59 +00:00
|
|
|
SkASSERT(false);
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// add local variables defined by this node to the set
|
2016-10-26 14:35:22 +00:00
|
|
|
void Compiler::addDefinitions(const BasicBlock::Node& node,
|
2017-01-19 18:32:00 +00:00
|
|
|
DefinitionMap* definitions) {
|
2020-09-28 20:08:58 +00:00
|
|
|
if (node.isExpression()) {
|
|
|
|
Expression* expr = node.expression()->get();
|
|
|
|
switch (expr->kind()) {
|
|
|
|
case Expression::Kind::kBinary: {
|
|
|
|
BinaryExpression* b = &expr->as<BinaryExpression>();
|
|
|
|
if (b->getOperator() == Token::Kind::TK_EQ) {
|
|
|
|
this->addDefinition(&b->left(), &b->rightPointer(), definitions);
|
|
|
|
} else if (Compiler::IsAssignment(b->getOperator())) {
|
|
|
|
this->addDefinition(
|
|
|
|
&b->left(),
|
|
|
|
(std::unique_ptr<Expression>*) &fContext->fDefined_Expression,
|
|
|
|
definitions);
|
2017-01-19 18:32:00 +00:00
|
|
|
|
2018-03-29 20:46:56 +00:00
|
|
|
}
|
2020-09-28 20:08:58 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case Expression::Kind::kFunctionCall: {
|
|
|
|
const FunctionCall& c = expr->as<FunctionCall>();
|
|
|
|
for (size_t i = 0; i < c.fFunction.fParameters.size(); ++i) {
|
|
|
|
if (c.fFunction.fParameters[i]->fModifiers.fFlags & Modifiers::kOut_Flag) {
|
2017-01-19 18:32:00 +00:00
|
|
|
this->addDefinition(
|
2020-09-28 20:08:58 +00:00
|
|
|
c.fArguments[i].get(),
|
|
|
|
(std::unique_ptr<Expression>*) &fContext->fDefined_Expression,
|
|
|
|
definitions);
|
2017-01-19 18:32:00 +00:00
|
|
|
}
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
2020-09-28 20:08:58 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case Expression::Kind::kPrefix: {
|
|
|
|
const PrefixExpression* p = &expr->as<PrefixExpression>();
|
|
|
|
if (p->fOperator == Token::Kind::TK_MINUSMINUS ||
|
|
|
|
p->fOperator == Token::Kind::TK_PLUSPLUS) {
|
|
|
|
this->addDefinition(
|
|
|
|
p->fOperand.get(),
|
|
|
|
(std::unique_ptr<Expression>*) &fContext->fDefined_Expression,
|
|
|
|
definitions);
|
2017-01-19 18:32:00 +00:00
|
|
|
}
|
2020-09-28 20:08:58 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case Expression::Kind::kPostfix: {
|
|
|
|
const PostfixExpression* p = &expr->as<PostfixExpression>();
|
|
|
|
if (p->fOperator == Token::Kind::TK_MINUSMINUS ||
|
|
|
|
p->fOperator == Token::Kind::TK_PLUSPLUS) {
|
|
|
|
this->addDefinition(
|
|
|
|
p->fOperand.get(),
|
|
|
|
(std::unique_ptr<Expression>*) &fContext->fDefined_Expression,
|
|
|
|
definitions);
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
2020-09-28 20:08:58 +00:00
|
|
|
break;
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
2020-09-28 20:08:58 +00:00
|
|
|
case Expression::Kind::kVariableReference: {
|
|
|
|
const VariableReference* v = &expr->as<VariableReference>();
|
|
|
|
if (v->fRefKind != VariableReference::kRead_RefKind) {
|
|
|
|
this->addDefinition(
|
|
|
|
v,
|
|
|
|
(std::unique_ptr<Expression>*) &fContext->fDefined_Expression,
|
|
|
|
definitions);
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
2020-09-28 20:08:58 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
} else if (node.isStatement()) {
|
|
|
|
Statement* stmt = node.statement()->get();
|
|
|
|
if (stmt->is<VarDeclaration>()) {
|
|
|
|
VarDeclaration& vd = stmt->as<VarDeclaration>();
|
|
|
|
if (vd.fValue) {
|
|
|
|
(*definitions)[vd.fVar] = &vd.fValue;
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Compiler::scanCFG(CFG* cfg, BlockId blockId, std::set<BlockId>* workList) {
|
|
|
|
BasicBlock& block = cfg->fBlocks[blockId];
|
|
|
|
|
|
|
|
// compute definitions after this block
|
2017-01-19 18:32:00 +00:00
|
|
|
DefinitionMap after = block.fBefore;
|
2016-10-13 20:25:34 +00:00
|
|
|
for (const BasicBlock::Node& n : block.fNodes) {
|
|
|
|
this->addDefinitions(n, &after);
|
|
|
|
}
|
|
|
|
|
|
|
|
// propagate definitions to exits
|
|
|
|
for (BlockId exitId : block.fExits) {
|
2018-03-27 18:10:52 +00:00
|
|
|
if (exitId == blockId) {
|
|
|
|
continue;
|
|
|
|
}
|
2016-10-13 20:25:34 +00:00
|
|
|
BasicBlock& exit = cfg->fBlocks[exitId];
|
|
|
|
for (const auto& pair : after) {
|
2017-01-19 18:32:00 +00:00
|
|
|
std::unique_ptr<Expression>* e1 = pair.second;
|
|
|
|
auto found = exit.fBefore.find(pair.first);
|
|
|
|
if (found == exit.fBefore.end()) {
|
|
|
|
// exit has no definition for it, just copy it
|
|
|
|
workList->insert(exitId);
|
2016-10-13 20:25:34 +00:00
|
|
|
exit.fBefore[pair.first] = e1;
|
|
|
|
} else {
|
2017-01-19 18:32:00 +00:00
|
|
|
// exit has a (possibly different) value already defined
|
|
|
|
std::unique_ptr<Expression>* e2 = exit.fBefore[pair.first];
|
2016-10-13 20:25:34 +00:00
|
|
|
if (e1 != e2) {
|
|
|
|
// definition has changed, merge and add exit block to worklist
|
|
|
|
workList->insert(exitId);
|
2017-02-27 18:26:45 +00:00
|
|
|
if (e1 && e2) {
|
|
|
|
exit.fBefore[pair.first] =
|
2018-03-27 18:10:52 +00:00
|
|
|
(std::unique_ptr<Expression>*) &fContext->fDefined_Expression;
|
2017-02-27 18:26:45 +00:00
|
|
|
} else {
|
|
|
|
exit.fBefore[pair.first] = nullptr;
|
|
|
|
}
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// returns a map which maps all local variables in the function to null, indicating that their value
|
|
|
|
// is initially unknown
|
2017-01-19 18:32:00 +00:00
|
|
|
static DefinitionMap compute_start_state(const CFG& cfg) {
|
|
|
|
DefinitionMap result;
|
2016-10-26 14:35:22 +00:00
|
|
|
for (const auto& block : cfg.fBlocks) {
|
|
|
|
for (const auto& node : block.fNodes) {
|
2020-09-28 20:08:58 +00:00
|
|
|
if (node.isStatement()) {
|
2017-04-20 23:31:52 +00:00
|
|
|
const Statement* s = node.statement()->get();
|
2020-09-24 19:01:27 +00:00
|
|
|
if (s->is<VarDeclarationsStatement>()) {
|
2020-08-18 15:19:07 +00:00
|
|
|
const VarDeclarationsStatement* vd = &s->as<VarDeclarationsStatement>();
|
2017-11-07 14:42:10 +00:00
|
|
|
for (const auto& decl : vd->fDeclaration->fVars) {
|
2020-09-08 14:22:09 +00:00
|
|
|
if (decl->kind() == Statement::Kind::kVarDeclaration) {
|
2020-08-18 15:19:07 +00:00
|
|
|
result[decl->as<VarDeclaration>().fVar] = nullptr;
|
2017-11-07 14:42:10 +00:00
|
|
|
}
|
2016-10-26 14:35:22 +00:00
|
|
|
}
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2017-04-20 23:31:52 +00:00
|
|
|
/**
|
|
|
|
* Returns true if assigning to this lvalue has no effect.
|
|
|
|
*/
|
|
|
|
static bool is_dead(const Expression& lvalue) {
|
2020-09-08 14:22:09 +00:00
|
|
|
switch (lvalue.kind()) {
|
|
|
|
case Expression::Kind::kVariableReference:
|
2020-09-24 19:01:27 +00:00
|
|
|
return lvalue.as<VariableReference>().fVariable->dead();
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kSwizzle:
|
2020-08-18 15:19:07 +00:00
|
|
|
return is_dead(*lvalue.as<Swizzle>().fBase);
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kFieldAccess:
|
2020-08-18 15:19:07 +00:00
|
|
|
return is_dead(*lvalue.as<FieldAccess>().fBase);
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kIndex: {
|
2020-08-18 15:19:07 +00:00
|
|
|
const IndexExpression& idx = lvalue.as<IndexExpression>();
|
2020-01-02 19:40:54 +00:00
|
|
|
return is_dead(*idx.fBase) &&
|
|
|
|
!idx.fIndex->hasProperty(Expression::Property::kSideEffects);
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kTernary: {
|
2020-08-18 15:19:07 +00:00
|
|
|
const TernaryExpression& t = lvalue.as<TernaryExpression>();
|
2018-01-18 18:32:11 +00:00
|
|
|
return !t.fTest->hasSideEffects() && is_dead(*t.fIfTrue) && is_dead(*t.fIfFalse);
|
|
|
|
}
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kExternalValue:
|
2019-05-15 19:29:54 +00:00
|
|
|
return false;
|
2017-04-20 23:31:52 +00:00
|
|
|
default:
|
2020-01-02 19:40:54 +00:00
|
|
|
#ifdef SK_DEBUG
|
2017-04-20 23:31:52 +00:00
|
|
|
ABORT("invalid lvalue: %s\n", lvalue.description().c_str());
|
2020-01-02 19:40:54 +00:00
|
|
|
#endif
|
|
|
|
return false;
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Returns true if this is an assignment which can be collapsed down to just the right hand side due
|
|
|
|
* to a dead target and lack of side effects on the left hand side.
|
|
|
|
*/
|
|
|
|
static bool dead_assignment(const BinaryExpression& b) {
|
2020-09-22 19:05:37 +00:00
|
|
|
if (!Compiler::IsAssignment(b.getOperator())) {
|
2017-04-20 23:31:52 +00:00
|
|
|
return false;
|
|
|
|
}
|
2020-09-22 19:05:37 +00:00
|
|
|
return is_dead(b.left());
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
2016-10-13 20:25:34 +00:00
|
|
|
|
2017-04-20 23:31:52 +00:00
|
|
|
void Compiler::computeDataFlow(CFG* cfg) {
|
|
|
|
cfg->fBlocks[cfg->fStart].fBefore = compute_start_state(*cfg);
|
2016-10-13 20:25:34 +00:00
|
|
|
std::set<BlockId> workList;
|
2017-04-20 23:31:52 +00:00
|
|
|
for (BlockId i = 0; i < cfg->fBlocks.size(); i++) {
|
2016-10-13 20:25:34 +00:00
|
|
|
workList.insert(i);
|
|
|
|
}
|
|
|
|
while (workList.size()) {
|
|
|
|
BlockId next = *workList.begin();
|
|
|
|
workList.erase(workList.begin());
|
2017-04-20 23:31:52 +00:00
|
|
|
this->scanCFG(cfg, next, &workList);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Attempts to replace the expression pointed to by iter with a new one (in both the CFG and the
|
|
|
|
* IR). If the expression can be cleanly removed, returns true and updates the iterator to point to
|
|
|
|
* the newly-inserted element. Otherwise updates only the IR and returns false (and the CFG will
|
|
|
|
* need to be regenerated).
|
|
|
|
*/
|
2020-08-18 14:08:21 +00:00
|
|
|
static bool try_replace_expression(BasicBlock* b,
|
|
|
|
std::vector<BasicBlock::Node>::iterator* iter,
|
|
|
|
std::unique_ptr<Expression>* newExpression) {
|
2017-04-20 23:31:52 +00:00
|
|
|
std::unique_ptr<Expression>* target = (*iter)->expression();
|
|
|
|
if (!b->tryRemoveExpression(iter)) {
|
|
|
|
*target = std::move(*newExpression);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
*target = std::move(*newExpression);
|
|
|
|
return b->tryInsertExpression(iter, target);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2017-04-27 20:24:51 +00:00
|
|
|
* Returns true if the expression is a constant numeric literal with the specified value, or a
|
|
|
|
* constant vector with all elements equal to the specified value.
|
2017-04-20 23:31:52 +00:00
|
|
|
*/
|
2020-08-19 13:56:49 +00:00
|
|
|
template <typename T = double>
|
|
|
|
static bool is_constant(const Expression& expr, T value) {
|
2020-09-08 14:22:09 +00:00
|
|
|
switch (expr.kind()) {
|
|
|
|
case Expression::Kind::kIntLiteral:
|
2020-08-18 13:24:00 +00:00
|
|
|
return expr.as<IntLiteral>().fValue == value;
|
2020-08-19 13:56:49 +00:00
|
|
|
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kFloatLiteral:
|
2020-08-18 13:24:00 +00:00
|
|
|
return expr.as<FloatLiteral>().fValue == value;
|
2020-08-19 13:56:49 +00:00
|
|
|
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kConstructor: {
|
2020-08-19 13:56:49 +00:00
|
|
|
const Constructor& constructor = expr.as<Constructor>();
|
|
|
|
if (constructor.isCompileTimeConstant()) {
|
2020-09-11 16:27:26 +00:00
|
|
|
const Type& constructorType = constructor.type();
|
|
|
|
bool isFloat = constructorType.columns() > 1
|
|
|
|
? constructorType.componentType().isFloat()
|
|
|
|
: constructorType.isFloat();
|
|
|
|
switch (constructorType.typeKind()) {
|
2020-09-08 14:22:09 +00:00
|
|
|
case Type::TypeKind::kVector:
|
2020-09-11 16:27:26 +00:00
|
|
|
for (int i = 0; i < constructorType.columns(); ++i) {
|
2020-08-19 13:56:49 +00:00
|
|
|
if (isFloat) {
|
|
|
|
if (constructor.getFVecComponent(i) != value) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (constructor.getIVecComponent(i) != value) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
2019-06-10 19:55:38 +00:00
|
|
|
}
|
2020-08-19 13:56:49 +00:00
|
|
|
return true;
|
|
|
|
|
2020-09-08 14:22:09 +00:00
|
|
|
case Type::TypeKind::kScalar:
|
2020-08-19 13:56:49 +00:00
|
|
|
SkASSERT(constructor.fArguments.size() == 1);
|
|
|
|
return is_constant<T>(*constructor.fArguments[0], value);
|
|
|
|
|
|
|
|
default:
|
2017-04-27 20:24:51 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
2017-04-20 23:31:52 +00:00
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Collapses the binary expression pointed to by iter down to just the right side (in both the IR
|
|
|
|
* and CFG structures).
|
|
|
|
*/
|
2020-08-18 14:08:21 +00:00
|
|
|
static void delete_left(BasicBlock* b,
|
|
|
|
std::vector<BasicBlock::Node>::iterator* iter,
|
|
|
|
bool* outUpdated,
|
|
|
|
bool* outNeedsRescan) {
|
2017-04-20 23:31:52 +00:00
|
|
|
*outUpdated = true;
|
2017-05-05 14:04:06 +00:00
|
|
|
std::unique_ptr<Expression>* target = (*iter)->expression();
|
2020-08-18 15:19:07 +00:00
|
|
|
BinaryExpression& bin = (*target)->as<BinaryExpression>();
|
2020-09-22 19:05:37 +00:00
|
|
|
Expression& left = bin.left();
|
|
|
|
std::unique_ptr<Expression>& rightPointer = bin.rightPointer();
|
|
|
|
SkASSERT(!left.hasSideEffects());
|
2017-05-05 14:04:06 +00:00
|
|
|
bool result;
|
2020-09-22 19:05:37 +00:00
|
|
|
if (bin.getOperator() == Token::Kind::TK_EQ) {
|
|
|
|
result = b->tryRemoveLValueBefore(iter, &left);
|
2017-05-05 14:04:06 +00:00
|
|
|
} else {
|
2020-09-22 19:05:37 +00:00
|
|
|
result = b->tryRemoveExpressionBefore(iter, &left);
|
2017-05-05 14:04:06 +00:00
|
|
|
}
|
2020-09-22 19:05:37 +00:00
|
|
|
*target = std::move(rightPointer);
|
2017-05-05 14:04:06 +00:00
|
|
|
if (!result) {
|
2017-05-17 14:52:55 +00:00
|
|
|
*outNeedsRescan = true;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (*iter == b->fNodes.begin()) {
|
2017-04-20 23:31:52 +00:00
|
|
|
*outNeedsRescan = true;
|
2017-05-05 14:04:06 +00:00
|
|
|
return;
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
2017-05-05 14:04:06 +00:00
|
|
|
--(*iter);
|
2020-09-28 20:08:58 +00:00
|
|
|
if (!(*iter)->isExpression() || (*iter)->expression() != &rightPointer) {
|
2017-05-17 14:52:55 +00:00
|
|
|
*outNeedsRescan = true;
|
|
|
|
return;
|
|
|
|
}
|
2017-05-05 14:04:06 +00:00
|
|
|
*iter = b->fNodes.erase(*iter);
|
2018-06-12 15:05:59 +00:00
|
|
|
SkASSERT((*iter)->expression() == target);
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Collapses the binary expression pointed to by iter down to just the left side (in both the IR and
|
|
|
|
* CFG structures).
|
|
|
|
*/
|
2020-08-18 14:08:21 +00:00
|
|
|
static void delete_right(BasicBlock* b,
|
|
|
|
std::vector<BasicBlock::Node>::iterator* iter,
|
|
|
|
bool* outUpdated,
|
|
|
|
bool* outNeedsRescan) {
|
2017-04-20 23:31:52 +00:00
|
|
|
*outUpdated = true;
|
2017-05-05 14:04:06 +00:00
|
|
|
std::unique_ptr<Expression>* target = (*iter)->expression();
|
2020-08-18 15:19:07 +00:00
|
|
|
BinaryExpression& bin = (*target)->as<BinaryExpression>();
|
2020-09-22 19:05:37 +00:00
|
|
|
std::unique_ptr<Expression>& leftPointer = bin.leftPointer();
|
|
|
|
Expression& right = bin.right();
|
|
|
|
SkASSERT(!right.hasSideEffects());
|
|
|
|
if (!b->tryRemoveExpressionBefore(iter, &right)) {
|
|
|
|
*target = std::move(leftPointer);
|
2017-04-20 23:31:52 +00:00
|
|
|
*outNeedsRescan = true;
|
2017-05-05 14:04:06 +00:00
|
|
|
return;
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
2020-09-22 19:05:37 +00:00
|
|
|
*target = std::move(leftPointer);
|
2017-05-17 14:52:55 +00:00
|
|
|
if (*iter == b->fNodes.begin()) {
|
|
|
|
*outNeedsRescan = true;
|
|
|
|
return;
|
|
|
|
}
|
2017-05-05 14:04:06 +00:00
|
|
|
--(*iter);
|
2020-09-28 20:08:58 +00:00
|
|
|
if ((!(*iter)->isExpression() || (*iter)->expression() != &leftPointer)) {
|
2017-05-17 14:52:55 +00:00
|
|
|
*outNeedsRescan = true;
|
|
|
|
return;
|
|
|
|
}
|
2017-05-05 14:04:06 +00:00
|
|
|
*iter = b->fNodes.erase(*iter);
|
2018-06-12 15:05:59 +00:00
|
|
|
SkASSERT((*iter)->expression() == target);
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
|
|
|
|
2017-04-27 20:24:51 +00:00
|
|
|
/**
|
|
|
|
* Constructs the specified type using a single argument.
|
|
|
|
*/
|
2020-09-11 16:27:26 +00:00
|
|
|
static std::unique_ptr<Expression> construct(const Type* type, std::unique_ptr<Expression> v) {
|
2017-04-27 20:24:51 +00:00
|
|
|
std::vector<std::unique_ptr<Expression>> args;
|
|
|
|
args.push_back(std::move(v));
|
2020-09-08 14:22:09 +00:00
|
|
|
std::unique_ptr<Expression> result = std::make_unique<Constructor>(-1, type, std::move(args));
|
2017-04-27 20:24:51 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Used in the implementations of vectorize_left and vectorize_right. Given a vector type and an
|
|
|
|
* expression x, deletes the expression pointed to by iter and replaces it with <type>(x).
|
|
|
|
*/
|
|
|
|
static void vectorize(BasicBlock* b,
|
|
|
|
std::vector<BasicBlock::Node>::iterator* iter,
|
|
|
|
const Type& type,
|
|
|
|
std::unique_ptr<Expression>* otherExpression,
|
|
|
|
bool* outUpdated,
|
|
|
|
bool* outNeedsRescan) {
|
2020-09-08 14:22:09 +00:00
|
|
|
SkASSERT((*(*iter)->expression())->kind() == Expression::Kind::kBinary);
|
|
|
|
SkASSERT(type.typeKind() == Type::TypeKind::kVector);
|
2020-09-11 16:27:26 +00:00
|
|
|
SkASSERT((*otherExpression)->type().typeKind() == Type::TypeKind::kScalar);
|
2017-04-27 20:24:51 +00:00
|
|
|
*outUpdated = true;
|
|
|
|
std::unique_ptr<Expression>* target = (*iter)->expression();
|
|
|
|
if (!b->tryRemoveExpression(iter)) {
|
2020-09-11 16:27:26 +00:00
|
|
|
*target = construct(&type, std::move(*otherExpression));
|
2017-04-27 20:24:51 +00:00
|
|
|
*outNeedsRescan = true;
|
|
|
|
} else {
|
2020-09-11 16:27:26 +00:00
|
|
|
*target = construct(&type, std::move(*otherExpression));
|
2017-04-27 20:24:51 +00:00
|
|
|
if (!b->tryInsertExpression(iter, target)) {
|
|
|
|
*outNeedsRescan = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Given a binary expression of the form x <op> vec<n>(y), deletes the right side and vectorizes the
|
|
|
|
* left to yield vec<n>(x).
|
|
|
|
*/
|
|
|
|
static void vectorize_left(BasicBlock* b,
|
|
|
|
std::vector<BasicBlock::Node>::iterator* iter,
|
|
|
|
bool* outUpdated,
|
|
|
|
bool* outNeedsRescan) {
|
2020-08-18 15:19:07 +00:00
|
|
|
BinaryExpression& bin = (*(*iter)->expression())->as<BinaryExpression>();
|
2020-09-22 19:05:37 +00:00
|
|
|
vectorize(b, iter, bin.right().type(), &bin.leftPointer(), outUpdated, outNeedsRescan);
|
2017-04-27 20:24:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Given a binary expression of the form vec<n>(x) <op> y, deletes the left side and vectorizes the
|
|
|
|
* right to yield vec<n>(y).
|
|
|
|
*/
|
|
|
|
static void vectorize_right(BasicBlock* b,
|
|
|
|
std::vector<BasicBlock::Node>::iterator* iter,
|
|
|
|
bool* outUpdated,
|
|
|
|
bool* outNeedsRescan) {
|
2020-08-18 15:19:07 +00:00
|
|
|
BinaryExpression& bin = (*(*iter)->expression())->as<BinaryExpression>();
|
2020-09-22 19:05:37 +00:00
|
|
|
vectorize(b, iter, bin.left().type(), &bin.rightPointer(), outUpdated, outNeedsRescan);
|
2017-04-27 20:24:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Mark that an expression which we were writing to is no longer being written to
|
2020-08-18 15:19:07 +00:00
|
|
|
static void clear_write(Expression& expr) {
|
2020-09-08 14:22:09 +00:00
|
|
|
switch (expr.kind()) {
|
|
|
|
case Expression::Kind::kVariableReference: {
|
2020-08-18 15:19:07 +00:00
|
|
|
expr.as<VariableReference>().setRefKind(VariableReference::kRead_RefKind);
|
2017-04-27 20:24:51 +00:00
|
|
|
break;
|
|
|
|
}
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kFieldAccess:
|
2020-08-18 15:19:07 +00:00
|
|
|
clear_write(*expr.as<FieldAccess>().fBase);
|
2017-04-27 20:24:51 +00:00
|
|
|
break;
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kSwizzle:
|
2020-08-18 15:19:07 +00:00
|
|
|
clear_write(*expr.as<Swizzle>().fBase);
|
2017-04-27 20:24:51 +00:00
|
|
|
break;
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kIndex:
|
2020-08-18 15:19:07 +00:00
|
|
|
clear_write(*expr.as<IndexExpression>().fBase);
|
2017-04-27 20:24:51 +00:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
ABORT("shouldn't be writing to this kind of expression\n");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-04-20 23:31:52 +00:00
|
|
|
void Compiler::simplifyExpression(DefinitionMap& definitions,
|
|
|
|
BasicBlock& b,
|
|
|
|
std::vector<BasicBlock::Node>::iterator* iter,
|
|
|
|
std::unordered_set<const Variable*>* undefinedVariables,
|
|
|
|
bool* outUpdated,
|
|
|
|
bool* outNeedsRescan) {
|
|
|
|
Expression* expr = (*iter)->expression()->get();
|
2018-06-12 15:05:59 +00:00
|
|
|
SkASSERT(expr);
|
2017-04-20 23:31:52 +00:00
|
|
|
if ((*iter)->fConstantPropagation) {
|
|
|
|
std::unique_ptr<Expression> optimized = expr->constantPropagate(*fIRGenerator, definitions);
|
|
|
|
if (optimized) {
|
2017-05-17 14:52:55 +00:00
|
|
|
*outUpdated = true;
|
2020-09-11 16:27:26 +00:00
|
|
|
optimized = fIRGenerator->coerce(std::move(optimized), expr->type());
|
2020-09-11 13:32:54 +00:00
|
|
|
SkASSERT(optimized);
|
2017-04-20 23:31:52 +00:00
|
|
|
if (!try_replace_expression(&b, iter, &optimized)) {
|
|
|
|
*outNeedsRescan = true;
|
2017-05-17 14:52:55 +00:00
|
|
|
return;
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
2020-09-28 20:08:58 +00:00
|
|
|
SkASSERT((*iter)->isExpression());
|
2017-04-20 23:31:52 +00:00
|
|
|
expr = (*iter)->expression()->get();
|
|
|
|
}
|
|
|
|
}
|
2020-09-08 14:22:09 +00:00
|
|
|
switch (expr->kind()) {
|
|
|
|
case Expression::Kind::kVariableReference: {
|
2020-08-18 15:19:07 +00:00
|
|
|
const VariableReference& ref = expr->as<VariableReference>();
|
2020-09-24 19:01:27 +00:00
|
|
|
const Variable* var = ref.fVariable;
|
2018-03-26 18:24:27 +00:00
|
|
|
if (ref.refKind() != VariableReference::kWrite_RefKind &&
|
|
|
|
ref.refKind() != VariableReference::kPointer_RefKind &&
|
2020-09-24 19:01:27 +00:00
|
|
|
var->fStorage == Variable::kLocal_Storage && !definitions[var] &&
|
|
|
|
(*undefinedVariables).find(var) == (*undefinedVariables).end()) {
|
|
|
|
(*undefinedVariables).insert(var);
|
2017-11-07 14:42:10 +00:00
|
|
|
this->error(expr->fOffset,
|
2020-09-24 19:01:27 +00:00
|
|
|
"'" + var->fName + "' has not been assigned");
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kTernary: {
|
2020-08-20 16:11:48 +00:00
|
|
|
TernaryExpression* t = &expr->as<TernaryExpression>();
|
2020-09-08 14:22:09 +00:00
|
|
|
if (t->fTest->kind() == Expression::Kind::kBoolLiteral) {
|
2017-04-20 23:31:52 +00:00
|
|
|
// ternary has a constant test, replace it with either the true or
|
|
|
|
// false branch
|
2020-09-28 13:18:15 +00:00
|
|
|
if (t->fTest->as<BoolLiteral>().value()) {
|
2017-04-20 23:31:52 +00:00
|
|
|
(*iter)->setExpression(std::move(t->fIfTrue));
|
|
|
|
} else {
|
|
|
|
(*iter)->setExpression(std::move(t->fIfFalse));
|
|
|
|
}
|
|
|
|
*outUpdated = true;
|
|
|
|
*outNeedsRescan = true;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kBinary: {
|
2020-08-20 16:11:48 +00:00
|
|
|
BinaryExpression* bin = &expr->as<BinaryExpression>();
|
2017-05-05 14:04:06 +00:00
|
|
|
if (dead_assignment(*bin)) {
|
|
|
|
delete_left(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
break;
|
|
|
|
}
|
2020-09-22 19:05:37 +00:00
|
|
|
Expression& left = bin->left();
|
|
|
|
Expression& right = bin->right();
|
|
|
|
const Type& leftType = left.type();
|
|
|
|
const Type& rightType = right.type();
|
2017-05-05 14:04:06 +00:00
|
|
|
// collapse useless expressions like x * 1 or x + 0
|
2020-09-11 16:27:26 +00:00
|
|
|
if (((leftType.typeKind() != Type::TypeKind::kScalar) &&
|
|
|
|
(leftType.typeKind() != Type::TypeKind::kVector)) ||
|
|
|
|
((rightType.typeKind() != Type::TypeKind::kScalar) &&
|
|
|
|
(rightType.typeKind() != Type::TypeKind::kVector))) {
|
2017-04-27 20:24:51 +00:00
|
|
|
break;
|
|
|
|
}
|
2020-09-22 19:05:37 +00:00
|
|
|
switch (bin->getOperator()) {
|
2020-04-17 16:45:51 +00:00
|
|
|
case Token::Kind::TK_STAR:
|
2020-09-22 19:05:37 +00:00
|
|
|
if (is_constant(left, 1)) {
|
2020-09-11 16:27:26 +00:00
|
|
|
if (leftType.typeKind() == Type::TypeKind::kVector &&
|
|
|
|
rightType.typeKind() == Type::TypeKind::kScalar) {
|
2017-07-28 19:19:46 +00:00
|
|
|
// float4(1) * x -> float4(x)
|
2017-04-27 20:24:51 +00:00
|
|
|
vectorize_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
} else {
|
|
|
|
// 1 * x -> x
|
2017-07-28 19:19:46 +00:00
|
|
|
// 1 * float4(x) -> float4(x)
|
|
|
|
// float4(1) * float4(x) -> float4(x)
|
2017-04-27 20:24:51 +00:00
|
|
|
delete_left(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
|
|
|
}
|
2020-09-22 19:05:37 +00:00
|
|
|
else if (is_constant(left, 0)) {
|
2020-09-11 16:27:26 +00:00
|
|
|
if (leftType.typeKind() == Type::TypeKind::kScalar &&
|
|
|
|
rightType.typeKind() == Type::TypeKind::kVector &&
|
2020-09-22 19:05:37 +00:00
|
|
|
!right.hasSideEffects()) {
|
2017-07-28 19:19:46 +00:00
|
|
|
// 0 * float4(x) -> float4(0)
|
2017-04-27 20:24:51 +00:00
|
|
|
vectorize_left(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
} else {
|
|
|
|
// 0 * x -> 0
|
2017-07-28 19:19:46 +00:00
|
|
|
// float4(0) * x -> float4(0)
|
|
|
|
// float4(0) * float4(x) -> float4(0)
|
2020-09-22 19:05:37 +00:00
|
|
|
if (!right.hasSideEffects()) {
|
2017-12-11 17:34:33 +00:00
|
|
|
delete_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
2017-04-27 20:24:51 +00:00
|
|
|
}
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
2020-09-22 19:05:37 +00:00
|
|
|
else if (is_constant(right, 1)) {
|
2020-09-11 16:27:26 +00:00
|
|
|
if (leftType.typeKind() == Type::TypeKind::kScalar &&
|
|
|
|
rightType.typeKind() == Type::TypeKind::kVector) {
|
2017-07-28 19:19:46 +00:00
|
|
|
// x * float4(1) -> float4(x)
|
2017-04-27 20:24:51 +00:00
|
|
|
vectorize_left(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
} else {
|
|
|
|
// x * 1 -> x
|
2017-07-28 19:19:46 +00:00
|
|
|
// float4(x) * 1 -> float4(x)
|
|
|
|
// float4(x) * float4(1) -> float4(x)
|
2017-04-27 20:24:51 +00:00
|
|
|
delete_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
|
|
|
}
|
2020-09-22 19:05:37 +00:00
|
|
|
else if (is_constant(right, 0)) {
|
2020-09-11 16:27:26 +00:00
|
|
|
if (leftType.typeKind() == Type::TypeKind::kVector &&
|
|
|
|
rightType.typeKind() == Type::TypeKind::kScalar &&
|
2020-09-22 19:05:37 +00:00
|
|
|
!left.hasSideEffects()) {
|
2017-07-28 19:19:46 +00:00
|
|
|
// float4(x) * 0 -> float4(0)
|
2017-04-27 20:24:51 +00:00
|
|
|
vectorize_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
} else {
|
|
|
|
// x * 0 -> 0
|
2017-07-28 19:19:46 +00:00
|
|
|
// x * float4(0) -> float4(0)
|
|
|
|
// float4(x) * float4(0) -> float4(0)
|
2020-09-22 19:05:37 +00:00
|
|
|
if (!left.hasSideEffects()) {
|
2017-12-11 17:34:33 +00:00
|
|
|
delete_left(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
2017-04-27 20:24:51 +00:00
|
|
|
}
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
|
|
|
break;
|
2020-04-17 16:45:51 +00:00
|
|
|
case Token::Kind::TK_PLUS:
|
2020-09-22 19:05:37 +00:00
|
|
|
if (is_constant(left, 0)) {
|
2020-09-11 16:27:26 +00:00
|
|
|
if (leftType.typeKind() == Type::TypeKind::kVector &&
|
|
|
|
rightType.typeKind() == Type::TypeKind::kScalar) {
|
2017-07-28 19:19:46 +00:00
|
|
|
// float4(0) + x -> float4(x)
|
2017-04-27 20:24:51 +00:00
|
|
|
vectorize_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
} else {
|
|
|
|
// 0 + x -> x
|
2017-07-28 19:19:46 +00:00
|
|
|
// 0 + float4(x) -> float4(x)
|
|
|
|
// float4(0) + float4(x) -> float4(x)
|
2017-04-27 20:24:51 +00:00
|
|
|
delete_left(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
2020-09-22 19:05:37 +00:00
|
|
|
} else if (is_constant(right, 0)) {
|
2020-09-11 16:27:26 +00:00
|
|
|
if (leftType.typeKind() == Type::TypeKind::kScalar &&
|
|
|
|
rightType.typeKind() == Type::TypeKind::kVector) {
|
2017-07-28 19:19:46 +00:00
|
|
|
// x + float4(0) -> float4(x)
|
2017-04-27 20:24:51 +00:00
|
|
|
vectorize_left(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
} else {
|
|
|
|
// x + 0 -> x
|
2017-07-28 19:19:46 +00:00
|
|
|
// float4(x) + 0 -> float4(x)
|
|
|
|
// float4(x) + float4(0) -> float4(x)
|
2017-04-27 20:24:51 +00:00
|
|
|
delete_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
2017-04-27 20:24:51 +00:00
|
|
|
break;
|
2020-04-17 16:45:51 +00:00
|
|
|
case Token::Kind::TK_MINUS:
|
2020-09-22 19:05:37 +00:00
|
|
|
if (is_constant(right, 0)) {
|
2020-09-11 16:27:26 +00:00
|
|
|
if (leftType.typeKind() == Type::TypeKind::kScalar &&
|
|
|
|
rightType.typeKind() == Type::TypeKind::kVector) {
|
2017-07-28 19:19:46 +00:00
|
|
|
// x - float4(0) -> float4(x)
|
2017-04-27 20:24:51 +00:00
|
|
|
vectorize_left(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
} else {
|
|
|
|
// x - 0 -> x
|
2017-07-28 19:19:46 +00:00
|
|
|
// float4(x) - 0 -> float4(x)
|
|
|
|
// float4(x) - float4(0) -> float4(x)
|
2017-04-27 20:24:51 +00:00
|
|
|
delete_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
2020-04-17 16:45:51 +00:00
|
|
|
case Token::Kind::TK_SLASH:
|
2020-09-22 19:05:37 +00:00
|
|
|
if (is_constant(right, 1)) {
|
2020-09-11 16:27:26 +00:00
|
|
|
if (leftType.typeKind() == Type::TypeKind::kScalar &&
|
|
|
|
rightType.typeKind() == Type::TypeKind::kVector) {
|
2017-07-28 19:19:46 +00:00
|
|
|
// x / float4(1) -> float4(x)
|
2017-04-27 20:24:51 +00:00
|
|
|
vectorize_left(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
} else {
|
|
|
|
// x / 1 -> x
|
2017-07-28 19:19:46 +00:00
|
|
|
// float4(x) / 1 -> float4(x)
|
|
|
|
// float4(x) / float4(1) -> float4(x)
|
2017-04-27 20:24:51 +00:00
|
|
|
delete_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
2020-09-22 19:05:37 +00:00
|
|
|
} else if (is_constant(left, 0)) {
|
2020-09-11 16:27:26 +00:00
|
|
|
if (leftType.typeKind() == Type::TypeKind::kScalar &&
|
|
|
|
rightType.typeKind() == Type::TypeKind::kVector &&
|
2020-09-22 19:05:37 +00:00
|
|
|
!right.hasSideEffects()) {
|
2017-07-28 19:19:46 +00:00
|
|
|
// 0 / float4(x) -> float4(0)
|
2017-04-27 20:24:51 +00:00
|
|
|
vectorize_left(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
} else {
|
|
|
|
// 0 / x -> 0
|
2017-07-28 19:19:46 +00:00
|
|
|
// float4(0) / x -> float4(0)
|
|
|
|
// float4(0) / float4(x) -> float4(0)
|
2020-09-22 19:05:37 +00:00
|
|
|
if (!right.hasSideEffects()) {
|
2017-12-11 17:34:33 +00:00
|
|
|
delete_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
2017-04-27 20:24:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
2020-04-17 16:45:51 +00:00
|
|
|
case Token::Kind::TK_PLUSEQ:
|
2020-09-22 19:05:37 +00:00
|
|
|
if (is_constant(right, 0)) {
|
|
|
|
clear_write(left);
|
2017-04-21 14:23:37 +00:00
|
|
|
delete_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
|
|
|
break;
|
2020-04-17 16:45:51 +00:00
|
|
|
case Token::Kind::TK_MINUSEQ:
|
2020-09-22 19:05:37 +00:00
|
|
|
if (is_constant(right, 0)) {
|
|
|
|
clear_write(left);
|
2017-04-20 23:31:52 +00:00
|
|
|
delete_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
|
|
|
break;
|
2020-04-17 16:45:51 +00:00
|
|
|
case Token::Kind::TK_STAREQ:
|
2020-09-22 19:05:37 +00:00
|
|
|
if (is_constant(right, 1)) {
|
|
|
|
clear_write(left);
|
2017-04-27 20:24:51 +00:00
|
|
|
delete_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
|
|
|
break;
|
2020-04-17 16:45:51 +00:00
|
|
|
case Token::Kind::TK_SLASHEQ:
|
2020-09-22 19:05:37 +00:00
|
|
|
if (is_constant(right, 1)) {
|
|
|
|
clear_write(left);
|
2017-04-20 23:31:52 +00:00
|
|
|
delete_right(&b, iter, outUpdated, outNeedsRescan);
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
2019-09-17 16:34:39 +00:00
|
|
|
break;
|
|
|
|
}
|
2020-09-08 14:22:09 +00:00
|
|
|
case Expression::Kind::kSwizzle: {
|
2020-08-20 16:11:48 +00:00
|
|
|
Swizzle& s = expr->as<Swizzle>();
|
2019-09-17 16:34:39 +00:00
|
|
|
// detect identity swizzles like foo.rgba
|
2020-09-11 16:27:26 +00:00
|
|
|
if ((int) s.fComponents.size() == s.fBase->type().columns()) {
|
2019-09-17 16:34:39 +00:00
|
|
|
bool identity = true;
|
|
|
|
for (int i = 0; i < (int) s.fComponents.size(); ++i) {
|
|
|
|
if (s.fComponents[i] != i) {
|
|
|
|
identity = false;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (identity) {
|
|
|
|
*outUpdated = true;
|
|
|
|
if (!try_replace_expression(&b, iter, &s.fBase)) {
|
|
|
|
*outNeedsRescan = true;
|
|
|
|
return;
|
|
|
|
}
|
2020-09-28 20:08:58 +00:00
|
|
|
SkASSERT((*iter)->isExpression());
|
2019-09-17 16:34:39 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// detect swizzles of swizzles, e.g. replace foo.argb.r000 with foo.a000
|
2020-09-08 14:22:09 +00:00
|
|
|
if (s.fBase->kind() == Expression::Kind::kSwizzle) {
|
2020-08-18 15:19:07 +00:00
|
|
|
Swizzle& base = s.fBase->as<Swizzle>();
|
2019-09-17 16:34:39 +00:00
|
|
|
std::vector<int> final;
|
|
|
|
for (int c : s.fComponents) {
|
2020-09-15 19:16:56 +00:00
|
|
|
final.push_back(base.fComponents[c]);
|
2019-09-17 16:34:39 +00:00
|
|
|
}
|
|
|
|
*outUpdated = true;
|
|
|
|
std::unique_ptr<Expression> replacement(new Swizzle(*fContext, base.fBase->clone(),
|
|
|
|
std::move(final)));
|
|
|
|
if (!try_replace_expression(&b, iter, &replacement)) {
|
|
|
|
*outNeedsRescan = true;
|
|
|
|
return;
|
|
|
|
}
|
2020-09-28 20:08:58 +00:00
|
|
|
SkASSERT((*iter)->isExpression());
|
2019-09-17 16:34:39 +00:00
|
|
|
}
|
2020-06-11 21:55:07 +00:00
|
|
|
break;
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
|
|
|
default:
|
|
|
|
break;
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
|
|
|
|
2020-08-31 22:09:01 +00:00
|
|
|
// Returns true if this statement could potentially execute a break at the current level. We ignore
|
|
|
|
// nested loops and switches, since any breaks inside of them will merely break the loop / switch.
|
|
|
|
static bool contains_conditional_break(Statement& stmt) {
|
|
|
|
class ContainsConditionalBreak : public ProgramVisitor {
|
|
|
|
public:
|
|
|
|
bool visitStatement(const Statement& stmt) override {
|
2020-09-08 14:22:09 +00:00
|
|
|
switch (stmt.kind()) {
|
|
|
|
case Statement::Kind::kBlock:
|
2020-08-31 22:09:01 +00:00
|
|
|
return this->INHERITED::visitStatement(stmt);
|
|
|
|
|
2020-09-08 14:22:09 +00:00
|
|
|
case Statement::Kind::kBreak:
|
2020-08-31 22:09:01 +00:00
|
|
|
return fInConditional > 0;
|
|
|
|
|
2020-09-08 14:22:09 +00:00
|
|
|
case Statement::Kind::kIf: {
|
2020-08-31 22:09:01 +00:00
|
|
|
++fInConditional;
|
|
|
|
bool result = this->INHERITED::visitStatement(stmt);
|
|
|
|
--fInConditional;
|
|
|
|
return result;
|
2017-05-10 19:06:17 +00:00
|
|
|
}
|
2020-06-15 16:32:24 +00:00
|
|
|
|
2020-08-31 22:09:01 +00:00
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
2017-05-10 19:06:17 +00:00
|
|
|
}
|
2020-06-15 16:32:24 +00:00
|
|
|
|
2020-08-31 22:09:01 +00:00
|
|
|
int fInConditional = 0;
|
|
|
|
using INHERITED = ProgramVisitor;
|
|
|
|
};
|
2017-05-10 19:06:17 +00:00
|
|
|
|
2020-08-31 22:09:01 +00:00
|
|
|
return ContainsConditionalBreak{}.visitStatement(stmt);
|
2020-06-15 16:32:24 +00:00
|
|
|
}
|
|
|
|
|
2018-08-24 17:06:27 +00:00
|
|
|
// returns true if this statement definitely executes a break at the current level (we ignore
|
|
|
|
// nested loops and switches, since any breaks inside of them will merely break the loop / switch)
|
2020-08-31 22:09:01 +00:00
|
|
|
static bool contains_unconditional_break(Statement& stmt) {
|
|
|
|
class ContainsUnconditionalBreak : public ProgramVisitor {
|
|
|
|
public:
|
|
|
|
bool visitStatement(const Statement& stmt) override {
|
2020-09-08 14:22:09 +00:00
|
|
|
switch (stmt.kind()) {
|
|
|
|
case Statement::Kind::kBlock:
|
2020-08-31 22:09:01 +00:00
|
|
|
return this->INHERITED::visitStatement(stmt);
|
|
|
|
|
2020-09-08 14:22:09 +00:00
|
|
|
case Statement::Kind::kBreak:
|
2018-08-24 17:06:27 +00:00
|
|
|
return true;
|
2020-08-31 22:09:01 +00:00
|
|
|
|
|
|
|
default:
|
|
|
|
return false;
|
2018-08-24 17:06:27 +00:00
|
|
|
}
|
2020-08-31 22:09:01 +00:00
|
|
|
}
|
2020-06-15 16:32:24 +00:00
|
|
|
|
2020-08-31 22:09:01 +00:00
|
|
|
using INHERITED = ProgramVisitor;
|
|
|
|
};
|
2020-06-15 16:32:24 +00:00
|
|
|
|
2020-08-31 22:09:01 +00:00
|
|
|
return ContainsUnconditionalBreak{}.visitStatement(stmt);
|
2018-08-24 17:06:27 +00:00
|
|
|
}
|
|
|
|
|
2020-06-15 16:32:24 +00:00
|
|
|
static void move_all_but_break(std::unique_ptr<Statement>& stmt,
|
|
|
|
std::vector<std::unique_ptr<Statement>>* target) {
|
2020-09-08 14:22:09 +00:00
|
|
|
switch (stmt->kind()) {
|
|
|
|
case Statement::Kind::kBlock: {
|
2020-06-15 16:32:24 +00:00
|
|
|
// Recurse into the block.
|
|
|
|
Block& block = static_cast<Block&>(*stmt);
|
|
|
|
|
|
|
|
std::vector<std::unique_ptr<Statement>> blockStmts;
|
2020-09-25 18:31:59 +00:00
|
|
|
blockStmts.reserve(block.children().size());
|
|
|
|
for (std::unique_ptr<Statement>& stmt : block.children()) {
|
|
|
|
move_all_but_break(stmt, &blockStmts);
|
2020-06-11 16:16:14 +00:00
|
|
|
}
|
2020-06-15 16:32:24 +00:00
|
|
|
|
|
|
|
target->push_back(std::make_unique<Block>(block.fOffset, std::move(blockStmts),
|
2020-09-25 18:31:59 +00:00
|
|
|
block.symbolTable(), block.isScope()));
|
2020-06-11 16:16:14 +00:00
|
|
|
break;
|
2020-06-15 16:32:24 +00:00
|
|
|
}
|
|
|
|
|
2020-09-08 14:22:09 +00:00
|
|
|
case Statement::Kind::kBreak:
|
2020-06-15 16:32:24 +00:00
|
|
|
// Do not append a break to the target.
|
|
|
|
break;
|
|
|
|
|
2020-06-11 16:16:14 +00:00
|
|
|
default:
|
2020-06-15 16:32:24 +00:00
|
|
|
// Append normal statements to the target.
|
|
|
|
target->push_back(std::move(stmt));
|
|
|
|
break;
|
2020-06-11 16:16:14 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-05-10 19:06:17 +00:00
|
|
|
// Returns a block containing all of the statements that will be run if the given case matches
|
|
|
|
// (which, owing to the statements being owned by unique_ptrs, means the switch itself will be
|
|
|
|
// broken by this call and must then be discarded).
|
|
|
|
// Returns null (and leaves the switch unmodified) if no such simple reduction is possible, such as
|
|
|
|
// when break statements appear inside conditionals.
|
2020-06-15 16:32:24 +00:00
|
|
|
static std::unique_ptr<Statement> block_for_case(SwitchStatement* switchStatement,
|
|
|
|
SwitchCase* caseToCapture) {
|
|
|
|
// We have to be careful to not move any of the pointers until after we're sure we're going to
|
|
|
|
// succeed, so before we make any changes at all, we check the switch-cases to decide on a plan
|
|
|
|
// of action. First, find the switch-case we are interested in.
|
|
|
|
auto iter = switchStatement->fCases.begin();
|
|
|
|
for (; iter != switchStatement->fCases.end(); ++iter) {
|
|
|
|
if (iter->get() == caseToCapture) {
|
|
|
|
break;
|
2017-05-10 19:06:17 +00:00
|
|
|
}
|
2020-06-15 16:32:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Next, walk forward through the rest of the switch. If we find a conditional break, we're
|
|
|
|
// stuck and can't simplify at all. If we find an unconditional break, we have a range of
|
|
|
|
// statements that we can use for simplification.
|
|
|
|
auto startIter = iter;
|
|
|
|
Statement* unconditionalBreakStmt = nullptr;
|
|
|
|
for (; iter != switchStatement->fCases.end(); ++iter) {
|
|
|
|
for (std::unique_ptr<Statement>& stmt : (*iter)->fStatements) {
|
|
|
|
if (contains_conditional_break(*stmt)) {
|
|
|
|
// We can't reduce switch-cases to a block when they have conditional breaks.
|
|
|
|
return nullptr;
|
2017-05-10 19:06:17 +00:00
|
|
|
}
|
2020-06-15 16:32:24 +00:00
|
|
|
|
|
|
|
if (contains_unconditional_break(*stmt)) {
|
|
|
|
// We found an unconditional break. We can use this block, but we need to strip
|
|
|
|
// out the break statement.
|
|
|
|
unconditionalBreakStmt = stmt.get();
|
2017-05-10 19:06:17 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2020-06-15 16:32:24 +00:00
|
|
|
|
|
|
|
if (unconditionalBreakStmt != nullptr) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// We fell off the bottom of the switch or encountered a break. We know the range of statements
|
|
|
|
// that we need to move over, and we know it's safe to do so.
|
|
|
|
std::vector<std::unique_ptr<Statement>> caseStmts;
|
|
|
|
|
|
|
|
// We can move over most of the statements as-is.
|
|
|
|
while (startIter != iter) {
|
|
|
|
for (std::unique_ptr<Statement>& stmt : (*startIter)->fStatements) {
|
|
|
|
caseStmts.push_back(std::move(stmt));
|
|
|
|
}
|
|
|
|
++startIter;
|
2017-05-10 19:06:17 +00:00
|
|
|
}
|
2020-06-15 16:32:24 +00:00
|
|
|
|
|
|
|
// If we found an unconditional break at the end, we need to move what we can while avoiding
|
|
|
|
// that break.
|
|
|
|
if (unconditionalBreakStmt != nullptr) {
|
|
|
|
for (std::unique_ptr<Statement>& stmt : (*startIter)->fStatements) {
|
|
|
|
if (stmt.get() == unconditionalBreakStmt) {
|
|
|
|
move_all_but_break(stmt, &caseStmts);
|
|
|
|
unconditionalBreakStmt = nullptr;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
caseStmts.push_back(std::move(stmt));
|
|
|
|
}
|
2017-05-10 19:06:17 +00:00
|
|
|
}
|
2020-06-15 16:32:24 +00:00
|
|
|
|
|
|
|
SkASSERT(unconditionalBreakStmt == nullptr); // Verify that we fixed the unconditional break.
|
|
|
|
|
|
|
|
// Return our newly-synthesized block.
|
|
|
|
return std::make_unique<Block>(/*offset=*/-1, std::move(caseStmts), switchStatement->fSymbols);
|
2017-05-10 19:06:17 +00:00
|
|
|
}
|
|
|
|
|
2017-04-20 23:31:52 +00:00
|
|
|
void Compiler::simplifyStatement(DefinitionMap& definitions,
|
2017-05-10 19:06:17 +00:00
|
|
|
BasicBlock& b,
|
|
|
|
std::vector<BasicBlock::Node>::iterator* iter,
|
|
|
|
std::unordered_set<const Variable*>* undefinedVariables,
|
|
|
|
bool* outUpdated,
|
|
|
|
bool* outNeedsRescan) {
|
2017-04-20 23:31:52 +00:00
|
|
|
Statement* stmt = (*iter)->statement()->get();
|
2020-09-08 14:22:09 +00:00
|
|
|
switch (stmt->kind()) {
|
|
|
|
case Statement::Kind::kVarDeclaration: {
|
2020-08-18 15:19:07 +00:00
|
|
|
const auto& varDecl = stmt->as<VarDeclaration>();
|
2017-11-07 14:42:10 +00:00
|
|
|
if (varDecl.fVar->dead() &&
|
|
|
|
(!varDecl.fValue ||
|
|
|
|
!varDecl.fValue->hasSideEffects())) {
|
|
|
|
if (varDecl.fValue) {
|
2018-06-12 15:05:59 +00:00
|
|
|
SkASSERT((*iter)->statement()->get() == stmt);
|
2017-11-07 14:42:10 +00:00
|
|
|
if (!b.tryRemoveExpressionBefore(iter, varDecl.fValue.get())) {
|
|
|
|
*outNeedsRescan = true;
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
(*iter)->setStatement(std::unique_ptr<Statement>(new Nop()));
|
2017-11-07 14:42:10 +00:00
|
|
|
*outUpdated = true;
|
2017-04-20 23:31:52 +00:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2020-09-08 14:22:09 +00:00
|
|
|
case Statement::Kind::kIf: {
|
2020-08-18 15:19:07 +00:00
|
|
|
IfStatement& i = stmt->as<IfStatement>();
|
2020-09-08 14:22:09 +00:00
|
|
|
if (i.fTest->kind() == Expression::Kind::kBoolLiteral) {
|
2017-05-10 19:06:17 +00:00
|
|
|
// constant if, collapse down to a single branch
|
2020-09-28 13:18:15 +00:00
|
|
|
if (i.fTest->as<BoolLiteral>().value()) {
|
2018-06-12 15:05:59 +00:00
|
|
|
SkASSERT(i.fIfTrue);
|
2017-05-10 19:06:17 +00:00
|
|
|
(*iter)->setStatement(std::move(i.fIfTrue));
|
|
|
|
} else {
|
|
|
|
if (i.fIfFalse) {
|
|
|
|
(*iter)->setStatement(std::move(i.fIfFalse));
|
|
|
|
} else {
|
|
|
|
(*iter)->setStatement(std::unique_ptr<Statement>(new Nop()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
*outUpdated = true;
|
|
|
|
*outNeedsRescan = true;
|
|
|
|
break;
|
|
|
|
}
|
2017-04-20 23:31:52 +00:00
|
|
|
if (i.fIfFalse && i.fIfFalse->isEmpty()) {
|
|
|
|
// else block doesn't do anything, remove it
|
|
|
|
i.fIfFalse.reset();
|
|
|
|
*outUpdated = true;
|
|
|
|
*outNeedsRescan = true;
|
|
|
|
}
|
|
|
|
if (!i.fIfFalse && i.fIfTrue->isEmpty()) {
|
|
|
|
// if block doesn't do anything, no else block
|
|
|
|
if (i.fTest->hasSideEffects()) {
|
|
|
|
// test has side effects, keep it
|
|
|
|
(*iter)->setStatement(std::unique_ptr<Statement>(
|
|
|
|
new ExpressionStatement(std::move(i.fTest))));
|
|
|
|
} else {
|
|
|
|
// no if, no else, no test side effects, kill the whole if
|
|
|
|
// statement
|
|
|
|
(*iter)->setStatement(std::unique_ptr<Statement>(new Nop()));
|
|
|
|
}
|
|
|
|
*outUpdated = true;
|
|
|
|
*outNeedsRescan = true;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2020-09-08 14:22:09 +00:00
|
|
|
case Statement::Kind::kSwitch: {
|
2020-08-18 15:19:07 +00:00
|
|
|
SwitchStatement& s = stmt->as<SwitchStatement>();
|
2020-09-14 15:33:47 +00:00
|
|
|
int64_t switchValue;
|
|
|
|
if (fIRGenerator->getConstantInt(*s.fValue, &switchValue)) {
|
2017-05-10 19:06:17 +00:00
|
|
|
// switch is constant, replace it with the case that matches
|
|
|
|
bool found = false;
|
|
|
|
SwitchCase* defaultCase = nullptr;
|
2020-08-19 13:56:49 +00:00
|
|
|
for (const std::unique_ptr<SwitchCase>& c : s.fCases) {
|
2017-05-10 19:06:17 +00:00
|
|
|
if (!c->fValue) {
|
|
|
|
defaultCase = c.get();
|
|
|
|
continue;
|
|
|
|
}
|
2020-09-14 15:33:47 +00:00
|
|
|
int64_t caseValue;
|
|
|
|
SkAssertResult(fIRGenerator->getConstantInt(*c->fValue, &caseValue));
|
|
|
|
if (caseValue == switchValue) {
|
2017-05-10 19:06:17 +00:00
|
|
|
std::unique_ptr<Statement> newBlock = block_for_case(&s, c.get());
|
|
|
|
if (newBlock) {
|
|
|
|
(*iter)->setStatement(std::move(newBlock));
|
2020-08-19 13:56:49 +00:00
|
|
|
found = true;
|
2017-05-10 19:06:17 +00:00
|
|
|
break;
|
|
|
|
} else {
|
2017-07-14 14:12:15 +00:00
|
|
|
if (s.fIsStatic && !(fFlags & kPermitInvalidStaticTests_Flag)) {
|
2017-09-11 20:50:14 +00:00
|
|
|
this->error(s.fOffset,
|
2017-05-10 19:06:17 +00:00
|
|
|
"static switch contains non-static conditional break");
|
|
|
|
s.fIsStatic = false;
|
|
|
|
}
|
|
|
|
return; // can't simplify
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!found) {
|
|
|
|
// no matching case. use default if it exists, or kill the whole thing
|
|
|
|
if (defaultCase) {
|
|
|
|
std::unique_ptr<Statement> newBlock = block_for_case(&s, defaultCase);
|
|
|
|
if (newBlock) {
|
|
|
|
(*iter)->setStatement(std::move(newBlock));
|
|
|
|
} else {
|
2017-07-14 14:12:15 +00:00
|
|
|
if (s.fIsStatic && !(fFlags & kPermitInvalidStaticTests_Flag)) {
|
2017-09-11 20:50:14 +00:00
|
|
|
this->error(s.fOffset,
|
2017-05-10 19:06:17 +00:00
|
|
|
"static switch contains non-static conditional break");
|
|
|
|
s.fIsStatic = false;
|
|
|
|
}
|
|
|
|
return; // can't simplify
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
(*iter)->setStatement(std::unique_ptr<Statement>(new Nop()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
*outUpdated = true;
|
|
|
|
*outNeedsRescan = true;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2020-09-08 14:22:09 +00:00
|
|
|
case Statement::Kind::kExpression: {
|
2020-08-18 15:19:07 +00:00
|
|
|
ExpressionStatement& e = stmt->as<ExpressionStatement>();
|
2018-06-12 15:05:59 +00:00
|
|
|
SkASSERT((*iter)->statement()->get() == &e);
|
2017-04-20 23:31:52 +00:00
|
|
|
if (!e.fExpression->hasSideEffects()) {
|
|
|
|
// Expression statement with no side effects, kill it
|
|
|
|
if (!b.tryRemoveExpressionBefore(iter, e.fExpression.get())) {
|
|
|
|
*outNeedsRescan = true;
|
|
|
|
}
|
2018-06-12 15:05:59 +00:00
|
|
|
SkASSERT((*iter)->statement()->get() == stmt);
|
2017-04-20 23:31:52 +00:00
|
|
|
(*iter)->setStatement(std::unique_ptr<Statement>(new Nop()));
|
|
|
|
*outUpdated = true;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-09 13:39:34 +00:00
|
|
|
bool Compiler::scanCFG(FunctionDefinition& f) {
|
|
|
|
bool madeChanges = false;
|
|
|
|
|
2017-04-20 23:31:52 +00:00
|
|
|
CFG cfg = CFGGenerator().getCFG(f);
|
|
|
|
this->computeDataFlow(&cfg);
|
2016-10-13 20:25:34 +00:00
|
|
|
|
|
|
|
// check for unreachable code
|
|
|
|
for (size_t i = 0; i < cfg.fBlocks.size(); i++) {
|
2020-09-09 13:39:34 +00:00
|
|
|
const BasicBlock& block = cfg.fBlocks[i];
|
|
|
|
if (i != cfg.fStart && !block.fEntrances.size() && block.fNodes.size()) {
|
2017-09-11 20:50:14 +00:00
|
|
|
int offset;
|
2020-09-09 13:39:34 +00:00
|
|
|
const BasicBlock::Node& node = block.fNodes[0];
|
2020-09-28 20:08:58 +00:00
|
|
|
if (node.isStatement()) {
|
|
|
|
offset = (*node.statement())->fOffset;
|
|
|
|
} else {
|
|
|
|
offset = (*node.expression())->fOffset;
|
|
|
|
if ((*node.expression())->is<BoolLiteral>()) {
|
|
|
|
// Function inlining can generate do { ... } while(false) loops which always
|
|
|
|
// break, so the boolean condition is considered unreachable. Since not being
|
|
|
|
// able to reach a literal is a non-issue in the first place, we don't report an
|
|
|
|
// error in this case.
|
|
|
|
continue;
|
|
|
|
}
|
2017-01-19 18:32:00 +00:00
|
|
|
}
|
2017-09-11 20:50:14 +00:00
|
|
|
this->error(offset, String("unreachable"));
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if (fErrorCount) {
|
2020-09-09 13:39:34 +00:00
|
|
|
return madeChanges;
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
|
|
|
|
2017-04-20 23:31:52 +00:00
|
|
|
// check for dead code & undefined variables, perform constant propagation
|
|
|
|
std::unordered_set<const Variable*> undefinedVariables;
|
|
|
|
bool updated;
|
|
|
|
bool needsRescan = false;
|
|
|
|
do {
|
|
|
|
if (needsRescan) {
|
|
|
|
cfg = CFGGenerator().getCFG(f);
|
|
|
|
this->computeDataFlow(&cfg);
|
|
|
|
needsRescan = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
updated = false;
|
2020-06-19 19:32:49 +00:00
|
|
|
bool first = true;
|
2017-04-20 23:31:52 +00:00
|
|
|
for (BasicBlock& b : cfg.fBlocks) {
|
2020-06-19 19:32:49 +00:00
|
|
|
if (!first && b.fEntrances.empty()) {
|
|
|
|
// Block was reachable before optimization, but has since become unreachable. In
|
|
|
|
// addition to being dead code, it's broken - since control flow can't reach it, no
|
|
|
|
// prior variable definitions can reach it, and therefore variables might look to
|
|
|
|
// have not been properly assigned. Kill it.
|
2020-09-09 19:15:06 +00:00
|
|
|
|
|
|
|
// We need to do this in two steps. For any variable declarations, the node list
|
|
|
|
// will contain statement nodes for each VarDeclaration, and then a statement for
|
|
|
|
// the VarDeclarationsStatement. When we replace the VDS with a Nop, we delete the
|
|
|
|
// storage of the unique_ptr that the VD nodes are pointing to. So we remove those
|
|
|
|
// from the node list entirely, first.
|
2020-09-28 20:08:58 +00:00
|
|
|
b.fNodes.erase(std::remove_if(b.fNodes.begin(), b.fNodes.end(),
|
|
|
|
[](const BasicBlock::Node& node) {
|
|
|
|
return node.isStatement() &&
|
|
|
|
(*node.statement())->is<VarDeclaration>();
|
|
|
|
}),
|
|
|
|
b.fNodes.end());
|
2020-09-09 19:15:06 +00:00
|
|
|
|
|
|
|
// Now replace any remaining statements in the block with Nops.
|
2020-06-19 19:32:49 +00:00
|
|
|
for (BasicBlock::Node& node : b.fNodes) {
|
2020-09-28 20:08:58 +00:00
|
|
|
if (node.isStatement() && !(*node.statement())->is<Nop>()) {
|
2020-09-09 13:39:34 +00:00
|
|
|
node.setStatement(std::make_unique<Nop>());
|
|
|
|
madeChanges = true;
|
2020-06-19 19:32:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
first = false;
|
2017-04-20 23:31:52 +00:00
|
|
|
DefinitionMap definitions = b.fBefore;
|
|
|
|
|
|
|
|
for (auto iter = b.fNodes.begin(); iter != b.fNodes.end() && !needsRescan; ++iter) {
|
2020-09-28 20:08:58 +00:00
|
|
|
if (iter->isExpression()) {
|
2017-04-20 23:31:52 +00:00
|
|
|
this->simplifyExpression(definitions, b, &iter, &undefinedVariables, &updated,
|
|
|
|
&needsRescan);
|
|
|
|
} else {
|
|
|
|
this->simplifyStatement(definitions, b, &iter, &undefinedVariables, &updated,
|
2020-09-09 13:39:34 +00:00
|
|
|
&needsRescan);
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
2017-05-17 14:52:55 +00:00
|
|
|
if (needsRescan) {
|
|
|
|
break;
|
|
|
|
}
|
2017-04-20 23:31:52 +00:00
|
|
|
this->addDefinitions(*iter, &definitions);
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
2020-09-14 15:32:49 +00:00
|
|
|
|
|
|
|
if (needsRescan) {
|
|
|
|
break;
|
|
|
|
}
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
2020-09-09 13:39:34 +00:00
|
|
|
madeChanges |= updated;
|
2017-04-20 23:31:52 +00:00
|
|
|
} while (updated);
|
2018-06-12 15:05:59 +00:00
|
|
|
SkASSERT(!needsRescan);
|
2016-10-13 20:25:34 +00:00
|
|
|
|
2017-06-22 15:24:38 +00:00
|
|
|
// verify static ifs & switches, clean up dead variable decls
|
2017-05-10 19:06:17 +00:00
|
|
|
for (BasicBlock& b : cfg.fBlocks) {
|
|
|
|
DefinitionMap definitions = b.fBefore;
|
|
|
|
|
2017-06-22 15:24:38 +00:00
|
|
|
for (auto iter = b.fNodes.begin(); iter != b.fNodes.end() && !needsRescan;) {
|
2020-09-28 20:08:58 +00:00
|
|
|
if (iter->isStatement()) {
|
2017-05-10 19:06:17 +00:00
|
|
|
const Statement& s = **iter->statement();
|
2020-09-08 14:22:09 +00:00
|
|
|
switch (s.kind()) {
|
|
|
|
case Statement::Kind::kIf:
|
2020-08-18 15:19:07 +00:00
|
|
|
if (s.as<IfStatement>().fIsStatic &&
|
2017-07-14 14:12:15 +00:00
|
|
|
!(fFlags & kPermitInvalidStaticTests_Flag)) {
|
2017-09-11 20:50:14 +00:00
|
|
|
this->error(s.fOffset, "static if has non-static test");
|
2017-05-10 19:06:17 +00:00
|
|
|
}
|
2017-06-22 15:24:38 +00:00
|
|
|
++iter;
|
2017-05-10 19:06:17 +00:00
|
|
|
break;
|
2020-09-08 14:22:09 +00:00
|
|
|
case Statement::Kind::kSwitch:
|
2020-08-18 15:19:07 +00:00
|
|
|
if (s.as<SwitchStatement>().fIsStatic &&
|
2020-09-09 13:39:34 +00:00
|
|
|
!(fFlags & kPermitInvalidStaticTests_Flag)) {
|
2017-09-11 20:50:14 +00:00
|
|
|
this->error(s.fOffset, "static switch has non-static test");
|
2017-05-10 19:06:17 +00:00
|
|
|
}
|
2017-06-22 15:24:38 +00:00
|
|
|
++iter;
|
|
|
|
break;
|
2020-09-08 14:22:09 +00:00
|
|
|
case Statement::Kind::kVarDeclarations: {
|
2020-08-18 15:19:07 +00:00
|
|
|
VarDeclarations& decls = *s.as<VarDeclarationsStatement>().fDeclaration;
|
2020-09-09 13:39:34 +00:00
|
|
|
decls.fVars.erase(
|
|
|
|
std::remove_if(decls.fVars.begin(), decls.fVars.end(),
|
|
|
|
[&](const std::unique_ptr<Statement>& var) {
|
|
|
|
bool nop = var->is<Nop>();
|
|
|
|
madeChanges |= nop;
|
|
|
|
return nop;
|
|
|
|
}),
|
|
|
|
decls.fVars.end());
|
|
|
|
if (decls.fVars.empty()) {
|
2017-11-07 14:42:10 +00:00
|
|
|
iter = b.fNodes.erase(iter);
|
|
|
|
} else {
|
|
|
|
++iter;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2017-05-10 19:06:17 +00:00
|
|
|
default:
|
2017-06-22 15:24:38 +00:00
|
|
|
++iter;
|
2017-05-10 19:06:17 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-06-22 15:24:38 +00:00
|
|
|
} else {
|
|
|
|
++iter;
|
2017-05-10 19:06:17 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-13 20:25:34 +00:00
|
|
|
// check for missing return
|
2018-03-27 18:10:52 +00:00
|
|
|
if (f.fDeclaration.fReturnType != *fContext->fVoid_Type) {
|
2016-10-13 20:25:34 +00:00
|
|
|
if (cfg.fBlocks[cfg.fExit].fEntrances.size()) {
|
2019-11-22 19:06:12 +00:00
|
|
|
this->error(f.fOffset, String("function '" + String(f.fDeclaration.fName) +
|
|
|
|
"' can exit without returning a value"));
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
|
|
|
}
|
2020-09-09 13:39:34 +00:00
|
|
|
|
|
|
|
return madeChanges;
|
2016-10-13 20:25:34 +00:00
|
|
|
}
|
|
|
|
|
2020-09-23 17:55:20 +00:00
|
|
|
std::unique_ptr<Program> Compiler::convertProgram(
|
|
|
|
Program::Kind kind,
|
|
|
|
String text,
|
|
|
|
const Program::Settings& settings,
|
|
|
|
const std::vector<std::unique_ptr<ExternalValue>>* externalValues) {
|
|
|
|
SkASSERT(!externalValues || (kind == Program::kGeneric_Kind));
|
2019-05-15 19:29:54 +00:00
|
|
|
|
2016-07-01 15:22:01 +00:00
|
|
|
fErrorText = "";
|
|
|
|
fErrorCount = 0;
|
2020-08-31 21:29:21 +00:00
|
|
|
fInliner.reset(context(), settings);
|
2018-04-24 17:06:09 +00:00
|
|
|
std::vector<std::unique_ptr<ProgramElement>>* inherited;
|
2016-07-25 17:08:54 +00:00
|
|
|
std::vector<std::unique_ptr<ProgramElement>> elements;
|
2016-07-01 15:22:01 +00:00
|
|
|
switch (kind) {
|
|
|
|
case Program::kVertex_Kind:
|
2018-04-24 17:06:09 +00:00
|
|
|
inherited = &fVertexInclude;
|
2020-08-26 23:46:27 +00:00
|
|
|
fIRGenerator->fIntrinsics = fGPUIntrinsics.get();
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
fIRGenerator->start(&settings, fVertexSymbolTable, inherited);
|
2016-07-01 15:22:01 +00:00
|
|
|
break;
|
|
|
|
case Program::kFragment_Kind:
|
2018-04-24 17:06:09 +00:00
|
|
|
inherited = &fFragmentInclude;
|
2020-08-26 23:46:27 +00:00
|
|
|
fIRGenerator->fIntrinsics = fGPUIntrinsics.get();
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
fIRGenerator->start(&settings, fFragmentSymbolTable, inherited);
|
2016-07-01 15:22:01 +00:00
|
|
|
break;
|
2017-02-16 21:37:32 +00:00
|
|
|
case Program::kGeometry_Kind:
|
2020-07-28 18:46:53 +00:00
|
|
|
this->loadGeometryIntrinsics();
|
2018-04-24 17:06:09 +00:00
|
|
|
inherited = &fGeometryInclude;
|
2020-08-26 23:46:27 +00:00
|
|
|
fIRGenerator->fIntrinsics = fGPUIntrinsics.get();
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
fIRGenerator->start(&settings, fGeometrySymbolTable, inherited);
|
2017-02-16 21:37:32 +00:00
|
|
|
break;
|
2020-08-06 17:00:19 +00:00
|
|
|
case Program::kFragmentProcessor_Kind: {
|
Reland "Untangle dependency cycle in sksl dehydration"
Explanation: The sksl standalone compiler is used to convert the raw
(text) SkSL pre-includes into a "dehydrated" binary format. It also
(previously) depended on those files, as they were #included and used,
unless a special #define was changed. This created a dependency cycle
that we hid from GN (by lying about the outputs of the dehydrate step).
As a result, builds would never reach steady-state, because the compiler
would be rebuilt (due to the newer dehydrated files), and then the
dehydrated files would be rebuilt (due to the newer compiler).
This CL changes the logic so that the standalone compiler always uses
the textual pre-includes, and no longer depends on the dehydrated binary
files. Thus, to make any kind of change to the dehydrated files (whether
due to pre-include changes, or the encoding format itself), you just
need skia_compile_processors enabled. The dependencies are now honestly
communicated to GN, and we reach steady state after one build.
The NOTE above is because GN/ninja cache the dependencies of each
target, and will still think that the SkSLCompiler.obj linked into the
standalone compiler depends on the dehydrated files, at least until one
successful build, when it will realize that's no longer true.
Reland notes:
The bots originally rejected this CL, because SkSLCompiler was
hard-coded to load the text files from a relative path that assumed the
executable was in "<skia_checkout>/out/<some_dir>". That's not true for
bots, and it was fragile, even for users. Now, we use GN to directly
generate sksl_fp.sksl, and copy all of the other pre-includes to the
root out dir (working directory when running skslc). This means we
no longer need to generate the sksl_fp.sksl file into the src tree, and
the compiler can more safely assume that the files will be in the
working directory.
Bug: skia:10571
Change-Id: Id7837a9aba7ee0c3f7fa82eb84f7761e24b9c705
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/308896
Reviewed-by: Mike Klein <mtklein@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
Reviewed-by: Michael Ludwig <michaelludwig@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-08-08 12:17:18 +00:00
|
|
|
#if !SKSL_STANDALONE
|
2020-07-28 18:46:53 +00:00
|
|
|
{
|
|
|
|
Rehydrator rehydrator(fContext.get(), fGpuSymbolTable, this,
|
|
|
|
SKSL_INCLUDE_sksl_fp,
|
|
|
|
SKSL_INCLUDE_sksl_fp_LENGTH);
|
|
|
|
fFPSymbolTable = rehydrator.symbolTable();
|
|
|
|
fFPInclude = rehydrator.elements();
|
|
|
|
}
|
SkSL: Start refactoring pre-include handling
This makes IRIntrinsicMap an actual type, and supports chaining (so an
intrinsic map can have a parent, just like a symbol table). That lets us
put Enums and defined functions at multiple levels of the pre-include
hierarchy.
With that done, we add an intrinsic map for sksl_fp.sksl, containing the
enum declarations from that file. This lets .fp processing using the FP
intrinsic map (which is parented to the GPU one) to resolve those enums
(PMConversion, GrClipEdgeType), as well as the enums in sksl_gpu
(SkBlendMode).
Because sksl_fp was being used to generate an inherited element list
(containing several builtin variables), I have relaxed the restriction
around grab_intrinsics - unsupported element types are simply left in
the original vector, unchanged. for the GPU and interpreter intrinsic
maps (where the element lists are discarded), we still assert that we
didn't end up with any unsupported elements.
Doing all of this lets us remove the redundant enum resolution code in
IR generator (where we previously supported looking up enums in both the
inherited element list, and in the intrinsic map).
Subsequent changes will add support for variables/declarations to the
intrinsic map, so we won't need both the inherited list and the
intrinsic map, if all goes well.
Change-Id: Ic6174511e5f8d68f65e4919f2ec0b923717d6cd9
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/318212
Commit-Queue: Brian Osman <brianosman@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
2020-09-21 15:32:10 +00:00
|
|
|
fFPIntrinsics = std::make_unique<IRIntrinsicMap>(fGPUIntrinsics.get());
|
|
|
|
grab_intrinsics(&fFPInclude, fFPIntrinsics.get());
|
|
|
|
|
2020-07-28 18:46:53 +00:00
|
|
|
inherited = &fFPInclude;
|
SkSL: Start refactoring pre-include handling
This makes IRIntrinsicMap an actual type, and supports chaining (so an
intrinsic map can have a parent, just like a symbol table). That lets us
put Enums and defined functions at multiple levels of the pre-include
hierarchy.
With that done, we add an intrinsic map for sksl_fp.sksl, containing the
enum declarations from that file. This lets .fp processing using the FP
intrinsic map (which is parented to the GPU one) to resolve those enums
(PMConversion, GrClipEdgeType), as well as the enums in sksl_gpu
(SkBlendMode).
Because sksl_fp was being used to generate an inherited element list
(containing several builtin variables), I have relaxed the restriction
around grab_intrinsics - unsupported element types are simply left in
the original vector, unchanged. for the GPU and interpreter intrinsic
maps (where the element lists are discarded), we still assert that we
didn't end up with any unsupported elements.
Doing all of this lets us remove the redundant enum resolution code in
IR generator (where we previously supported looking up enums in both the
inherited element list, and in the intrinsic map).
Subsequent changes will add support for variables/declarations to the
intrinsic map, so we won't need both the inherited list and the
intrinsic map, if all goes well.
Change-Id: Ic6174511e5f8d68f65e4919f2ec0b923717d6cd9
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/318212
Commit-Queue: Brian Osman <brianosman@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
2020-09-21 15:32:10 +00:00
|
|
|
fIRGenerator->fIntrinsics = fFPIntrinsics.get();
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
fIRGenerator->start(&settings, fFPSymbolTable, inherited);
|
2020-07-28 18:46:53 +00:00
|
|
|
break;
|
|
|
|
#else
|
2018-04-24 17:06:09 +00:00
|
|
|
inherited = nullptr;
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
fIRGenerator->start(&settings, fGpuSymbolTable, /*inherited=*/nullptr,
|
|
|
|
/*builtin=*/true);
|
2020-08-26 23:46:27 +00:00
|
|
|
fIRGenerator->fIntrinsics = fGPUIntrinsics.get();
|
2020-08-06 17:00:19 +00:00
|
|
|
std::ifstream in(SKSL_FP_INCLUDE);
|
|
|
|
std::string stdText{std::istreambuf_iterator<char>(in),
|
|
|
|
std::istreambuf_iterator<char>()};
|
|
|
|
if (in.rdstate()) {
|
|
|
|
printf("error reading %s\n", SKSL_FP_INCLUDE);
|
|
|
|
abort();
|
|
|
|
}
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
const String* source = fRootSymbolTable->takeOwnershipOfString(
|
2020-08-06 17:00:19 +00:00
|
|
|
std::make_unique<String>(stdText.c_str()));
|
|
|
|
fIRGenerator->convertProgram(kind, source->c_str(), source->length(), &elements);
|
2020-07-28 18:46:53 +00:00
|
|
|
fIRGenerator->fIsBuiltinCode = false;
|
2017-06-29 14:03:38 +00:00
|
|
|
break;
|
2020-07-28 18:46:53 +00:00
|
|
|
#endif
|
2020-08-06 17:00:19 +00:00
|
|
|
}
|
2019-05-24 15:01:59 +00:00
|
|
|
case Program::kPipelineStage_Kind:
|
2020-07-28 18:46:53 +00:00
|
|
|
this->loadPipelineIntrinsics();
|
2019-05-24 15:01:59 +00:00
|
|
|
inherited = &fPipelineInclude;
|
2020-08-26 23:46:27 +00:00
|
|
|
fIRGenerator->fIntrinsics = fGPUIntrinsics.get();
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
fIRGenerator->start(&settings, fPipelineSymbolTable, inherited);
|
2019-05-24 15:01:59 +00:00
|
|
|
break;
|
2019-04-23 17:31:09 +00:00
|
|
|
case Program::kGeneric_Kind:
|
2020-07-28 18:46:53 +00:00
|
|
|
this->loadInterpreterIntrinsics();
|
2020-09-18 15:49:22 +00:00
|
|
|
inherited = nullptr;
|
2020-08-26 23:46:27 +00:00
|
|
|
fIRGenerator->fIntrinsics = fInterpreterIntrinsics.get();
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
fIRGenerator->start(&settings, fInterpreterSymbolTable, /*inherited=*/nullptr);
|
2019-04-08 13:46:01 +00:00
|
|
|
break;
|
2016-07-01 15:22:01 +00:00
|
|
|
}
|
2020-09-23 17:55:20 +00:00
|
|
|
if (externalValues) {
|
|
|
|
// Add any external values to the symbol table. IRGenerator::start() has pushed a table, so
|
|
|
|
// we're only making these visible to the current Program.
|
|
|
|
for (const auto& ev : *externalValues) {
|
|
|
|
fIRGenerator->fSymbolTable->addWithoutOwnership(ev->fName, ev.get());
|
|
|
|
}
|
|
|
|
}
|
2017-09-11 20:50:14 +00:00
|
|
|
std::unique_ptr<String> textPtr(new String(std::move(text)));
|
|
|
|
fSource = textPtr.get();
|
2020-07-28 18:46:53 +00:00
|
|
|
fIRGenerator->convertProgram(kind, textPtr->c_str(), textPtr->size(), &elements);
|
2020-08-03 17:21:46 +00:00
|
|
|
auto result = std::make_unique<Program>(kind,
|
|
|
|
std::move(textPtr),
|
|
|
|
settings,
|
|
|
|
fContext,
|
|
|
|
inherited,
|
|
|
|
std::move(elements),
|
|
|
|
fIRGenerator->fSymbolTable,
|
|
|
|
fIRGenerator->fInputs);
|
Various cleanup related to symbol tables
- Remove a spurious symbol table inserted by convertProgram. start()
already pushes a symbol table, and this was pushing a second one,
which didn't seem necessary. (The Parser can inject symbols for types
it discovers, but I can't justify those needing to be in a different
table than the rest of the program elements?)
- The convertProgram one had a comment indicating that it was popped by
the Compiler. That wasn't true, so this gets us one step closer to
balance.
- The one in start() is meant to be balanced by a pop in finish(), but
no one ever called finish(). Add that call in, and also rearrange
things so that the base symbol table is a parameter to start(), rather
than just setting it on the IR generator. (There's more of this
pattern around, but I wanted to limit the scope of this CL).
- When dehydrating the include files, we had logic to work around the
extra symbol table (absorbing the symbols) - that's not needed now.
- Simplify some other logic in processIncludeFile (no need to make so
many string copies). Always just put the incoming include file strings
into the root table, also. It's largely irrelevant where they go.
Change-Id: I18d897af3d5fa6506e11024beb9bb70e6cc5b538
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/319038
Reviewed-by: John Stiles <johnstiles@google.com>
Commit-Queue: Brian Osman <brianosman@google.com>
2020-09-23 18:42:11 +00:00
|
|
|
fIRGenerator->finish();
|
2016-12-12 20:33:30 +00:00
|
|
|
if (fErrorCount) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
2020-09-14 15:33:47 +00:00
|
|
|
if (settings.fOptimize && !this->optimize(*result)) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
2016-07-25 17:08:54 +00:00
|
|
|
return result;
|
2016-07-01 15:22:01 +00:00
|
|
|
}
|
|
|
|
|
2018-07-31 13:44:36 +00:00
|
|
|
bool Compiler::optimize(Program& program) {
|
|
|
|
SkASSERT(!fErrorCount);
|
2020-09-14 15:33:47 +00:00
|
|
|
fIRGenerator->fKind = program.fKind;
|
|
|
|
fIRGenerator->fSettings = &program.fSettings;
|
2020-09-01 14:53:02 +00:00
|
|
|
|
2020-09-14 15:33:47 +00:00
|
|
|
while (fErrorCount == 0) {
|
|
|
|
bool madeChanges = false;
|
2020-09-09 17:40:37 +00:00
|
|
|
|
2020-09-14 15:33:47 +00:00
|
|
|
// Scan and optimize based on the control-flow graph for each function.
|
|
|
|
for (ProgramElement& element : program) {
|
|
|
|
if (element.is<FunctionDefinition>()) {
|
|
|
|
madeChanges |= this->scanCFG(element.as<FunctionDefinition>());
|
2018-07-31 13:44:36 +00:00
|
|
|
}
|
2020-09-14 15:33:47 +00:00
|
|
|
}
|
2020-09-01 14:53:02 +00:00
|
|
|
|
2020-09-14 15:33:47 +00:00
|
|
|
// Perform inline-candidate analysis and inline any functions deemed suitable.
|
2020-09-19 14:13:24 +00:00
|
|
|
madeChanges |= fInliner.analyze(program);
|
2020-09-14 15:33:47 +00:00
|
|
|
|
|
|
|
// Remove dead functions. We wait until after analysis so that we still report errors,
|
|
|
|
// even in unused code.
|
|
|
|
if (program.fSettings.fRemoveDeadFunctions) {
|
|
|
|
program.fElements.erase(
|
|
|
|
std::remove_if(program.fElements.begin(),
|
|
|
|
program.fElements.end(),
|
|
|
|
[&](const std::unique_ptr<ProgramElement>& element) {
|
|
|
|
if (!element->is<FunctionDefinition>()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
const auto& fn = element->as<FunctionDefinition>();
|
|
|
|
bool dead = fn.fDeclaration.fCallCount == 0 &&
|
|
|
|
fn.fDeclaration.fName != "main";
|
|
|
|
madeChanges |= dead;
|
|
|
|
return dead;
|
|
|
|
}),
|
|
|
|
program.fElements.end());
|
|
|
|
}
|
2020-09-01 14:53:02 +00:00
|
|
|
|
2020-09-14 15:33:47 +00:00
|
|
|
if (program.fKind != Program::kFragmentProcessor_Kind) {
|
|
|
|
// Remove dead variables.
|
|
|
|
for (ProgramElement& element : program) {
|
|
|
|
if (!element.is<VarDeclarations>()) {
|
|
|
|
continue;
|
2019-02-08 20:46:24 +00:00
|
|
|
}
|
2020-09-14 15:33:47 +00:00
|
|
|
VarDeclarations& vars = element.as<VarDeclarations>();
|
|
|
|
vars.fVars.erase(
|
|
|
|
std::remove_if(vars.fVars.begin(), vars.fVars.end(),
|
|
|
|
[&](const std::unique_ptr<Statement>& stmt) {
|
|
|
|
bool dead = stmt->as<VarDeclaration>().fVar->dead();
|
2020-09-09 17:40:37 +00:00
|
|
|
madeChanges |= dead;
|
|
|
|
return dead;
|
|
|
|
}),
|
2020-09-14 15:33:47 +00:00
|
|
|
vars.fVars.end());
|
2019-02-08 20:46:24 +00:00
|
|
|
}
|
2020-09-02 18:12:41 +00:00
|
|
|
|
2020-09-14 15:33:47 +00:00
|
|
|
// Remove empty variable declarations with no variables left inside of them.
|
|
|
|
program.fElements.erase(
|
|
|
|
std::remove_if(program.fElements.begin(), program.fElements.end(),
|
|
|
|
[&](const std::unique_ptr<ProgramElement>& element) {
|
|
|
|
if (!element->is<VarDeclarations>()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
bool dead = element->as<VarDeclarations>().fVars.empty();
|
|
|
|
madeChanges |= dead;
|
|
|
|
return dead;
|
|
|
|
}),
|
|
|
|
program.fElements.end());
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!madeChanges) {
|
|
|
|
break;
|
2019-02-08 20:46:24 +00:00
|
|
|
}
|
2018-07-31 13:44:36 +00:00
|
|
|
}
|
|
|
|
return fErrorCount == 0;
|
|
|
|
}
|
|
|
|
|
2019-06-18 14:14:20 +00:00
|
|
|
#if defined(SKSL_STANDALONE) || SK_SUPPORT_GPU
|
|
|
|
|
2018-07-31 13:44:36 +00:00
|
|
|
bool Compiler::toSPIRV(Program& program, OutputStream& out) {
|
2017-03-16 13:56:54 +00:00
|
|
|
#ifdef SK_ENABLE_SPIRV_VALIDATION
|
2017-03-31 17:56:23 +00:00
|
|
|
StringStream buffer;
|
2017-09-11 20:50:14 +00:00
|
|
|
fSource = program.fSource.get();
|
2018-03-27 18:10:52 +00:00
|
|
|
SPIRVCodeGenerator cg(fContext.get(), &program, this, &buffer);
|
2017-03-16 13:56:54 +00:00
|
|
|
bool result = cg.generateCode();
|
2017-09-11 20:50:14 +00:00
|
|
|
fSource = nullptr;
|
2017-03-16 13:56:54 +00:00
|
|
|
if (result) {
|
|
|
|
spvtools::SpirvTools tools(SPV_ENV_VULKAN_1_0);
|
2017-06-29 14:03:38 +00:00
|
|
|
const String& data = buffer.str();
|
2018-06-12 15:05:59 +00:00
|
|
|
SkASSERT(0 == data.size() % 4);
|
2017-03-16 13:56:54 +00:00
|
|
|
auto dumpmsg = [](spv_message_level_t, const char*, const spv_position_t&, const char* m) {
|
|
|
|
SkDebugf("SPIR-V validation error: %s\n", m);
|
|
|
|
};
|
|
|
|
tools.SetMessageConsumer(dumpmsg);
|
2018-06-12 15:05:59 +00:00
|
|
|
// Verify that the SPIR-V we produced is valid. If this SkASSERT fails, check the logs prior
|
2017-03-16 13:56:54 +00:00
|
|
|
// to the failure to see the validation errors.
|
2018-06-12 15:05:59 +00:00
|
|
|
SkAssertResult(tools.Validate((const uint32_t*) data.c_str(), data.size() / 4));
|
2017-06-29 14:03:38 +00:00
|
|
|
out.write(data.c_str(), data.size());
|
2017-03-16 13:56:54 +00:00
|
|
|
}
|
|
|
|
#else
|
2017-09-11 20:50:14 +00:00
|
|
|
fSource = program.fSource.get();
|
2018-03-27 18:10:52 +00:00
|
|
|
SPIRVCodeGenerator cg(fContext.get(), &program, this, &out);
|
2016-12-12 20:33:30 +00:00
|
|
|
bool result = cg.generateCode();
|
2017-09-11 20:50:14 +00:00
|
|
|
fSource = nullptr;
|
2017-03-16 13:56:54 +00:00
|
|
|
#endif
|
2016-07-01 15:22:01 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2018-07-31 13:44:36 +00:00
|
|
|
bool Compiler::toSPIRV(Program& program, String* out) {
|
2017-03-31 17:56:23 +00:00
|
|
|
StringStream buffer;
|
2016-12-12 20:33:30 +00:00
|
|
|
bool result = this->toSPIRV(program, buffer);
|
|
|
|
if (result) {
|
2017-06-29 14:03:38 +00:00
|
|
|
*out = buffer.str();
|
2016-07-01 15:22:01 +00:00
|
|
|
}
|
2016-12-12 20:33:30 +00:00
|
|
|
return result;
|
2016-07-01 15:22:01 +00:00
|
|
|
}
|
|
|
|
|
2018-07-31 13:44:36 +00:00
|
|
|
bool Compiler::toGLSL(Program& program, OutputStream& out) {
|
2017-09-11 20:50:14 +00:00
|
|
|
fSource = program.fSource.get();
|
2018-03-27 18:10:52 +00:00
|
|
|
GLSLCodeGenerator cg(fContext.get(), &program, this, &out);
|
2016-12-12 20:33:30 +00:00
|
|
|
bool result = cg.generateCode();
|
2017-09-11 20:50:14 +00:00
|
|
|
fSource = nullptr;
|
2016-12-12 20:33:30 +00:00
|
|
|
return result;
|
2016-07-01 15:22:01 +00:00
|
|
|
}
|
|
|
|
|
2018-07-31 13:44:36 +00:00
|
|
|
bool Compiler::toGLSL(Program& program, String* out) {
|
2017-03-31 17:56:23 +00:00
|
|
|
StringStream buffer;
|
2016-12-12 20:33:30 +00:00
|
|
|
bool result = this->toGLSL(program, buffer);
|
2016-07-01 15:22:01 +00:00
|
|
|
if (result) {
|
2017-06-29 14:03:38 +00:00
|
|
|
*out = buffer.str();
|
2016-07-01 15:22:01 +00:00
|
|
|
}
|
2016-08-03 19:43:36 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2020-02-19 20:35:26 +00:00
|
|
|
bool Compiler::toHLSL(Program& program, String* out) {
|
|
|
|
String spirv;
|
|
|
|
if (!this->toSPIRV(program, &spirv)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return SPIRVtoHLSL(spirv, out);
|
|
|
|
}
|
|
|
|
|
2018-07-31 13:44:36 +00:00
|
|
|
bool Compiler::toMetal(Program& program, OutputStream& out) {
|
2018-03-27 18:10:52 +00:00
|
|
|
MetalCodeGenerator cg(fContext.get(), &program, this, &out);
|
2017-10-13 20:17:45 +00:00
|
|
|
bool result = cg.generateCode();
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2018-07-31 13:44:36 +00:00
|
|
|
bool Compiler::toMetal(Program& program, String* out) {
|
2018-07-23 20:46:16 +00:00
|
|
|
StringStream buffer;
|
|
|
|
bool result = this->toMetal(program, buffer);
|
|
|
|
if (result) {
|
|
|
|
*out = buffer.str();
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2020-09-25 15:12:56 +00:00
|
|
|
#if defined(SKSL_STANDALONE) || GR_TEST_UTILS
|
2018-07-31 13:44:36 +00:00
|
|
|
bool Compiler::toCPP(Program& program, String name, OutputStream& out) {
|
2017-09-11 20:50:14 +00:00
|
|
|
fSource = program.fSource.get();
|
2018-03-27 18:10:52 +00:00
|
|
|
CPPCodeGenerator cg(fContext.get(), &program, this, name, &out);
|
2017-06-29 14:03:38 +00:00
|
|
|
bool result = cg.generateCode();
|
2017-09-11 20:50:14 +00:00
|
|
|
fSource = nullptr;
|
2017-06-29 14:03:38 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2018-07-31 13:44:36 +00:00
|
|
|
bool Compiler::toH(Program& program, String name, OutputStream& out) {
|
2017-09-11 20:50:14 +00:00
|
|
|
fSource = program.fSource.get();
|
2018-03-27 18:10:52 +00:00
|
|
|
HCodeGenerator cg(fContext.get(), &program, this, name, &out);
|
2017-06-29 14:03:38 +00:00
|
|
|
bool result = cg.generateCode();
|
2017-09-11 20:50:14 +00:00
|
|
|
fSource = nullptr;
|
2018-07-31 13:44:36 +00:00
|
|
|
return result;
|
|
|
|
}
|
2020-09-25 15:12:56 +00:00
|
|
|
#endif // defined(SKSL_STANDALONE) || GR_TEST_UTILS
|
2018-07-31 13:44:36 +00:00
|
|
|
|
2020-08-18 20:29:45 +00:00
|
|
|
#endif // defined(SKSL_STANDALONE) || SK_SUPPORT_GPU
|
2019-09-20 16:19:11 +00:00
|
|
|
|
|
|
|
#if !defined(SKSL_STANDALONE) && SK_SUPPORT_GPU
|
Remove 'in' variables from SkRuntimeEffect
Runtime effects previously allowed two kinds of global input variables:
'in' variables could be bool, int, or float. 'uniform' could be float,
vector, or matrix. Uniform variables worked like you'd expect, but 'in'
variables were baked into the program statically. There was a large
amount of machinery to make this work, and it meant that 'in' variables
needed to have values before we could make decisions about program
caching, and before we could catch some errors. It was also essentially
syntactic sugar over the client just inserting the value into their SkSL
as a string. Finally: No one was using the feature.
To simplify the mental model, and make the API much more predictable,
this CL removes 'in' variables entirely. We no longer need to
"specialize" runtime effect programs, which means we can catch more
errors up front (those not detected until optimization). All of the API
that referred to "inputs" (the previous term that unified 'in' and
'uniform') now just refers to "uniforms".
Bug: skia:10593
Change-Id: I971f620d868b259e652b3114f0b497c2620f4b0c
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/309050
Commit-Queue: Brian Osman <brianosman@google.com>
Reviewed-by: Brian Salomon <bsalomon@google.com>
Reviewed-by: John Stiles <johnstiles@google.com>
2020-08-10 18:26:16 +00:00
|
|
|
bool Compiler::toPipelineStage(Program& program, PipelineStageArgs* outArgs) {
|
2018-07-31 13:44:36 +00:00
|
|
|
fSource = program.fSource.get();
|
|
|
|
StringStream buffer;
|
2020-01-23 20:42:43 +00:00
|
|
|
PipelineStageCodeGenerator cg(fContext.get(), &program, this, &buffer, outArgs);
|
2018-07-31 13:44:36 +00:00
|
|
|
bool result = cg.generateCode();
|
|
|
|
fSource = nullptr;
|
|
|
|
if (result) {
|
2019-12-30 20:02:30 +00:00
|
|
|
outArgs->fCode = buffer.str();
|
2018-07-31 13:44:36 +00:00
|
|
|
}
|
2017-06-29 14:03:38 +00:00
|
|
|
return result;
|
|
|
|
}
|
2019-06-18 14:14:20 +00:00
|
|
|
#endif
|
|
|
|
|
2019-03-21 15:05:37 +00:00
|
|
|
std::unique_ptr<ByteCode> Compiler::toByteCode(Program& program) {
|
2020-08-20 00:41:00 +00:00
|
|
|
#if defined(SK_ENABLE_SKSL_INTERPRETER)
|
2020-01-21 20:36:47 +00:00
|
|
|
fSource = program.fSource.get();
|
2019-03-21 15:05:37 +00:00
|
|
|
std::unique_ptr<ByteCode> result(new ByteCode());
|
2020-04-02 15:38:40 +00:00
|
|
|
ByteCodeGenerator cg(fContext.get(), &program, this, result.get());
|
|
|
|
bool success = cg.generateCode();
|
|
|
|
fSource = nullptr;
|
|
|
|
if (success) {
|
2019-03-21 15:05:37 +00:00
|
|
|
return result;
|
|
|
|
}
|
2020-08-20 00:41:00 +00:00
|
|
|
#else
|
|
|
|
ABORT("ByteCode interpreter not enabled");
|
|
|
|
#endif
|
2019-03-21 15:05:37 +00:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2020-09-10 18:47:24 +00:00
|
|
|
const char* Compiler::OperatorName(Token::Kind op) {
|
|
|
|
switch (op) {
|
2020-04-17 16:45:51 +00:00
|
|
|
case Token::Kind::TK_PLUS: return "+";
|
|
|
|
case Token::Kind::TK_MINUS: return "-";
|
|
|
|
case Token::Kind::TK_STAR: return "*";
|
|
|
|
case Token::Kind::TK_SLASH: return "/";
|
|
|
|
case Token::Kind::TK_PERCENT: return "%";
|
|
|
|
case Token::Kind::TK_SHL: return "<<";
|
|
|
|
case Token::Kind::TK_SHR: return ">>";
|
|
|
|
case Token::Kind::TK_LOGICALNOT: return "!";
|
|
|
|
case Token::Kind::TK_LOGICALAND: return "&&";
|
|
|
|
case Token::Kind::TK_LOGICALOR: return "||";
|
|
|
|
case Token::Kind::TK_LOGICALXOR: return "^^";
|
|
|
|
case Token::Kind::TK_BITWISENOT: return "~";
|
|
|
|
case Token::Kind::TK_BITWISEAND: return "&";
|
|
|
|
case Token::Kind::TK_BITWISEOR: return "|";
|
|
|
|
case Token::Kind::TK_BITWISEXOR: return "^";
|
|
|
|
case Token::Kind::TK_EQ: return "=";
|
|
|
|
case Token::Kind::TK_EQEQ: return "==";
|
|
|
|
case Token::Kind::TK_NEQ: return "!=";
|
|
|
|
case Token::Kind::TK_LT: return "<";
|
|
|
|
case Token::Kind::TK_GT: return ">";
|
|
|
|
case Token::Kind::TK_LTEQ: return "<=";
|
|
|
|
case Token::Kind::TK_GTEQ: return ">=";
|
|
|
|
case Token::Kind::TK_PLUSEQ: return "+=";
|
|
|
|
case Token::Kind::TK_MINUSEQ: return "-=";
|
|
|
|
case Token::Kind::TK_STAREQ: return "*=";
|
|
|
|
case Token::Kind::TK_SLASHEQ: return "/=";
|
|
|
|
case Token::Kind::TK_PERCENTEQ: return "%=";
|
|
|
|
case Token::Kind::TK_SHLEQ: return "<<=";
|
|
|
|
case Token::Kind::TK_SHREQ: return ">>=";
|
|
|
|
case Token::Kind::TK_LOGICALANDEQ: return "&&=";
|
|
|
|
case Token::Kind::TK_LOGICALOREQ: return "||=";
|
|
|
|
case Token::Kind::TK_LOGICALXOREQ: return "^^=";
|
|
|
|
case Token::Kind::TK_BITWISEANDEQ: return "&=";
|
|
|
|
case Token::Kind::TK_BITWISEOREQ: return "|=";
|
|
|
|
case Token::Kind::TK_BITWISEXOREQ: return "^=";
|
|
|
|
case Token::Kind::TK_PLUSPLUS: return "++";
|
|
|
|
case Token::Kind::TK_MINUSMINUS: return "--";
|
|
|
|
case Token::Kind::TK_COMMA: return ",";
|
2017-09-11 20:50:14 +00:00
|
|
|
default:
|
2020-09-10 18:47:24 +00:00
|
|
|
ABORT("unsupported operator: %d\n", (int) op);
|
2017-09-11 20:50:14 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Compiler::IsAssignment(Token::Kind op) {
|
|
|
|
switch (op) {
|
2020-04-17 16:45:51 +00:00
|
|
|
case Token::Kind::TK_EQ: // fall through
|
|
|
|
case Token::Kind::TK_PLUSEQ: // fall through
|
|
|
|
case Token::Kind::TK_MINUSEQ: // fall through
|
|
|
|
case Token::Kind::TK_STAREQ: // fall through
|
|
|
|
case Token::Kind::TK_SLASHEQ: // fall through
|
|
|
|
case Token::Kind::TK_PERCENTEQ: // fall through
|
|
|
|
case Token::Kind::TK_SHLEQ: // fall through
|
|
|
|
case Token::Kind::TK_SHREQ: // fall through
|
|
|
|
case Token::Kind::TK_BITWISEOREQ: // fall through
|
|
|
|
case Token::Kind::TK_BITWISEXOREQ: // fall through
|
|
|
|
case Token::Kind::TK_BITWISEANDEQ: // fall through
|
|
|
|
case Token::Kind::TK_LOGICALOREQ: // fall through
|
|
|
|
case Token::Kind::TK_LOGICALXOREQ: // fall through
|
|
|
|
case Token::Kind::TK_LOGICALANDEQ:
|
2017-09-11 20:50:14 +00:00
|
|
|
return true;
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-10 18:47:24 +00:00
|
|
|
Token::Kind Compiler::RemoveAssignment(Token::Kind op) {
|
|
|
|
switch (op) {
|
|
|
|
case Token::Kind::TK_PLUSEQ: return Token::Kind::TK_PLUS;
|
|
|
|
case Token::Kind::TK_MINUSEQ: return Token::Kind::TK_MINUS;
|
|
|
|
case Token::Kind::TK_STAREQ: return Token::Kind::TK_STAR;
|
|
|
|
case Token::Kind::TK_SLASHEQ: return Token::Kind::TK_SLASH;
|
|
|
|
case Token::Kind::TK_PERCENTEQ: return Token::Kind::TK_PERCENT;
|
|
|
|
case Token::Kind::TK_SHLEQ: return Token::Kind::TK_SHL;
|
|
|
|
case Token::Kind::TK_SHREQ: return Token::Kind::TK_SHR;
|
|
|
|
case Token::Kind::TK_BITWISEOREQ: return Token::Kind::TK_BITWISEOR;
|
|
|
|
case Token::Kind::TK_BITWISEXOREQ: return Token::Kind::TK_BITWISEXOR;
|
|
|
|
case Token::Kind::TK_BITWISEANDEQ: return Token::Kind::TK_BITWISEAND;
|
|
|
|
case Token::Kind::TK_LOGICALOREQ: return Token::Kind::TK_LOGICALOR;
|
|
|
|
case Token::Kind::TK_LOGICALXOREQ: return Token::Kind::TK_LOGICALXOR;
|
|
|
|
case Token::Kind::TK_LOGICALANDEQ: return Token::Kind::TK_LOGICALAND;
|
|
|
|
default: return op;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-11 20:50:14 +00:00
|
|
|
Position Compiler::position(int offset) {
|
2018-06-12 15:05:59 +00:00
|
|
|
SkASSERT(fSource);
|
2017-09-11 20:50:14 +00:00
|
|
|
int line = 1;
|
|
|
|
int column = 1;
|
|
|
|
for (int i = 0; i < offset; i++) {
|
|
|
|
if ((*fSource)[i] == '\n') {
|
|
|
|
++line;
|
|
|
|
column = 1;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
++column;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return Position(line, column);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Compiler::error(int offset, String msg) {
|
2016-12-12 20:33:30 +00:00
|
|
|
fErrorCount++;
|
2017-09-11 20:50:14 +00:00
|
|
|
Position pos = this->position(offset);
|
|
|
|
fErrorText += "error: " + to_string(pos.fLine) + ": " + msg.c_str() + "\n";
|
2016-11-21 20:59:48 +00:00
|
|
|
}
|
|
|
|
|
2017-03-31 17:56:23 +00:00
|
|
|
String Compiler::errorText() {
|
2018-07-31 13:44:36 +00:00
|
|
|
this->writeErrorCount();
|
|
|
|
fErrorCount = 0;
|
2017-03-31 17:56:23 +00:00
|
|
|
String result = fErrorText;
|
2016-12-12 20:33:30 +00:00
|
|
|
return result;
|
2016-07-01 15:22:01 +00:00
|
|
|
}
|
|
|
|
|
2016-12-12 20:33:30 +00:00
|
|
|
void Compiler::writeErrorCount() {
|
|
|
|
if (fErrorCount) {
|
|
|
|
fErrorText += to_string(fErrorCount) + " error";
|
|
|
|
if (fErrorCount > 1) {
|
|
|
|
fErrorText += "s";
|
|
|
|
}
|
|
|
|
fErrorText += "\n";
|
2016-08-03 19:43:36 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-06 18:11:56 +00:00
|
|
|
} // namespace SkSL
|