2019-03-25 15:54:59 +00:00
|
|
|
/*
|
|
|
|
* Copyright 2014 Google Inc.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
|
|
|
*/
|
|
|
|
|
2019-04-23 17:05:21 +00:00
|
|
|
#include "include/core/SkExecutor.h"
|
|
|
|
#include "include/gpu/GrContextOptions.h"
|
|
|
|
#include "tools/flags/CommonFlags.h"
|
2019-03-25 15:54:59 +00:00
|
|
|
|
|
|
|
DEFINE_int(gpuThreads,
|
|
|
|
2,
|
|
|
|
"Create this many extra threads to assist with GPU work, "
|
|
|
|
"including software path rendering. Defaults to two.");
|
|
|
|
|
|
|
|
static DEFINE_bool(cachePathMasks, true,
|
|
|
|
"Allows path mask textures to be cached in GPU configs.");
|
|
|
|
|
|
|
|
static DEFINE_bool(noGS, false, "Disables support for geometry shaders.");
|
|
|
|
|
2019-03-30 06:31:23 +00:00
|
|
|
static DEFINE_bool(cc, false, "Allow coverage counting shortcuts to render paths?");
|
|
|
|
|
2019-03-25 15:54:59 +00:00
|
|
|
static DEFINE_string(pr, "",
|
|
|
|
"Set of enabled gpu path renderers. Defined as a list of: "
|
|
|
|
"[~]none [~]dashline [~]nvpr [~]ccpr [~]aahairline [~]aaconvex [~]aalinearizing "
|
|
|
|
"[~]small [~]tess] [~]all");
|
|
|
|
|
2019-12-27 20:47:25 +00:00
|
|
|
static DEFINE_int(internalSamples, 4,
|
|
|
|
"Number of samples for internal draws that use MSAA or mixed samples.");
|
|
|
|
|
2019-03-25 15:54:59 +00:00
|
|
|
static DEFINE_bool(disableDriverCorrectnessWorkarounds, false,
|
|
|
|
"Disables all GPU driver correctness workarounds");
|
|
|
|
|
2019-08-22 20:19:24 +00:00
|
|
|
static DEFINE_bool(reduceOpsTaskSplitting, false, "Improve opsTask sorting");
|
|
|
|
static DEFINE_bool(dontReduceOpsTaskSplitting, false, "Allow more opsTask splitting");
|
2019-03-25 15:54:59 +00:00
|
|
|
|
|
|
|
static GpuPathRenderers get_named_pathrenderers_flags(const char* name) {
|
|
|
|
if (!strcmp(name, "none")) {
|
|
|
|
return GpuPathRenderers::kNone;
|
|
|
|
} else if (!strcmp(name, "dashline")) {
|
|
|
|
return GpuPathRenderers::kDashLine;
|
2020-01-07 02:49:37 +00:00
|
|
|
} else if (!strcmp(name, "gtess")) {
|
|
|
|
return GpuPathRenderers::kGpuTessellation;
|
2019-03-25 15:54:59 +00:00
|
|
|
} else if (!strcmp(name, "nvpr")) {
|
|
|
|
return GpuPathRenderers::kStencilAndCover;
|
|
|
|
} else if (!strcmp(name, "ccpr")) {
|
|
|
|
return GpuPathRenderers::kCoverageCounting;
|
|
|
|
} else if (!strcmp(name, "aahairline")) {
|
|
|
|
return GpuPathRenderers::kAAHairline;
|
|
|
|
} else if (!strcmp(name, "aaconvex")) {
|
|
|
|
return GpuPathRenderers::kAAConvex;
|
|
|
|
} else if (!strcmp(name, "aalinearizing")) {
|
|
|
|
return GpuPathRenderers::kAALinearizing;
|
|
|
|
} else if (!strcmp(name, "small")) {
|
|
|
|
return GpuPathRenderers::kSmall;
|
|
|
|
} else if (!strcmp(name, "tess")) {
|
|
|
|
return GpuPathRenderers::kTessellating;
|
2019-12-28 21:51:11 +00:00
|
|
|
} else if (!strcmp(name, "default")) {
|
|
|
|
return GpuPathRenderers::kDefault;
|
2019-03-25 15:54:59 +00:00
|
|
|
}
|
|
|
|
SK_ABORT(SkStringPrintf("error: unknown named path renderer \"%s\"\n", name).c_str());
|
|
|
|
}
|
|
|
|
|
|
|
|
static GpuPathRenderers collect_gpu_path_renderers_from_flags() {
|
|
|
|
if (FLAGS_pr.isEmpty()) {
|
2019-12-28 21:51:11 +00:00
|
|
|
return GpuPathRenderers::kDefault;
|
2019-03-25 15:54:59 +00:00
|
|
|
}
|
2019-03-30 06:31:23 +00:00
|
|
|
|
2019-03-25 15:54:59 +00:00
|
|
|
GpuPathRenderers gpuPathRenderers = ('~' == FLAGS_pr[0][0])
|
2019-12-28 21:51:11 +00:00
|
|
|
? GpuPathRenderers::kDefault
|
2019-03-30 06:31:23 +00:00
|
|
|
: GpuPathRenderers::kNone;
|
2019-03-25 15:54:59 +00:00
|
|
|
|
|
|
|
for (int i = 0; i < FLAGS_pr.count(); ++i) {
|
|
|
|
const char* name = FLAGS_pr[i];
|
|
|
|
if (name[0] == '~') {
|
|
|
|
gpuPathRenderers &= ~get_named_pathrenderers_flags(&name[1]);
|
|
|
|
} else {
|
|
|
|
gpuPathRenderers |= get_named_pathrenderers_flags(name);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return gpuPathRenderers;
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetCtxOptionsFromCommonFlags(GrContextOptions* ctxOptions) {
|
|
|
|
static std::unique_ptr<SkExecutor> gGpuExecutor = (0 != FLAGS_gpuThreads)
|
|
|
|
? SkExecutor::MakeFIFOThreadPool(FLAGS_gpuThreads)
|
|
|
|
: nullptr;
|
|
|
|
|
|
|
|
ctxOptions->fExecutor = gGpuExecutor.get();
|
2019-03-30 06:31:23 +00:00
|
|
|
ctxOptions->fDisableCoverageCountingPaths = !FLAGS_cc;
|
2019-03-25 15:54:59 +00:00
|
|
|
ctxOptions->fAllowPathMaskCaching = FLAGS_cachePathMasks;
|
|
|
|
ctxOptions->fSuppressGeometryShaders = FLAGS_noGS;
|
|
|
|
ctxOptions->fGpuPathRenderers = collect_gpu_path_renderers_from_flags();
|
2019-12-27 20:47:25 +00:00
|
|
|
ctxOptions->fInternalMultisampleCount = FLAGS_internalSamples;
|
2019-03-25 15:54:59 +00:00
|
|
|
ctxOptions->fDisableDriverCorrectnessWorkarounds = FLAGS_disableDriverCorrectnessWorkarounds;
|
|
|
|
|
2019-08-22 20:19:24 +00:00
|
|
|
if (FLAGS_reduceOpsTaskSplitting) {
|
|
|
|
SkASSERT(!FLAGS_dontReduceOpsTaskSplitting);
|
2019-08-22 21:15:39 +00:00
|
|
|
ctxOptions->fReduceOpsTaskSplitting = GrContextOptions::Enable::kYes;
|
2019-08-22 20:19:24 +00:00
|
|
|
} else if (FLAGS_dontReduceOpsTaskSplitting) {
|
2019-08-22 21:15:39 +00:00
|
|
|
ctxOptions->fReduceOpsTaskSplitting = GrContextOptions::Enable::kNo;
|
2019-03-25 15:54:59 +00:00
|
|
|
}
|
|
|
|
}
|