Revert r8535
git-svn-id: http://skia.googlecode.com/svn/trunk@8536 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
parent
da8db9837e
commit
67caaf8792
@ -26,8 +26,7 @@ namespace skiagm {
|
||||
// or off (as long as the number of these errors is 0).
|
||||
kNoGpuContext_ErrorType,
|
||||
|
||||
kRenderModeMismatch_ErrorType,
|
||||
kExpectationsMismatch_ErrorType,
|
||||
kImageMismatch_ErrorType,
|
||||
kMissingExpectations_ErrorType,
|
||||
kWritingReferenceImage_ErrorType,
|
||||
kLast_ErrorType = kWritingReferenceImage_ErrorType
|
||||
@ -40,10 +39,8 @@ namespace skiagm {
|
||||
switch(type) {
|
||||
case kNoGpuContext_ErrorType:
|
||||
return "NoGpuContext";
|
||||
case kRenderModeMismatch_ErrorType:
|
||||
return "RenderModeMismatch";
|
||||
case kExpectationsMismatch_ErrorType:
|
||||
return "ExpectationsMismatch";
|
||||
case kImageMismatch_ErrorType:
|
||||
return "ImageMismatch";
|
||||
case kMissingExpectations_ErrorType:
|
||||
return "MissingExpectations";
|
||||
case kWritingReferenceImage_ErrorType:
|
||||
|
@ -272,8 +272,7 @@ public:
|
||||
}
|
||||
|
||||
// Things to do only if there is some error condition.
|
||||
SkString fullName = name;
|
||||
fullName.append(renderModeDescriptor);
|
||||
SkString fullName = make_name(name.c_str(), renderModeDescriptor);
|
||||
for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) {
|
||||
ErrorType type = static_cast<ErrorType>(typeInt);
|
||||
if (errorCombination.includes(type)) {
|
||||
@ -684,17 +683,18 @@ public:
|
||||
* @param baseNameString name of test without renderModeDescriptor added
|
||||
* @param renderModeDescriptor e.g., "-rtree", "-deferred"
|
||||
* @param addToJsonSummary whether to add these results (both actual and
|
||||
* expected) to the JSON summary. Regardless of this setting, if
|
||||
* we find an image mismatch in this test, we will write these
|
||||
* results to the JSON summary. (This is so that we will always
|
||||
* report errors across rendering modes, such as pipe vs tiled.
|
||||
* See https://codereview.chromium.org/13650002/ )
|
||||
* expected) to the JSON summary
|
||||
*
|
||||
* TODO: For now, addToJsonSummary is only set to true within
|
||||
* compare_test_results_to_stored_expectations(), so results of our
|
||||
* in-memory comparisons (Rtree vs regular, etc.) are not written to the
|
||||
* JSON summary. We may wish to change that.
|
||||
*/
|
||||
ErrorCombination compare_to_expectations(Expectations expectations,
|
||||
const SkBitmap& actualBitmap,
|
||||
const SkString& baseNameString,
|
||||
const char renderModeDescriptor[],
|
||||
bool addToJsonSummary) {
|
||||
bool addToJsonSummary=false) {
|
||||
ErrorCombination errors;
|
||||
Checksum actualChecksum = SkBitmapChecksummer::Compute64(actualBitmap);
|
||||
SkString completeNameString = baseNameString;
|
||||
@ -704,14 +704,7 @@ public:
|
||||
if (expectations.empty()) {
|
||||
errors.add(kMissingExpectations_ErrorType);
|
||||
} else if (!expectations.match(actualChecksum)) {
|
||||
addToJsonSummary = true;
|
||||
// The error mode we record depends on whether this was running
|
||||
// in a non-standard renderMode.
|
||||
if ('\0' == *renderModeDescriptor) {
|
||||
errors.add(kExpectationsMismatch_ErrorType);
|
||||
} else {
|
||||
errors.add(kRenderModeMismatch_ErrorType);
|
||||
}
|
||||
errors.add(kImageMismatch_ErrorType);
|
||||
|
||||
// Write out the "actuals" for any mismatches, if we have
|
||||
// been directed to do so.
|
||||
@ -760,7 +753,7 @@ public:
|
||||
// (where we can set ignore-failure to either true or
|
||||
// false), add test cases that exercise ignored
|
||||
// failures (both for kMissingExpectations_ErrorType
|
||||
// and kExpectationsMismatch_ErrorType).
|
||||
// and kImageMismatch_ErrorType).
|
||||
this->fJsonActualResults_FailureIgnored[testName] =
|
||||
actualResults;
|
||||
} else {
|
||||
@ -779,8 +772,7 @@ public:
|
||||
this->fJsonActualResults_NoComparison[testName] =
|
||||
actualResults;
|
||||
}
|
||||
if (result.includes(kExpectationsMismatch_ErrorType) ||
|
||||
result.includes(kRenderModeMismatch_ErrorType)) {
|
||||
if (result.includes(kImageMismatch_ErrorType)) {
|
||||
this->fJsonActualResults_Failed[testName] = actualResults;
|
||||
}
|
||||
}
|
||||
@ -874,11 +866,18 @@ public:
|
||||
GM* gm, const ConfigData& gRec, const char renderModeDescriptor [],
|
||||
SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) {
|
||||
|
||||
// TODO(epoger): This method is run to compare results across
|
||||
// different rendering modes (as opposed to
|
||||
// compare_test_results_to_stored_expectations(), which
|
||||
// compares results against expectations stored on disk). If
|
||||
// we would like the GenerateGMs step to distinguish between
|
||||
// those two types of mismatches, we should report image
|
||||
// mismatches in here with a different ErrorType.
|
||||
SkASSERT(referenceBitmap);
|
||||
SkString name = make_name(gm->shortName(), gRec.fName);
|
||||
Expectations expectations(*referenceBitmap);
|
||||
return compare_to_expectations(expectations, actualBitmap,
|
||||
name, renderModeDescriptor, false);
|
||||
name, renderModeDescriptor);
|
||||
}
|
||||
|
||||
static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t recordFlags,
|
||||
@ -997,8 +996,9 @@ public:
|
||||
return kEmpty_ErrorCombination;
|
||||
}
|
||||
|
||||
ErrorCombination test_pipe_playback(GM* gm, const ConfigData& gRec,
|
||||
const SkBitmap& referenceBitmap, bool simulateFailure) {
|
||||
ErrorCombination test_pipe_playback(GM* gm,
|
||||
const ConfigData& gRec,
|
||||
const SkBitmap& referenceBitmap) {
|
||||
ErrorCombination errors;
|
||||
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
|
||||
SkBitmap bitmap;
|
||||
@ -1010,9 +1010,7 @@ public:
|
||||
SkGPipeWriter writer;
|
||||
SkCanvas* pipeCanvas = writer.startRecording(
|
||||
&pipeController, gPipeWritingFlagCombos[i].flags);
|
||||
if (!simulateFailure) {
|
||||
invokeGM(gm, pipeCanvas, false, false);
|
||||
}
|
||||
invokeGM(gm, pipeCanvas, false, false);
|
||||
complete_bitmap(&bitmap);
|
||||
writer.endRecording();
|
||||
SkString string("-pipe");
|
||||
@ -1179,7 +1177,6 @@ DEFINE_bool(replay, true, "Exercise the SkPicture replay test pass.");
|
||||
DEFINE_string2(resourcePath, i, "", "Directory that stores image resources.");
|
||||
DEFINE_bool(rtree, true, "Exercise the R-Tree variant of SkPicture test pass.");
|
||||
DEFINE_bool(serialize, true, "Exercise the SkPicture serialization & deserialization test pass.");
|
||||
DEFINE_bool(simulatePipePlaybackFailure, false, "Simulate a rendering failure in pipe mode only.");
|
||||
DEFINE_bool(tiledPipe, false, "Exercise tiled SkGPipe replay.");
|
||||
DEFINE_bool(tileGrid, true, "Exercise the tile grid variant of SkPicture.");
|
||||
DEFINE_string(tileGridReplayScales, "", "Space separated list of floating-point scale "
|
||||
@ -1414,6 +1411,10 @@ ErrorCombination run_multiple_modes(GMMain &gmmain, GM *gm, const ConfigData &co
|
||||
errorsForAllModes.add(pictErrors);
|
||||
}
|
||||
|
||||
// TODO: add a test in which the RTree rendering results in a
|
||||
// different bitmap than the standard rendering. It should
|
||||
// show up as failed in the JSON summary, and should be listed
|
||||
// in the stdout also.
|
||||
if (!(gmFlags & GM::kSkipPicture_Flag) && FLAGS_rtree) {
|
||||
SkPicture* pict = gmmain.generate_new_picture(
|
||||
gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag);
|
||||
@ -1458,8 +1459,7 @@ ErrorCombination run_multiple_modes(GMMain &gmmain, GM *gm, const ConfigData &co
|
||||
ErrorCombination pipeErrors;
|
||||
|
||||
if (FLAGS_pipe) {
|
||||
pipeErrors.add(gmmain.test_pipe_playback(gm, compareConfig, comparisonBitmap,
|
||||
FLAGS_simulatePipePlaybackFailure));
|
||||
pipeErrors.add(gmmain.test_pipe_playback(gm, compareConfig, comparisonBitmap));
|
||||
}
|
||||
|
||||
if ((pipeErrors.isEmpty()) &&
|
||||
|
@ -4,10 +4,9 @@ GM: Ran 1 GMs
|
||||
GM: ... over 2 configs ["8888", "565"]
|
||||
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||
GM: ... so there should be a total of 9 tests.
|
||||
GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=2 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=2 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: [*] 0 NoGpuContext:
|
||||
GM: [*] 0 RenderModeMismatch:
|
||||
GM: [*] 2 ExpectationsMismatch: 8888/selftest1 565/selftest1
|
||||
GM: [*] 2 ImageMismatch: 8888/selftest1 565/selftest1
|
||||
GM: [ ] 0 MissingExpectations:
|
||||
GM: [*] 0 WritingReferenceImage:
|
||||
GM: (results marked with [*] will cause nonzero return value)
|
||||
|
@ -4,10 +4,9 @@ GM: Ran 1 GMs
|
||||
GM: ... over 2 configs ["8888", "565"]
|
||||
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||
GM: ... so there should be a total of 9 tests.
|
||||
GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=2 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=2 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: [*] 0 NoGpuContext:
|
||||
GM: [*] 0 RenderModeMismatch:
|
||||
GM: [*] 2 ExpectationsMismatch: 8888/selftest1 565/selftest1
|
||||
GM: [*] 2 ImageMismatch: 8888/selftest1 565/selftest1
|
||||
GM: [ ] 0 MissingExpectations:
|
||||
GM: [*] 0 WritingReferenceImage:
|
||||
GM: (results marked with [*] will cause nonzero return value)
|
||||
|
@ -4,10 +4,9 @@ GM: Ran 1 GMs
|
||||
GM: ... over 2 configs ["8888", "565"]
|
||||
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||
GM: ... so there should be a total of 9 tests.
|
||||
GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=2 WritingReferenceImage=0
|
||||
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=2 WritingReferenceImage=0
|
||||
GM: [*] 0 NoGpuContext:
|
||||
GM: [*] 0 RenderModeMismatch:
|
||||
GM: [*] 0 ExpectationsMismatch:
|
||||
GM: [*] 0 ImageMismatch:
|
||||
GM: [ ] 2 MissingExpectations: 8888/selftest1 565/selftest1
|
||||
GM: [*] 0 WritingReferenceImage:
|
||||
GM: (results marked with [*] will cause nonzero return value)
|
||||
|
@ -4,10 +4,9 @@ GM: Ran 1 GMs
|
||||
GM: ... over 2 configs ["8888", "565"]
|
||||
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||
GM: ... so there should be a total of 9 tests.
|
||||
GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: [*] 0 NoGpuContext:
|
||||
GM: [*] 0 RenderModeMismatch:
|
||||
GM: [*] 0 ExpectationsMismatch:
|
||||
GM: [*] 0 ImageMismatch:
|
||||
GM: [ ] 0 MissingExpectations:
|
||||
GM: [*] 0 WritingReferenceImage:
|
||||
GM: (results marked with [*] will cause nonzero return value)
|
||||
|
@ -4,10 +4,9 @@ GM: Ran 1 GMs
|
||||
GM: ... over 2 configs ["8888", "565"]
|
||||
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||
GM: ... so there should be a total of 9 tests.
|
||||
GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: [*] 0 NoGpuContext:
|
||||
GM: [*] 0 RenderModeMismatch:
|
||||
GM: [*] 0 ExpectationsMismatch:
|
||||
GM: [*] 0 ImageMismatch:
|
||||
GM: [ ] 0 MissingExpectations:
|
||||
GM: [*] 0 WritingReferenceImage:
|
||||
GM: (results marked with [*] will cause nonzero return value)
|
||||
|
@ -4,10 +4,9 @@ GM: Ran 1 GMs
|
||||
GM: ... over 2 configs ["8888", "565"]
|
||||
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||
GM: ... so there should be a total of 9 tests.
|
||||
GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: [*] 0 NoGpuContext:
|
||||
GM: [*] 0 RenderModeMismatch:
|
||||
GM: [*] 0 ExpectationsMismatch:
|
||||
GM: [*] 0 ImageMismatch:
|
||||
GM: [ ] 0 MissingExpectations:
|
||||
GM: [*] 0 WritingReferenceImage:
|
||||
GM: (results marked with [*] will cause nonzero return value)
|
||||
|
@ -4,10 +4,9 @@ GM: Ran 1 GMs
|
||||
GM: ... over 2 configs ["8888", "565"]
|
||||
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||
GM: ... so there should be a total of 9 tests.
|
||||
GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: [*] 0 NoGpuContext:
|
||||
GM: [*] 0 RenderModeMismatch:
|
||||
GM: [*] 0 ExpectationsMismatch:
|
||||
GM: [*] 0 ImageMismatch:
|
||||
GM: [ ] 0 MissingExpectations:
|
||||
GM: [*] 0 WritingReferenceImage:
|
||||
GM: (results marked with [*] will cause nonzero return value)
|
||||
|
@ -3,10 +3,9 @@ GM: Ran 1 GMs
|
||||
GM: ... over 2 configs ["8888", "565"]
|
||||
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
|
||||
GM: ... so there should be a total of 9 tests.
|
||||
GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=2 WritingReferenceImage=0
|
||||
GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=2 WritingReferenceImage=0
|
||||
GM: [*] 0 NoGpuContext:
|
||||
GM: [*] 0 RenderModeMismatch:
|
||||
GM: [*] 0 ExpectationsMismatch:
|
||||
GM: [*] 0 ImageMismatch:
|
||||
GM: [ ] 2 MissingExpectations: 8888/selftest1 565/selftest1
|
||||
GM: [*] 0 WritingReferenceImage:
|
||||
GM: (results marked with [*] will cause nonzero return value)
|
||||
|
@ -1 +0,0 @@
|
||||
out/Debug/gm --simulatePipePlaybackFailure --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/json/identical-pixels.json --writeJsonSummaryPath gm/tests/outputs/pipe-playback-failure/output-actual/json-summary.txt
|
@ -1,33 +0,0 @@
|
||||
{
|
||||
"actual-results" : {
|
||||
"failed" : {
|
||||
"comparison/selftest1-pipe" : {
|
||||
"checksum" : 4259036727585789440
|
||||
}
|
||||
},
|
||||
"failure-ignored" : null,
|
||||
"no-comparison" : null,
|
||||
"succeeded" : {
|
||||
"565/selftest1" : {
|
||||
"checksum" : 9512553915271796906
|
||||
},
|
||||
"8888/selftest1" : {
|
||||
"checksum" : 14022967492765711532
|
||||
}
|
||||
}
|
||||
},
|
||||
"expected-results" : {
|
||||
"565/selftest1" : {
|
||||
"checksums" : [ 9512553915271796906 ],
|
||||
"ignore-failure" : false
|
||||
},
|
||||
"8888/selftest1" : {
|
||||
"checksums" : [ 14022967492765711532 ],
|
||||
"ignore-failure" : false
|
||||
},
|
||||
"comparison/selftest1-pipe" : {
|
||||
"checksums" : [ 14022967492765711532 ],
|
||||
"ignore-failure" : false
|
||||
}
|
||||
}
|
||||
}
|
@ -1 +0,0 @@
|
||||
255
|
@ -1 +0,0 @@
|
||||
GM: ---- comparison/selftest1-pipe: 60000 (of 60000) differing pixels, max per-channel mismatch R=135 G=246 B=135 A=0
|
@ -1,13 +0,0 @@
|
||||
GM: reading expectations from JSON summary file gm/tests/inputs/json/identical-pixels.json
|
||||
GM: drawing... selftest1 [300 200]
|
||||
GM: Ran 1 GMs
|
||||
GM: ... over 2 configs ["8888", "565"]
|
||||
GM: ... and 5 modes ["pipe", "replay", "rtree", "serialize", "tilegrid"]
|
||||
GM: ... so there should be a total of 7 tests.
|
||||
GM: Ran 7 tests: NoGpuContext=0 RenderModeMismatch=1 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
|
||||
GM: [*] 0 NoGpuContext:
|
||||
GM: [*] 1 RenderModeMismatch: comparison/selftest1-pipe
|
||||
GM: [*] 0 ExpectationsMismatch:
|
||||
GM: [ ] 0 MissingExpectations:
|
||||
GM: [*] 0 WritingReferenceImage:
|
||||
GM: (results marked with [*] will cause nonzero return value)
|
@ -156,7 +156,4 @@ gm_test "--hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/empty-dir"
|
||||
# section should be empty.
|
||||
gm_test "--hierarchy --match selftest1 $CONFIGS" "$GM_OUTPUTS/no-readpath"
|
||||
|
||||
# Test what happens if a subset of the renderModes fail (e.g. pipe)
|
||||
gm_test "--simulatePipePlaybackFailure --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/identical-pixels.json" "$GM_OUTPUTS/pipe-playback-failure"
|
||||
|
||||
echo "All tests passed."
|
||||
|
Loading…
Reference in New Issue
Block a user