Optimize out redundant hasher lookups

This commit is contained in:
Martin Raszyk 2018-09-29 11:02:24 +02:00
parent 352b7a01e9
commit 626e0656ac

View File

@ -50,8 +50,13 @@ static BROTLI_NOINLINE void EXPORT_FN(CreateBackwardReferences)(
const score_t cost_diff_lazy = 175;
int delayed_backward_references_in_row = 0;
--max_length;
for (; BackwardReferenceScoreUsingLastDistance(pos_end - (position + 1))
>= sr.score + cost_diff_lazy; --max_length) {
for (;; --max_length) {
if (BackwardReferenceScoreUsingLastDistance(pos_end - (position + 1))
< sr.score + cost_diff_lazy && sr.score > kMinScore
+ BROTLI_LITERAL_BYTE_SCORE) {
FN(Store)(hasher, ringbuffer, ringbuffer_mask, position + 1);
break;
}
HasherSearchResult sr2;
sr2.len = params->quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH ?
BROTLI_MIN(size_t, sr.len - 1, max_length) : 0;