diff --git a/c/enc/block_splitter_inc.h b/c/enc/block_splitter_inc.h index aa40bfd..6668d5e 100644 --- a/c/enc/block_splitter_inc.h +++ b/c/enc/block_splitter_inc.h @@ -129,7 +129,7 @@ static size_t FN(FindBlocks)(const DataType* data, const size_t length, } /* More blocks for the beginning. */ if (byte_ix < 2000) { - block_switch_cost *= 0.77 + 0.07 * (double)byte_ix / 2000; + block_switch_cost *= 0.77 + 0.00003 * (double)byte_ix; } for (k = 0; k < num_histograms; ++k) { cost[k] -= min_cost; diff --git a/c/enc/encode.c b/c/enc/encode.c index 8ea6eee..a2cc7cb 100644 --- a/c/enc/encode.c +++ b/c/enc/encode.c @@ -437,10 +437,11 @@ static BROTLI_BOOL ShouldCompress( if ((double)num_literals > 0.99 * (double)bytes) { uint32_t literal_histo[256] = { 0 }; static const uint32_t kSampleRate = 13; + static const double invKSampleRate = 1.0 / 13.0; static const double kMinEntropy = 7.92; const double bit_cost_threshold = - (double)bytes * kMinEntropy / kSampleRate; - size_t t = (bytes + kSampleRate - 1) / kSampleRate; + (double)bytes * kMinEntropy * invKSampleRate; + size_t t = (bytes + kSampleRate - 1) * invKSampleRate; uint32_t pos = (uint32_t)last_flush_pos; size_t i; for (i = 0; i < t; i++) { diff --git a/c/enc/literal_cost.c b/c/enc/literal_cost.c index 2ac847f..fafc7f6 100644 --- a/c/enc/literal_cost.c +++ b/c/enc/literal_cost.c @@ -121,7 +121,7 @@ static void EstimateBitCostsForLiteralsUTF8(size_t pos, size_t len, size_t mask, rapidly in the beginning of the file, perhaps because the beginning of the data is a statistical "anomaly". */ if (i < 2000) { - lit_cost += 0.7 - ((double)(2000 - i) / 2000.0 * 0.35); + lit_cost += 0.7 - ((double)(2000 - i) * 0.000175); } cost[i] = (float)lit_cost; }