2015-11-27 10:27:11 +00:00
|
|
|
/* Copyright 2010 Google Inc. All Rights Reserved.
|
|
|
|
|
2015-12-11 10:11:51 +00:00
|
|
|
Distributed under MIT license.
|
2015-11-27 10:27:11 +00:00
|
|
|
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
|
|
|
*/
|
|
|
|
|
2013-10-23 11:06:13 +00:00
|
|
|
// Entropy encoding (Huffman) utilities.
|
|
|
|
|
|
|
|
#include "./entropy_encode.h"
|
|
|
|
|
|
|
|
#include <algorithm>
|
|
|
|
#include <limits>
|
2013-12-12 18:43:05 +00:00
|
|
|
#include <cstdlib>
|
2013-10-23 11:06:13 +00:00
|
|
|
|
|
|
|
#include "./histogram.h"
|
2015-11-23 10:05:12 +00:00
|
|
|
#include "./port.h"
|
2015-10-01 10:08:14 +00:00
|
|
|
#include "./types.h"
|
2013-10-23 11:06:13 +00:00
|
|
|
|
|
|
|
namespace brotli {
|
|
|
|
|
|
|
|
void SetDepth(const HuffmanTree &p,
|
|
|
|
HuffmanTree *pool,
|
|
|
|
uint8_t *depth,
|
2015-10-28 16:44:47 +00:00
|
|
|
uint8_t level) {
|
2013-10-23 11:06:13 +00:00
|
|
|
if (p.index_left_ >= 0) {
|
|
|
|
++level;
|
|
|
|
SetDepth(pool[p.index_left_], pool, depth, level);
|
|
|
|
SetDepth(pool[p.index_right_or_value_], pool, depth, level);
|
|
|
|
} else {
|
|
|
|
depth[p.index_right_or_value_] = level;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-03-15 09:50:16 +00:00
|
|
|
// Sort the root nodes, least popular first.
|
|
|
|
static inline bool SortHuffmanTree(const HuffmanTree& v0,
|
|
|
|
const HuffmanTree& v1) {
|
|
|
|
if (v0.total_count_ != v1.total_count_) {
|
|
|
|
return v0.total_count_ < v1.total_count_;
|
|
|
|
}
|
|
|
|
return v0.index_right_or_value_ > v1.index_right_or_value_;
|
|
|
|
}
|
|
|
|
|
2013-10-23 11:06:13 +00:00
|
|
|
// This function will create a Huffman tree.
|
|
|
|
//
|
|
|
|
// The catch here is that the tree cannot be arbitrarily deep.
|
|
|
|
// Brotli specifies a maximum depth of 15 bits for "code trees"
|
|
|
|
// and 7 bits for "code length code trees."
|
|
|
|
//
|
|
|
|
// count_limit is the value that is to be faked as the minimum value
|
|
|
|
// and this minimum value is raised until the tree matches the
|
|
|
|
// maximum length requirement.
|
|
|
|
//
|
|
|
|
// This algorithm is not of excellent performance for very long data blocks,
|
|
|
|
// especially when population counts are longer than 2**tree_limit, but
|
|
|
|
// we are not planning to use this with extremely long blocks.
|
|
|
|
//
|
|
|
|
// See http://en.wikipedia.org/wiki/Huffman_coding
|
2016-01-07 15:27:49 +00:00
|
|
|
void CreateHuffmanTree(const uint32_t *data,
|
|
|
|
const size_t length,
|
2013-10-23 11:06:13 +00:00
|
|
|
const int tree_limit,
|
2016-03-15 09:50:16 +00:00
|
|
|
HuffmanTree* tree,
|
2013-10-23 11:06:13 +00:00
|
|
|
uint8_t *depth) {
|
|
|
|
// For block sizes below 64 kB, we never need to do a second iteration
|
|
|
|
// of this loop. Probably all of our block sizes will be smaller than
|
|
|
|
// that, so this loop is mostly of academic interest. If we actually
|
|
|
|
// would need this, we would be better off with the Katajainen algorithm.
|
2016-01-07 15:27:49 +00:00
|
|
|
for (uint32_t count_limit = 1; ; count_limit *= 2) {
|
2016-03-15 09:50:16 +00:00
|
|
|
size_t n = 0;
|
2016-01-07 15:27:49 +00:00
|
|
|
for (size_t i = length; i != 0;) {
|
|
|
|
--i;
|
2013-10-23 11:06:13 +00:00
|
|
|
if (data[i]) {
|
2016-01-07 15:27:49 +00:00
|
|
|
const uint32_t count = std::max(data[i], count_limit);
|
2016-03-15 09:50:16 +00:00
|
|
|
tree[n++] = HuffmanTree(count, -1, static_cast<int16_t>(i));
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (n == 1) {
|
|
|
|
depth[tree[0].index_right_or_value_] = 1; // Only one element.
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2016-03-15 09:50:16 +00:00
|
|
|
std::sort(tree, tree + n, SortHuffmanTree);
|
2015-04-23 14:20:29 +00:00
|
|
|
|
2013-10-23 11:06:13 +00:00
|
|
|
// The nodes are:
|
|
|
|
// [0, n): the sorted leaf nodes that we start with.
|
|
|
|
// [n]: we add a sentinel here.
|
|
|
|
// [n + 1, 2n): new parent nodes are added here, starting from
|
|
|
|
// (n+1). These are naturally in ascending order.
|
|
|
|
// [2n]: we add a sentinel at the end as well.
|
|
|
|
// There will be (2n+1) elements at the end.
|
2016-01-07 15:27:49 +00:00
|
|
|
const HuffmanTree sentinel(std::numeric_limits<uint32_t>::max(), -1, -1);
|
2016-03-15 09:50:16 +00:00
|
|
|
tree[n] = sentinel;
|
|
|
|
tree[n + 1] = sentinel;
|
2013-10-23 11:06:13 +00:00
|
|
|
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t i = 0; // Points to the next leaf node.
|
|
|
|
size_t j = n + 1; // Points to the next non-leaf node.
|
|
|
|
for (size_t k = n - 1; k != 0; --k) {
|
|
|
|
size_t left, right;
|
2013-10-23 11:06:13 +00:00
|
|
|
if (tree[i].total_count_ <= tree[j].total_count_) {
|
|
|
|
left = i;
|
|
|
|
++i;
|
|
|
|
} else {
|
|
|
|
left = j;
|
|
|
|
++j;
|
|
|
|
}
|
|
|
|
if (tree[i].total_count_ <= tree[j].total_count_) {
|
|
|
|
right = i;
|
|
|
|
++i;
|
|
|
|
} else {
|
|
|
|
right = j;
|
|
|
|
++j;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The sentinel node becomes the parent node.
|
2016-03-15 09:50:16 +00:00
|
|
|
size_t j_end = 2 * n - k;
|
2013-10-23 11:06:13 +00:00
|
|
|
tree[j_end].total_count_ =
|
|
|
|
tree[left].total_count_ + tree[right].total_count_;
|
2015-10-28 16:44:47 +00:00
|
|
|
tree[j_end].index_left_ = static_cast<int16_t>(left);
|
|
|
|
tree[j_end].index_right_or_value_ = static_cast<int16_t>(right);
|
2013-10-23 11:06:13 +00:00
|
|
|
|
|
|
|
// Add back the last sentinel node.
|
2016-03-15 09:50:16 +00:00
|
|
|
tree[j_end + 1] = sentinel;
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
|
|
|
SetDepth(tree[2 * n - 1], &tree[0], depth, 0);
|
|
|
|
|
|
|
|
// We need to pack the Huffman tree in tree_limit bits.
|
|
|
|
// If this was not successful, add fake entities to the lowest values
|
|
|
|
// and retry.
|
|
|
|
if (*std::max_element(&depth[0], &depth[length]) <= tree_limit) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-03-19 02:18:59 +00:00
|
|
|
static void Reverse(uint8_t* v, size_t start, size_t end) {
|
2013-12-12 12:18:04 +00:00
|
|
|
--end;
|
|
|
|
while (start < end) {
|
2016-03-15 09:50:16 +00:00
|
|
|
uint8_t tmp = v[start];
|
|
|
|
v[start] = v[end];
|
|
|
|
v[end] = tmp;
|
2013-12-12 12:18:04 +00:00
|
|
|
++start;
|
|
|
|
--end;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-03-19 02:18:59 +00:00
|
|
|
static void WriteHuffmanTreeRepetitions(
|
2015-10-28 16:44:47 +00:00
|
|
|
const uint8_t previous_value,
|
|
|
|
const uint8_t value,
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t repetitions,
|
2016-03-15 09:50:16 +00:00
|
|
|
size_t* tree_size,
|
|
|
|
uint8_t* tree,
|
|
|
|
uint8_t* extra_bits_data) {
|
2016-01-07 15:27:49 +00:00
|
|
|
assert(repetitions > 0);
|
2013-10-23 11:06:13 +00:00
|
|
|
if (previous_value != value) {
|
2016-03-15 09:50:16 +00:00
|
|
|
tree[*tree_size] = value;
|
|
|
|
extra_bits_data[*tree_size] = 0;
|
|
|
|
++(*tree_size);
|
2013-10-23 11:06:13 +00:00
|
|
|
--repetitions;
|
|
|
|
}
|
2014-02-14 14:04:23 +00:00
|
|
|
if (repetitions == 7) {
|
2016-03-15 09:50:16 +00:00
|
|
|
tree[*tree_size] = value;
|
|
|
|
extra_bits_data[*tree_size] = 0;
|
|
|
|
++(*tree_size);
|
2014-02-14 14:04:23 +00:00
|
|
|
--repetitions;
|
|
|
|
}
|
2013-12-12 12:18:04 +00:00
|
|
|
if (repetitions < 3) {
|
2016-01-07 15:27:49 +00:00
|
|
|
for (size_t i = 0; i < repetitions; ++i) {
|
2016-03-15 09:50:16 +00:00
|
|
|
tree[*tree_size] = value;
|
|
|
|
extra_bits_data[*tree_size] = 0;
|
|
|
|
++(*tree_size);
|
2013-12-12 12:18:04 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
repetitions -= 3;
|
2016-03-15 09:50:16 +00:00
|
|
|
size_t start = *tree_size;
|
2016-01-07 15:27:49 +00:00
|
|
|
while (true) {
|
2016-03-15 09:50:16 +00:00
|
|
|
tree[*tree_size] = 16;
|
|
|
|
extra_bits_data[*tree_size] = repetitions & 0x3;
|
|
|
|
++(*tree_size);
|
2013-12-12 12:18:04 +00:00
|
|
|
repetitions >>= 2;
|
2016-01-07 15:27:49 +00:00
|
|
|
if (repetitions == 0) {
|
|
|
|
break;
|
|
|
|
}
|
2013-12-12 12:18:04 +00:00
|
|
|
--repetitions;
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
2016-03-15 09:50:16 +00:00
|
|
|
Reverse(tree, start, *tree_size);
|
|
|
|
Reverse(extra_bits_data, start, *tree_size);
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-03-19 02:18:59 +00:00
|
|
|
static void WriteHuffmanTreeRepetitionsZeros(
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t repetitions,
|
2016-03-15 09:50:16 +00:00
|
|
|
size_t* tree_size,
|
|
|
|
uint8_t* tree,
|
|
|
|
uint8_t* extra_bits_data) {
|
2014-02-14 14:04:23 +00:00
|
|
|
if (repetitions == 11) {
|
2016-03-15 09:50:16 +00:00
|
|
|
tree[*tree_size] = 0;
|
|
|
|
extra_bits_data[*tree_size] = 0;
|
|
|
|
++(*tree_size);
|
2014-02-14 14:04:23 +00:00
|
|
|
--repetitions;
|
|
|
|
}
|
2013-12-12 12:18:04 +00:00
|
|
|
if (repetitions < 3) {
|
2016-01-07 15:27:49 +00:00
|
|
|
for (size_t i = 0; i < repetitions; ++i) {
|
2016-03-15 09:50:16 +00:00
|
|
|
tree[*tree_size] = 0;
|
|
|
|
extra_bits_data[*tree_size] = 0;
|
|
|
|
++(*tree_size);
|
2013-12-12 12:18:04 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
repetitions -= 3;
|
2016-03-15 09:50:16 +00:00
|
|
|
size_t start = *tree_size;
|
2016-01-07 15:27:49 +00:00
|
|
|
while (true) {
|
2016-03-15 09:50:16 +00:00
|
|
|
tree[*tree_size] = 17;
|
|
|
|
extra_bits_data[*tree_size] = repetitions & 0x7;
|
|
|
|
++(*tree_size);
|
2013-12-12 12:18:04 +00:00
|
|
|
repetitions >>= 3;
|
2016-01-07 15:27:49 +00:00
|
|
|
if (repetitions == 0) {
|
|
|
|
break;
|
|
|
|
}
|
2013-12-12 12:18:04 +00:00
|
|
|
--repetitions;
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
2016-03-15 09:50:16 +00:00
|
|
|
Reverse(tree, start, *tree_size);
|
|
|
|
Reverse(extra_bits_data, start, *tree_size);
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-03-15 09:50:16 +00:00
|
|
|
void OptimizeHuffmanCountsForRle(size_t length, uint32_t* counts,
|
|
|
|
uint8_t* good_for_rle) {
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t nonzero_count = 0;
|
|
|
|
size_t stride;
|
|
|
|
size_t limit;
|
|
|
|
size_t sum;
|
|
|
|
const size_t streak_limit = 1240;
|
2013-10-23 11:06:13 +00:00
|
|
|
// Let's make the Huffman code more compatible with rle encoding.
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t i;
|
2015-04-23 14:20:29 +00:00
|
|
|
for (i = 0; i < length; i++) {
|
|
|
|
if (counts[i]) {
|
|
|
|
++nonzero_count;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (nonzero_count < 16) {
|
2016-03-15 09:50:16 +00:00
|
|
|
return;
|
2015-04-23 14:20:29 +00:00
|
|
|
}
|
2016-01-07 15:27:49 +00:00
|
|
|
while (length != 0 && counts[length - 1] == 0) {
|
|
|
|
--length;
|
|
|
|
}
|
|
|
|
if (length == 0) {
|
2016-03-15 09:50:16 +00:00
|
|
|
return; // All zeros.
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
2016-01-07 15:27:49 +00:00
|
|
|
// Now counts[0..length - 1] does not have trailing zeros.
|
2014-02-14 14:04:23 +00:00
|
|
|
{
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t nonzeros = 0;
|
|
|
|
uint32_t smallest_nonzero = 1 << 30;
|
2014-02-14 14:04:23 +00:00
|
|
|
for (i = 0; i < length; ++i) {
|
|
|
|
if (counts[i] != 0) {
|
|
|
|
++nonzeros;
|
|
|
|
if (smallest_nonzero > counts[i]) {
|
|
|
|
smallest_nonzero = counts[i];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (nonzeros < 5) {
|
|
|
|
// Small histogram will model it well.
|
2016-03-15 09:50:16 +00:00
|
|
|
return;
|
2014-02-14 14:04:23 +00:00
|
|
|
}
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t zeros = length - nonzeros;
|
2014-02-14 14:04:23 +00:00
|
|
|
if (smallest_nonzero < 4) {
|
|
|
|
if (zeros < 6) {
|
|
|
|
for (i = 1; i < length - 1; ++i) {
|
|
|
|
if (counts[i - 1] != 0 && counts[i] == 0 && counts[i + 1] != 0) {
|
|
|
|
counts[i] = 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (nonzeros < 28) {
|
2016-03-15 09:50:16 +00:00
|
|
|
return;
|
2014-02-14 14:04:23 +00:00
|
|
|
}
|
|
|
|
}
|
2013-10-23 11:06:13 +00:00
|
|
|
// 2) Let's mark all population counts that already can be encoded
|
|
|
|
// with an rle code.
|
2016-03-15 09:50:16 +00:00
|
|
|
memset(good_for_rle, 0, length);
|
2013-10-23 11:06:13 +00:00
|
|
|
{
|
|
|
|
// Let's not spoil any of the existing good rle codes.
|
|
|
|
// Mark any seq of 0's that is longer as 5 as a good_for_rle.
|
|
|
|
// Mark any seq of non-0's that is longer as 7 as a good_for_rle.
|
2016-01-07 15:27:49 +00:00
|
|
|
uint32_t symbol = counts[0];
|
|
|
|
size_t step = 0;
|
|
|
|
for (i = 0; i <= length; ++i) {
|
2013-10-23 11:06:13 +00:00
|
|
|
if (i == length || counts[i] != symbol) {
|
2016-01-07 15:27:49 +00:00
|
|
|
if ((symbol == 0 && step >= 5) ||
|
|
|
|
(symbol != 0 && step >= 7)) {
|
|
|
|
size_t k;
|
|
|
|
for (k = 0; k < step; ++k) {
|
2013-10-23 11:06:13 +00:00
|
|
|
good_for_rle[i - k - 1] = 1;
|
|
|
|
}
|
|
|
|
}
|
2016-01-07 15:27:49 +00:00
|
|
|
step = 1;
|
2013-10-23 11:06:13 +00:00
|
|
|
if (i != length) {
|
|
|
|
symbol = counts[i];
|
|
|
|
}
|
|
|
|
} else {
|
2016-01-07 15:27:49 +00:00
|
|
|
++step;
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// 3) Let's replace those population counts that lead to more rle codes.
|
2014-02-14 14:04:23 +00:00
|
|
|
// Math here is in 24.8 fixed point representation.
|
2013-10-23 11:06:13 +00:00
|
|
|
stride = 0;
|
2014-02-14 14:04:23 +00:00
|
|
|
limit = 256 * (counts[0] + counts[1] + counts[2]) / 3 + 420;
|
2013-10-23 11:06:13 +00:00
|
|
|
sum = 0;
|
2016-01-07 15:27:49 +00:00
|
|
|
for (i = 0; i <= length; ++i) {
|
2013-10-23 11:06:13 +00:00
|
|
|
if (i == length || good_for_rle[i] ||
|
|
|
|
(i != 0 && good_for_rle[i - 1]) ||
|
2016-01-07 15:27:49 +00:00
|
|
|
(256 * counts[i] - limit + streak_limit) >= 2 * streak_limit) {
|
2013-10-23 11:06:13 +00:00
|
|
|
if (stride >= 4 || (stride >= 3 && sum == 0)) {
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t k;
|
2013-10-23 11:06:13 +00:00
|
|
|
// The stride must end, collapse what we have, if we have enough (4).
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t count = (sum + stride / 2) / stride;
|
|
|
|
if (count == 0) {
|
2013-10-23 11:06:13 +00:00
|
|
|
count = 1;
|
|
|
|
}
|
|
|
|
if (sum == 0) {
|
|
|
|
// Don't make an all zeros stride to be upgraded to ones.
|
|
|
|
count = 0;
|
|
|
|
}
|
|
|
|
for (k = 0; k < stride; ++k) {
|
|
|
|
// We don't want to change value at counts[i],
|
|
|
|
// that is already belonging to the next stride. Thus - 1.
|
2016-01-07 15:27:49 +00:00
|
|
|
counts[i - k - 1] = static_cast<uint32_t>(count);
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
stride = 0;
|
|
|
|
sum = 0;
|
2013-11-15 18:02:17 +00:00
|
|
|
if (i < length - 2) {
|
2013-10-23 11:06:13 +00:00
|
|
|
// All interesting strides have a count of at least 4,
|
|
|
|
// at least when non-zeros.
|
2014-02-14 14:04:23 +00:00
|
|
|
limit = 256 * (counts[i] + counts[i + 1] + counts[i + 2]) / 3 + 420;
|
2013-10-23 11:06:13 +00:00
|
|
|
} else if (i < length) {
|
2014-02-14 14:04:23 +00:00
|
|
|
limit = 256 * counts[i];
|
2013-10-23 11:06:13 +00:00
|
|
|
} else {
|
|
|
|
limit = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
++stride;
|
|
|
|
if (i != length) {
|
|
|
|
sum += counts[i];
|
|
|
|
if (stride >= 4) {
|
2014-02-14 14:04:23 +00:00
|
|
|
limit = (256 * sum + stride / 2) / stride;
|
|
|
|
}
|
|
|
|
if (stride == 4) {
|
|
|
|
limit += 120;
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-01-07 15:27:49 +00:00
|
|
|
static void DecideOverRleUse(const uint8_t* depth, const size_t length,
|
2014-02-14 14:04:23 +00:00
|
|
|
bool *use_rle_for_non_zero,
|
|
|
|
bool *use_rle_for_zero) {
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t total_reps_zero = 0;
|
|
|
|
size_t total_reps_non_zero = 0;
|
|
|
|
size_t count_reps_zero = 1;
|
|
|
|
size_t count_reps_non_zero = 1;
|
|
|
|
for (size_t i = 0; i < length;) {
|
|
|
|
const uint8_t value = depth[i];
|
|
|
|
size_t reps = 1;
|
|
|
|
for (size_t k = i + 1; k < length && depth[k] == value; ++k) {
|
2014-02-14 14:04:23 +00:00
|
|
|
++reps;
|
|
|
|
}
|
|
|
|
if (reps >= 3 && value == 0) {
|
|
|
|
total_reps_zero += reps;
|
|
|
|
++count_reps_zero;
|
|
|
|
}
|
|
|
|
if (reps >= 4 && value != 0) {
|
|
|
|
total_reps_non_zero += reps;
|
|
|
|
++count_reps_non_zero;
|
|
|
|
}
|
|
|
|
i += reps;
|
|
|
|
}
|
2016-01-07 15:27:49 +00:00
|
|
|
*use_rle_for_non_zero = total_reps_non_zero > count_reps_non_zero * 2;
|
|
|
|
*use_rle_for_zero = total_reps_zero > count_reps_zero * 2;
|
2014-02-14 14:04:23 +00:00
|
|
|
}
|
|
|
|
|
2014-10-15 12:01:36 +00:00
|
|
|
void WriteHuffmanTree(const uint8_t* depth,
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t length,
|
2016-03-15 09:50:16 +00:00
|
|
|
size_t* tree_size,
|
|
|
|
uint8_t* tree,
|
|
|
|
uint8_t* extra_bits_data) {
|
2015-10-28 16:44:47 +00:00
|
|
|
uint8_t previous_value = 8;
|
2014-02-14 14:04:23 +00:00
|
|
|
|
2014-10-15 12:01:36 +00:00
|
|
|
// Throw away trailing zeros.
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t new_length = length;
|
|
|
|
for (size_t i = 0; i < length; ++i) {
|
2014-10-15 12:01:36 +00:00
|
|
|
if (depth[length - i - 1] == 0) {
|
|
|
|
--new_length;
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-14 14:04:23 +00:00
|
|
|
// First gather statistics on if it is a good idea to do rle.
|
2014-10-15 12:01:36 +00:00
|
|
|
bool use_rle_for_non_zero = false;
|
|
|
|
bool use_rle_for_zero = false;
|
|
|
|
if (length > 50) {
|
|
|
|
// Find rle coding for longer codes.
|
|
|
|
// Shorter codes seem not to benefit from rle.
|
|
|
|
DecideOverRleUse(depth, new_length,
|
|
|
|
&use_rle_for_non_zero, &use_rle_for_zero);
|
|
|
|
}
|
2014-02-14 14:04:23 +00:00
|
|
|
|
|
|
|
// Actual rle coding.
|
2016-01-07 15:27:49 +00:00
|
|
|
for (size_t i = 0; i < new_length;) {
|
2015-10-28 16:44:47 +00:00
|
|
|
const uint8_t value = depth[i];
|
2016-01-07 15:27:49 +00:00
|
|
|
size_t reps = 1;
|
2014-10-15 12:01:36 +00:00
|
|
|
if ((value != 0 && use_rle_for_non_zero) ||
|
|
|
|
(value == 0 && use_rle_for_zero)) {
|
2016-01-07 15:27:49 +00:00
|
|
|
for (size_t k = i + 1; k < new_length && depth[k] == value; ++k) {
|
2014-10-15 12:01:36 +00:00
|
|
|
++reps;
|
2014-02-14 14:04:23 +00:00
|
|
|
}
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
|
|
|
if (value == 0) {
|
2016-03-15 09:50:16 +00:00
|
|
|
WriteHuffmanTreeRepetitionsZeros(reps, tree_size, tree, extra_bits_data);
|
2013-10-23 11:06:13 +00:00
|
|
|
} else {
|
2014-10-15 12:01:36 +00:00
|
|
|
WriteHuffmanTreeRepetitions(previous_value,
|
2016-03-15 09:50:16 +00:00
|
|
|
value, reps, tree_size,
|
|
|
|
tree, extra_bits_data);
|
2013-10-23 11:06:13 +00:00
|
|
|
previous_value = value;
|
|
|
|
}
|
|
|
|
i += reps;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
uint16_t ReverseBits(int num_bits, uint16_t bits) {
|
|
|
|
static const size_t kLut[16] = { // Pre-reversed 4-bit values.
|
|
|
|
0x0, 0x8, 0x4, 0xc, 0x2, 0xa, 0x6, 0xe,
|
|
|
|
0x1, 0x9, 0x5, 0xd, 0x3, 0xb, 0x7, 0xf
|
|
|
|
};
|
|
|
|
size_t retval = kLut[bits & 0xf];
|
|
|
|
for (int i = 4; i < num_bits; i += 4) {
|
|
|
|
retval <<= 4;
|
2015-10-28 16:44:47 +00:00
|
|
|
bits = static_cast<uint16_t>(bits >> 4);
|
2013-10-23 11:06:13 +00:00
|
|
|
retval |= kLut[bits & 0xf];
|
|
|
|
}
|
|
|
|
retval >>= (-num_bits & 0x3);
|
2015-10-28 16:44:47 +00:00
|
|
|
return static_cast<uint16_t>(retval);
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace
|
|
|
|
|
2016-01-07 15:27:49 +00:00
|
|
|
void ConvertBitDepthsToSymbols(const uint8_t *depth,
|
|
|
|
size_t len,
|
|
|
|
uint16_t *bits) {
|
2013-10-23 11:06:13 +00:00
|
|
|
// In Brotli, all bit depths are [1..15]
|
|
|
|
// 0 bit depth means that the symbol does not exist.
|
|
|
|
const int kMaxBits = 16; // 0..15 are values for bits
|
|
|
|
uint16_t bl_count[kMaxBits] = { 0 };
|
|
|
|
{
|
2016-01-07 15:27:49 +00:00
|
|
|
for (size_t i = 0; i < len; ++i) {
|
2013-10-23 11:06:13 +00:00
|
|
|
++bl_count[depth[i]];
|
|
|
|
}
|
|
|
|
bl_count[0] = 0;
|
|
|
|
}
|
|
|
|
uint16_t next_code[kMaxBits];
|
|
|
|
next_code[0] = 0;
|
|
|
|
{
|
|
|
|
int code = 0;
|
|
|
|
for (int bits = 1; bits < kMaxBits; ++bits) {
|
|
|
|
code = (code + bl_count[bits - 1]) << 1;
|
2015-10-28 16:44:47 +00:00
|
|
|
next_code[bits] = static_cast<uint16_t>(code);
|
2013-10-23 11:06:13 +00:00
|
|
|
}
|
|
|
|
}
|
2016-01-07 15:27:49 +00:00
|
|
|
for (size_t i = 0; i < len; ++i) {
|
2013-10-23 11:06:13 +00:00
|
|
|
if (depth[i]) {
|
|
|
|
bits[i] = ReverseBits(depth[i], next_code[depth[i]]++);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace brotli
|