Merge pull request #87 from szabadka/master

Remove quality parameter from bitstream writing functions.
This commit is contained in:
szabadka 2015-04-23 16:23:36 +02:00
commit 977bec5159
15 changed files with 139 additions and 152 deletions

View File

@ -346,7 +346,7 @@ void CreateBackwardReferences(size_t num_bytes,
commands, num_commands);
break;
case 7:
CreateBackwardReferences<Hashers::H7, true, false>(
CreateBackwardReferences<Hashers::H7, false, false>(
num_bytes, position, ringbuffer, ringbuffer_mask,
literal_cost, literal_cost_mask, max_backward_limit, base_min_score,
quality, hashers->hash_h7.get(), dist_cache, last_insert_len,

View File

@ -116,7 +116,7 @@ static inline int HuffmanBitCost(const uint8_t* depth, int length) {
// create huffman tree of huffman tree
uint8_t cost[kCodeLengthCodes] = { 0 };
CreateHuffmanTree(histogram, kCodeLengthCodes, 7, 9, cost);
CreateHuffmanTree(histogram, kCodeLengthCodes, 7, cost);
// account for rle extra bits
cost[16] += 2;
cost[17] += 3;
@ -148,7 +148,7 @@ double PopulationCost(const Histogram<kSize>& histogram) {
return 20 + histogram.total_count_;
}
uint8_t depth[kSize] = { 0 };
CreateHuffmanTree(&histogram.data_[0], kSize, 15, 9, depth);
CreateHuffmanTree(&histogram.data_[0], kSize, 15, depth);
int bits = 0;
for (int i = 0; i < kSize; ++i) {
bits += histogram.data_[i] * depth[i];

View File

@ -220,7 +220,6 @@ void StoreSimpleHuffmanTree(const uint8_t* depths,
// num = alphabet size
// depths = symbol depths
void StoreHuffmanTree(const uint8_t* depths, size_t num,
int quality,
int *storage_ix, uint8_t *storage) {
// Write the Huffman tree into the brotli-representation.
std::vector<uint8_t> huffman_tree;
@ -256,7 +255,7 @@ void StoreHuffmanTree(const uint8_t* depths, size_t num,
uint8_t code_length_bitdepth[kCodeLengthCodes] = { 0 };
std::vector<uint16_t> code_length_bitdepth_symbols(kCodeLengthCodes);
CreateHuffmanTree(&huffman_tree_histogram[0], kCodeLengthCodes,
5, quality, &code_length_bitdepth[0]);
5, &code_length_bitdepth[0]);
ConvertBitDepthsToSymbols(code_length_bitdepth, kCodeLengthCodes,
code_length_bitdepth_symbols.data());
@ -278,7 +277,6 @@ void StoreHuffmanTree(const uint8_t* depths, size_t num,
void BuildAndStoreHuffmanTree(const int *histogram,
const int length,
const int quality,
uint8_t* depth,
uint16_t* bits,
int* storage_ix,
@ -289,7 +287,7 @@ void BuildAndStoreHuffmanTree(const int *histogram,
if (histogram[i]) {
if (count < 4) {
s4[count] = i;
} else if (quality < 3 && count > 4) {
} else if (count > 4) {
break;
}
count++;
@ -309,20 +307,13 @@ void BuildAndStoreHuffmanTree(const int *histogram,
return;
}
if (length >= 50 && count >= 16 && quality >= 3) {
std::vector<int> counts(length);
memcpy(&counts[0], histogram, sizeof(counts[0]) * length);
OptimizeHuffmanCountsForRle(length, &counts[0]);
CreateHuffmanTree(&counts[0], length, 15, quality, depth);
} else {
CreateHuffmanTree(histogram, length, 15, quality, depth);
}
CreateHuffmanTree(histogram, length, 15, depth);
ConvertBitDepthsToSymbols(depth, length, bits);
if (count <= 4) {
StoreSimpleHuffmanTree(depth, s4, count, max_bits, storage_ix, storage);
} else {
StoreHuffmanTree(depth, length, quality, storage_ix, storage);
StoreHuffmanTree(depth, length, storage_ix, storage);
}
}
@ -462,7 +453,6 @@ void EncodeContextMap(const std::vector<int>& context_map,
memset(symbol_code.bits_, 0, sizeof(symbol_code.bits_));
BuildAndStoreHuffmanTree(symbol_histogram.data_,
num_clusters + max_run_length_prefix,
9, // quality
symbol_code.depth_, symbol_code.bits_,
storage_ix, storage);
for (int i = 0; i < rle_symbols.size(); ++i) {
@ -495,7 +485,6 @@ void StoreBlockSwitch(const BlockSplitCode& code,
void BuildAndStoreBlockSplitCode(const std::vector<int>& types,
const std::vector<int>& lengths,
const int num_types,
const int quality,
BlockSplitCode* code,
int* storage_ix,
uint8_t* storage) {
@ -529,10 +518,10 @@ void BuildAndStoreBlockSplitCode(const std::vector<int>& types,
}
StoreVarLenUint8(num_types - 1, storage_ix, storage);
if (num_types > 1) {
BuildAndStoreHuffmanTree(&type_histo[0], num_types + 2, quality,
BuildAndStoreHuffmanTree(&type_histo[0], num_types + 2,
&code->type_depths[0], &code->type_bits[0],
storage_ix, storage);
BuildAndStoreHuffmanTree(&length_histo[0], 26, quality,
BuildAndStoreHuffmanTree(&length_histo[0], 26,
&code->length_depths[0], &code->length_bits[0],
storage_ix, storage);
StoreBlockSwitch(*code, 0, storage_ix, storage);
@ -559,7 +548,7 @@ void StoreTrivialContextMap(int num_types,
for (int i = context_bits; i < alphabet_size; ++i) {
histogram[i] = 1;
}
BuildAndStoreHuffmanTree(&histogram[0], alphabet_size, 1,
BuildAndStoreHuffmanTree(&histogram[0], alphabet_size,
&depths[0], &bits[0],
storage_ix, storage);
for (int i = 0; i < num_types; ++i) {
@ -590,11 +579,10 @@ class BlockEncoder {
// Creates entropy codes of block lengths and block types and stores them
// to the bit stream.
void BuildAndStoreBlockSwitchEntropyCodes(int quality,
int* storage_ix, uint8_t* storage) {
void BuildAndStoreBlockSwitchEntropyCodes(int* storage_ix, uint8_t* storage) {
BuildAndStoreBlockSplitCode(
block_types_, block_lengths_, num_block_types_,
quality, &block_split_code_, storage_ix, storage);
&block_split_code_, storage_ix, storage);
}
// Creates entropy codes for all block types and stores them to the bit
@ -602,14 +590,12 @@ class BlockEncoder {
template<int kSize>
void BuildAndStoreEntropyCodes(
const std::vector<Histogram<kSize> >& histograms,
int quality,
int* storage_ix, uint8_t* storage) {
depths_.resize(histograms.size() * alphabet_size_);
bits_.resize(histograms.size() * alphabet_size_);
for (int i = 0; i < histograms.size(); ++i) {
int ix = i * alphabet_size_;
BuildAndStoreHuffmanTree(&histograms[i].data_[0], alphabet_size_,
quality,
&depths_[ix], &bits_[ix],
storage_ix, storage);
}
@ -670,8 +656,9 @@ bool StoreMetaBlock(const uint8_t* input,
size_t start_pos,
size_t length,
size_t mask,
uint8_t prev_byte,
uint8_t prev_byte2,
bool is_last,
int quality,
int num_direct_distance_codes,
int distance_postfix_bits,
int literal_context_mode,
@ -707,12 +694,9 @@ bool StoreMetaBlock(const uint8_t* input,
mb.distance_split.types,
mb.distance_split.lengths);
literal_enc.BuildAndStoreBlockSwitchEntropyCodes(
quality, storage_ix, storage);
command_enc.BuildAndStoreBlockSwitchEntropyCodes(
quality, storage_ix, storage);
distance_enc.BuildAndStoreBlockSwitchEntropyCodes(
quality, storage_ix, storage);
literal_enc.BuildAndStoreBlockSwitchEntropyCodes(storage_ix, storage);
command_enc.BuildAndStoreBlockSwitchEntropyCodes(storage_ix, storage);
distance_enc.BuildAndStoreBlockSwitchEntropyCodes(storage_ix, storage);
WriteBits(2, distance_postfix_bits, storage_ix, storage);
WriteBits(4, num_direct_distance_codes >> distance_postfix_bits,
@ -737,11 +721,11 @@ bool StoreMetaBlock(const uint8_t* input,
storage_ix, storage);
}
literal_enc.BuildAndStoreEntropyCodes(mb.literal_histograms, quality,
literal_enc.BuildAndStoreEntropyCodes(mb.literal_histograms,
storage_ix, storage);
command_enc.BuildAndStoreEntropyCodes(mb.command_histograms, quality,
command_enc.BuildAndStoreEntropyCodes(mb.command_histograms,
storage_ix, storage);
distance_enc.BuildAndStoreEntropyCodes(mb.distance_histograms, quality,
distance_enc.BuildAndStoreEntropyCodes(mb.distance_histograms,
storage_ix, storage);
size_t pos = start_pos;
@ -759,30 +743,34 @@ bool StoreMetaBlock(const uint8_t* input,
}
} else {
for (int j = 0; j < cmd.insert_len_; ++j) {
uint8_t prev_byte = pos > 0 ? input[(pos - 1) & mask] : 0;
uint8_t prev_byte2 = pos > 1 ? input[(pos - 2) & mask] : 0;
int context = Context(prev_byte, prev_byte2,
literal_context_mode);
int literal = input[pos & mask];
literal_enc.StoreSymbolWithContext<kLiteralContextBits>(
literal, context, mb.literal_context_map, storage_ix, storage);
prev_byte2 = prev_byte;
prev_byte = literal;
++pos;
}
}
if (cmd.copy_len_ > 0 && cmd.cmd_prefix_ >= 128) {
int dist_code = cmd.dist_prefix_;
int distnumextra = cmd.dist_extra_ >> 24;
int distextra = cmd.dist_extra_ & 0xffffff;
if (mb.distance_context_map.empty()) {
distance_enc.StoreSymbol(dist_code, storage_ix, storage);
} else {
int context = cmd.DistanceContext();
distance_enc.StoreSymbolWithContext<kDistanceContextBits>(
dist_code, context, mb.distance_context_map, storage_ix, storage);
}
brotli::WriteBits(distnumextra, distextra, storage_ix, storage);
}
pos += cmd.copy_len_;
if (cmd.copy_len_ > 0) {
prev_byte2 = input[(pos - 2) & mask];
prev_byte = input[(pos - 1) & mask];
if (cmd.cmd_prefix_ >= 128) {
int dist_code = cmd.dist_prefix_;
int distnumextra = cmd.dist_extra_ >> 24;
int distextra = cmd.dist_extra_ & 0xffffff;
if (mb.distance_context_map.empty()) {
distance_enc.StoreSymbol(dist_code, storage_ix, storage);
} else {
int context = cmd.DistanceContext();
distance_enc.StoreSymbolWithContext<kDistanceContextBits>(
dist_code, context, mb.distance_context_map, storage_ix, storage);
}
brotli::WriteBits(distnumextra, distextra, storage_ix, storage);
}
}
}
if (is_last) {
JumpToByteBoundary(storage_ix, storage);

View File

@ -65,7 +65,6 @@ void StoreHuffmanTreeOfHuffmanTreeToBitMask(
// bits[0:length] and stores the encoded tree to the bit stream.
void BuildAndStoreHuffmanTree(const int *histogram,
const int length,
const int quality,
uint8_t* depth,
uint16_t* bits,
int* storage_ix,
@ -95,7 +94,6 @@ struct BlockSplitCode {
void BuildAndStoreBlockSplitCode(const std::vector<int>& types,
const std::vector<int>& lengths,
const int num_types,
const int quality,
BlockSplitCode* code,
int* storage_ix,
uint8_t* storage);
@ -110,8 +108,9 @@ bool StoreMetaBlock(const uint8_t* input,
size_t start_pos,
size_t length,
size_t mask,
uint8_t prev_byte,
uint8_t prev_byte2,
bool final_block,
int quality,
int num_direct_distance_codes,
int distance_postfix_bits,
int literal_context_mode,

View File

@ -137,6 +137,8 @@ BrotliCompressor::BrotliCompressor(BrotliParams params)
last_insert_len_(0),
last_flush_pos_(0),
last_processed_pos_(0),
prev_byte_(0),
prev_byte2_(0),
storage_size_(0) {
// Sanitize params.
params_.quality = std::max(0, params_.quality);
@ -324,7 +326,7 @@ bool BrotliCompressor::WriteBrotliData(const bool is_last,
if (!is_last && !force_flush &&
num_commands_ + (input_block_size() >> 1) < cmd_buffer_size_ &&
input_pos_ + input_block_size() + 2 <= last_flush_pos_ + mask + 1) {
input_pos_ + input_block_size() <= last_flush_pos_ + mask + 1) {
// Everything will happen later.
last_processed_pos_ = input_pos_;
*out_size = 0;
@ -395,29 +397,33 @@ bool BrotliCompressor::WriteMetaBlockInternal(const bool is_last,
if (params_.quality > 9 && params_.mode == BrotliParams::MODE_FONT) {
num_direct_distance_codes = 12;
distance_postfix_bits = 1;
RecomputeDistancePrefixes(commands_.get(),
num_commands_,
num_direct_distance_codes,
distance_postfix_bits);
}
int literal_context_mode = utf8_mode ? CONTEXT_UTF8 : CONTEXT_SIGNED;
MetaBlockSplit mb;
if (params_.greedy_block_split) {
BuildMetaBlockGreedy(data, last_flush_pos_, mask,
commands_.get(), num_commands_,
params_.quality,
&mb);
} else {
RecomputeDistancePrefixes(commands_.get(),
num_commands_,
num_direct_distance_codes,
distance_postfix_bits);
BuildMetaBlock(data, last_flush_pos_, mask,
prev_byte_, prev_byte2_,
commands_.get(), num_commands_,
num_direct_distance_codes,
distance_postfix_bits,
literal_context_mode,
params_.enable_context_modeling,
&mb);
}
if (params_.quality >= 3) {
OptimizeHistograms(num_direct_distance_codes,
distance_postfix_bits,
&mb);
}
if (!StoreMetaBlock(data, last_flush_pos_, bytes, mask,
is_last, params_.quality,
prev_byte_, prev_byte2_,
is_last,
num_direct_distance_codes,
distance_postfix_bits,
literal_context_mode,
@ -442,6 +448,8 @@ bool BrotliCompressor::WriteMetaBlockInternal(const bool is_last,
last_byte_bits_ = storage_ix & 7;
last_flush_pos_ = input_pos_;
last_processed_pos_ = input_pos_;
prev_byte_ = data[(last_flush_pos_ - 1) & mask];
prev_byte2_ = data[(last_flush_pos_ - 2) & mask];
num_commands_ = 0;
*output = &storage[0];
*out_size = storage_ix >> 3;

View File

@ -67,6 +67,7 @@ struct BrotliParams {
bool enable_context_modeling;
};
// An instance can not be reused for multiple brotli streams.
class BrotliCompressor {
public:
explicit BrotliCompressor(BrotliParams params);
@ -153,6 +154,8 @@ class BrotliCompressor {
int dist_cache_[4];
uint8_t last_byte_;
uint8_t last_byte_bits_;
uint8_t prev_byte_;
uint8_t prev_byte2_;
int storage_size_;
std::unique_ptr<uint8_t[]> storage_;
static StaticDictionary *static_dictionary_;

View File

@ -144,6 +144,9 @@ bool WriteMetaBlockParallel(const BrotliParams& params,
// mask + 1 as the size of the ringbuffer.
const size_t mask = std::numeric_limits<size_t>::max() >> 1;
uint8_t prev_byte = input_pos > 0 ? input[(input_pos - 1) & mask] : 0;
uint8_t prev_byte2 = input_pos > 1 ? input[(input_pos - 2) & mask] : 0;
// Decide about UTF8 mode.
static const double kMinUTF8Ratio = 0.75;
bool utf8_mode = IsMostlyUTF8(&input[input_pos], input_size, kMinUTF8Ratio);
@ -200,18 +203,17 @@ bool WriteMetaBlockParallel(const BrotliParams& params,
params.mode == BrotliParams::MODE_FONT ? 12 : 0;
int distance_postfix_bits = params.mode == BrotliParams::MODE_FONT ? 1 : 0;
int literal_context_mode = utf8_mode ? CONTEXT_UTF8 : CONTEXT_SIGNED;
RecomputeDistancePrefixes(&commands,
num_direct_distance_codes,
distance_postfix_bits);
if (params.greedy_block_split) {
BuildMetaBlockGreedy(&input[0], input_pos, mask,
commands.data(), commands.size(), params.quality,
commands.data(), commands.size(),
&mb);
} else {
RecomputeDistancePrefixes(&commands,
num_direct_distance_codes,
distance_postfix_bits);
BuildMetaBlock(&input[0], input_pos, mask,
prev_byte, prev_byte2,
commands.data(), commands.size(),
num_direct_distance_codes,
distance_postfix_bits,
literal_context_mode,
true,
&mb);
@ -236,7 +238,8 @@ bool WriteMetaBlockParallel(const BrotliParams& params,
// Store the meta-block to the temporary output.
if (!StoreMetaBlock(&input[0], input_pos, input_size, mask,
is_last, params.quality,
prev_byte, prev_byte2,
is_last,
num_direct_distance_codes,
distance_postfix_bits,
literal_context_mode,

View File

@ -42,16 +42,8 @@ struct HuffmanTree {
HuffmanTree::HuffmanTree() {}
// Sort the root nodes, least popular first, break ties by value.
bool SortHuffmanTree(const HuffmanTree &v0, const HuffmanTree &v1) {
if (v0.total_count_ == v1.total_count_) {
return v0.index_right_or_value_ > v1.index_right_or_value_;
}
return v0.total_count_ < v1.total_count_;
}
// Sort the root nodes, least popular first.
bool SortHuffmanTreeFast(const HuffmanTree &v0, const HuffmanTree &v1) {
bool SortHuffmanTree(const HuffmanTree &v0, const HuffmanTree &v1) {
return v0.total_count_ < v1.total_count_;
}
@ -88,7 +80,6 @@ void SetDepth(const HuffmanTree &p,
void CreateHuffmanTree(const int *data,
const int length,
const int tree_limit,
const int quality,
uint8_t *depth) {
// For block sizes below 64 kB, we never need to do a second iteration
// of this loop. Probably all of our block sizes will be smaller than
@ -98,7 +89,7 @@ void CreateHuffmanTree(const int *data,
std::vector<HuffmanTree> tree;
tree.reserve(2 * length + 1);
for (int i = 0; i < length; ++i) {
for (int i = length - 1; i >= 0; --i) {
if (data[i]) {
const int count = std::max(data[i], count_limit);
tree.push_back(HuffmanTree(count, -1, i));
@ -111,11 +102,8 @@ void CreateHuffmanTree(const int *data,
break;
}
if (quality > 1) {
std::sort(tree.begin(), tree.end(), SortHuffmanTree);
} else {
std::sort(tree.begin(), tree.end(), SortHuffmanTreeFast);
}
std::stable_sort(tree.begin(), tree.end(), SortHuffmanTree);
// The nodes are:
// [0, n): the sorted leaf nodes that we start with.
// [n]: we add a sentinel here.
@ -242,12 +230,21 @@ void WriteHuffmanTreeRepetitionsZeros(
}
int OptimizeHuffmanCountsForRle(int length, int* counts) {
int nonzero_count = 0;
int stride;
int limit;
int sum;
uint8_t* good_for_rle;
// Let's make the Huffman code more compatible with rle encoding.
int i;
for (i = 0; i < length; i++) {
if (counts[i]) {
++nonzero_count;
}
}
if (nonzero_count < 16) {
return 1;
}
for (; length >= 0; --length) {
if (length == 0) {
return 1; // All zeros.

View File

@ -37,7 +37,6 @@ namespace brotli {
void CreateHuffmanTree(const int *data,
const int length,
const int tree_limit,
const int quality,
uint8_t *depth);
// Change the population counts in a way that the consequent

View File

@ -538,7 +538,7 @@ class HashLongestMatch {
}
if (kUseDictionary && static_dict_ != NULL) {
// We decide based on first 4 bytes how many bytes to test for.
int prefix = BROTLI_UNALIGNED_LOAD32(&data[cur_ix_masked]);
uint32_t prefix = BROTLI_UNALIGNED_LOAD32(&data[cur_ix_masked]);
int maxlen = static_dict_->GetLength(prefix);
for (int len = std::min<size_t>(maxlen, max_length);
len > best_len && len >= 4; --len) {
@ -595,7 +595,7 @@ struct Hashers {
typedef HashLongestMatch<14, 5, 4, 4, false, false> H4;
typedef HashLongestMatch<15, 6, 4, 10, false, false> H5;
typedef HashLongestMatch<15, 7, 4, 10, false, false> H6;
typedef HashLongestMatch<15, 8, 4, 16, true, false> H7;
typedef HashLongestMatch<15, 8, 4, 16, false, false> H7;
typedef HashLongestMatch<15, 8, 4, 16, true, true> H8;
typedef HashLongestMatch<15, 8, 2, 16, true, false> H9;

View File

@ -33,12 +33,15 @@ void BuildHistograms(
const BlockSplit& insert_and_copy_split,
const BlockSplit& dist_split,
const uint8_t* ringbuffer,
size_t pos,
size_t start_pos,
size_t mask,
uint8_t prev_byte,
uint8_t prev_byte2,
const std::vector<int>& context_modes,
std::vector<HistogramLiteral>* literal_histograms,
std::vector<HistogramCommand>* insert_and_copy_histograms,
std::vector<HistogramDistance>* copy_dist_histograms) {
size_t pos = start_pos;
BlockSplitIterator literal_it(literal_split);
BlockSplitIterator insert_and_copy_it(insert_and_copy_split);
BlockSplitIterator dist_it(dist_split);
@ -49,47 +52,24 @@ void BuildHistograms(
cmd.cmd_prefix_);
for (int j = 0; j < cmd.insert_len_; ++j) {
literal_it.Next();
uint8_t prev_byte = pos > 0 ? ringbuffer[(pos - 1) & mask] : 0;
uint8_t prev_byte2 = pos > 1 ? ringbuffer[(pos - 2) & mask] : 0;
int context = (literal_it.type_ << kLiteralContextBits) +
Context(prev_byte, prev_byte2, context_modes[literal_it.type_]);
(*literal_histograms)[context].Add(ringbuffer[pos & mask]);
prev_byte2 = prev_byte;
prev_byte = ringbuffer[pos & mask];
++pos;
}
pos += cmd.copy_len_;
if (cmd.copy_len_ > 0 && cmd.cmd_prefix_ >= 128) {
dist_it.Next();
int context = (dist_it.type_ << kDistanceContextBits) +
cmd.DistanceContext();
(*copy_dist_histograms)[context].Add(cmd.dist_prefix_);
}
}
}
void BuildLiteralHistogramsForBlockType(
const Command* cmds,
const size_t num_commands,
const BlockSplit& literal_split,
const uint8_t* ringbuffer,
size_t pos,
size_t mask,
int block_type,
int context_mode,
std::vector<HistogramLiteral>* histograms) {
BlockSplitIterator literal_it(literal_split);
for (int i = 0; i < num_commands; ++i) {
const Command &cmd = cmds[i];
for (int j = 0; j < cmd.insert_len_; ++j) {
literal_it.Next();
if (literal_it.type_ == block_type) {
uint8_t prev_byte = pos > 0 ? ringbuffer[(pos - 1) & mask] : 0;
uint8_t prev_byte2 = pos > 1 ? ringbuffer[(pos - 2) & mask] : 0;
int context = Context(prev_byte, prev_byte2, context_mode);
(*histograms)[context].Add(ringbuffer[pos & mask]);
if (cmd.copy_len_ > 0) {
prev_byte2 = ringbuffer[(pos - 2) & mask];
prev_byte = ringbuffer[(pos - 1) & mask];
if (cmd.cmd_prefix_ >= 128) {
dist_it.Next();
int context = (dist_it.type_ << kDistanceContextBits) +
cmd.DistanceContext();
(*copy_dist_histograms)[context].Add(cmd.dist_prefix_);
}
++pos;
}
pos += cmd.copy_len_;
}
}

View File

@ -95,22 +95,13 @@ void BuildHistograms(
const uint8_t* ringbuffer,
size_t pos,
size_t mask,
uint8_t prev_byte,
uint8_t prev_byte2,
const std::vector<int>& context_modes,
std::vector<HistogramLiteral>* literal_histograms,
std::vector<HistogramCommand>* insert_and_copy_histograms,
std::vector<HistogramDistance>* copy_dist_histograms);
void BuildLiteralHistogramsForBlockType(
const Command* cmds,
const size_t num_commands,
const BlockSplit& literal_split,
const uint8_t* ringbuffer,
size_t pos,
size_t mask,
int block_type,
int context_mode,
std::vector<HistogramLiteral>* histograms);
} // namespace brotli
#endif // BROTLI_ENC_HISTOGRAM_H_

View File

@ -26,10 +26,10 @@ namespace brotli {
void BuildMetaBlock(const uint8_t* ringbuffer,
const size_t pos,
const size_t mask,
uint8_t prev_byte,
uint8_t prev_byte2,
const Command* cmds,
size_t num_commands,
int num_direct_distance_codes,
int distance_postfix_bits,
int literal_context_mode,
bool enable_context_modeling,
MetaBlockSplit* mb) {
@ -56,6 +56,8 @@ void BuildMetaBlock(const uint8_t* ringbuffer,
ringbuffer,
pos,
mask,
prev_byte,
prev_byte2,
literal_context_modes,
&literal_histograms,
&mb->command_histograms,
@ -107,13 +109,11 @@ class BlockSplitter {
int min_block_size,
double split_threshold,
int num_symbols,
int quality,
BlockSplit* split,
std::vector<HistogramType>* histograms)
: alphabet_size_(alphabet_size),
min_block_size_(min_block_size),
split_threshold_(split_threshold),
quality_(quality),
num_blocks_(0),
split_(split),
histograms_(histograms),
@ -238,8 +238,6 @@ class BlockSplitter {
// where A is the current histogram and B is the histogram of the last or the
// second last block type.
const double split_threshold_;
// Quality setting used for speed vs. compression ratio decisions.
const int quality_;
int num_blocks_;
BlockSplit* split_; // not owned
@ -265,7 +263,6 @@ void BuildMetaBlockGreedy(const uint8_t* ringbuffer,
size_t mask,
const Command *commands,
size_t n_commands,
int quality,
MetaBlockSplit* mb) {
int num_literals = 0;
for (int i = 0; i < n_commands; ++i) {
@ -273,13 +270,13 @@ void BuildMetaBlockGreedy(const uint8_t* ringbuffer,
}
BlockSplitter<HistogramLiteral> lit_blocks(
256, 512, 400.0, num_literals, quality,
256, 512, 400.0, num_literals,
&mb->literal_split, &mb->literal_histograms);
BlockSplitter<HistogramCommand> cmd_blocks(
kNumCommandPrefixes, 1024, 500.0, n_commands, quality,
kNumCommandPrefixes, 1024, 500.0, n_commands,
&mb->command_split, &mb->command_histograms);
BlockSplitter<HistogramDistance> dist_blocks(
64, 512, 100.0, n_commands, quality,
64, 512, 100.0, n_commands,
&mb->distance_split, &mb->distance_histograms);
for (int i = 0; i < n_commands; ++i) {
@ -300,4 +297,23 @@ void BuildMetaBlockGreedy(const uint8_t* ringbuffer,
dist_blocks.FinishBlock(/* is_final = */ true);
}
void OptimizeHistograms(int num_direct_distance_codes,
int distance_postfix_bits,
MetaBlockSplit* mb) {
for (int i = 0; i < mb->literal_histograms.size(); ++i) {
OptimizeHuffmanCountsForRle(256, &mb->literal_histograms[i].data_[0]);
}
for (int i = 0; i < mb->command_histograms.size(); ++i) {
OptimizeHuffmanCountsForRle(kNumCommandPrefixes,
&mb->command_histograms[i].data_[0]);
}
int num_distance_codes =
kNumDistanceShortCodes + num_direct_distance_codes +
(48 << distance_postfix_bits);
for (int i = 0; i < mb->distance_histograms.size(); ++i) {
OptimizeHuffmanCountsForRle(num_distance_codes,
&mb->distance_histograms[i].data_[0]);
}
}
} // namespace brotli

View File

@ -47,10 +47,10 @@ struct MetaBlockSplit {
void BuildMetaBlock(const uint8_t* ringbuffer,
const size_t pos,
const size_t mask,
uint8_t prev_byte,
uint8_t prev_byte2,
const Command* cmds,
size_t num_commands,
int num_direct_distance_codes,
int distance_postfix_bits,
int literal_context_mode,
bool enable_context_modleing,
MetaBlockSplit* mb);
@ -60,9 +60,12 @@ void BuildMetaBlockGreedy(const uint8_t* ringbuffer,
size_t mask,
const Command *commands,
size_t n_commands,
int quality,
MetaBlockSplit* mb);
void OptimizeHistograms(int num_direct_distance_codes,
int distance_postfix_bits,
MetaBlockSplit* mb);
} // namespace brotli
#endif // BROTLI_ENC_METABLOCK_H_

View File

@ -52,16 +52,16 @@ class StaticDictionary {
return;
}
map_[str] = ix;
int v = 0;
uint32_t v = 0;
for (int i = 0; i < 4 && i < str.size(); ++i) {
v += str[i] << (8 * i);
v += static_cast<uint32_t>(str[i]) << (8 * i);
}
if (prefix_map_[v] < str.size()) {
prefix_map_[v] = str.size();
}
}
int GetLength(int v) const {
std::unordered_map<int, int>::const_iterator it = prefix_map_.find(v);
int GetLength(uint32_t v) const {
std::unordered_map<uint32_t, int>::const_iterator it = prefix_map_.find(v);
if (it == prefix_map_.end()) {
return 0;
}
@ -79,7 +79,7 @@ class StaticDictionary {
}
private:
std::unordered_map<std::string, int> map_;
std::unordered_map<int, int> prefix_map_;
std::unordered_map<uint32_t, int> prefix_map_;
};
} // namespace brotli