Fix build with -Wmissing-declarations.

While there, add -Wmissing-prototypes and -Wmissing-declarations
to shared.mk in order to catch similar errors in the future.

Signed-off-by: Piotr Sikora <piotrsikora@google.com>
This commit is contained in:
Piotr Sikora 2016-03-18 19:18:59 -07:00
parent f453b1bf36
commit 501cb86172
10 changed files with 185 additions and 180 deletions

View File

@ -236,11 +236,11 @@ class StartPosQueue {
// Returns the minimum possible copy length that can improve the cost of any
// future position.
size_t ComputeMinimumCopyLength(const StartPosQueue& queue,
const ZopfliNode* nodes,
const ZopfliCostModel& model,
const size_t num_bytes,
const size_t pos) {
static size_t ComputeMinimumCopyLength(const StartPosQueue& queue,
const ZopfliNode* nodes,
const ZopfliCostModel& model,
const size_t num_bytes,
const size_t pos) {
// Compute the minimum possible cost of reaching any future position.
const size_t start0 = queue.GetStartPosData(0).pos;
float min_cost = (nodes[start0].cost +
@ -272,12 +272,12 @@ size_t ComputeMinimumCopyLength(const StartPosQueue& queue,
// starting_dist_cach[0..3].
// REQUIRES: nodes[pos].cost < kInfinity
// REQUIRES: nodes[0..pos] satisfies that "ZopfliNode array invariant".
void ComputeDistanceCache(const size_t block_start,
const size_t pos,
const size_t max_backward,
const int* starting_dist_cache,
const ZopfliNode* nodes,
int* dist_cache) {
static void ComputeDistanceCache(const size_t block_start,
const size_t pos,
const size_t max_backward,
const int* starting_dist_cache,
const ZopfliNode* nodes,
int* dist_cache) {
int idx = 0;
size_t p = pos;
// Because of prerequisite, does at most (pos + 1) / 2 iterations.
@ -302,18 +302,18 @@ void ComputeDistanceCache(const size_t block_start,
}
}
void UpdateNodes(const size_t num_bytes,
const size_t block_start,
const size_t pos,
const uint8_t* ringbuffer,
const size_t ringbuffer_mask,
const size_t max_backward_limit,
const int* starting_dist_cache,
const size_t num_matches,
const BackwardMatch* matches,
const ZopfliCostModel* model,
StartPosQueue* queue,
ZopfliNode* nodes) {
static void UpdateNodes(const size_t num_bytes,
const size_t block_start,
const size_t pos,
const uint8_t* ringbuffer,
const size_t ringbuffer_mask,
const size_t max_backward_limit,
const int* starting_dist_cache,
const size_t num_matches,
const BackwardMatch* matches,
const ZopfliCostModel* model,
StartPosQueue* queue,
ZopfliNode* nodes) {
size_t cur_ix = block_start + pos;
size_t cur_ix_masked = cur_ix & ringbuffer_mask;
size_t max_distance = std::min(cur_ix, max_backward_limit);
@ -408,9 +408,9 @@ void UpdateNodes(const size_t num_bytes,
}
}
void ComputeShortestPathFromNodes(size_t num_bytes,
const ZopfliNode* nodes,
std::vector<uint32_t>* path) {
static void ComputeShortestPathFromNodes(size_t num_bytes,
const ZopfliNode* nodes,
std::vector<uint32_t>* path) {
std::vector<uint32_t> backwards(num_bytes / 2 + 1);
size_t index = num_bytes;
while (nodes[index].cost == kInfinity) --index;
@ -467,17 +467,17 @@ void ZopfliCreateCommands(const size_t num_bytes,
*last_insert_len += num_bytes - pos;
}
void ZopfliIterate(size_t num_bytes,
size_t position,
const uint8_t* ringbuffer,
size_t ringbuffer_mask,
const size_t max_backward_limit,
const int* dist_cache,
const ZopfliCostModel& model,
const std::vector<uint32_t>& num_matches,
const std::vector<BackwardMatch>& matches,
ZopfliNode* nodes,
std::vector<uint32_t>* path) {
static void ZopfliIterate(size_t num_bytes,
size_t position,
const uint8_t* ringbuffer,
size_t ringbuffer_mask,
const size_t max_backward_limit,
const int* dist_cache,
const ZopfliCostModel& model,
const std::vector<uint32_t>& num_matches,
const std::vector<BackwardMatch>& matches,
ZopfliNode* nodes,
std::vector<uint32_t>* path) {
nodes[0].length = 0;
nodes[0].cost = 0;
StartPosQueue queue(3);

View File

@ -229,8 +229,8 @@ size_t FindBlocks(const DataType* data, const size_t length,
return num_blocks;
}
size_t RemapBlockIds(uint8_t* block_ids, const size_t length,
uint16_t* new_id, const size_t num_histograms) {
static size_t RemapBlockIds(uint8_t* block_ids, const size_t length,
uint16_t* new_id, const size_t num_histograms) {
static const uint16_t kInvalidId = 256;
for (size_t i = 0; i < num_histograms; ++i) {
new_id[i] = kInvalidId;

View File

@ -165,7 +165,7 @@ void StoreHuffmanTreeOfHuffmanTreeToBitMask(
}
}
void StoreHuffmanTreeToBitMask(
static void StoreHuffmanTreeToBitMask(
const size_t huffman_tree_size,
const uint8_t* huffman_tree,
const uint8_t* huffman_tree_extra_bits,
@ -189,11 +189,11 @@ void StoreHuffmanTreeToBitMask(
}
}
void StoreSimpleHuffmanTree(const uint8_t* depths,
size_t symbols[4],
size_t num_symbols,
size_t max_bits,
size_t *storage_ix, uint8_t *storage) {
static void StoreSimpleHuffmanTree(const uint8_t* depths,
size_t symbols[4],
size_t num_symbols,
size_t max_bits,
size_t *storage_ix, uint8_t *storage) {
// value of 1 indicates a simple Huffman code
WriteBits(2, 1, storage_ix, storage);
WriteBits(2, num_symbols - 1, storage_ix, storage); // NSYM - 1
@ -494,7 +494,7 @@ void BuildAndStoreHuffmanTreeFast(const uint32_t *histogram,
}
}
size_t IndexOf(const uint8_t* v, size_t v_size, uint8_t value) {
static size_t IndexOf(const uint8_t* v, size_t v_size, uint8_t value) {
size_t i = 0;
for (; i < v_size; ++i) {
if (v[i] == value) return i;
@ -502,7 +502,7 @@ size_t IndexOf(const uint8_t* v, size_t v_size, uint8_t value) {
return i;
}
void MoveToFront(uint8_t* v, size_t index) {
static void MoveToFront(uint8_t* v, size_t index) {
uint8_t value = v[index];
for (size_t i = index; i != 0; --i) {
v[i] = v[i - 1];
@ -510,9 +510,9 @@ void MoveToFront(uint8_t* v, size_t index) {
v[0] = value;
}
void MoveToFrontTransform(const uint32_t* __restrict v_in,
const size_t v_size,
uint32_t* v_out) {
static void MoveToFrontTransform(const uint32_t* __restrict v_in,
const size_t v_size,
uint32_t* v_out) {
if (v_size == 0) {
return;
}
@ -537,10 +537,10 @@ void MoveToFrontTransform(const uint32_t* __restrict v_in,
// *max_length_prefix. Will not create prefix codes bigger than the initial
// value of *max_run_length_prefix. The prefix code of run length L is simply
// Log2Floor(L) and the number of extra bits is the same as the prefix code.
void RunLengthCodeZeros(const size_t in_size,
uint32_t* __restrict v,
size_t* __restrict out_size,
uint32_t* __restrict max_run_length_prefix) {
static void RunLengthCodeZeros(const size_t in_size,
uint32_t* __restrict v,
size_t* __restrict out_size,
uint32_t* __restrict max_run_length_prefix) {
uint32_t max_reps = 0;
for (size_t i = 0; i < in_size;) {
for (; i < in_size && v[i] != 0; ++i) ;
@ -646,13 +646,13 @@ void StoreBlockSwitch(const BlockSplitCode& code,
storage_ix, storage);
}
void BuildAndStoreBlockSplitCode(const std::vector<uint8_t>& types,
const std::vector<uint32_t>& lengths,
const size_t num_types,
HuffmanTree* tree,
BlockSplitCode* code,
size_t* storage_ix,
uint8_t* storage) {
static void BuildAndStoreBlockSplitCode(const std::vector<uint8_t>& types,
const std::vector<uint32_t>& lengths,
const size_t num_types,
HuffmanTree* tree,
BlockSplitCode* code,
size_t* storage_ix,
uint8_t* storage) {
const size_t num_blocks = types.size();
uint32_t type_histo[kBlockTypeAlphabetSize];
uint32_t length_histo[kNumBlockLenPrefixes];
@ -823,7 +823,7 @@ class BlockEncoder {
std::vector<uint16_t> bits_;
};
void JumpToByteBoundary(size_t* storage_ix, uint8_t* storage) {
static void JumpToByteBoundary(size_t* storage_ix, uint8_t* storage) {
*storage_ix = (*storage_ix + 7u) & ~7u;
storage[*storage_ix >> 3] = 0;
}
@ -947,14 +947,14 @@ void StoreMetaBlock(const uint8_t* input,
}
}
void BuildHistograms(const uint8_t* input,
size_t start_pos,
size_t mask,
const brotli::Command *commands,
size_t n_commands,
HistogramLiteral* lit_histo,
HistogramCommand* cmd_histo,
HistogramDistance* dist_histo) {
static void BuildHistograms(const uint8_t* input,
size_t start_pos,
size_t mask,
const brotli::Command *commands,
size_t n_commands,
HistogramLiteral* lit_histo,
HistogramCommand* cmd_histo,
HistogramDistance* dist_histo) {
size_t pos = start_pos;
for (size_t i = 0; i < n_commands; ++i) {
const Command cmd = commands[i];
@ -970,19 +970,19 @@ void BuildHistograms(const uint8_t* input,
}
}
void StoreDataWithHuffmanCodes(const uint8_t* input,
size_t start_pos,
size_t mask,
const brotli::Command *commands,
size_t n_commands,
const uint8_t* lit_depth,
const uint16_t* lit_bits,
const uint8_t* cmd_depth,
const uint16_t* cmd_bits,
const uint8_t* dist_depth,
const uint16_t* dist_bits,
size_t* storage_ix,
uint8_t* storage) {
static void StoreDataWithHuffmanCodes(const uint8_t* input,
size_t start_pos,
size_t mask,
const brotli::Command *commands,
size_t n_commands,
const uint8_t* lit_depth,
const uint16_t* lit_bits,
const uint8_t* cmd_depth,
const uint16_t* cmd_bits,
const uint8_t* dist_depth,
const uint16_t* dist_bits,
size_t* storage_ix,
uint8_t* storage) {
size_t pos = start_pos;
for (size_t i = 0; i < n_commands; ++i) {
const Command cmd = commands[i];

View File

@ -58,12 +58,12 @@ static inline int IsMatch(const uint8_t* p1, const uint8_t* p2) {
// we can only approximate the statistics of the actual literal stream.
// Moreover, for long inputs we build a histogram from a sample of the input
// and thus have to assign a non-zero depth for each literal.
void BuildAndStoreLiteralPrefixCode(const uint8_t* input,
const size_t input_size,
uint8_t depths[256],
uint16_t bits[256],
size_t* storage_ix,
uint8_t* storage) {
static void BuildAndStoreLiteralPrefixCode(const uint8_t* input,
const size_t input_size,
uint8_t depths[256],
uint16_t bits[256],
size_t* storage_ix,
uint8_t* storage) {
uint32_t histogram[256] = { 0 };
size_t histogram_total;
if (input_size < (1 << 15)) {
@ -102,9 +102,11 @@ void BuildAndStoreLiteralPrefixCode(const uint8_t* input,
// Builds a command and distance prefix code (each 64 symbols) into "depth" and
// "bits" based on "histogram" and stores it into the bit stream.
void BuildAndStoreCommandPrefixCode(const uint32_t histogram[128],
uint8_t depth[128], uint16_t bits[128],
size_t* storage_ix, uint8_t* storage) {
static void BuildAndStoreCommandPrefixCode(const uint32_t histogram[128],
uint8_t depth[128],
uint16_t bits[128],
size_t* storage_ix,
uint8_t* storage) {
// Tree size for building a tree over 64 symbols is 2 * 64 + 1.
static const size_t kTreeSize = 129;
HuffmanTree tree[kTreeSize];
@ -315,10 +317,10 @@ static void StoreMetaBlockHeader(
WriteBits(1, is_uncompressed, storage_ix, storage);
}
void UpdateBits(size_t n_bits,
uint32_t bits,
size_t pos,
uint8_t *array) {
static void UpdateBits(size_t n_bits,
uint32_t bits,
size_t pos,
uint8_t *array) {
while (n_bits > 0) {
size_t byte_pos = pos >> 3;
size_t n_unchanged_bits = pos & 7;
@ -336,15 +338,16 @@ void UpdateBits(size_t n_bits,
}
}
void RewindBitPosition(const size_t new_storage_ix,
size_t* storage_ix, uint8_t* storage) {
static void RewindBitPosition(const size_t new_storage_ix,
size_t* storage_ix, uint8_t* storage) {
const size_t bitpos = new_storage_ix & 7;
const size_t mask = (1u << bitpos) - 1;
storage[new_storage_ix >> 3] &= static_cast<uint8_t>(mask);
*storage_ix = new_storage_ix;
}
bool ShouldMergeBlock(const uint8_t* data, size_t len, const uint8_t* depths) {
static bool ShouldMergeBlock(const uint8_t* data, size_t len,
const uint8_t* depths) {
size_t histo[256] = { 0 };
static const size_t kSampleRate = 43;
for (size_t i = 0; i < len; i += kSampleRate) {
@ -377,9 +380,9 @@ inline bool ShouldUseUncompressedMode(const uint8_t* metablock_start,
return sum > static_cast<uint32_t>((1 << 15) * kMinEntropy);
}
void EmitUncompressedMetaBlock(const uint8_t* begin, const uint8_t* end,
const size_t storage_ix_start,
size_t* storage_ix, uint8_t* storage) {
static void EmitUncompressedMetaBlock(const uint8_t* begin, const uint8_t* end,
const size_t storage_ix_start,
size_t* storage_ix, uint8_t* storage) {
const size_t len = static_cast<size_t>(end - begin);
RewindBitPosition(storage_ix_start, storage_ix, storage);
StoreMetaBlockHeader(len, 1, storage_ix, storage);

View File

@ -220,10 +220,10 @@ static void StoreMetaBlockHeader(
WriteBits(1, is_uncompressed, storage_ix, storage);
}
void CreateCommands(const uint8_t* input, size_t block_size, size_t input_size,
const uint8_t* base_ip,
int* table, size_t table_size,
uint8_t** literals, uint32_t** commands) {
static void CreateCommands(const uint8_t* input, size_t block_size,
size_t input_size, const uint8_t* base_ip,
int* table, size_t table_size,
uint8_t** literals, uint32_t** commands) {
// "ip" is the input pointer.
const uint8_t* ip = input;
assert(table_size);
@ -400,9 +400,9 @@ emit_remainder:
}
}
void StoreCommands(const uint8_t* literals, const size_t num_literals,
const uint32_t* commands, const size_t num_commands,
size_t* storage_ix, uint8_t* storage) {
static void StoreCommands(const uint8_t* literals, const size_t num_literals,
const uint32_t* commands, const size_t num_commands,
size_t* storage_ix, uint8_t* storage) {
uint8_t lit_depths[256] = { 0 };
uint16_t lit_bits[256] = { 0 };
uint32_t lit_histo[256] = { 0 };
@ -458,8 +458,8 @@ void StoreCommands(const uint8_t* literals, const size_t num_literals,
}
}
bool ShouldCompress(const uint8_t* input, size_t input_size,
size_t num_literals) {
static bool ShouldCompress(const uint8_t* input, size_t input_size,
size_t num_literals) {
static const double kAcceptableLossForUncompressibleSpeedup = 0.02;
static const double kMaxRatioOfLiterals =
1.0 - kAcceptableLossForUncompressibleSpeedup;

View File

@ -42,10 +42,10 @@ static const size_t kMaxNumDelayedSymbols = 0x2fff;
#define COPY_ARRAY(dst, src) memcpy(dst, src, sizeof(src));
void RecomputeDistancePrefixes(Command* cmds,
size_t num_commands,
uint32_t num_direct_distance_codes,
uint32_t distance_postfix_bits) {
static void RecomputeDistancePrefixes(Command* cmds,
size_t num_commands,
uint32_t num_direct_distance_codes,
uint32_t distance_postfix_bits) {
if (num_direct_distance_codes == 0 && distance_postfix_bits == 0) {
return;
}
@ -63,7 +63,7 @@ void RecomputeDistancePrefixes(Command* cmds,
/* Wraps 64-bit input position to 32-bit ringbuffer position preserving
"not-a-first-lap" feature. */
uint32_t WrapPosition(uint64_t position) {
static uint32_t WrapPosition(uint64_t position) {
uint32_t result = static_cast<uint32_t>(position);
if (position > (1u << 30)) {
result = (result & ((1u << 30) - 1)) | (1u << 30);
@ -80,11 +80,11 @@ uint8_t* BrotliCompressor::GetBrotliStorage(size_t size) {
return storage_;
}
size_t MaxHashTableSize(int quality) {
static size_t MaxHashTableSize(int quality) {
return quality == 0 ? 1 << 15 : 1 << 17;
}
size_t HashTableSize(size_t max_table_size, size_t input_size) {
static size_t HashTableSize(size_t max_table_size, size_t input_size) {
size_t htsize = 256;
while (htsize < max_table_size && htsize < input_size) {
htsize <<= 1;
@ -118,7 +118,8 @@ int* BrotliCompressor::GetHashTable(int quality,
return table;
}
void EncodeWindowBits(int lgwin, uint8_t* last_byte, uint8_t* last_byte_bits) {
static void EncodeWindowBits(int lgwin, uint8_t* last_byte,
uint8_t* last_byte_bits) {
if (lgwin == 16) {
*last_byte = 0;
*last_byte_bits = 1;
@ -135,10 +136,10 @@ void EncodeWindowBits(int lgwin, uint8_t* last_byte, uint8_t* last_byte_bits) {
}
// Initializes the command and distance prefix codes for the first block.
void InitCommandPrefixCodes(uint8_t cmd_depths[128],
uint16_t cmd_bits[128],
uint8_t cmd_code[512],
size_t* cmd_code_numbits) {
static void InitCommandPrefixCodes(uint8_t cmd_depths[128],
uint16_t cmd_bits[128],
uint8_t cmd_code[512],
size_t* cmd_code_numbits) {
static const uint8_t kDefaultCommandDepths[128] = {
0, 4, 4, 5, 6, 6, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8,
0, 0, 0, 4, 4, 4, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7,
@ -187,10 +188,10 @@ void InitCommandPrefixCodes(uint8_t cmd_depths[128],
// encoded with the remaining 6 bits of the following byte, and
// BitsEntropy will assume that symbol to be stored alone using Huffman
// coding.
void ChooseContextMap(int quality,
uint32_t* bigram_histo,
size_t* num_literal_contexts,
const uint32_t** literal_context_map) {
static void ChooseContextMap(int quality,
uint32_t* bigram_histo,
size_t* num_literal_contexts,
const uint32_t** literal_context_map) {
uint32_t monogram_histo[3] = { 0 };
uint32_t two_prefix_histo[6] = { 0 };
size_t total = 0;
@ -248,14 +249,15 @@ void ChooseContextMap(int quality,
}
}
void DecideOverLiteralContextModeling(const uint8_t* input,
size_t start_pos,
size_t length,
size_t mask,
int quality,
ContextType* literal_context_mode,
size_t* num_literal_contexts,
const uint32_t** literal_context_map) {
static void DecideOverLiteralContextModeling(
const uint8_t* input,
size_t start_pos,
size_t length,
size_t mask,
int quality,
ContextType* literal_context_mode,
size_t* num_literal_contexts,
const uint32_t** literal_context_map) {
if (quality < kMinQualityForContextModeling || length < 64) {
return;
}
@ -279,12 +281,12 @@ void DecideOverLiteralContextModeling(const uint8_t* input,
literal_context_map);
}
bool ShouldCompress(const uint8_t* data,
const size_t mask,
const uint64_t last_flush_pos,
const size_t bytes,
const size_t num_literals,
const size_t num_commands) {
static bool ShouldCompress(const uint8_t* data,
const size_t mask,
const uint64_t last_flush_pos,
const size_t bytes,
const size_t num_literals,
const size_t num_commands) {
if (num_commands < (bytes >> 8) + 2) {
if (num_literals > 0.99 * static_cast<double>(bytes)) {
uint32_t literal_histo[256] = { 0 };
@ -306,22 +308,22 @@ bool ShouldCompress(const uint8_t* data,
return true;
}
void WriteMetaBlockInternal(const uint8_t* data,
const size_t mask,
const uint64_t last_flush_pos,
const size_t bytes,
const bool is_last,
const int quality,
const bool font_mode,
const uint8_t prev_byte,
const uint8_t prev_byte2,
const size_t num_literals,
const size_t num_commands,
Command* commands,
const int* saved_dist_cache,
int* dist_cache,
size_t* storage_ix,
uint8_t* storage) {
static void WriteMetaBlockInternal(const uint8_t* data,
const size_t mask,
const uint64_t last_flush_pos,
const size_t bytes,
const bool is_last,
const int quality,
const bool font_mode,
const uint8_t prev_byte,
const uint8_t prev_byte2,
const size_t num_literals,
const size_t num_commands,
Command* commands,
const int* saved_dist_cache,
int* dist_cache,
size_t* storage_ix,
uint8_t* storage) {
if (bytes == 0) {
// Write the ISLAST and ISEMPTY bits.
WriteBits(2, 3, storage_ix, storage);
@ -771,11 +773,11 @@ bool BrotliCompressor::FinishStream(
return WriteMetaBlock(0, NULL, true, encoded_size, encoded_buffer);
}
int BrotliCompressBufferQuality10(int lgwin,
size_t input_size,
const uint8_t* input_buffer,
size_t* encoded_size,
uint8_t* encoded_buffer) {
static int BrotliCompressBufferQuality10(int lgwin,
size_t input_size,
const uint8_t* input_buffer,
size_t* encoded_size,
uint8_t* encoded_buffer) {
const size_t mask = std::numeric_limits<size_t>::max() >> 1;
assert(input_size <= mask + 1);
const size_t max_backward_limit = (1 << lgwin) - 16;
@ -972,15 +974,15 @@ int BrotliCompressBuffer(BrotliParams params,
return 1;
}
bool BrotliInIsFinished(BrotliIn* r) {
static bool BrotliInIsFinished(BrotliIn* r) {
size_t read_bytes;
return r->Read(0, &read_bytes) == NULL;
}
const uint8_t* BrotliInReadAndCheckEnd(const size_t block_size,
BrotliIn* r,
size_t* bytes_read,
bool* is_last) {
static const uint8_t* BrotliInReadAndCheckEnd(const size_t block_size,
BrotliIn* r,
size_t* bytes_read,
bool* is_last) {
*bytes_read = 0;
const uint8_t* data = reinterpret_cast<const uint8_t*>(
r->Read(block_size, bytes_read));
@ -989,10 +991,10 @@ const uint8_t* BrotliInReadAndCheckEnd(const size_t block_size,
return data;
}
bool CopyOneBlockToRingBuffer(BrotliIn* r,
BrotliCompressor* compressor,
size_t* bytes_read,
bool* is_last) {
static bool CopyOneBlockToRingBuffer(BrotliIn* r,
BrotliCompressor* compressor,
size_t* bytes_read,
bool* is_last) {
const size_t block_size = compressor->input_block_size();
const uint8_t* data = BrotliInReadAndCheckEnd(block_size, r,
bytes_read, is_last);

View File

@ -132,7 +132,7 @@ void CreateHuffmanTree(const uint32_t *data,
}
}
void Reverse(uint8_t* v, size_t start, size_t end) {
static void Reverse(uint8_t* v, size_t start, size_t end) {
--end;
while (start < end) {
uint8_t tmp = v[start];
@ -143,7 +143,7 @@ void Reverse(uint8_t* v, size_t start, size_t end) {
}
}
void WriteHuffmanTreeRepetitions(
static void WriteHuffmanTreeRepetitions(
const uint8_t previous_value,
const uint8_t value,
size_t repetitions,
@ -187,7 +187,7 @@ void WriteHuffmanTreeRepetitions(
}
}
void WriteHuffmanTreeRepetitionsZeros(
static void WriteHuffmanTreeRepetitionsZeros(
size_t repetitions,
size_t* tree_size,
uint8_t* tree,

View File

@ -53,8 +53,8 @@ static size_t DecideMultiByteStatsLevel(size_t pos, size_t len, size_t mask,
return max_utf8;
}
void EstimateBitCostsForLiteralsUTF8(size_t pos, size_t len, size_t mask,
const uint8_t *data, float *cost) {
static void EstimateBitCostsForLiteralsUTF8(size_t pos, size_t len, size_t mask,
const uint8_t *data, float *cost) {
// max_utf8 is 0 (normal ascii single byte modeling),
// 1 (for 2-byte utf-8 modeling), or 2 (for 3-byte utf-8 modeling).

View File

@ -9,5 +9,5 @@ ifeq ($(OS), Darwin)
CPPFLAGS += -DOS_MACOSX
endif
CFLAGS += $(COMMON_FLAGS)
CXXFLAGS += $(COMMON_FLAGS)
CFLAGS += $(COMMON_FLAGS) -Wmissing-prototypes
CXXFLAGS += $(COMMON_FLAGS) -Wmissing-declarations

View File

@ -167,7 +167,7 @@ static FILE *OpenOutputFile(const char *output_path, const int force) {
return fdopen(fd, "wb");
}
int64_t FileSize(char *path) {
static int64_t FileSize(char *path) {
FILE *f = fopen(path, "rb");
if (f == NULL) {
return -1;
@ -185,7 +185,7 @@ int64_t FileSize(char *path) {
static const size_t kFileBufferSize = 65536;
void Decompresss(FILE* fin, FILE* fout) {
static void Decompresss(FILE* fin, FILE* fout) {
uint8_t* input = new uint8_t[kFileBufferSize];
uint8_t* output = new uint8_t[kFileBufferSize];
size_t total_out;