2017-08-18 23:52:05 +00:00
|
|
|
/*
|
2016-08-30 17:04:33 +00:00
|
|
|
* Copyright (c) 2016-present, Yann Collet, Facebook, Inc.
|
|
|
|
* All rights reserved.
|
|
|
|
*
|
2017-08-18 23:52:05 +00:00
|
|
|
* This source code is licensed under both the BSD-style license (found in the
|
|
|
|
* LICENSE file in the root directory of this source tree) and the GPLv2 (found
|
|
|
|
* in the COPYING file in the root directory of this source tree).
|
2017-09-08 07:09:23 +00:00
|
|
|
* You may select, at your option, one of the above-listed licenses.
|
2016-08-30 17:04:33 +00:00
|
|
|
*/
|
2016-02-12 01:31:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2016-12-21 12:23:34 +00:00
|
|
|
/* **************************************
|
|
|
|
* Compiler Warnings
|
|
|
|
****************************************/
|
|
|
|
#ifdef _MSC_VER
|
2017-09-14 22:12:57 +00:00
|
|
|
# pragma warning(disable : 4127) /* disable: C4127: conditional expression is constant */
|
2016-12-21 12:23:34 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
|
2016-02-12 01:31:57 +00:00
|
|
|
/*-*************************************
|
|
|
|
* Includes
|
|
|
|
***************************************/
|
2016-12-21 14:08:44 +00:00
|
|
|
#include "platform.h" /* Large Files support */
|
2016-12-21 12:23:34 +00:00
|
|
|
#include "util.h" /* UTIL_getFileSize, UTIL_getTotalFileSize */
|
2016-02-12 01:31:57 +00:00
|
|
|
#include <stdlib.h> /* malloc, free */
|
|
|
|
#include <string.h> /* memset */
|
|
|
|
#include <stdio.h> /* fprintf, fopen, ftello64 */
|
2016-05-10 12:22:55 +00:00
|
|
|
#include <time.h> /* clock_t, clock, CLOCKS_PER_SEC */
|
2016-07-06 14:27:17 +00:00
|
|
|
#include <errno.h> /* errno */
|
2016-02-12 01:31:57 +00:00
|
|
|
|
2016-12-21 12:23:34 +00:00
|
|
|
#include "mem.h" /* read */
|
2016-02-12 01:31:57 +00:00
|
|
|
#include "error_private.h"
|
2016-04-22 10:43:18 +00:00
|
|
|
#include "dibio.h"
|
2016-02-12 01:31:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
/*-*************************************
|
|
|
|
* Constants
|
|
|
|
***************************************/
|
|
|
|
#define KB *(1 <<10)
|
|
|
|
#define MB *(1 <<20)
|
|
|
|
#define GB *(1U<<30)
|
|
|
|
|
2016-12-18 10:58:23 +00:00
|
|
|
#define SAMPLESIZE_MAX (128 KB)
|
|
|
|
#define MEMMULT 11 /* rough estimation : memory cost to analyze 1 byte of sample */
|
2017-01-01 05:08:24 +00:00
|
|
|
#define COVER_MEMMULT 9 /* rough estimation : memory cost to analyze 1 byte of sample */
|
2017-09-14 22:12:57 +00:00
|
|
|
static const size_t g_maxMemory = (sizeof(size_t) == 4) ? (2 GB - 64 MB) : ((size_t)(512 MB) << sizeof(size_t));
|
2016-02-12 01:31:57 +00:00
|
|
|
|
|
|
|
#define NOISELENGTH 32
|
|
|
|
|
|
|
|
|
|
|
|
/*-*************************************
|
|
|
|
* Console display
|
|
|
|
***************************************/
|
|
|
|
#define DISPLAY(...) fprintf(stderr, __VA_ARGS__)
|
2017-09-14 23:45:10 +00:00
|
|
|
#define DISPLAYLEVEL(l, ...) if (displayLevel>=l) { DISPLAY(__VA_ARGS__); }
|
2016-02-12 01:31:57 +00:00
|
|
|
|
2017-09-14 23:45:10 +00:00
|
|
|
#define DISPLAYUPDATE(l, ...) if (displayLevel>=l) { \
|
|
|
|
if ((DIB_clockSpan(g_time) > refreshRate) || (displayLevel>=4)) \
|
2016-05-09 02:44:45 +00:00
|
|
|
{ g_time = clock(); DISPLAY(__VA_ARGS__); \
|
2017-09-14 23:45:10 +00:00
|
|
|
if (displayLevel>=4) fflush(stderr); } }
|
2016-07-06 13:41:03 +00:00
|
|
|
static const clock_t refreshRate = CLOCKS_PER_SEC * 2 / 10;
|
2016-05-09 02:44:45 +00:00
|
|
|
static clock_t g_time = 0;
|
|
|
|
|
2016-07-06 13:41:03 +00:00
|
|
|
static clock_t DIB_clockSpan(clock_t nPrevious) { return clock() - nPrevious; }
|
2016-05-09 02:44:45 +00:00
|
|
|
|
2016-02-12 01:31:57 +00:00
|
|
|
|
|
|
|
/*-*************************************
|
|
|
|
* Exceptions
|
|
|
|
***************************************/
|
|
|
|
#ifndef DEBUG
|
|
|
|
# define DEBUG 0
|
|
|
|
#endif
|
|
|
|
#define DEBUGOUTPUT(...) if (DEBUG) DISPLAY(__VA_ARGS__);
|
|
|
|
#define EXM_THROW(error, ...) \
|
|
|
|
{ \
|
|
|
|
DEBUGOUTPUT("Error defined at %s, line %i : \n", __FILE__, __LINE__); \
|
2017-09-14 23:45:10 +00:00
|
|
|
DISPLAY("Error %i : ", error); \
|
|
|
|
DISPLAY(__VA_ARGS__); \
|
|
|
|
DISPLAY("\n"); \
|
2016-02-12 01:31:57 +00:00
|
|
|
exit(error); \
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/* ********************************************************
|
|
|
|
* Helper functions
|
|
|
|
**********************************************************/
|
|
|
|
unsigned DiB_isError(size_t errorCode) { return ERR_isError(errorCode); }
|
|
|
|
|
|
|
|
const char* DiB_getErrorName(size_t errorCode) { return ERR_getErrorName(errorCode); }
|
|
|
|
|
2017-04-13 22:35:05 +00:00
|
|
|
#undef MIN
|
|
|
|
#define MIN(a,b) ((a) < (b) ? (a) : (b))
|
2016-07-06 13:41:03 +00:00
|
|
|
|
2016-02-12 01:31:57 +00:00
|
|
|
|
|
|
|
/* ********************************************************
|
|
|
|
* File related operations
|
|
|
|
**********************************************************/
|
2016-05-30 19:18:52 +00:00
|
|
|
/** DiB_loadFiles() :
|
2017-09-15 22:31:31 +00:00
|
|
|
* load samples from files listed in fileNamesTable into buffer.
|
|
|
|
* works even if buffer is too small to load all samples.
|
|
|
|
* Also provides the size of each sample into sampleSizes table
|
|
|
|
* which must be sized correctly, using DiB_fileStats().
|
|
|
|
* @return : nb of samples effectively loaded into `buffer`
|
|
|
|
* *bufferSizePtr is modified, it provides the amount data loaded within buffer.
|
|
|
|
* sampleSizes is filled with the size of each sample.
|
|
|
|
*/
|
2016-07-06 13:41:03 +00:00
|
|
|
static unsigned DiB_loadFiles(void* buffer, size_t* bufferSizePtr,
|
2017-09-15 22:31:31 +00:00
|
|
|
size_t* sampleSizes, unsigned sstSize,
|
2017-09-14 23:45:10 +00:00
|
|
|
const char** fileNamesTable, unsigned nbFiles, size_t targetChunkSize,
|
|
|
|
unsigned displayLevel)
|
2016-02-12 01:31:57 +00:00
|
|
|
{
|
2016-05-30 19:18:52 +00:00
|
|
|
char* const buff = (char*)buffer;
|
2016-02-12 01:31:57 +00:00
|
|
|
size_t pos = 0;
|
2017-09-14 23:45:10 +00:00
|
|
|
unsigned nbLoadedChunks = 0, fileIndex;
|
2016-02-12 01:31:57 +00:00
|
|
|
|
2017-09-14 23:45:10 +00:00
|
|
|
for (fileIndex=0; fileIndex<nbFiles; fileIndex++) {
|
|
|
|
const char* const fileName = fileNamesTable[fileIndex];
|
2016-07-06 13:41:03 +00:00
|
|
|
unsigned long long const fs64 = UTIL_getFileSize(fileName);
|
2017-09-14 23:45:10 +00:00
|
|
|
unsigned long long remainingToLoad = fs64;
|
|
|
|
U32 const nbChunks = targetChunkSize ? (U32)((fs64 + (targetChunkSize-1)) / targetChunkSize) : 1;
|
|
|
|
U64 const chunkSize = targetChunkSize ? MIN(targetChunkSize, fs64) : fs64;
|
2017-09-15 18:55:13 +00:00
|
|
|
size_t const maxChunkSize = (size_t)MIN(chunkSize, SAMPLESIZE_MAX);
|
2017-09-14 23:45:10 +00:00
|
|
|
U32 cnb;
|
|
|
|
FILE* const f = fopen(fileName, "rb");
|
|
|
|
if (f==NULL) EXM_THROW(10, "zstd: dictBuilder: %s %s ", fileName, strerror(errno));
|
|
|
|
DISPLAYUPDATE(2, "Loading %s... \r", fileName);
|
|
|
|
for (cnb=0; cnb<nbChunks; cnb++) {
|
2017-09-15 18:55:13 +00:00
|
|
|
size_t const toLoad = (size_t)MIN(maxChunkSize, remainingToLoad);
|
2017-09-14 23:45:10 +00:00
|
|
|
if (toLoad > *bufferSizePtr-pos) break;
|
|
|
|
{ size_t const readSize = fread(buff+pos, 1, toLoad, f);
|
|
|
|
if (readSize != toLoad) EXM_THROW(11, "Pb reading %s", fileName);
|
|
|
|
pos += readSize;
|
2017-09-15 22:31:31 +00:00
|
|
|
sampleSizes[nbLoadedChunks++] = toLoad;
|
2017-09-14 23:45:10 +00:00
|
|
|
remainingToLoad -= targetChunkSize;
|
2017-09-15 22:31:31 +00:00
|
|
|
if (nbLoadedChunks == sstSize) { /* no more space left in sampleSizes table */
|
|
|
|
fileIndex = nbFiles; /* stop there */
|
|
|
|
break;
|
|
|
|
}
|
2017-09-14 23:45:10 +00:00
|
|
|
if (toLoad < targetChunkSize) {
|
2017-09-15 17:16:26 +00:00
|
|
|
fseek(f, (long)(targetChunkSize - toLoad), SEEK_CUR);
|
2017-09-14 23:45:10 +00:00
|
|
|
} } }
|
|
|
|
fclose(f);
|
|
|
|
}
|
2017-01-01 05:08:24 +00:00
|
|
|
DISPLAYLEVEL(2, "\r%79s\r", "");
|
2016-07-06 13:41:03 +00:00
|
|
|
*bufferSizePtr = pos;
|
2017-09-14 23:45:10 +00:00
|
|
|
DISPLAYLEVEL(4, "loaded : %u KB \n", (U32)(pos >> 10))
|
|
|
|
return nbLoadedChunks;
|
2016-02-12 01:31:57 +00:00
|
|
|
}
|
|
|
|
|
2017-01-01 05:08:24 +00:00
|
|
|
#define DiB_rotl32(x,r) ((x << r) | (x >> (32 - r)))
|
|
|
|
static U32 DiB_rand(U32* src)
|
|
|
|
{
|
|
|
|
static const U32 prime1 = 2654435761U;
|
|
|
|
static const U32 prime2 = 2246822519U;
|
|
|
|
U32 rand32 = *src;
|
|
|
|
rand32 *= prime1;
|
|
|
|
rand32 ^= prime2;
|
|
|
|
rand32 = DiB_rotl32(rand32, 13);
|
|
|
|
*src = rand32;
|
|
|
|
return rand32 >> 5;
|
|
|
|
}
|
|
|
|
|
2017-09-14 22:12:57 +00:00
|
|
|
/* DiB_shuffle() :
|
|
|
|
* shuffle a table of file names in a semi-random way
|
|
|
|
* It improves dictionary quality by reducing "locality" impact, so if sample set is very large,
|
|
|
|
* it will load random elements from it, instead of just the first ones. */
|
2017-01-01 05:08:24 +00:00
|
|
|
static void DiB_shuffle(const char** fileNamesTable, unsigned nbFiles) {
|
2017-09-14 22:12:57 +00:00
|
|
|
U32 seed = 0xFD2FB528;
|
|
|
|
unsigned i;
|
|
|
|
for (i = nbFiles - 1; i > 0; --i) {
|
|
|
|
unsigned const j = DiB_rand(&seed) % (i + 1);
|
|
|
|
const char* const tmp = fileNamesTable[j];
|
|
|
|
fileNamesTable[j] = fileNamesTable[i];
|
|
|
|
fileNamesTable[i] = tmp;
|
|
|
|
}
|
2017-01-01 05:08:24 +00:00
|
|
|
}
|
|
|
|
|
2016-02-12 01:31:57 +00:00
|
|
|
|
|
|
|
/*-********************************************************
|
|
|
|
* Dictionary training functions
|
|
|
|
**********************************************************/
|
|
|
|
static size_t DiB_findMaxMem(unsigned long long requiredMem)
|
|
|
|
{
|
2016-05-30 19:18:52 +00:00
|
|
|
size_t const step = 8 MB;
|
2016-02-12 01:31:57 +00:00
|
|
|
void* testmem = NULL;
|
|
|
|
|
|
|
|
requiredMem = (((requiredMem >> 23) + 1) << 23);
|
2016-07-06 13:41:03 +00:00
|
|
|
requiredMem += step;
|
2017-09-14 22:12:57 +00:00
|
|
|
if (requiredMem > g_maxMemory) requiredMem = g_maxMemory;
|
2016-02-12 01:31:57 +00:00
|
|
|
|
|
|
|
while (!testmem) {
|
|
|
|
testmem = malloc((size_t)requiredMem);
|
2016-07-06 13:41:03 +00:00
|
|
|
requiredMem -= step;
|
2016-02-12 01:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
free(testmem);
|
2016-07-06 13:41:03 +00:00
|
|
|
return (size_t)requiredMem;
|
2016-02-12 01:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static void DiB_fillNoise(void* buffer, size_t length)
|
|
|
|
{
|
2016-07-06 13:41:03 +00:00
|
|
|
unsigned const prime1 = 2654435761U;
|
|
|
|
unsigned const prime2 = 2246822519U;
|
|
|
|
unsigned acc = prime1;
|
2016-02-12 01:31:57 +00:00
|
|
|
size_t p=0;;
|
|
|
|
|
|
|
|
for (p=0; p<length; p++) {
|
2016-07-06 13:41:03 +00:00
|
|
|
acc *= prime2;
|
2016-02-12 01:31:57 +00:00
|
|
|
((unsigned char*)buffer)[p] = (unsigned char)(acc >> 21);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static void DiB_saveDict(const char* dictFileName,
|
|
|
|
const void* buff, size_t buffSize)
|
|
|
|
{
|
2016-05-30 19:18:52 +00:00
|
|
|
FILE* const f = fopen(dictFileName, "wb");
|
2016-02-12 01:31:57 +00:00
|
|
|
if (f==NULL) EXM_THROW(3, "cannot open %s ", dictFileName);
|
|
|
|
|
2016-05-09 02:44:45 +00:00
|
|
|
{ size_t const n = fwrite(buff, 1, buffSize, f);
|
|
|
|
if (n!=buffSize) EXM_THROW(4, "%s : write error", dictFileName) }
|
2016-02-12 01:31:57 +00:00
|
|
|
|
2016-05-09 02:44:45 +00:00
|
|
|
{ size_t const n = (size_t)fclose(f);
|
|
|
|
if (n!=0) EXM_THROW(5, "%s : flush error", dictFileName) }
|
2016-02-12 01:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-09-14 23:45:10 +00:00
|
|
|
typedef struct {
|
|
|
|
U64 totalSizeToLoad;
|
|
|
|
unsigned oneSampleTooLarge;
|
2017-09-15 22:31:31 +00:00
|
|
|
unsigned nbSamples;
|
2017-09-14 23:45:10 +00:00
|
|
|
} fileStats;
|
|
|
|
|
2017-09-15 22:31:31 +00:00
|
|
|
/*! DiB_fileStats() :
|
|
|
|
* Given a list of files, and a chunkSize (0 == no chunk, whole files)
|
|
|
|
* provides the amount of data to be loaded and the resulting nb of samples.
|
|
|
|
* This is useful primarily for allocation purpose => sample buffer, and sample sizes table.
|
|
|
|
*/
|
2017-09-14 23:45:10 +00:00
|
|
|
static fileStats DiB_fileStats(const char** fileNamesTable, unsigned nbFiles, size_t chunkSize, unsigned displayLevel)
|
2016-12-18 10:58:23 +00:00
|
|
|
{
|
2017-09-14 23:45:10 +00:00
|
|
|
fileStats fs;
|
2016-12-18 10:58:23 +00:00
|
|
|
unsigned n;
|
2017-09-14 23:45:10 +00:00
|
|
|
memset(&fs, 0, sizeof(fs));
|
2016-12-18 10:58:23 +00:00
|
|
|
for (n=0; n<nbFiles; n++) {
|
|
|
|
U64 const fileSize = UTIL_getFileSize(fileNamesTable[n]);
|
2017-09-15 22:31:31 +00:00
|
|
|
U32 const nbSamples = (U32)(chunkSize ? (fileSize + (chunkSize-1)) / chunkSize : 1);
|
2017-09-14 23:45:10 +00:00
|
|
|
U64 const chunkToLoad = chunkSize ? MIN(chunkSize, fileSize) : fileSize;
|
2017-09-15 18:55:13 +00:00
|
|
|
size_t const cappedChunkSize = (size_t)MIN(chunkToLoad, SAMPLESIZE_MAX);
|
2017-09-15 22:31:31 +00:00
|
|
|
fs.totalSizeToLoad += cappedChunkSize * nbSamples;
|
2017-09-14 23:45:10 +00:00
|
|
|
fs.oneSampleTooLarge |= (chunkSize > 2*SAMPLESIZE_MAX);
|
2017-09-15 22:31:31 +00:00
|
|
|
fs.nbSamples += nbSamples;
|
2016-12-18 10:58:23 +00:00
|
|
|
}
|
2017-09-14 23:45:10 +00:00
|
|
|
DISPLAYLEVEL(4, "Preparing to load : %u KB \n", (U32)(fs.totalSizeToLoad >> 10));
|
|
|
|
return fs;
|
2016-12-18 10:58:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-06-27 04:07:14 +00:00
|
|
|
/*! ZDICT_trainFromBuffer_unsafe_legacy() :
|
2016-02-12 19:19:48 +00:00
|
|
|
Strictly Internal use only !!
|
2017-06-27 04:07:14 +00:00
|
|
|
Same as ZDICT_trainFromBuffer_legacy(), but does not control `samplesBuffer`.
|
2016-02-12 19:19:48 +00:00
|
|
|
`samplesBuffer` must be followed by noisy guard band to avoid out-of-buffer reads.
|
|
|
|
@return : size of dictionary stored into `dictBuffer` (<= `dictBufferCapacity`)
|
|
|
|
or an error code.
|
|
|
|
*/
|
2017-06-27 04:07:14 +00:00
|
|
|
size_t ZDICT_trainFromBuffer_unsafe_legacy(void* dictBuffer, size_t dictBufferCapacity,
|
|
|
|
const void* samplesBuffer, const size_t* samplesSizes, unsigned nbSamples,
|
|
|
|
ZDICT_legacy_params_t parameters);
|
2016-02-12 19:19:48 +00:00
|
|
|
|
|
|
|
|
2016-02-12 01:31:57 +00:00
|
|
|
int DiB_trainFromFiles(const char* dictFileName, unsigned maxDictSize,
|
2017-09-14 23:45:10 +00:00
|
|
|
const char** fileNamesTable, unsigned nbFiles, size_t chunkSize,
|
2017-06-27 04:07:14 +00:00
|
|
|
ZDICT_legacy_params_t *params, ZDICT_cover_params_t *coverParams,
|
2017-01-01 05:08:24 +00:00
|
|
|
int optimizeCover)
|
2016-02-12 01:31:57 +00:00
|
|
|
{
|
2017-09-15 22:31:31 +00:00
|
|
|
unsigned const displayLevel = params ? params->zParams.notificationLevel :
|
|
|
|
coverParams ? coverParams->zParams.notificationLevel :
|
|
|
|
0; /* should never happen */
|
2016-05-30 19:18:52 +00:00
|
|
|
void* const dictBuffer = malloc(maxDictSize);
|
2017-09-14 23:45:10 +00:00
|
|
|
fileStats const fs = DiB_fileStats(fileNamesTable, nbFiles, chunkSize, displayLevel);
|
2017-09-15 22:31:31 +00:00
|
|
|
size_t* const sampleSizes = (size_t*)malloc(fs.nbSamples * sizeof(size_t));
|
2017-01-10 00:49:04 +00:00
|
|
|
size_t const memMult = params ? MEMMULT : COVER_MEMMULT;
|
2017-09-14 23:45:10 +00:00
|
|
|
size_t const maxMem = DiB_findMaxMem(fs.totalSizeToLoad * memMult) / memMult;
|
|
|
|
size_t loadedSize = (size_t) MIN ((unsigned long long)maxMem, fs.totalSizeToLoad);
|
|
|
|
void* const srcBuffer = malloc(loadedSize+NOISELENGTH);
|
2016-02-12 01:31:57 +00:00
|
|
|
int result = 0;
|
|
|
|
|
2016-05-30 19:18:52 +00:00
|
|
|
/* Checks */
|
2017-09-15 22:31:31 +00:00
|
|
|
if ((!sampleSizes) || (!srcBuffer) || (!dictBuffer))
|
2017-09-14 22:12:57 +00:00
|
|
|
EXM_THROW(12, "not enough memory for DiB_trainFiles"); /* should not happen */
|
2017-09-14 23:45:10 +00:00
|
|
|
if (fs.oneSampleTooLarge) {
|
|
|
|
DISPLAYLEVEL(2, "! Warning : some sample(s) are very large \n");
|
|
|
|
DISPLAYLEVEL(2, "! Note that dictionary is only useful for small samples. \n");
|
|
|
|
DISPLAYLEVEL(2, "! As a consequence, only the first %u bytes of each sample are loaded \n", SAMPLESIZE_MAX);
|
2016-12-18 10:58:23 +00:00
|
|
|
}
|
2017-09-15 22:31:31 +00:00
|
|
|
if (fs.nbSamples < 5) {
|
2016-08-18 13:02:11 +00:00
|
|
|
DISPLAYLEVEL(2, "! Warning : nb of samples too low for proper processing ! \n");
|
|
|
|
DISPLAYLEVEL(2, "! Please provide _one file per sample_. \n");
|
2017-09-15 23:23:50 +00:00
|
|
|
DISPLAYLEVEL(2, "! Alternatively, split files into fixed-size blocks representative of samples, with -B# \n");
|
2017-09-14 23:45:10 +00:00
|
|
|
EXM_THROW(14, "nb of samples too low"); /* we now clearly forbid this case */
|
|
|
|
}
|
|
|
|
if (fs.totalSizeToLoad < (unsigned long long)(8 * maxDictSize)) {
|
|
|
|
DISPLAYLEVEL(2, "! Warning : data size of samples too small for target dictionary size \n");
|
|
|
|
DISPLAYLEVEL(2, "! Samples should be about 100x larger than target dictionary size \n");
|
2016-07-27 10:35:29 +00:00
|
|
|
}
|
2016-05-30 19:18:52 +00:00
|
|
|
|
2016-02-12 01:31:57 +00:00
|
|
|
/* init */
|
2017-09-14 23:45:10 +00:00
|
|
|
if (loadedSize < fs.totalSizeToLoad)
|
|
|
|
DISPLAYLEVEL(1, "Not enough memory; training on %u MB only...\n", (unsigned)(loadedSize >> 20));
|
2016-02-12 01:31:57 +00:00
|
|
|
|
|
|
|
/* Load input buffer */
|
2017-01-01 05:08:24 +00:00
|
|
|
DISPLAYLEVEL(3, "Shuffling input files\n");
|
|
|
|
DiB_shuffle(fileNamesTable, nbFiles);
|
2017-09-15 22:31:31 +00:00
|
|
|
nbFiles = DiB_loadFiles(srcBuffer, &loadedSize, sampleSizes, fs.nbSamples, fileNamesTable, nbFiles, chunkSize, displayLevel);
|
2016-02-12 01:31:57 +00:00
|
|
|
|
2017-09-14 22:12:57 +00:00
|
|
|
{ size_t dictSize;
|
2017-01-01 05:08:24 +00:00
|
|
|
if (params) {
|
2017-09-14 23:45:10 +00:00
|
|
|
DiB_fillNoise((char*)srcBuffer + loadedSize, NOISELENGTH); /* guard band, for end of buffer condition */
|
2017-06-27 04:07:14 +00:00
|
|
|
dictSize = ZDICT_trainFromBuffer_unsafe_legacy(dictBuffer, maxDictSize,
|
2017-09-15 22:31:31 +00:00
|
|
|
srcBuffer, sampleSizes, fs.nbSamples,
|
2017-06-27 04:07:14 +00:00
|
|
|
*params);
|
2017-01-01 05:08:24 +00:00
|
|
|
} else if (optimizeCover) {
|
2017-06-27 04:07:14 +00:00
|
|
|
dictSize = ZDICT_optimizeTrainFromBuffer_cover(dictBuffer, maxDictSize,
|
2017-09-15 22:31:31 +00:00
|
|
|
srcBuffer, sampleSizes, fs.nbSamples,
|
2017-06-27 04:07:14 +00:00
|
|
|
coverParams);
|
2017-01-01 05:08:24 +00:00
|
|
|
if (!ZDICT_isError(dictSize)) {
|
2017-06-27 04:07:14 +00:00
|
|
|
DISPLAYLEVEL(2, "k=%u\nd=%u\nsteps=%u\n", coverParams->k, coverParams->d, coverParams->steps);
|
2017-01-01 05:08:24 +00:00
|
|
|
}
|
|
|
|
} else {
|
2017-09-14 22:12:57 +00:00
|
|
|
dictSize = ZDICT_trainFromBuffer_cover(dictBuffer, maxDictSize, srcBuffer,
|
2017-09-15 22:31:31 +00:00
|
|
|
sampleSizes, fs.nbSamples, *coverParams);
|
2017-01-01 05:08:24 +00:00
|
|
|
}
|
2016-05-30 19:18:52 +00:00
|
|
|
if (ZDICT_isError(dictSize)) {
|
|
|
|
DISPLAYLEVEL(1, "dictionary training failed : %s \n", ZDICT_getErrorName(dictSize)); /* should not happen */
|
|
|
|
result = 1;
|
|
|
|
goto _cleanup;
|
|
|
|
}
|
|
|
|
/* save dict */
|
|
|
|
DISPLAYLEVEL(2, "Save dictionary of size %u into file %s \n", (U32)dictSize, dictFileName);
|
|
|
|
DiB_saveDict(dictFileName, dictBuffer, dictSize);
|
2016-02-12 01:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/* clean up */
|
|
|
|
_cleanup:
|
|
|
|
free(srcBuffer);
|
2017-09-15 22:31:31 +00:00
|
|
|
free(sampleSizes);
|
2016-02-12 01:31:57 +00:00
|
|
|
free(dictBuffer);
|
|
|
|
return result;
|
|
|
|
}
|