2017-08-31 19:11:57 +00:00
|
|
|
/*
|
2020-03-26 22:19:05 +00:00
|
|
|
* Copyright (c) 2016-2020, Facebook, Inc.
|
2017-06-29 23:53:52 +00:00
|
|
|
* All rights reserved.
|
|
|
|
*
|
2017-08-31 19:11:57 +00:00
|
|
|
* This source code is licensed under both the BSD-style license (found in the
|
|
|
|
* LICENSE file in the root directory of this source tree) and the GPLv2 (found
|
|
|
|
* in the COPYING file in the root directory of this source tree).
|
2020-03-26 22:19:05 +00:00
|
|
|
* You may select, at your option, one of the above-listed licenses.
|
2017-06-29 23:53:52 +00:00
|
|
|
*/
|
|
|
|
|
|
|
|
/**
|
|
|
|
* This fuzz target performs a zstd round-trip test (compress & decompress),
|
|
|
|
* compares the result with the original, and calls abort() on corruption.
|
|
|
|
*/
|
|
|
|
|
2017-09-14 21:41:49 +00:00
|
|
|
#define ZSTD_STATIC_LINKING_ONLY
|
|
|
|
|
2017-06-29 23:53:52 +00:00
|
|
|
#include <stddef.h>
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include <stdio.h>
|
|
|
|
#include <string.h>
|
|
|
|
#include "fuzz_helpers.h"
|
2017-09-14 21:41:49 +00:00
|
|
|
#include "zstd_helpers.h"
|
2019-09-10 23:14:43 +00:00
|
|
|
#include "fuzz_data_producer.h"
|
2017-06-29 23:53:52 +00:00
|
|
|
|
|
|
|
static ZSTD_CCtx *cctx = NULL;
|
|
|
|
static ZSTD_DCtx *dctx = NULL;
|
|
|
|
|
|
|
|
static size_t roundTripTest(void *result, size_t resultCapacity,
|
|
|
|
void *compressed, size_t compressedCapacity,
|
2019-09-10 23:14:43 +00:00
|
|
|
const void *src, size_t srcSize,
|
|
|
|
FUZZ_dataProducer_t *producer)
|
2017-06-29 23:53:52 +00:00
|
|
|
{
|
2017-09-14 21:41:49 +00:00
|
|
|
size_t cSize;
|
2020-05-01 23:11:47 +00:00
|
|
|
size_t dSize;
|
|
|
|
int targetCBlockSize = 0;
|
2019-09-12 19:40:12 +00:00
|
|
|
if (FUZZ_dataProducer_uint32Range(producer, 0, 1)) {
|
2019-09-10 23:14:43 +00:00
|
|
|
FUZZ_setRandomParameters(cctx, srcSize, producer);
|
2019-04-09 03:01:38 +00:00
|
|
|
cSize = ZSTD_compress2(cctx, compressed, compressedCapacity, src, srcSize);
|
2020-05-01 23:11:47 +00:00
|
|
|
FUZZ_ZASSERT(ZSTD_CCtx_getParameter(cctx, ZSTD_c_targetCBlockSize, &targetCBlockSize));
|
2017-09-14 21:41:49 +00:00
|
|
|
} else {
|
2019-09-12 19:40:12 +00:00
|
|
|
int const cLevel = FUZZ_dataProducer_int32Range(producer, kMinClevel, kMaxClevel);
|
|
|
|
|
2017-09-14 21:41:49 +00:00
|
|
|
cSize = ZSTD_compressCCtx(
|
|
|
|
cctx, compressed, compressedCapacity, src, srcSize, cLevel);
|
|
|
|
}
|
2017-09-26 21:03:43 +00:00
|
|
|
FUZZ_ZASSERT(cSize);
|
2020-05-01 23:11:47 +00:00
|
|
|
dSize = ZSTD_decompressDCtx(dctx, result, resultCapacity, compressed, cSize);
|
|
|
|
FUZZ_ZASSERT(dSize);
|
|
|
|
/* When superblock is enabled make sure we don't expand the block more than expected. */
|
|
|
|
if (targetCBlockSize != 0) {
|
|
|
|
size_t normalCSize;
|
|
|
|
FUZZ_ZASSERT(ZSTD_CCtx_setParameter(cctx, ZSTD_c_targetCBlockSize, 0));
|
|
|
|
normalCSize = ZSTD_compress2(cctx, compressed, compressedCapacity, src, srcSize);
|
|
|
|
FUZZ_ZASSERT(normalCSize);
|
|
|
|
{
|
|
|
|
size_t const bytesPerBlock = 3 /* block header */
|
|
|
|
+ 5 /* Literal header */
|
|
|
|
+ 6 /* Huffman jump table */
|
|
|
|
+ 3 /* number of sequences */
|
|
|
|
+ 1 /* symbol compression modes */;
|
|
|
|
size_t const expectedExpansion = bytesPerBlock * (1 + (normalCSize / MAX(1, targetCBlockSize)));
|
2020-05-19 18:42:53 +00:00
|
|
|
size_t const allowedExpansion = (srcSize >> 3) + 5 * expectedExpansion + 10;
|
2020-05-01 23:11:47 +00:00
|
|
|
FUZZ_ASSERT(cSize <= normalCSize + allowedExpansion);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return dSize;
|
2017-06-29 23:53:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
int LLVMFuzzerTestOneInput(const uint8_t *src, size_t size)
|
|
|
|
{
|
2019-04-09 23:47:59 +00:00
|
|
|
size_t const rBufSize = size;
|
2020-05-01 23:35:35 +00:00
|
|
|
void* rBuf = FUZZ_malloc(rBufSize);
|
2020-05-01 23:11:47 +00:00
|
|
|
size_t cBufSize = ZSTD_compressBound(size);
|
2019-04-09 23:47:59 +00:00
|
|
|
void* cBuf;
|
2017-06-29 23:53:52 +00:00
|
|
|
|
2019-09-10 23:52:38 +00:00
|
|
|
/* Give a random portion of src data to the producer, to use for
|
|
|
|
parameter generation. The rest will be used for (de)compression */
|
2019-09-10 23:14:43 +00:00
|
|
|
FUZZ_dataProducer_t *producer = FUZZ_dataProducer_create(src, size);
|
2019-09-11 17:09:29 +00:00
|
|
|
size = FUZZ_dataProducer_reserveDataPrefix(producer);
|
2019-09-10 23:52:38 +00:00
|
|
|
|
2019-04-09 23:47:59 +00:00
|
|
|
/* Half of the time fuzz with a 1 byte smaller output size.
|
|
|
|
* This will still succeed because we don't use a dictionary, so the dictID
|
|
|
|
* field is empty, giving us 4 bytes of overhead.
|
|
|
|
*/
|
2019-09-10 23:14:43 +00:00
|
|
|
cBufSize -= FUZZ_dataProducer_uint32Range(producer, 0, 1);
|
2019-09-10 23:52:38 +00:00
|
|
|
|
2020-05-01 23:35:35 +00:00
|
|
|
cBuf = FUZZ_malloc(cBufSize);
|
2017-06-29 23:53:52 +00:00
|
|
|
|
|
|
|
if (!cctx) {
|
|
|
|
cctx = ZSTD_createCCtx();
|
|
|
|
FUZZ_ASSERT(cctx);
|
|
|
|
}
|
|
|
|
if (!dctx) {
|
|
|
|
dctx = ZSTD_createDCtx();
|
|
|
|
FUZZ_ASSERT(dctx);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
size_t const result =
|
2019-09-10 23:14:43 +00:00
|
|
|
roundTripTest(rBuf, rBufSize, cBuf, cBufSize, src, size, producer);
|
2017-09-26 21:03:43 +00:00
|
|
|
FUZZ_ZASSERT(result);
|
2017-06-29 23:53:52 +00:00
|
|
|
FUZZ_ASSERT_MSG(result == size, "Incorrect regenerated size");
|
2020-05-01 23:35:35 +00:00
|
|
|
FUZZ_ASSERT_MSG(!FUZZ_memcmp(src, rBuf, size), "Corruption!");
|
2017-06-29 23:53:52 +00:00
|
|
|
}
|
2019-04-09 23:47:59 +00:00
|
|
|
free(rBuf);
|
|
|
|
free(cBuf);
|
2019-09-10 23:14:43 +00:00
|
|
|
FUZZ_dataProducer_free(producer);
|
2017-09-13 03:20:27 +00:00
|
|
|
#ifndef STATEFUL_FUZZING
|
2017-06-29 23:53:52 +00:00
|
|
|
ZSTD_freeCCtx(cctx); cctx = NULL;
|
|
|
|
ZSTD_freeDCtx(dctx); dctx = NULL;
|
|
|
|
#endif
|
|
|
|
return 0;
|
|
|
|
}
|