29#ifndef ZDICT_STATIC_LINKING_ONLY
30# define ZDICT_STATIC_LINKING_ONLY
33#include "../common/mem.h"
34#include "../common/pool.h"
35#include "../common/threading.h"
36#include "../common/zstd_internal.h"
37#include "../common/bits.h"
51#define COVER_MAX_SAMPLES_SIZE (sizeof(size_t) == 8 ? ((unsigned)-1) : ((unsigned)1 GB))
52#define COVER_DEFAULT_SPLITPOINT 1.0
57#ifndef LOCALDISPLAYLEVEL
58static int g_displayLevel = 0;
63 fprintf(stderr, __VA_ARGS__); \
66#undef LOCALDISPLAYLEVEL
67#define LOCALDISPLAYLEVEL(displayLevel, l, ...) \
68 if (displayLevel >= l) { \
69 DISPLAY(__VA_ARGS__); \
72#define DISPLAYLEVEL(l, ...) LOCALDISPLAYLEVEL(g_displayLevel, l, __VA_ARGS__)
74#ifndef LOCALDISPLAYUPDATE
75static const clock_t g_refreshRate = CLOCKS_PER_SEC * 15 / 100;
76static clock_t g_time = 0;
78#undef LOCALDISPLAYUPDATE
79#define LOCALDISPLAYUPDATE(displayLevel, l, ...) \
80 if (displayLevel >= l) { \
81 if ((clock() - g_time > g_refreshRate) || (displayLevel >= 4)) { \
83 DISPLAY(__VA_ARGS__); \
87#define DISPLAYUPDATE(l, ...) LOCALDISPLAYUPDATE(g_displayLevel, l, __VA_ARGS__)
98#define MAP_EMPTY_VALUE ((U32)-1)
127 map->sizeMask =
map->size - 1;
134 COVER_map_clear(
map);
141static const U32 COVER_prime4bytes = 2654435761U;
143 return (key * COVER_prime4bytes) >> (32 -
map->sizeLog);
150 const U32 hash = COVER_map_hash(
map, key);
152 for (i = hash;; i = (i + 1) &
map->sizeMask) {
157 if (pos->
key == key) {
181 U32 i = COVER_map_index(
map, key);
187 for (i = (i + 1) &
map->sizeMask;; i = (i + 1) &
map->sizeMask) {
195 if (((i - COVER_map_hash(
map, pos->
key)) &
map->sizeMask) >= shift) {
245size_t COVER_sum(
const size_t *samplesSizes,
unsigned nbSamples) {
248 for (i = 0; i < nbSamples; ++i) {
249 sum += samplesSizes[i];
259static int COVER_cmp(
COVER_ctx_t *ctx,
const void *lp,
const void *rp) {
260 U32 const lhs = *(
U32 const *)lp;
261 U32 const rhs = *(
U32 const *)rp;
267static int COVER_cmp8(
COVER_ctx_t *ctx,
const void *lp,
const void *rp) {
268 U64 const mask = (ctx->
d == 8) ? (
U64)-1 : (((
U64)1 << (8 * ctx->
d)) - 1);
282static int WIN_CDECL COVER_strict_cmp(
const void *lp,
const void *rp) {
283 int result = COVER_cmp(g_coverCtx, lp, rp);
285 result = lp < rp ? -1 : 1;
292static int WIN_CDECL COVER_strict_cmp8(
const void *lp,
const void *rp) {
293 int result = COVER_cmp8(g_coverCtx, lp, rp);
295 result = lp < rp ? -1 : 1;
304static const size_t *COVER_lower_bound(
const size_t* first,
const size_t* last,
306 size_t count = (size_t)(last - first);
309 size_t step =
count / 2;
310 const size_t *ptr =
first;
328COVER_groupBy(
const void *data,
size_t count,
size_t size,
COVER_ctx_t *ctx,
329 int (*cmp)(
COVER_ctx_t *,
const void *,
const void *),
330 void (*grp)(
COVER_ctx_t *,
const void *,
const void *)) {
331 const BYTE *ptr = (
const BYTE *)data;
333 while (num < count) {
336 while (num < count && cmp(ctx, ptr, grpEnd) == 0) {
340 grp(ctx, ptr, grpEnd);
354static void COVER_group(
COVER_ctx_t *ctx,
const void *group,
355 const void *groupEnd) {
357 const U32 *grpPtr = (
const U32 *)group;
358 const U32 *grpEnd = (
const U32 *)groupEnd;
367 const size_t *curOffsetPtr = ctx->
offsets;
372 size_t curSampleEnd = ctx->
offsets[0];
373 for (; grpPtr != grpEnd; ++grpPtr) {
375 ctx->
dmerAt[*grpPtr] = dmerId;
380 if (*grpPtr < curSampleEnd) {
388 if (grpPtr + 1 != grpEnd) {
389 const size_t *sampleEndPtr =
390 COVER_lower_bound(curOffsetPtr, offsetsEnd, *grpPtr);
391 curSampleEnd = *sampleEndPtr;
392 curOffsetPtr = sampleEndPtr + 1;
400 ctx->
suffix[dmerId] = freq;
418 ZDICT_cover_params_t parameters) {
420 const U32 k = parameters.k;
421 const U32 d = parameters.d;
422 const U32 dmersInK = k -
d + 1;
427 COVER_map_clear(activeDmers);
429 activeSegment.
begin = begin;
430 activeSegment.
end = begin;
431 activeSegment.
score = 0;
435 while (activeSegment.
end < end) {
439 U32 *newDmerOcc = COVER_map_at(activeDmers, newDmer);
441 if (*newDmerOcc == 0) {
445 activeSegment.
score += freqs[newDmer];
448 activeSegment.
end += 1;
452 if (activeSegment.
end - activeSegment.
begin == dmersInK + 1) {
454 U32 *delDmerOcc = COVER_map_at(activeDmers, delDmer);
455 activeSegment.
begin += 1;
458 if (*delDmerOcc == 0) {
459 COVER_map_remove(activeDmers, delDmer);
460 activeSegment.
score -= freqs[delDmer];
465 if (activeSegment.
score > bestSegment.
score) {
466 bestSegment = activeSegment;
471 U32 newBegin = bestSegment.
end;
474 for (pos = bestSegment.
begin; pos != bestSegment.
end; ++pos) {
477 newBegin =
MIN(newBegin, pos);
481 bestSegment.
begin = newBegin;
482 bestSegment.
end = newEnd;
487 for (pos = bestSegment.
begin; pos != bestSegment.
end; ++pos) {
488 freqs[ctx->
dmerAt[pos]] = 0;
498static int COVER_checkParameters(ZDICT_cover_params_t parameters,
499 size_t maxDictSize) {
501 if (parameters.d == 0 || parameters.k == 0) {
505 if (parameters.k > maxDictSize) {
509 if (parameters.d > parameters.k) {
513 if (parameters.splitPoint <= 0 || parameters.splitPoint > 1){
551static size_t COVER_ctx_init(
COVER_ctx_t *ctx,
const void *samplesBuffer,
552 const size_t *samplesSizes,
unsigned nbSamples,
553 unsigned d,
double splitPoint)
555 const BYTE *
const samples = (
const BYTE *)samplesBuffer;
556 const size_t totalSamplesSize =
COVER_sum(samplesSizes, nbSamples);
558 const unsigned nbTrainSamples = splitPoint < 1.0 ? (unsigned)((
double)nbSamples * splitPoint) : nbSamples;
559 const unsigned nbTestSamples = splitPoint < 1.0 ? nbSamples - nbTrainSamples : nbSamples;
560 const size_t trainingSamplesSize = splitPoint < 1.0 ?
COVER_sum(samplesSizes, nbTrainSamples) : totalSamplesSize;
561 const size_t testSamplesSize = splitPoint < 1.0 ?
COVER_sum(samplesSizes + nbTrainSamples, nbTestSamples) : totalSamplesSize;
563 if (totalSamplesSize <
MAX(d,
sizeof(
U64)) ||
565 DISPLAYLEVEL(1,
"Total samples size is too large (%u MB), maximum size is %u MB\n",
567 return ERROR(srcSize_wrong);
570 if (nbTrainSamples < 5) {
571 DISPLAYLEVEL(1,
"Total number of training samples is %u and is invalid.", nbTrainSamples);
572 return ERROR(srcSize_wrong);
575 if (nbTestSamples < 1) {
576 DISPLAYLEVEL(1,
"Total number of testing samples is %u and is invalid.", nbTestSamples);
577 return ERROR(srcSize_wrong);
580 memset(ctx, 0,
sizeof(*ctx));
581 DISPLAYLEVEL(2,
"Training on %u samples of total size %u\n", nbTrainSamples,
582 (
unsigned)trainingSamplesSize);
583 DISPLAYLEVEL(2,
"Testing on %u samples of total size %u\n", nbTestSamples,
584 (
unsigned)testSamplesSize);
596 ctx->
offsets = (
size_t *)malloc((nbSamples + 1) *
sizeof(size_t));
598 DISPLAYLEVEL(1,
"Failed to allocate scratch buffers\n");
599 COVER_ctx_destroy(ctx);
600 return ERROR(memory_allocation);
609 for (i = 1; i <= nbSamples; ++i) {
627#if defined(__OpenBSD__)
629 (ctx->
d <= 8 ? &COVER_strict_cmp8 : &COVER_strict_cmp));
632 (ctx->
d <= 8 ? &COVER_strict_cmp8 : &COVER_strict_cmp));
644 (ctx->
d <= 8 ? &COVER_cmp8 : &COVER_cmp), &COVER_group);
652 const double ratio = (double)nbDmers / (
double)maxDictSize;
657 "WARNING: The maximum dictionary size %u is too large "
658 "compared to the source size %u! "
659 "size(source)/size(dictionary) = %f, but it should be >= "
660 "10! This may lead to a subpar dictionary! We recommend "
661 "training on sources at least 10x, and preferably 100x "
662 "the size of the dictionary! \n", (
U32)maxDictSize,
663 (
U32)nbDmers, ratio);
669 const U32 minEpochSize = k * 10;
671 epochs.
num =
MAX(1, maxDictSize / k / passes);
672 epochs.
size = nbDmers / epochs.
num;
673 if (epochs.
size >= minEpochSize) {
677 epochs.
size =
MIN(minEpochSize, nbDmers);
678 epochs.
num = nbDmers / epochs.
size;
686static size_t COVER_buildDictionary(
const COVER_ctx_t *ctx,
U32 *freqs,
688 size_t dictBufferCapacity,
689 ZDICT_cover_params_t parameters) {
690 BYTE *
const dict = (
BYTE *)dictBuffer;
691 size_t tail = dictBufferCapacity;
695 const size_t maxZeroScoreRun =
MAX(10,
MIN(100, epochs.
num >> 3));
696 size_t zeroScoreRun = 0;
698 DISPLAYLEVEL(2,
"Breaking content into %u epochs of size %u\n",
703 for (epoch = 0; tail > 0; epoch = (epoch + 1) % epochs.
num) {
704 const U32 epochBegin = (
U32)(epoch * epochs.
size);
705 const U32 epochEnd = epochBegin + epochs.
size;
709 ctx, freqs, activeDmers, epochBegin, epochEnd, parameters);
713 if (segment.
score == 0) {
714 if (++zeroScoreRun >= maxZeroScoreRun) {
721 segmentSize =
MIN(segment.
end - segment.
begin + parameters.d - 1, tail);
722 if (segmentSize < parameters.d) {
729 memcpy(dict + tail, ctx->
samples + segment.
begin, segmentSize);
732 (
unsigned)(((dictBufferCapacity - tail) * 100) / dictBufferCapacity));
739 void *dictBuffer,
size_t dictBufferCapacity,
740 const void *samplesBuffer,
const size_t *samplesSizes,
unsigned nbSamples,
741 ZDICT_cover_params_t parameters)
743 BYTE*
const dict = (
BYTE*)dictBuffer;
746 parameters.splitPoint = 1.0;
748 g_displayLevel = (int)parameters.zParams.notificationLevel;
750 if (!COVER_checkParameters(parameters, dictBufferCapacity)) {
752 return ERROR(parameter_outOfBound);
754 if (nbSamples == 0) {
755 DISPLAYLEVEL(1,
"Cover must have at least one input file\n");
756 return ERROR(srcSize_wrong);
758 if (dictBufferCapacity < ZDICT_DICTSIZE_MIN) {
759 DISPLAYLEVEL(1,
"dictBufferCapacity must be at least %u\n",
761 return ERROR(dstSize_tooSmall);
765 size_t const initVal = COVER_ctx_init(&ctx, samplesBuffer, samplesSizes, nbSamples,
766 parameters.
d, parameters.splitPoint);
772 if (!COVER_map_init(&activeDmers, parameters.k - parameters.d + 1)) {
773 DISPLAYLEVEL(1,
"Failed to allocate dmer map: out of memory\n");
774 COVER_ctx_destroy(&ctx);
775 return ERROR(memory_allocation);
781 COVER_buildDictionary(&ctx, ctx.
freqs, &activeDmers, dictBuffer,
782 dictBufferCapacity, parameters);
784 dict, dictBufferCapacity, dict + tail, dictBufferCapacity - tail,
785 samplesBuffer, samplesSizes, nbSamples, parameters.zParams);
788 (
unsigned)dictionarySize);
790 COVER_ctx_destroy(&ctx);
791 COVER_map_destroy(&activeDmers);
792 return dictionarySize;
799 const size_t *samplesSizes,
const BYTE *samples,
801 size_t nbTrainSamples,
size_t nbSamples,
802 BYTE *
const dict,
size_t dictBufferCapacity) {
803 size_t totalCompressedSize =
ERROR(GENERIC);
813 size_t maxSampleSize = 0;
814 i = parameters.splitPoint < 1.0 ? nbTrainSamples : 0;
815 for (; i < nbSamples; ++i) {
816 maxSampleSize =
MAX(samplesSizes[i], maxSampleSize);
819 dst = malloc(dstCapacity);
824 parameters.zParams.compressionLevel);
825 if (!dst || !cctx || !cdict) {
826 goto _compressCleanup;
829 totalCompressedSize = dictBufferCapacity;
830 i = parameters.splitPoint < 1.0 ? nbTrainSamples : 0;
831 for (; i < nbSamples; ++i) {
833 cctx, dst, dstCapacity, samples + offsets[i],
834 samplesSizes[i], cdict);
836 totalCompressedSize = size;
837 goto _compressCleanup;
839 totalCompressedSize += size;
847 return totalCompressedSize;
855 if (best==NULL)
return;
913 ZDICT_cover_params_t parameters,
918 size_t dictSize = selection.
dictSize;
928 if (compressedSize < best->compressedSize) {
934 best->
dict = malloc(dictSize);
945 memcpy(best->
dict, dict, dictSize);
968 return setDictSelection(NULL, 0, error);
980 size_t dictContentSize,
const BYTE* samplesBuffer,
const size_t* samplesSizes,
unsigned nbFinalizeSamples,
981 size_t nbCheckSamples,
size_t nbSamples, ZDICT_cover_params_t params,
size_t* offsets,
size_t totalCompressedSize) {
983 size_t largestDict = 0;
984 size_t largestCompressed = 0;
985 BYTE* customDictContentEnd = customDictContent + dictContentSize;
987 BYTE* largestDictbuffer = (
BYTE*)malloc(dictBufferCapacity);
988 BYTE* candidateDictBuffer = (
BYTE*)malloc(dictBufferCapacity);
989 double regressionTolerance = ((double)params.shrinkDictMaxRegression / 100.0) + 1.00;
991 if (!largestDictbuffer || !candidateDictBuffer) {
992 free(largestDictbuffer);
993 free(candidateDictBuffer);
998 memcpy(largestDictbuffer, customDictContent, dictContentSize);
1000 largestDictbuffer, dictBufferCapacity, customDictContent, dictContentSize,
1001 samplesBuffer, samplesSizes, nbFinalizeSamples, params.zParams);
1004 free(largestDictbuffer);
1005 free(candidateDictBuffer);
1010 samplesBuffer, offsets,
1011 nbCheckSamples, nbSamples,
1012 largestDictbuffer, dictContentSize);
1015 free(largestDictbuffer);
1016 free(candidateDictBuffer);
1020 if (params.shrinkDict == 0) {
1021 free(candidateDictBuffer);
1022 return setDictSelection(largestDictbuffer, dictContentSize, totalCompressedSize);
1025 largestDict = dictContentSize;
1026 largestCompressed = totalCompressedSize;
1027 dictContentSize = ZDICT_DICTSIZE_MIN;
1030 while (dictContentSize < largestDict) {
1031 memcpy(candidateDictBuffer, largestDictbuffer, largestDict);
1033 candidateDictBuffer, dictBufferCapacity, customDictContentEnd - dictContentSize, dictContentSize,
1034 samplesBuffer, samplesSizes, nbFinalizeSamples, params.zParams);
1037 free(largestDictbuffer);
1038 free(candidateDictBuffer);
1044 samplesBuffer, offsets,
1045 nbCheckSamples, nbSamples,
1046 candidateDictBuffer, dictContentSize);
1049 free(largestDictbuffer);
1050 free(candidateDictBuffer);
1054 if ((
double)totalCompressedSize <= (
double)largestCompressed * regressionTolerance) {
1055 free(largestDictbuffer);
1056 return setDictSelection( candidateDictBuffer, dictContentSize, totalCompressedSize );
1058 dictContentSize *= 2;
1060 dictContentSize = largestDict;
1061 totalCompressedSize = largestCompressed;
1062 free(candidateDictBuffer);
1063 return setDictSelection( largestDictbuffer, dictContentSize, totalCompressedSize );
1081static void COVER_tryParameters(
void *opaque)
1086 const ZDICT_cover_params_t parameters = data->
parameters;
1088 size_t totalCompressedSize =
ERROR(GENERIC);
1091 BYTE*
const dict = (
BYTE*)malloc(dictBufferCapacity);
1094 if (!COVER_map_init(&activeDmers, parameters.k - parameters.d + 1)) {
1095 DISPLAYLEVEL(1,
"Failed to allocate dmer map: out of memory\n");
1098 if (!dict || !freqs) {
1099 DISPLAYLEVEL(1,
"Failed to allocate buffers: out of memory\n");
1106 const size_t tail = COVER_buildDictionary(ctx, freqs, &activeDmers, dict,
1107 dictBufferCapacity, parameters);
1108 selection =
COVER_selectDict(dict + tail, dictBufferCapacity, dictBufferCapacity - tail,
1110 totalCompressedSize);
1121 COVER_map_destroy(&activeDmers);
1127 void* dictBuffer,
size_t dictBufferCapacity,
const void* samplesBuffer,
1128 const size_t* samplesSizes,
unsigned nbSamples,
1129 ZDICT_cover_params_t* parameters)
1132 const unsigned nbThreads = parameters->nbThreads;
1133 const double splitPoint =
1135 const unsigned kMinD = parameters->d == 0 ? 6 : parameters->d;
1136 const unsigned kMaxD = parameters->d == 0 ? 8 : parameters->d;
1137 const unsigned kMinK = parameters->k == 0 ? 50 : parameters->k;
1138 const unsigned kMaxK = parameters->k == 0 ? 2000 : parameters->k;
1139 const unsigned kSteps = parameters->steps == 0 ? 40 : parameters->steps;
1140 const unsigned kStepSize =
MAX((kMaxK - kMinK) / kSteps, 1);
1141 const unsigned kIterations =
1142 (1 + (kMaxD - kMinD) / 2) * (1 + (kMaxK - kMinK) / kStepSize);
1143 const unsigned shrinkDict = 0;
1145 const int displayLevel = parameters->zParams.notificationLevel;
1146 unsigned iteration = 1;
1154 if (splitPoint <= 0 || splitPoint > 1) {
1156 return ERROR(parameter_outOfBound);
1158 if (kMinK < kMaxD || kMaxK < kMinK) {
1160 return ERROR(parameter_outOfBound);
1162 if (nbSamples == 0) {
1163 DISPLAYLEVEL(1,
"Cover must have at least one input file\n");
1164 return ERROR(srcSize_wrong);
1166 if (dictBufferCapacity < ZDICT_DICTSIZE_MIN) {
1167 DISPLAYLEVEL(1,
"dictBufferCapacity must be at least %u\n",
1168 ZDICT_DICTSIZE_MIN);
1169 return ERROR(dstSize_tooSmall);
1171 if (nbThreads > 1) {
1174 return ERROR(memory_allocation);
1180 g_displayLevel = displayLevel == 0 ? 0 : displayLevel - 1;
1184 for (d = kMinD; d <= kMaxD; d += 2) {
1189 const size_t initVal = COVER_ctx_init(&ctx, samplesBuffer, samplesSizes, nbSamples, d, splitPoint);
1202 for (k = kMinK; k <= kMaxK; k += kStepSize) {
1210 COVER_ctx_destroy(&ctx);
1212 return ERROR(memory_allocation);
1223 data->
parameters.zParams.notificationLevel = g_displayLevel;
1225 if (!COVER_checkParameters(data->
parameters, dictBufferCapacity)) {
1233 POOL_add(pool, &COVER_tryParameters, data);
1235 COVER_tryParameters(data);
1239 (
unsigned)((iteration * 100) / kIterations));
1243 COVER_ctx_destroy(&ctx);
1248 const size_t dictSize = best.
dictSize;
1253 return compressedSize;
1256 memcpy(dictBuffer, best.
dict, dictSize);
MEM_STATIC unsigned ZSTD_highbit32(U32 val)
#define LOCALDISPLAYUPDATE(displayLevel, l,...)
ZDICTLIB_STATIC_API size_t ZDICT_trainFromBuffer_cover(void *dictBuffer, size_t dictBufferCapacity, const void *samplesBuffer, const size_t *samplesSizes, unsigned nbSamples, ZDICT_cover_params_t parameters)
struct COVER_tryParameters_data_s COVER_tryParameters_data_t
#define COVER_DEFAULT_SPLITPOINT
COVER_dictSelection_t COVER_dictSelectionError(size_t error)
COVER_epoch_info_t COVER_computeEpochs(U32 maxDictSize, U32 nbDmers, U32 k, U32 passes)
ZDICTLIB_STATIC_API size_t ZDICT_optimizeTrainFromBuffer_cover(void *dictBuffer, size_t dictBufferCapacity, const void *samplesBuffer, const size_t *samplesSizes, unsigned nbSamples, ZDICT_cover_params_t *parameters)
#define DISPLAYUPDATE(l,...)
void COVER_warnOnSmallCorpus(size_t maxDictSize, size_t nbDmers, int displayLevel)
void COVER_best_finish(COVER_best_t *best, ZDICT_cover_params_t parameters, COVER_dictSelection_t selection)
COVER_dictSelection_t COVER_selectDict(BYTE *customDictContent, size_t dictBufferCapacity, size_t dictContentSize, const BYTE *samplesBuffer, const size_t *samplesSizes, unsigned nbFinalizeSamples, size_t nbCheckSamples, size_t nbSamples, ZDICT_cover_params_t params, size_t *offsets, size_t totalCompressedSize)
struct COVER_map_pair_t_s COVER_map_pair_t
struct COVER_map_s COVER_map_t
size_t COVER_checkTotalCompressedSize(const ZDICT_cover_params_t parameters, const size_t *samplesSizes, const BYTE *samples, size_t *offsets, size_t nbTrainSamples, size_t nbSamples, BYTE *const dict, size_t dictBufferCapacity)
void COVER_dictSelectionFree(COVER_dictSelection_t selection)
void COVER_best_wait(COVER_best_t *best)
unsigned COVER_dictSelectionIsError(COVER_dictSelection_t selection)
size_t COVER_sum(const size_t *samplesSizes, unsigned nbSamples)
#define DISPLAYLEVEL(l,...)
void COVER_best_start(COVER_best_t *best)
#define COVER_MAX_SAMPLES_SIZE
void COVER_best_init(COVER_best_t *best)
#define LOCALDISPLAYLEVEL(displayLevel, l,...)
void COVER_best_destroy(COVER_best_t *best)
#define assert(condition)
MEM_STATIC U64 MEM_readLE64(const void *memPtr)
uint32_t size(sys::state const &state)
MOD_PROV_LIST constexpr uint32_t count
POOL_ctx * POOL_create(size_t numThreads, size_t queueSize)
void POOL_add(POOL_ctx *ctx, POOL_function function, void *opaque)
void POOL_free(POOL_ctx *ctx)
ZSTD_pthread_mutex_t mutex
ZDICT_cover_params_t parameters
const size_t * samplesSizes
size_t totalCompressedSize
size_t dictBufferCapacity
ZDICT_cover_params_t parameters
#define ZSTD_pthread_mutex_init(a, b)
#define ZSTD_pthread_mutex_unlock(a)
#define ZSTD_pthread_cond_signal(a)
#define ZSTD_pthread_cond_wait(a, b)
#define ZSTD_pthread_cond_broadcast(a)
#define ZSTD_pthread_mutex_lock(a)
#define ZSTD_pthread_mutex_destroy(a)
#define ZSTD_pthread_cond_init(a, b)
#define ZSTD_pthread_cond_destroy(a)
size_t ZDICT_finalizeDictionary(void *dictBuffer, size_t dictBufferCapacity, const void *customDictContent, size_t dictContentSize, const void *samplesBuffer, const size_t *samplesSizes, unsigned nbSamples, ZDICT_params_t params)
unsigned ZDICT_isError(size_t errorCode)
size_t ZSTD_compressBound(size_t srcSize)
ZSTD_CDict * ZSTD_createCDict(const void *dict, size_t dictSize, int compressionLevel)
ZSTD_CCtx * ZSTD_createCCtx(void)
size_t ZSTD_compress_usingCDict(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize, const ZSTD_CDict *cdict)
size_t ZSTD_freeCCtx(ZSTD_CCtx *cctx)
size_t ZSTD_freeCDict(ZSTD_CDict *cdict)