19# pragma warning(disable : 4127)
26#include "../common/zstd_deps.h"
27#include "../common/compiler.h"
28#include "../common/bitstream.h"
30#define FSE_STATIC_LINKING_ONLY
31#include "../common/fse.h"
32#include "../common/huf.h"
33#include "../common/error_private.h"
34#include "../common/bits.h"
40#define HUF_isError ERR_isError
41#define HUF_STATIC_ASSERT(c) DEBUG_STATIC_ASSERT(c)
61static size_t showU32(
const U32* arr,
size_t size)
64 for (u=0; u<
size; u++) {
65 RAWLOG(6,
" %u", arr[u]); (void)arr;
71static size_t HUF_getNbBits(
HUF_CElt elt);
73static size_t showCTableBits(
const HUF_CElt* ctable,
size_t size)
76 for (u=0; u<
size; u++) {
77 RAWLOG(6,
" %zu", HUF_getNbBits(ctable[u])); (void)ctable;
84static size_t showHNodeSymbols(
const nodeElt* hnode,
size_t size)
87 for (u=0; u<
size; u++) {
88 RAWLOG(6,
" %u", hnode[u].
byte); (void)hnode;
94static size_t showHNodeBits(
const nodeElt* hnode,
size_t size)
97 for (u=0; u<
size; u++) {
98 RAWLOG(6,
" %u", hnode[u].nbBits); (void)hnode;
110#define HUF_WORKSPACE_MAX_ALIGNMENT 8
112static void* HUF_alignUpWorkspace(
void* workspace,
size_t* workspaceSizePtr,
size_t align)
114 size_t const mask = align - 1;
115 size_t const rem = (size_t)workspace & mask;
116 size_t const add = (align - rem) & mask;
117 BYTE*
const aligned = (
BYTE*)workspace + add;
118 assert((align & (align - 1)) == 0);
120 if (*workspaceSizePtr >= add) {
122 assert(((
size_t)aligned & mask) == 0);
123 *workspaceSizePtr -= add;
126 *workspaceSizePtr = 0;
137#define MAX_FSE_TABLELOG_FOR_HUFF_HEADER 6
147HUF_compressWeights(
void* dst,
size_t dstSize,
148 const void* weightTable,
size_t wtSize,
149 void* workspace,
size_t workspaceSize)
153 BYTE*
const oend = ostart + dstSize;
162 if (wtSize <= 1)
return 0;
166 if (maxCount == wtSize)
return 1;
167 if (maxCount == 1)
return 0;
181 if (cSize == 0)
return 0;
185 return (
size_t)(op-ostart);
188static size_t HUF_getNbBits(
HUF_CElt elt)
193static size_t HUF_getNbBitsFast(
HUF_CElt elt)
198static size_t HUF_getValue(
HUF_CElt elt)
200 return elt & ~(size_t)0xFF;
203static size_t HUF_getValueFast(
HUF_CElt elt)
208static void HUF_setNbBits(
HUF_CElt* elt,
size_t nbBits)
214static void HUF_setValue(
HUF_CElt* elt,
size_t value)
216 size_t const nbBits = HUF_getNbBits(*elt);
218 assert((value >> nbBits) == 0);
230static void HUF_writeCTableHeader(
HUF_CElt* ctable,
U32 tableLog,
U32 maxSymbolValue)
237 assert(maxSymbolValue < 256);
249 const HUF_CElt* CTable,
unsigned maxSymbolValue,
unsigned huffLog,
250 void* workspace,
size_t workspaceSize)
252 HUF_CElt const*
const ct = CTable + 1;
268 for (n=1; n<huffLog+1; n++)
270 for (n=0; n<maxSymbolValue; n++)
274 if (maxDstSize < 1)
return ERROR(dstSize_tooSmall);
276 if ((hSize>1) & (hSize < maxSymbolValue/2)) {
282 if (maxSymbolValue > (256-128))
return ERROR(GENERIC);
283 if (((maxSymbolValue+1)/2) + 1 > maxDstSize)
return ERROR(dstSize_tooSmall);
284 op[0] = (
BYTE)(128 + (maxSymbolValue-1));
286 for (n=0; n<maxSymbolValue; n+=2)
288 return ((maxSymbolValue+1)/2) + 1;
292size_t HUF_readCTable (
HUF_CElt* CTable,
unsigned* maxSymbolValuePtr,
const void* src,
size_t srcSize,
unsigned* hasZeroWeights)
302 *hasZeroWeights = (rankVal[0] > 0);
306 if (nbSymbols > *maxSymbolValuePtr+1)
return ERROR(maxSymbolValue_tooSmall);
308 *maxSymbolValuePtr = nbSymbols - 1;
310 HUF_writeCTableHeader(CTable, tableLog, *maxSymbolValuePtr);
313 {
U32 n, nextRankStart = 0;
314 for (n=1; n<=tableLog; n++) {
315 U32 curr = nextRankStart;
316 nextRankStart += (rankVal[n] << (n-1));
321 {
U32 n;
for (n=0; n<nbSymbols; n++) {
322 const U32 w = huffWeight[n];
323 HUF_setNbBits(ct + n, (
BYTE)(tableLog + 1 - w) & -(w != 0));
329 {
U32 n;
for (n=0; n<nbSymbols; n++) nbPerRank[HUF_getNbBits(ct[n])]++; }
331 valPerRank[tableLog+1] = 0;
333 U32 n;
for (n=tableLog; n>0; n--) {
339 {
U32 n;
for (n=0; n<nbSymbols; n++) HUF_setValue(ct + n, valPerRank[HUF_getNbBits(ct[n])]++); }
347 const HUF_CElt*
const ct = CTable + 1;
351 return (
U32)HUF_getNbBits(ct[symbolValue]);
376static U32 HUF_setMaxHeight(
nodeElt* huffNode,
U32 lastNonNull,
U32 targetNbBits)
378 const U32 largestBits = huffNode[lastNonNull].
nbBits;
380 if (largestBits <= targetNbBits)
return largestBits;
382 DEBUGLOG(5,
"HUF_setMaxHeight (targetNbBits = %u)", targetNbBits);
386 const U32 baseCost = 1 << (largestBits - targetNbBits);
387 int n = (int)lastNonNull;
393 while (huffNode[n].nbBits > targetNbBits) {
394 totalCost += baseCost - (1 << (largestBits - huffNode[
n].
nbBits));
399 assert(huffNode[n].nbBits <= targetNbBits);
401 while (huffNode[n].nbBits == targetNbBits) --
n;
405 assert(((
U32)totalCost & (baseCost - 1)) == 0);
406 totalCost >>= (largestBits - targetNbBits);
410 {
U32 const noSymbol = 0xF0F0F0F0;
415 {
U32 currentNbBits = targetNbBits;
417 for (pos=n ; pos >= 0; pos--) {
418 if (huffNode[pos].nbBits >= currentNbBits)
continue;
419 currentNbBits = huffNode[pos].
nbBits;
420 rankLast[targetNbBits-currentNbBits] = (
U32)pos;
423 while (totalCost > 0) {
428 for ( ; nBitsToDecrease > 1; nBitsToDecrease--) {
429 U32 const highPos = rankLast[nBitsToDecrease];
430 U32 const lowPos = rankLast[nBitsToDecrease-1];
431 if (highPos == noSymbol)
continue;
435 if (lowPos == noSymbol)
break;
436 {
U32 const highTotal = huffNode[highPos].
count;
437 U32 const lowTotal = 2 * huffNode[lowPos].
count;
438 if (highTotal <= lowTotal)
break;
441 assert(rankLast[nBitsToDecrease] != noSymbol || nBitsToDecrease == 1);
443 while ((nBitsToDecrease<=
HUF_TABLELOG_MAX) && (rankLast[nBitsToDecrease] == noSymbol))
445 assert(rankLast[nBitsToDecrease] != noSymbol);
447 totalCost -= 1 << (nBitsToDecrease-1);
448 huffNode[rankLast[nBitsToDecrease]].
nbBits++;
454 if (rankLast[nBitsToDecrease-1] == noSymbol)
455 rankLast[nBitsToDecrease-1] = rankLast[nBitsToDecrease];
463 if (rankLast[nBitsToDecrease] == 0)
464 rankLast[nBitsToDecrease] = noSymbol;
466 rankLast[nBitsToDecrease]--;
467 if (huffNode[rankLast[nBitsToDecrease]].nbBits != targetNbBits-nBitsToDecrease)
468 rankLast[nBitsToDecrease] = noSymbol;
478 while (totalCost < 0) {
482 if (rankLast[1] == noSymbol) {
483 while (huffNode[n].nbBits == targetNbBits)
n--;
486 rankLast[1] = (
U32)(n+1);
490 huffNode[ rankLast[1] + 1 ].
nbBits--;
508#define RANK_POSITION_TABLE_SIZE 192
523#define RANK_POSITION_MAX_COUNT_LOG 32
524#define RANK_POSITION_LOG_BUCKETS_BEGIN ((RANK_POSITION_TABLE_SIZE - 1) - RANK_POSITION_MAX_COUNT_LOG - 1 )
525#define RANK_POSITION_DISTINCT_COUNT_CUTOFF (RANK_POSITION_LOG_BUCKETS_BEGIN + ZSTD_highbit32(RANK_POSITION_LOG_BUCKETS_BEGIN) )
530static U32 HUF_getIndex(
U32 const count) {
546 for (i = 1; i < maxSymbolValue1; ++i) {
547 if (huffNode[i].count > huffNode[i-1].count) {
557 int const size = high-low+1;
559 for (i = 1; i < size; ++i) {
560 nodeElt const key = huffNode[i];
562 while (j >= 0 && huffNode[j].count < key.
count) {
563 huffNode[j + 1] = huffNode[j];
566 huffNode[j + 1] = key;
571static int HUF_quickSortPartition(
nodeElt arr[],
int const low,
int const high) {
578 for ( ; j < high; j++) {
579 if (arr[j].count > pivot) {
581 HUF_swapNodes(&arr[i], &arr[j]);
584 HUF_swapNodes(&arr[i + 1], &arr[high]);
591static void HUF_simpleQuickSort(
nodeElt arr[],
int low,
int high) {
592 int const kInsertionSortThreshold = 8;
593 if (high - low < kInsertionSortThreshold) {
598 int const idx = HUF_quickSortPartition(arr, low, high);
599 if (idx - low < high - idx) {
600 HUF_simpleQuickSort(arr, low, idx - 1);
603 HUF_simpleQuickSort(arr, idx + 1, high);
620static void HUF_sort(
nodeElt huffNode[],
const unsigned count[],
U32 const maxSymbolValue,
rankPos rankPosition[]) {
622 U32 const maxSymbolValue1 = maxSymbolValue+1;
631 for (n = 0;
n < maxSymbolValue1; ++
n) {
632 U32 lowerRank = HUF_getIndex(count[n]);
634 rankPosition[lowerRank].
base++;
640 rankPosition[
n-1].
base += rankPosition[
n].
base;
641 rankPosition[
n-1].
curr = rankPosition[
n-1].
base;
645 for (n = 0;
n < maxSymbolValue1; ++
n) {
647 U32 const r = HUF_getIndex(c) + 1;
648 U32 const pos = rankPosition[r].
curr++;
649 assert(pos < maxSymbolValue1);
650 huffNode[pos].
count = c;
656 int const bucketSize = rankPosition[
n].
curr - rankPosition[
n].
base;
657 U32 const bucketStartIdx = rankPosition[
n].
base;
658 if (bucketSize > 1) {
659 assert(bucketStartIdx < maxSymbolValue1);
660 HUF_simpleQuickSort(huffNode + bucketStartIdx, 0, bucketSize-1);
672#define STARTNODE (HUF_SYMBOLVALUE_MAX+1)
681static int HUF_buildTree(
nodeElt* huffNode,
U32 maxSymbolValue)
683 nodeElt*
const huffNode0 = huffNode - 1;
688 DEBUGLOG(5,
"HUF_buildTree (alphabet size = %u)", maxSymbolValue + 1);
690 nonNullRank = (int)maxSymbolValue;
691 while(huffNode[nonNullRank].count == 0) nonNullRank--;
692 lowS = nonNullRank; nodeRoot = nodeNb + lowS - 1; lowN = nodeNb;
693 huffNode[nodeNb].
count = huffNode[lowS].
count + huffNode[lowS-1].
count;
696 for (n=nodeNb; n<=nodeRoot; n++) huffNode[n].count = (
U32)(1U<<30);
700 while (nodeNb <= nodeRoot) {
701 int const n1 = (huffNode[lowS].
count < huffNode[lowN].
count) ? lowS-- : lowN++;
702 int const n2 = (huffNode[lowS].
count < huffNode[lowN].
count) ? lowS-- : lowN++;
709 huffNode[nodeRoot].
nbBits = 0;
711 huffNode[n].nbBits = huffNode[ huffNode[n].parent ].nbBits + 1;
712 for (n=0; n<=nonNullRank; n++)
713 huffNode[n].nbBits = huffNode[ huffNode[n].parent ].nbBits + 1;
715 DEBUGLOG(6,
"Initial distribution of bits completed (%zu sorted symbols)", showHNodeBits(huffNode, maxSymbolValue+1));
730static void HUF_buildCTableFromTree(
HUF_CElt* CTable,
nodeElt const* huffNode,
int nonNullRank,
U32 maxSymbolValue,
U32 maxNbBits)
737 int const alphabetSize = (int)(maxSymbolValue + 1);
738 for (n=0;
n<=nonNullRank;
n++)
739 nbPerRank[huffNode[n].nbBits]++;
742 for (n=(
int)maxNbBits;
n>0;
n--) {
747 for (n=0;
n<alphabetSize;
n++)
748 HUF_setNbBits(ct + huffNode[n].
byte, huffNode[n].nbBits);
749 for (n=0;
n<alphabetSize;
n++)
750 HUF_setValue(ct + n, valPerRank[HUF_getNbBits(ct[n])]++);
752 HUF_writeCTableHeader(CTable, maxNbBits, maxSymbolValue);
757 void* workSpace,
size_t wkspSize)
762 nodeElt*
const huffNode = huffNode0+1;
767 DEBUGLOG(5,
"HUF_buildCTable_wksp (alphabet size = %u)", maxSymbolValue+1);
771 return ERROR(workSpace_tooSmall);
774 return ERROR(maxSymbolValue_tooLarge);
778 HUF_sort(huffNode, count, maxSymbolValue, wksp_tables->
rankPosition);
779 DEBUGLOG(6,
"sorted symbols completed (%zu symbols)", showHNodeSymbols(huffNode, maxSymbolValue+1));
782 nonNullRank = HUF_buildTree(huffNode, maxSymbolValue);
785 maxNbBits = HUF_setMaxHeight(huffNode, (
U32)nonNullRank, maxNbBits);
788 HUF_buildCTableFromTree(CTable, huffNode, nonNullRank, maxSymbolValue, maxNbBits);
798 for (s = 0; s <= (int)maxSymbolValue; ++s) {
799 nbBits += HUF_getNbBits(ct[s]) * count[s];
815 for (s = 0; s <= (int)maxSymbolValue; ++s) {
816 bad |= (count[s] != 0) & (HUF_getNbBits(ct[s]) == 0);
841#define HUF_BITS_IN_CONTAINER (sizeof(size_t) * 8)
844 size_t bitContainer[2];
857 void* startPtr,
size_t dstCapacity)
863 if (dstCapacity <=
sizeof(bitC->
bitContainer[0]))
return ERROR(dstSize_tooSmall);
887 bitC->
bitContainer[idx] |= kFast ? HUF_getValueFast(elt) : HUF_getValue(elt);
891 bitC->
bitPos[idx] += HUF_getNbBitsFast(elt);
899 size_t const nbBits = HUF_getNbBits(elt);
903 assert(((elt >> dirtyBits) << (dirtyBits + nbBits)) == 0);
940 size_t const nbBits = bitC->
bitPos[0] & 0xFF;
941 size_t const nbBytes = nbBits >> 3;
950 bitC->
ptr += nbBytes;
965 HUF_setNbBits(&endMark, 1);
966 HUF_setValue(&endMark, 1);
978 size_t const nbBits = bitC->
bitPos[0] & 0xFF;
980 return (
size_t)(bitC->
ptr - bitC->
startPtr) + (nbBits > 0);
992 const BYTE* ip,
size_t srcSize,
994 int kUnroll,
int kFastFlush,
int kLastFast)
997 int n = (int)srcSize;
998 int rem = n % kUnroll;
1000 for (; rem > 0; --rem) {
1005 assert(n % kUnroll == 0);
1008 if (n % (2 * kUnroll)) {
1010 for (u = 1; u < kUnroll; ++u) {
1017 assert(n % (2 * kUnroll) == 0);
1019 for (; n>0; n-= 2 * kUnroll) {
1022 for (u = 1; u < kUnroll; ++u) {
1032 for (u = 1; u < kUnroll; ++u) {
1049static size_t HUF_tightCompressBound(
size_t srcSize,
size_t tableLog)
1051 return ((srcSize * tableLog) >> 3) + 8;
1057 const void* src,
size_t srcSize,
1062 const BYTE* ip = (
const BYTE*) src;
1064 BYTE*
const oend = ostart + dstSize;
1068 if (dstSize < 8)
return 0;
1069 {
BYTE* op = ostart;
1070 size_t const initErr = HUF_initCStream(&bitC, op, (
size_t)(oend-op));
1073 if (dstSize < HUF_tightCompressBound(srcSize, (
size_t)tableLog) || tableLog > 11)
1117 return HUF_closeCStream(&bitC);
1123HUF_compress1X_usingCTable_internal_bmi2(
void* dst,
size_t dstSize,
1124 const void* src,
size_t srcSize,
1131HUF_compress1X_usingCTable_internal_default(
void* dst,
size_t dstSize,
1132 const void* src,
size_t srcSize,
1139HUF_compress1X_usingCTable_internal(
void* dst,
size_t dstSize,
1140 const void* src,
size_t srcSize,
1141 const HUF_CElt* CTable,
const int flags)
1144 return HUF_compress1X_usingCTable_internal_bmi2(dst, dstSize, src, srcSize, CTable);
1146 return HUF_compress1X_usingCTable_internal_default(dst, dstSize, src, srcSize, CTable);
1152HUF_compress1X_usingCTable_internal(
void* dst,
size_t dstSize,
1153 const void* src,
size_t srcSize,
1154 const HUF_CElt* CTable,
const int flags)
1164 return HUF_compress1X_usingCTable_internal(dst, dstSize, src, srcSize, CTable, flags);
1168HUF_compress4X_usingCTable_internal(
void* dst,
size_t dstSize,
1169 const void* src,
size_t srcSize,
1172 size_t const segmentSize = (srcSize+3)/4;
1173 const BYTE* ip = (
const BYTE*) src;
1174 const BYTE*
const iend = ip + srcSize;
1176 BYTE*
const oend = ostart + dstSize;
1179 if (dstSize < 6 + 1 + 1 + 1 + 8)
return 0;
1180 if (srcSize < 12)
return 0;
1184 {
CHECK_V_F(cSize, HUF_compress1X_usingCTable_internal(op, (
size_t)(oend-op), ip, segmentSize, CTable, flags) );
1185 if (cSize == 0 || cSize > 65535)
return 0;
1192 {
CHECK_V_F(cSize, HUF_compress1X_usingCTable_internal(op, (
size_t)(oend-op), ip, segmentSize, CTable, flags) );
1193 if (cSize == 0 || cSize > 65535)
return 0;
1200 {
CHECK_V_F(cSize, HUF_compress1X_usingCTable_internal(op, (
size_t)(oend-op), ip, segmentSize, CTable, flags) );
1201 if (cSize == 0 || cSize > 65535)
return 0;
1209 {
CHECK_V_F(cSize, HUF_compress1X_usingCTable_internal(op, (
size_t)(oend-op), ip, (
size_t)(iend-ip), CTable, flags) );
1210 if (cSize == 0 || cSize > 65535)
return 0;
1214 return (
size_t)(op-ostart);
1219 return HUF_compress4X_usingCTable_internal(dst, dstSize, src, srcSize, CTable, flags);
1224static size_t HUF_compressCTable_internal(
1226 const void* src,
size_t srcSize,
1230 HUF_compress1X_usingCTable_internal(op, (
size_t)(oend - op), src, srcSize, CTable, flags) :
1231 HUF_compress4X_usingCTable_internal(op, (
size_t)(oend - op), src, srcSize, CTable, flags);
1233 if (cSize==0) {
return 0; }
1237 if ((
size_t)(op-ostart) >= srcSize-1) {
return 0; }
1238 return (
size_t)(op-ostart);
1251#define SUSPECT_INCOMPRESSIBLE_SAMPLE_SIZE 4096
1252#define SUSPECT_INCOMPRESSIBLE_SAMPLE_RATIO 10
1256 unsigned cardinality = 0;
1259 for (i = 0; i < maxSymbolValue + 1; i++) {
1260 if (count[i] != 0) cardinality += 1;
1269 return minBitsSymbols;
1273 unsigned maxTableLog,
1275 unsigned maxSymbolValue,
1276 void* workSpace,
size_t wkspSize,
1278 const unsigned* count,
1291 size_t hSize, newSize;
1292 const unsigned symbolCardinality =
HUF_cardinality(count, maxSymbolValue);
1294 size_t optSize = ((size_t) ~0) - 1;
1295 unsigned optLog = maxTableLog, optLogGuess;
1297 DEBUGLOG(6,
"HUF_optimalTableLog: probing huf depth (srcSize=%zu)", srcSize);
1300 for (optLogGuess = minTableLog; optLogGuess <= maxTableLog; optLogGuess++) {
1301 DEBUGLOG(7,
"checking for huffLog=%u", optLogGuess);
1306 if (maxBits < optLogGuess && optLogGuess > minTableLog)
break;
1315 if (newSize > optSize + 1) {
1319 if (newSize < optSize) {
1321 optLog = optLogGuess;
1333HUF_compress_internal (
void* dst,
size_t dstSize,
1334 const void* src,
size_t srcSize,
1335 unsigned maxSymbolValue,
unsigned huffLog,
1337 void* workSpace,
size_t wkspSize,
1342 BYTE*
const oend = ostart + dstSize;
1345 DEBUGLOG(5,
"HUF_compress_internal (srcSize=%zu)", srcSize);
1349 if (wkspSize <
sizeof(*
table))
return ERROR(workSpace_tooSmall);
1350 if (!srcSize)
return 0;
1351 if (!dstSize)
return 0;
1360 return HUF_compressCTable_internal(ostart, op, oend,
1362 nbStreams, oldHufTable, flags);
1368 size_t largestTotal = 0;
1369 DEBUGLOG(5,
"input suspected incompressible : sampling to check");
1370 {
unsigned maxSymbolValueBegin = maxSymbolValue;
1372 largestTotal += largestBegin;
1374 {
unsigned maxSymbolValueEnd = maxSymbolValue;
1376 largestTotal += largestEnd;
1383 if (largest == srcSize) { *ostart = ((
const BYTE*)src)[0];
return 1; }
1384 if (largest <= (srcSize >> 7)+4)
return 0;
1386 DEBUGLOG(6,
"histogram detail completed (%zu symbols)", showU32(
table->count, maxSymbolValue+1));
1396 return HUF_compressCTable_internal(ostart, op, oend,
1398 nbStreams, oldHufTable, flags);
1404 maxSymbolValue, huffLog,
1405 &
table->wksps.buildCTable_wksp,
sizeof(
table->wksps.buildCTable_wksp));
1407 huffLog = (
U32)maxBits;
1408 DEBUGLOG(6,
"bit distribution completed (%zu symbols)", showCTableBits(
table->CTable + 1, maxSymbolValue+1));
1413 &
table->wksps.writeCTable_wksp,
sizeof(
table->wksps.writeCTable_wksp)) );
1418 if (oldSize <= hSize + newSize || hSize + 12 >= srcSize) {
1419 return HUF_compressCTable_internal(ostart, op, oend,
1421 nbStreams, oldHufTable, flags);
1425 if (hSize + 12ul >= srcSize) {
return 0; }
1431 return HUF_compressCTable_internal(ostart, op, oend,
1433 nbStreams,
table->CTable, flags);
1437 const void* src,
size_t srcSize,
1438 unsigned maxSymbolValue,
unsigned huffLog,
1439 void* workSpace,
size_t wkspSize,
1442 DEBUGLOG(5,
"HUF_compress1X_repeat (srcSize = %zu)", srcSize);
1443 return HUF_compress_internal(dst, dstSize, src, srcSize,
1445 workSpace, wkspSize, hufTable,
1454 const void* src,
size_t srcSize,
1455 unsigned maxSymbolValue,
unsigned huffLog,
1456 void* workSpace,
size_t wkspSize,
1459 DEBUGLOG(5,
"HUF_compress4X_repeat (srcSize = %zu)", srcSize);
1460 return HUF_compress_internal(dst, dstSize, src, srcSize,
1462 workSpace, wkspSize,
1463 hufTable, repeat, flags);
MEM_STATIC unsigned ZSTD_highbit32(U32 val)
#define BMI2_TARGET_ATTRIBUTE
#define FORCE_INLINE_TEMPLATE
#define DEBUG_STATIC_ASSERT(c)
#define assert(condition)
size_t HUF_readStats(BYTE *huffWeight, size_t hwSize, U32 *rankStats, U32 *nbSymbolsPtr, U32 *tableLogPtr, const void *src, size_t srcSize)
ERR_STATIC unsigned ERR_isError(size_t code)
FSE_PUBLIC_API size_t FSE_writeNCount(void *buffer, size_t bufferSize, const short *normalizedCounter, unsigned maxSymbolValue, unsigned tableLog)
FSE_PUBLIC_API size_t FSE_normalizeCount(short *normalizedCounter, unsigned tableLog, const unsigned *count, size_t srcSize, unsigned maxSymbolValue, unsigned useLowProbCount)
FSE_PUBLIC_API unsigned FSE_optimalTableLog(unsigned maxTableLog, size_t srcSize, unsigned maxSymbolValue)
FSE_PUBLIC_API size_t FSE_compress_usingCTable(void *dst, size_t dstCapacity, const void *src, size_t srcSize, const FSE_CTable *ct)
size_t FSE_buildCTable_wksp(FSE_CTable *ct, const short *normalizedCounter, unsigned maxSymbolValue, unsigned tableLog, void *workSpace, size_t wkspSize)
unsigned FSE_optimalTableLog_internal(unsigned maxTableLog, size_t srcSize, unsigned maxSymbolValue, unsigned minus)
size_t HIST_count_wksp(unsigned *count, unsigned *maxSymbolValuePtr, const void *source, size_t sourceSize, void *workSpace, size_t workSpaceSize)
unsigned HIST_count_simple(unsigned *count, unsigned *maxSymbolValuePtr, const void *src, size_t srcSize)
#define HIST_WKSP_SIZE_U32
#define HUF_BLOCKSIZE_MAX
#define HUF_CTABLE_WORKSPACE_SIZE
#define HUF_SYMBOLVALUE_MAX
#define HUF_TABLELOG_DEFAULT
@ HUF_flags_suspectUncompressible
#define HUF_CTABLE_SIZE_ST(maxSymbolValue)
#define HUF_WORKSPACE_SIZE
#define HUF_TABLELOG_ABSOLUTEMAX
#define HUF_COMPRESSBOUND(size)
#define HUF_STATIC_ASSERT(c)
#define SUSPECT_INCOMPRESSIBLE_SAMPLE_SIZE
#define HUF_BITS_IN_CONTAINER
#define SUSPECT_INCOMPRESSIBLE_SAMPLE_RATIO
size_t HUF_compressBound(size_t size)
int HUF_validateCTable(const HUF_CElt *CTable, const unsigned *count, unsigned maxSymbolValue)
#define RANK_POSITION_DISTINCT_COUNT_CUTOFF
nodeElt huffNodeTable[2 *(HUF_SYMBOLVALUE_MAX+1)]
size_t HUF_writeCTable_wksp(void *dst, size_t maxDstSize, const HUF_CElt *CTable, unsigned maxSymbolValue, unsigned huffLog, void *workspace, size_t workspaceSize)
unsigned HUF_cardinality(const unsigned *count, unsigned maxSymbolValue)
FORCE_INLINE_TEMPLATE void HUF_compress1X_usingCTable_internal_body_loop(HUF_CStream_t *bitC, const BYTE *ip, size_t srcSize, const HUF_CElt *ct, int kUnroll, int kFastFlush, int kLastFast)
FORCE_INLINE_TEMPLATE void HUF_encodeSymbol(HUF_CStream_t *bitCPtr, U32 symbol, const HUF_CElt *CTable, int idx, int fast)
HINT_INLINE void HUF_insertionSort(nodeElt huffNode[], int const low, int const high)
FORCE_INLINE_TEMPLATE void HUF_mergeIndex1(HUF_CStream_t *bitC)
U32 HUF_getNbBitsFromCTable(HUF_CElt const *CTable, U32 symbolValue)
size_t HUF_compress4X_usingCTable(void *dst, size_t dstSize, const void *src, size_t srcSize, const HUF_CElt *CTable, int flags)
#define RANK_POSITION_LOG_BUCKETS_BEGIN
FORCE_INLINE_TEMPLATE void HUF_zeroIndex1(HUF_CStream_t *bitC)
size_t HUF_estimateCompressedSize(const HUF_CElt *CTable, const unsigned *count, unsigned maxSymbolValue)
size_t HUF_buildCTable_wksp(HUF_CElt *CTable, const unsigned *count, U32 maxSymbolValue, U32 maxNbBits, void *workSpace, size_t wkspSize)
MEM_STATIC int HUF_isSorted(nodeElt huffNode[], U32 const maxSymbolValue1)
#define RANK_POSITION_TABLE_SIZE
size_t HUF_compress1X_repeat(void *dst, size_t dstSize, const void *src, size_t srcSize, unsigned maxSymbolValue, unsigned huffLog, void *workSpace, size_t wkspSize, HUF_CElt *hufTable, HUF_repeat *repeat, int flags)
FORCE_INLINE_TEMPLATE void HUF_addBits(HUF_CStream_t *bitC, HUF_CElt elt, int idx, int kFast)
FORCE_INLINE_TEMPLATE void HUF_flushBits(HUF_CStream_t *bitC, int kFast)
HUF_CTableHeader HUF_readCTableHeader(HUF_CElt const *ctable)
size_t HUF_compress1X_usingCTable(void *dst, size_t dstSize, const void *src, size_t srcSize, const HUF_CElt *CTable, int flags)
#define HUF_WORKSPACE_MAX_ALIGNMENT
FORCE_INLINE_TEMPLATE size_t HUF_compress1X_usingCTable_internal_body(void *dst, size_t dstSize, const void *src, size_t srcSize, const HUF_CElt *CTable)
#define MAX_FSE_TABLELOG_FOR_HUFF_HEADER
size_t HUF_readCTable(HUF_CElt *CTable, unsigned *maxSymbolValuePtr, const void *src, size_t srcSize, unsigned *hasZeroWeights)
unsigned HUF_minTableLog(unsigned symbolCardinality)
unsigned HUF_optimalTableLog(unsigned maxTableLog, size_t srcSize, unsigned maxSymbolValue, void *workSpace, size_t wkspSize, HUF_CElt *table, const unsigned *count, int flags)
size_t HUF_compress4X_repeat(void *dst, size_t dstSize, const void *src, size_t srcSize, unsigned maxSymbolValue, unsigned huffLog, void *workSpace, size_t wkspSize, HUF_CElt *hufTable, HUF_repeat *repeat, int flags)
MEM_STATIC unsigned MEM_32bits(void)
MEM_STATIC void MEM_writeLE16(void *memPtr, U16 val)
MEM_STATIC void MEM_writeLEST(void *memPtr, size_t val)
uint32_t size(sys::state const &state)
MOD_PROV_LIST constexpr uint32_t count
FSE_CTable CTable[FSE_CTABLE_SIZE_U32(MAX_FSE_TABLELOG_FOR_HUFF_HEADER, HUF_TABLELOG_MAX)]
U32 scratchBuffer[FSE_BUILD_CTABLE_WORKSPACE_SIZE_U32(HUF_TABLELOG_MAX, MAX_FSE_TABLELOG_FOR_HUFF_HEADER)]
unsigned count[HUF_TABLELOG_MAX+1]
S16 norm[HUF_TABLELOG_MAX+1]
BYTE huffWeight[HUF_SYMBOLVALUE_MAX]
BYTE bitsToWeight[HUF_TABLELOG_MAX+1]
HUF_CompressWeightsWksp wksp
rankPos rankPosition[RANK_POSITION_TABLE_SIZE]
huffNodeTable huffNodeTbl
HUF_buildCTable_wksp_tables buildCTable_wksp
HUF_WriteCTableWksp writeCTable_wksp
#define ZSTD_memcpy(d, s, l)
#define ZSTD_memset(p, v, l)