15#if !defined(ZSTD_EXCLUDE_BTLAZY2_BLOCK_COMPRESSOR) \
16 || !defined(ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR) \
17 || !defined(ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR)
19#define ZSTD_LITFREQ_ADD 2
20#define ZSTD_MAX_PRICE (1<<30)
22#define ZSTD_PREDEF_THRESHOLD 8
30# define BITCOST_ACCURACY 0
31# define BITCOST_MULTIPLIER (1 << BITCOST_ACCURACY)
32# define WEIGHT(stat, opt) ((void)(opt), ZSTD_bitWeight(stat))
34# define BITCOST_ACCURACY 8
35# define BITCOST_MULTIPLIER (1 << BITCOST_ACCURACY)
36# define WEIGHT(stat,opt) ((void)(opt), ZSTD_fracWeight(stat))
38# define BITCOST_ACCURACY 8
39# define BITCOST_MULTIPLIER (1 << BITCOST_ACCURACY)
40# define WEIGHT(stat,opt) ((opt) ? ZSTD_fracWeight(stat) : ZSTD_bitWeight(stat))
55 U32 const stat = rawStat + 1;
61 U32 const FWeight = (stat << BITCOST_ACCURACY) >> hb;
62 U32 const weight = BWeight + FWeight;
77static int ZSTD_compressedLiterals(
optState_t const*
const optPtr)
82static void ZSTD_setBasePrices(
optState_t* optPtr,
int optLevel)
84 if (ZSTD_compressedLiterals(optPtr))
92static U32 sum_u32(
const unsigned table[],
size_t nbElts)
96 for (n=0;
n<nbElts;
n++) {
108 DEBUGLOG(5,
"ZSTD_downscaleStats (nbElts=%u, shift=%u)",
109 (
unsigned)lastEltIndex+1, (
unsigned)shift );
111 for (s=0; s<lastEltIndex+1; s++) {
112 unsigned const base = base1 ? 1 : (
table[s]>0);
113 unsigned const newStat =
base + (
table[s] >> shift);
123static U32 ZSTD_scaleStats(
unsigned*
table,
U32 lastEltIndex,
U32 logTarget)
125 U32 const prevsum = sum_u32(
table, lastEltIndex+1);
126 U32 const factor = prevsum >> logTarget;
127 DEBUGLOG(5,
"ZSTD_scaleStats (nbElts=%u, target=%u)", (
unsigned)lastEltIndex+1, (
unsigned)logTarget);
129 if (factor <= 1)
return prevsum;
142 const BYTE*
const src,
size_t const srcSize,
145 int const compressedLiterals = ZSTD_compressedLiterals(optPtr);
146 DEBUGLOG(5,
"ZSTD_rescaleFreqs (srcSize=%u)", (
unsigned)srcSize);
163 if (compressedLiterals) {
168 for (lit=0; lit<=
MaxLit; lit++) {
169 U32 const scaleLog = 11;
171 assert(bitCost <= scaleLog);
172 optPtr->
litFreq[lit] = bitCost ? 1 << (scaleLog-bitCost) : 1 ;
177 FSE_CState_t llstate;
180 for (ll=0; ll<=
MaxLL; ll++) {
181 U32 const scaleLog = 10;
182 U32 const bitCost = FSE_getMaxNbBits(llstate.symbolTT, ll);
183 assert(bitCost < scaleLog);
184 optPtr->
litLengthFreq[ll] = bitCost ? 1 << (scaleLog-bitCost) : 1 ;
189 FSE_CState_t mlstate;
192 for (ml=0; ml<=
MaxML; ml++) {
193 U32 const scaleLog = 10;
194 U32 const bitCost = FSE_getMaxNbBits(mlstate.symbolTT, ml);
195 assert(bitCost < scaleLog);
201 FSE_CState_t ofstate;
204 for (of=0; of<=
MaxOff; of++) {
205 U32 const scaleLog = 10;
206 U32 const bitCost = FSE_getMaxNbBits(ofstate.symbolTT, of);
207 assert(bitCost < scaleLog);
208 optPtr->
offCodeFreq[of] = bitCost ? 1 << (scaleLog-bitCost) : 1 ;
215 if (compressedLiterals) {
222 {
unsigned const baseLLfreqs[
MaxLL+1] = {
223 4, 2, 1, 1, 1, 1, 1, 1,
224 1, 1, 1, 1, 1, 1, 1, 1,
225 1, 1, 1, 1, 1, 1, 1, 1,
226 1, 1, 1, 1, 1, 1, 1, 1,
234 for (ml=0; ml<=
MaxML; ml++)
239 {
unsigned const baseOFCfreqs[
MaxOff+1] = {
240 6, 2, 1, 1, 2, 3, 4, 4,
241 4, 3, 2, 1, 1, 1, 1, 1,
242 1, 1, 1, 1, 1, 1, 1, 1,
243 1, 1, 1, 1, 1, 1, 1, 1
253 if (compressedLiterals)
260 ZSTD_setBasePrices(optPtr, optLevel);
266static U32 ZSTD_rawLiteralsCost(
const BYTE*
const literals,
U32 const litLength,
270 DEBUGLOG(8,
"ZSTD_rawLiteralsCost (%u literals)", litLength);
271 if (litLength == 0)
return 0;
273 if (!ZSTD_compressedLiterals(optPtr))
284 for (u=0; u < litLength; u++) {
286 if (
UNLIKELY(litPrice > litPriceMax)) litPrice = litPriceMax;
295static U32 ZSTD_litLengthPrice(
U32 const litLength,
const optState_t*
const optPtr,
int optLevel)
299 return WEIGHT(litLength, optLevel);
325 U32 const matchLength,
335 return WEIGHT(mlBase, optLevel)
340 if ((optLevel<2) && offCode >= 20)
350 DEBUGLOG(8,
"ZSTD_getMatchPrice(ml:%u) = %u", matchLength, price);
356static void ZSTD_updateStats(
optState_t*
const optPtr,
357 U32 litLength,
const BYTE* literals,
358 U32 offBase,
U32 matchLength)
361 if (ZSTD_compressedLiterals(optPtr)) {
363 for (u=0; u < litLength; u++)
413 const BYTE*
const ip)
418 U32 idx = *nextToUpdate3;
423 while(idx < target) {
429 return hashTable3[hash3];
444 const BYTE*
const ip,
const BYTE*
const iend,
446 U32 const mls,
const int extDict)
448 const ZSTD_compressionParameters*
const cParams = &ms->
cParams;
450 U32 const hashLog = cParams->hashLog;
453 U32 const btLog = cParams->chainLog - 1;
454 U32 const btMask = (1 << btLog) - 1;
455 U32 matchIndex = hashTable[h];
456 size_t commonLengthSmaller=0, commonLengthLarger=0;
460 const BYTE*
const dictEnd = dictBase + dictLimit;
461 const BYTE*
const prefixStart =
base + dictLimit;
464 const U32 btLow = btMask >=
curr ? 0 :
curr - btMask;
465 U32* smallerPtr = bt + 2*(
curr&btMask);
466 U32* largerPtr = smallerPtr + 1;
473 size_t bestLength = 8;
474 U32 nbCompares = 1U << cParams->searchLog;
476 U32 predictedSmall = *(bt + 2*((
curr-1)&btMask) + 0);
477 U32 predictedLarge = *(bt + 2*((
curr-1)&btMask) + 1);
478 predictedSmall += (predictedSmall>0);
479 predictedLarge += (predictedLarge>0);
482 DEBUGLOG(8,
"ZSTD_insertBt1 (%u)", curr);
489 for (; nbCompares && (matchIndex >= windowLow); --nbCompares) {
490 U32*
const nextPtr = bt + 2*(matchIndex & btMask);
491 size_t matchLength =
MIN(commonLengthSmaller, commonLengthLarger);
492 assert(matchIndex < curr);
495 const U32* predictPtr = bt + 2*((matchIndex-1) & btMask);
496 if (matchIndex == predictedSmall) {
498 *smallerPtr = matchIndex;
499 if (matchIndex <= btLow) { smallerPtr=&dummy32;
break; }
500 smallerPtr = nextPtr+1;
501 matchIndex = nextPtr[1];
502 predictedSmall = predictPtr[1] + (predictPtr[1]>0);
505 if (matchIndex == predictedLarge) {
506 *largerPtr = matchIndex;
507 if (matchIndex <= btLow) { largerPtr=&dummy32;
break; }
509 matchIndex = nextPtr[0];
510 predictedLarge = predictPtr[0] + (predictPtr[0]>0);
515 if (!extDict || (matchIndex+matchLength >= dictLimit)) {
516 assert(matchIndex+matchLength >= dictLimit);
517 match =
base + matchIndex;
518 matchLength +=
ZSTD_count(ip+matchLength, match+matchLength, iend);
520 match = dictBase + matchIndex;
521 matchLength +=
ZSTD_count_2segments(ip+matchLength, match+matchLength, iend, dictEnd, prefixStart);
522 if (matchIndex+matchLength >= dictLimit)
523 match =
base + matchIndex;
526 if (matchLength > bestLength) {
527 bestLength = matchLength;
528 if (matchLength > matchEndIdx - matchIndex)
529 matchEndIdx = matchIndex + (
U32)matchLength;
532 if (ip+matchLength == iend) {
536 if (match[matchLength] < ip[matchLength]) {
538 *smallerPtr = matchIndex;
539 commonLengthSmaller = matchLength;
540 if (matchIndex <= btLow) { smallerPtr=&dummy32;
break; }
541 smallerPtr = nextPtr+1;
542 matchIndex = nextPtr[1];
545 *largerPtr = matchIndex;
546 commonLengthLarger = matchLength;
547 if (matchIndex <= btLow) { largerPtr=&dummy32;
break; }
549 matchIndex = nextPtr[0];
552 *smallerPtr = *largerPtr = 0;
554 if (bestLength > 384) positions =
MIN(192, (
U32)(bestLength - 384));
555 assert(matchEndIdx > curr + 8);
556 return MAX(positions, matchEndIdx - (curr + 8));
564 const BYTE*
const ip,
const BYTE*
const iend,
568 U32 const target = (
U32)(ip - base);
570 DEBUGLOG(7,
"ZSTD_updateTree_internal, from %u to %u (dictMode:%u)",
571 idx, target, dictMode);
573 while(idx < target) {
574 U32 const forward = ZSTD_insertBt1(ms, base+idx, iend, target, mls, dictMode ==
ZSTD_extDict);
578 assert((
size_t)(ip - base) <= (
size_t)(
U32)(-1));
579 assert((
size_t)(iend - base) <= (
size_t)(
U32)(-1));
594 const BYTE*
const ip,
const BYTE*
const iLimit,
598 const U32 lengthToBeat,
601 const ZSTD_compressionParameters*
const cParams = &ms->
cParams;
604 U32 const curr = (
U32)(ip-base);
605 U32 const hashLog = cParams->hashLog;
606 U32 const minMatch = (mls==3) ? 3 : 4;
609 U32 matchIndex = hashTable[h];
611 U32 const btLog = cParams->chainLog - 1;
612 U32 const btMask= (1U << btLog) - 1;
613 size_t commonLengthSmaller=0, commonLengthLarger=0;
616 const BYTE*
const dictEnd = dictBase + dictLimit;
617 const BYTE*
const prefixStart = base + dictLimit;
618 U32 const btLow = (btMask >= curr) ? 0 : curr - btMask;
620 U32 const matchLow = windowLow ? windowLow : 1;
621 U32* smallerPtr = bt + 2*(curr&btMask);
622 U32* largerPtr = bt + 2*(curr&btMask) + 1;
623 U32 matchEndIdx = curr+8+1;
626 U32 nbCompares = 1U << cParams->searchLog;
629 const ZSTD_compressionParameters*
const dmsCParams =
639 U32 const dmsBtLow = dictMode ==
ZSTD_dictMatchState && dmsBtMask < dmsHighLimit - dmsLowLimit ? dmsHighLimit - dmsBtMask : dmsLowLimit;
641 size_t bestLength = lengthToBeat-1;
642 DEBUGLOG(8,
"ZSTD_insertBtAndGetAllMatches: current=%u", curr);
648 for (repCode = ll0; repCode < lastR; repCode++) {
649 U32 const repOffset = (repCode==
ZSTD_REP_NUM) ? (rep[0] - 1) : rep[repCode];
650 U32 const repIndex = curr - repOffset;
652 assert(curr >= dictLimit);
653 if (repOffset-1 < curr-dictLimit) {
658 repLen = (
U32)
ZSTD_count(ip+minMatch, ip+minMatch-repOffset, iLimit) + minMatch;
662 dmsBase + repIndex - dmsIndexDelta :
664 assert(curr >= windowLow);
666 && ( ((repOffset-1) < curr - windowLow)
667 & (((
U32)((dictLimit-1) - repIndex) >= 3) ) )
672 && ( ((repOffset-1) < curr - (dmsLowLimit + dmsIndexDelta))
673 & ((
U32)((dictLimit-1) - repIndex) >= 3) )
678 if (repLen > bestLength) {
679 DEBUGLOG(8,
"found repCode %u (ll0:%u, offset:%u) of length %u",
680 repCode, ll0, repOffset, repLen);
683 matches[mnum].
len = (
U32)repLen;
685 if ( (repLen > sufficient_len)
686 | (ip+repLen == iLimit) ) {
691 if ((mls == 3) && (bestLength < mls)) {
692 U32 const matchIndex3 = ZSTD_insertAndFindFirstIndexHash3(ms, nextToUpdate3, ip);
693 if ((matchIndex3 >= matchLow)
694 & (curr - matchIndex3 < (1<<18)) ) {
697 const BYTE*
const match = base + matchIndex3;
700 const BYTE*
const match = dictBase + matchIndex3;
706 DEBUGLOG(8,
"found small match with hlog3, of length %u",
709 assert(curr > matchIndex3);
712 matches[0].
len = (
U32)mlen;
714 if ( (mlen > sufficient_len) |
715 (ip+mlen == iLimit) ) {
724 for (; nbCompares && (matchIndex >= matchLow); --nbCompares) {
725 U32*
const nextPtr = bt + 2*(matchIndex & btMask);
727 size_t matchLength =
MIN(commonLengthSmaller, commonLengthLarger);
728 assert(curr > matchIndex);
731 assert(matchIndex+matchLength >= dictLimit);
732 match = base + matchIndex;
733 if (matchIndex >= dictLimit)
assert(memcmp(match, ip, matchLength) == 0);
734 matchLength +=
ZSTD_count(ip+matchLength, match+matchLength, iLimit);
736 match = dictBase + matchIndex;
737 assert(memcmp(match, ip, matchLength) == 0);
738 matchLength +=
ZSTD_count_2segments(ip+matchLength, match+matchLength, iLimit, dictEnd, prefixStart);
739 if (matchIndex+matchLength >= dictLimit)
740 match = base + matchIndex;
743 if (matchLength > bestLength) {
744 DEBUGLOG(8,
"found match of length %u at distance %u (offBase=%u)",
746 assert(matchEndIdx > matchIndex);
747 if (matchLength > matchEndIdx - matchIndex)
748 matchEndIdx = matchIndex + (
U32)matchLength;
749 bestLength = matchLength;
751 matches[mnum].
len = (
U32)matchLength;
754 | (ip+matchLength == iLimit) ) {
759 if (match[matchLength] < ip[matchLength]) {
761 *smallerPtr = matchIndex;
762 commonLengthSmaller = matchLength;
763 if (matchIndex <= btLow) { smallerPtr=&dummy32;
break; }
764 smallerPtr = nextPtr+1;
765 matchIndex = nextPtr[1];
767 *largerPtr = matchIndex;
768 commonLengthLarger = matchLength;
769 if (matchIndex <= btLow) { largerPtr=&dummy32;
break; }
771 matchIndex = nextPtr[0];
774 *smallerPtr = *largerPtr = 0;
776 assert(nbCompares <= (1U << ZSTD_SEARCHLOG_MAX));
781 commonLengthSmaller = commonLengthLarger = 0;
782 for (; nbCompares && (dictMatchIndex > dmsLowLimit); --nbCompares) {
783 const U32*
const nextPtr = dmsBt + 2*(dictMatchIndex & dmsBtMask);
784 size_t matchLength =
MIN(commonLengthSmaller, commonLengthLarger);
785 const BYTE* match = dmsBase + dictMatchIndex;
786 matchLength +=
ZSTD_count_2segments(ip+matchLength, match+matchLength, iLimit, dmsEnd, prefixStart);
787 if (dictMatchIndex+matchLength >= dmsHighLimit)
788 match = base + dictMatchIndex + dmsIndexDelta;
790 if (matchLength > bestLength) {
791 matchIndex = dictMatchIndex + dmsIndexDelta;
792 DEBUGLOG(8,
"found dms match of length %u at distance %u (offBase=%u)",
794 if (matchLength > matchEndIdx - matchIndex)
795 matchEndIdx = matchIndex + (
U32)matchLength;
796 bestLength = matchLength;
798 matches[mnum].
len = (
U32)matchLength;
801 | (ip+matchLength == iLimit) ) {
805 if (dictMatchIndex <= dmsBtLow) {
break; }
806 if (match[matchLength] < ip[matchLength]) {
807 commonLengthSmaller = matchLength;
808 dictMatchIndex = nextPtr[1];
811 commonLengthLarger = matchLength;
812 dictMatchIndex = nextPtr[0];
815 assert(matchEndIdx > curr+8);
828 U32 const lengthToBeat);
837 const BYTE*
const iHighLimit,
840 U32 const lengthToBeat,
845 DEBUGLOG(8,
"ZSTD_BtGetAllMatches(dictMode=%d, mls=%u)", (
int)dictMode, mls);
852#define ZSTD_BT_GET_ALL_MATCHES_FN(dictMode, mls) ZSTD_btGetAllMatches_##dictMode##_##mls
854#define GEN_ZSTD_BT_GET_ALL_MATCHES_(dictMode, mls) \
855 static U32 ZSTD_BT_GET_ALL_MATCHES_FN(dictMode, mls)( \
856 ZSTD_match_t* matches, \
857 ZSTD_matchState_t* ms, \
858 U32* nextToUpdate3, \
860 const BYTE* const iHighLimit, \
861 const U32 rep[ZSTD_REP_NUM], \
863 U32 const lengthToBeat) \
865 return ZSTD_btGetAllMatches_internal( \
866 matches, ms, nextToUpdate3, ip, iHighLimit, \
867 rep, ll0, lengthToBeat, ZSTD_##dictMode, mls); \
870#define GEN_ZSTD_BT_GET_ALL_MATCHES(dictMode) \
871 GEN_ZSTD_BT_GET_ALL_MATCHES_(dictMode, 3) \
872 GEN_ZSTD_BT_GET_ALL_MATCHES_(dictMode, 4) \
873 GEN_ZSTD_BT_GET_ALL_MATCHES_(dictMode, 5) \
874 GEN_ZSTD_BT_GET_ALL_MATCHES_(dictMode, 6)
880#define ZSTD_BT_GET_ALL_MATCHES_ARRAY(dictMode) \
882 ZSTD_BT_GET_ALL_MATCHES_FN(dictMode, 3), \
883 ZSTD_BT_GET_ALL_MATCHES_FN(dictMode, 4), \
884 ZSTD_BT_GET_ALL_MATCHES_FN(dictMode, 5), \
885 ZSTD_BT_GET_ALL_MATCHES_FN(dictMode, 6) \
899 return getAllMatchesFns[(int)dictMode][mls - 3];
918static void ZSTD_optLdm_skipRawSeqStoreBytes(
rawSeqStore_t* rawSeqStore,
size_t nbBytes)
921 while (currPos && rawSeqStore->
pos < rawSeqStore->
size) {
931 if (currPos == 0 || rawSeqStore->
pos == rawSeqStore->
size) {
941ZSTD_opt_getNextMatchAndUpdateSeqStore(
ZSTD_optLdm_t* optLdm,
U32 currPosInBlock,
942 U32 blockBytesRemaining)
946 U32 literalsBytesRemaining;
947 U32 matchBytesRemaining;
959 currBlockEndPos = currPosInBlock + blockBytesRemaining;
963 matchBytesRemaining = (literalsBytesRemaining == 0) ?
968 if (literalsBytesRemaining >= blockBytesRemaining) {
971 ZSTD_optLdm_skipRawSeqStoreBytes(&optLdm->
seqStore, blockBytesRemaining);
984 ZSTD_optLdm_skipRawSeqStoreBytes(&optLdm->
seqStore, currBlockEndPos - currPosInBlock);
987 ZSTD_optLdm_skipRawSeqStoreBytes(&optLdm->
seqStore, literalsBytesRemaining + matchBytesRemaining);
996static void ZSTD_optLdm_maybeAddMatch(
ZSTD_match_t* matches,
U32* nbMatches,
1004 if (currPosInBlock < optLdm->startPosInBlock
1006 || candidateMatchLength <
MINMATCH) {
1010 if (*nbMatches == 0 || ((candidateMatchLength > matches[*nbMatches-1].len) && *nbMatches <
ZSTD_OPT_NUM)) {
1012 DEBUGLOG(6,
"ZSTD_optLdm_maybeAddMatch(): Adding ldm candidate match (offBase: %u matchLength %u) at block position=%u",
1013 candidateOffBase, candidateMatchLength, currPosInBlock);
1014 matches[*nbMatches].
len = candidateMatchLength;
1015 matches[*nbMatches].
off = candidateOffBase;
1026 U32 currPosInBlock,
U32 remainingBytes)
1039 ZSTD_optLdm_skipRawSeqStoreBytes(&optLdm->
seqStore, posOvershoot);
1041 ZSTD_opt_getNextMatchAndUpdateSeqStore(optLdm, currPosInBlock, remainingBytes);
1043 ZSTD_optLdm_maybeAddMatch(matches, nbMatches, optLdm, currPosInBlock);
1054listStats(
const U32*
table,
int lastEltID)
1056 int const nbElts = lastEltID + 1;
1058 for (enb=0; enb < nbElts; enb++) {
1068#define LIT_PRICE(_p) (int)ZSTD_rawLiteralsCost(_p, 1, optStatePtr, optLevel)
1069#define LL_PRICE(_l) (int)ZSTD_litLengthPrice(_l, optStatePtr, optLevel)
1070#define LL_INCPRICE(_l) (LL_PRICE(_l) - LL_PRICE(_l-1))
1078 const void* src,
size_t srcSize,
1083 const BYTE*
const istart = (
const BYTE*)src;
1084 const BYTE* ip = istart;
1085 const BYTE* anchor = istart;
1086 const BYTE*
const iend = istart + srcSize;
1087 const BYTE*
const ilimit = iend - 8;
1090 const ZSTD_compressionParameters*
const cParams = &ms->
cParams;
1095 U32 const minMatch = (cParams->minMatch == 3) ? 3 : 4;
1107 ZSTD_opt_getNextMatchAndUpdateSeqStore(&optLdm, (
U32)(ip-istart), (
U32)(iend-ip));
1110 DEBUGLOG(5,
"ZSTD_compressBlock_opt_generic: current=%u, prefix=%u, nextToUpdate=%u",
1113 ZSTD_rescaleFreqs(optStatePtr, (
const BYTE*)src, srcSize, optLevel);
1114 ip += (ip==prefixStart);
1117 while (ip < ilimit) {
1118 U32 cur, last_pos = 0;
1121 {
U32 const litlen = (
U32)(ip - anchor);
1122 U32 const ll0 = !litlen;
1123 U32 nbMatches = getAllMatches(matches, ms, &nextToUpdate3, ip, iend, rep, ll0, minMatch);
1124 ZSTD_optLdm_processMatchCandidate(&optLdm, matches, &nbMatches,
1125 (
U32)(ip-istart), (
U32)(iend-ip));
1127 DEBUGLOG(8,
"no match found at cPos %u", (
unsigned)(ip-istart));
1141 opt[0].litlen = litlen;
1149 ZSTD_memcpy(&opt[0].rep, rep,
sizeof(opt[0].rep));
1152 {
U32 const maxML = matches[nbMatches-1].
len;
1153 U32 const maxOffBase = matches[nbMatches-1].
off;
1154 DEBUGLOG(6,
"found %u matches of maxLength=%u and maxOffBase=%u at cPos=%u => start new series",
1155 nbMatches, maxML, maxOffBase, (
U32)(ip-prefixStart));
1157 if (maxML > sufficient_len) {
1159 lastStretch.
mlen = maxML;
1160 lastStretch.
off = maxOffBase;
1161 DEBUGLOG(6,
"large match (%u>%u) => immediate encoding",
1162 maxML, sufficient_len);
1169 assert(opt[0].price >= 0);
1172 for (pos = 1; pos < minMatch; pos++) {
1175 opt[pos].litlen = litlen + pos;
1177 for (matchNb = 0; matchNb < nbMatches; matchNb++) {
1178 U32 const offBase = matches[matchNb].
off;
1179 U32 const end = matches[matchNb].
len;
1180 for ( ; pos <= end ; pos++ ) {
1182 int const sequencePrice = opt[0].price + matchPrice;
1183 DEBUGLOG(7,
"rPos:%u => set initial price : %.2f",
1184 pos, ZSTD_fCost(sequencePrice));
1185 opt[pos].mlen = pos;
1186 opt[pos].off = offBase;
1187 opt[pos].litlen = 0;
1188 opt[pos].price = sequencePrice +
LL_PRICE(0);
1197 for (cur = 1; cur <= last_pos; cur++) {
1198 const BYTE*
const inr = ip + cur;
1200 DEBUGLOG(7,
"cPos:%zi==rPos:%u", inr-istart, cur);
1203 {
U32 const litlen = opt[cur-1].litlen + 1;
1204 int const price = opt[cur-1].price
1207 assert(price < 1000000000);
1208 if (price <= opt[cur].price) {
1210 DEBUGLOG(7,
"cPos:%zi==rPos:%u : better price (%.2f<=%.2f) using literal (ll==%u) (hist:%u,%u,%u)",
1211 inr-istart, cur, ZSTD_fCost(price), ZSTD_fCost(opt[cur].price), litlen,
1212 opt[cur-1].rep[0], opt[cur-1].rep[1], opt[cur-1].rep[2]);
1213 opt[cur] = opt[cur-1];
1214 opt[cur].litlen = litlen;
1215 opt[cur].price = price;
1216 if ( (optLevel >= 1)
1217 && (prevMatch.
litlen == 0)
1219 &&
LIKELY(ip + cur < iend)
1224 DEBUGLOG(7,
"then at next rPos %u : match+1lit %.2f vs %ulits %.2f",
1225 cur+1, ZSTD_fCost(with1literal), litlen+1, ZSTD_fCost(withMoreLiterals));
1226 if ( (with1literal < withMoreLiterals)
1227 && (with1literal < opt[cur+1].price) ) {
1229 U32 const prev = cur - prevMatch.
mlen;
1232 DEBUGLOG(7,
"==> match+1lit is cheaper (%.2f < %.2f) (hist:%u,%u,%u) !",
1233 ZSTD_fCost(with1literal), ZSTD_fCost(withMoreLiterals),
1234 newReps.
rep[0], newReps.
rep[1], newReps.
rep[2] );
1235 opt[cur+1] = prevMatch;
1237 opt[cur+1].litlen = 1;
1238 opt[cur+1].price = with1literal;
1239 if (last_pos < cur+1) last_pos = cur+1;
1243 DEBUGLOG(7,
"cPos:%zi==rPos:%u : literal would cost more (%.2f>%.2f)",
1244 inr-istart, cur, ZSTD_fCost(price), ZSTD_fCost(opt[cur].price));
1252 assert(cur >= opt[cur].mlen);
1253 if (opt[cur].litlen == 0) {
1255 U32 const prev = cur - opt[cur].mlen;
1261 if (inr > ilimit)
continue;
1263 if (cur == last_pos)
break;
1267 DEBUGLOG(7,
"skip current position : next rPos(%u) price is cheaper", cur+1);
1271 assert(opt[cur].price >= 0);
1272 {
U32 const ll0 = (opt[cur].litlen == 0);
1273 int const previousPrice = opt[cur].price;
1274 int const basePrice = previousPrice +
LL_PRICE(0);
1275 U32 nbMatches = getAllMatches(matches, ms, &nextToUpdate3, inr, iend, opt[cur].rep, ll0, minMatch);
1278 ZSTD_optLdm_processMatchCandidate(&optLdm, matches, &nbMatches,
1279 (
U32)(inr-istart), (
U32)(iend-inr));
1282 DEBUGLOG(7,
"rPos:%u : no match found", cur);
1286 {
U32 const longestML = matches[nbMatches-1].
len;
1287 DEBUGLOG(7,
"cPos:%zi==rPos:%u, found %u matches, of longest ML=%u",
1288 inr-istart, cur, nbMatches, longestML);
1290 if ( (longestML > sufficient_len)
1292 || (ip + cur + longestML >= iend) ) {
1293 lastStretch.
mlen = longestML;
1294 lastStretch.
off = matches[nbMatches-1].
off;
1296 last_pos = cur + longestML;
1301 for (matchNb = 0; matchNb < nbMatches; matchNb++) {
1302 U32 const offset = matches[matchNb].
off;
1303 U32 const lastML = matches[matchNb].
len;
1304 U32 const startML = (matchNb>0) ? matches[matchNb-1].len+1 : minMatch;
1307 DEBUGLOG(7,
"testing match %u => offBase=%4u, mlen=%2u, llen=%2u",
1308 matchNb, matches[matchNb].off, lastML, opt[cur].litlen);
1310 for (mlen = lastML; mlen >= startML; mlen--) {
1311 U32 const pos = cur + mlen;
1312 int const price = basePrice + (int)
ZSTD_getMatchPrice(offset, mlen, optStatePtr, optLevel);
1314 if ((pos > last_pos) || (price < opt[pos].price)) {
1315 DEBUGLOG(7,
"rPos:%u (ml=%2u) => new better price (%.2f<%.2f)",
1316 pos, mlen, ZSTD_fCost(price), ZSTD_fCost(opt[pos].price));
1317 while (last_pos < pos) {
1321 opt[last_pos].litlen = !0;
1323 opt[pos].mlen = mlen;
1324 opt[pos].off = offset;
1325 opt[pos].litlen = 0;
1326 opt[pos].price = price;
1328 DEBUGLOG(7,
"rPos:%u (ml=%2u) => new price is worse (%.2f>=%.2f)",
1329 pos, mlen, ZSTD_fCost(price), ZSTD_fCost(opt[pos].price));
1330 if (optLevel==0)
break;
1336 lastStretch = opt[last_pos];
1338 cur = last_pos - lastStretch.
mlen;
1341 assert(opt[0].mlen == 0);
1345 if (lastStretch.
mlen==0) {
1347 assert(lastStretch.
litlen == (ip - anchor) + last_pos);
1354 if (lastStretch.
litlen == 0) {
1361 cur -= lastStretch.
litlen;
1372 {
U32 const storeEnd = cur + 2;
1373 U32 storeStart = storeEnd;
1374 U32 stretchPos = cur;
1376 DEBUGLOG(6,
"start reverse traversal (last_pos:%u, cur:%u)",
1377 last_pos, cur); (void)last_pos;
1379 DEBUGLOG(6,
"last stretch copied into pos=%u (llen=%u,mlen=%u,ofc=%u)",
1380 storeEnd, lastStretch.
litlen, lastStretch.
mlen, lastStretch.
off);
1381 if (lastStretch.
litlen > 0) {
1383 opt[storeEnd].litlen = lastStretch.
litlen;
1384 opt[storeEnd].mlen = 0;
1385 storeStart = storeEnd-1;
1386 opt[storeStart] = lastStretch;
1388 opt[storeEnd] = lastStretch;
1389 storeStart = storeEnd;
1393 opt[storeStart].litlen = nextStretch.
litlen;
1394 DEBUGLOG(6,
"selected sequence (llen=%u,mlen=%u,ofc=%u)",
1395 opt[storeStart].litlen, opt[storeStart].mlen, opt[storeStart].off);
1396 if (nextStretch.
mlen == 0) {
1401 opt[storeStart] = nextStretch;
1403 stretchPos -= nextStretch.
litlen + nextStretch.
mlen;
1407 DEBUGLOG(6,
"sending selected sequences into seqStore");
1409 for (storePos=storeStart; storePos <= storeEnd; storePos++) {
1410 U32 const llen = opt[storePos].litlen;
1411 U32 const mlen = opt[storePos].mlen;
1412 U32 const offBase = opt[storePos].off;
1413 U32 const advance = llen + mlen;
1414 DEBUGLOG(6,
"considering seq starting at %zi, llen=%u, mlen=%u",
1415 anchor - istart, (
unsigned)llen, (
unsigned)mlen);
1418 assert(storePos == storeEnd);
1423 assert(anchor + llen <= iend);
1424 ZSTD_updateStats(optStatePtr, llen, anchor, offBase, mlen);
1429 DEBUGLOG(7,
"new offset history : %u, %u, %u", rep[0], rep[1], rep[2]);
1432 ZSTD_setBasePrices(optStatePtr, optLevel);
1437 return (
size_t)(iend - anchor);
1441#ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR
1442static size_t ZSTD_compressBlock_opt0(
1450#ifndef ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR
1451static size_t ZSTD_compressBlock_opt2(
1459#ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR
1462 const void* src,
size_t srcSize)
1464 DEBUGLOG(5,
"ZSTD_compressBlock_btopt");
1465 return ZSTD_compressBlock_opt0(ms, seqStore, rep, src, srcSize,
ZSTD_noDict);
1472#ifndef ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR
1483 const void* src,
size_t srcSize)
1488 DEBUGLOG(4,
"ZSTD_initStats_ultra (srcSize=%zu)", srcSize);
1494 ZSTD_compressBlock_opt2(ms, seqStore, tmpRep, src, srcSize,
ZSTD_noDict);
1507 const void* src,
size_t srcSize)
1509 DEBUGLOG(5,
"ZSTD_compressBlock_btultra (srcSize=%zu)", srcSize);
1510 return ZSTD_compressBlock_opt2(ms, seqStore, rep, src, srcSize,
ZSTD_noDict);
1515 const void* src,
size_t srcSize)
1518 DEBUGLOG(5,
"ZSTD_compressBlock_btultra2 (srcSize=%zu)", srcSize);
1535 ZSTD_initStats_ultra(ms, seqStore, rep, src, srcSize);
1538 return ZSTD_compressBlock_opt2(ms, seqStore, rep, src, srcSize,
ZSTD_noDict);
1542#ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR
1545 const void* src,
size_t srcSize)
1552 const void* src,
size_t srcSize)
1554 return ZSTD_compressBlock_opt0(ms, seqStore, rep, src, srcSize,
ZSTD_extDict);
1558#ifndef ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR
1561 const void* src,
size_t srcSize)
1568 const void* src,
size_t srcSize)
1570 return ZSTD_compressBlock_opt2(ms, seqStore, rep, src, srcSize,
ZSTD_extDict);
MEM_STATIC unsigned ZSTD_highbit32(U32 val)
#define FORCE_INLINE_TEMPLATE
#define ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
#define assert(condition)
unsigned HIST_count_simple(unsigned *count, unsigned *maxSymbolValuePtr, const void *src, size_t srcSize)
U32 HUF_getNbBitsFromCTable(const HUF_CElt *symbolTable, U32 symbolValue)
MEM_STATIC U32 MEM_read32(const void *memPtr)
MEM_STATIC unsigned MEM_isLittleEndian(void)
constexpr dcon::demographics_key total(0)
FSE_CTable offcodeCTable[FSE_CTABLE_SIZE_U32(OffFSELog, MaxOff)]
FSE_CTable litlengthCTable[FSE_CTABLE_SIZE_U32(LLFSELog, MaxLL)]
FSE_CTable matchlengthCTable[FSE_CTABLE_SIZE_U32(MLFSELog, MaxML)]
HUF_CElt CTable[HUF_CTABLE_SIZE_ST(255)]
const rawSeqStore_t * ldmSeqStore
ZSTD_compressionParameters cParams
const ZSTD_matchState_t * dictMatchState
ZSTD_match_t * matchTable
ZSTD_optimal_t * priceTable
U32 matchLengthSumBasePrice
U32 litLengthSumBasePrice
ZSTD_OptPrice_e priceType
unsigned * matchLengthFreq
ZSTD_paramSwitch_e literalCompressionMode
const ZSTD_entropyCTables_t * symbolCosts
#define ZSTD_BLOCKSIZE_MAX
void ZSTD_resetSeqStore(seqStore_t *ssPtr)
MEM_STATIC U32 ZSTD_MLcode(U32 mlBase)
MEM_STATIC size_t ZSTD_count_2segments(const BYTE *ip, const BYTE *match, const BYTE *iEnd, const BYTE *mEnd, const BYTE *iStart)
#define OFFSET_TO_OFFBASE(o)
MEM_STATIC U32 ZSTD_LLcode(U32 litLength)
MEM_STATIC FORCE_INLINE_ATTR size_t ZSTD_hashPtr(const void *p, U32 hBits, U32 mls)
MEM_STATIC size_t ZSTD_count(const BYTE *pIn, const BYTE *pMatch, const BYTE *const pInLimit)
HINT_INLINE UNUSED_ATTR void ZSTD_storeSeq(seqStore_t *seqStorePtr, size_t litLength, const BYTE *literals, const BYTE *litLimit, U32 offBase, size_t matchLength)
MEM_STATIC U32 ZSTD_getLowestMatchIndex(const ZSTD_matchState_t *ms, U32 curr, unsigned windowLog)
MEM_STATIC size_t ZSTD_hash3Ptr(const void *ptr, U32 h)
#define REPCODE_TO_OFFBASE(r)
MEM_STATIC repcodes_t ZSTD_newRep(U32 const rep[ZSTD_REP_NUM], U32 const offBase, U32 const ll0)
#define ZSTD_memcpy(d, s, l)
#define ZSTD_memset(p, v, l)
#define BOUNDED(min, val, max)
#define ZSTD_STATIC_ASSERT(c)
MEM_STATIC U32 ZSTD_fracWeight(U32 rawStat)
#define WEIGHT(stat, opt)
MEM_STATIC U32 ZSTD_bitWeight(U32 stat)
size_t ZSTD_compressBlock_btopt_extDict(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR U32 ZSTD_insertBtAndGetAllMatches(ZSTD_match_t *matches, ZSTD_matchState_t *ms, U32 *nextToUpdate3, const BYTE *const ip, const BYTE *const iLimit, const ZSTD_dictMode_e dictMode, const U32 rep[ZSTD_REP_NUM], const U32 ll0, const U32 lengthToBeat, const U32 mls)
FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR U32 ZSTD_btGetAllMatches_internal(ZSTD_match_t *matches, ZSTD_matchState_t *ms, U32 *nextToUpdate3, const BYTE *ip, const BYTE *const iHighLimit, const U32 rep[ZSTD_REP_NUM], U32 const ll0, U32 const lengthToBeat, const ZSTD_dictMode_e dictMode, const U32 mls)
size_t ZSTD_compressBlock_btopt(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
#define GEN_ZSTD_BT_GET_ALL_MATCHES(dictMode)
#define BITCOST_MULTIPLIER
FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t ZSTD_compressBlock_opt_generic(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize, const int optLevel, const ZSTD_dictMode_e dictMode)
MEM_STATIC U32 ZSTD_readMINMATCH(const void *memPtr, U32 length)
FORCE_INLINE_TEMPLATE U32 ZSTD_getMatchPrice(U32 const offBase, U32 const matchLength, const optState_t *const optPtr, int const optLevel)
FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR void ZSTD_updateTree_internal(ZSTD_matchState_t *ms, const BYTE *const ip, const BYTE *const iend, const U32 mls, const ZSTD_dictMode_e dictMode)
void ZSTD_updateTree(ZSTD_matchState_t *ms, const BYTE *ip, const BYTE *iend)
#define ZSTD_BT_GET_ALL_MATCHES_ARRAY(dictMode)
U32(* ZSTD_getAllMatchesFn)(ZSTD_match_t *, ZSTD_matchState_t *, U32 *, const BYTE *, const BYTE *, const U32 rep[ZSTD_REP_NUM], U32 const ll0, U32 const lengthToBeat)
size_t ZSTD_compressBlock_btultra_dictMatchState(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
size_t ZSTD_compressBlock_btultra_extDict(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
size_t ZSTD_compressBlock_btultra2(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
size_t ZSTD_compressBlock_btultra(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
size_t ZSTD_compressBlock_btopt_dictMatchState(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
#define ZSTD_PREDEF_THRESHOLD