zstdlib 0.14.0-arm64-darwin → 0.15.0-arm64-darwin
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGES.md +5 -0
- data/ext/zstdlib_c/extconf.rb +1 -1
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/bits.h +92 -87
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/bitstream.h +26 -29
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/compiler.h +36 -22
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/cpu.h +1 -1
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/debug.h +0 -9
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/error_private.c +1 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/error_private.h +0 -10
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/fse.h +2 -17
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/fse_decompress.c +2 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/huf.h +0 -9
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/mem.h +7 -11
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/pool.h +0 -9
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/portability_macros.h +22 -9
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/threading.h +0 -8
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/xxhash.h +93 -19
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/zstd_deps.h +12 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/zstd_internal.h +1 -69
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/zstd_trace.h +5 -12
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/hist.c +10 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/hist.h +7 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_compress.c +1057 -367
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_compress_internal.h +227 -125
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_compress_literals.c +1 -1
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_compress_sequences.c +7 -7
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_compress_sequences.h +7 -6
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_compress_superblock.c +17 -17
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_cwksp.h +41 -24
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_double_fast.c +58 -50
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_double_fast.h +4 -12
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_fast.c +91 -74
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_fast.h +4 -12
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_lazy.c +64 -64
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_lazy.h +30 -39
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_ldm.c +48 -33
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_ldm.h +6 -14
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_opt.c +55 -51
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_opt.h +8 -16
- data/ext/zstdlib_c/zstd-1.5.7/lib/compress/zstd_preSplit.c +238 -0
- data/ext/zstdlib_c/zstd-1.5.7/lib/compress/zstd_preSplit.h +33 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstdmt_compress.c +134 -93
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstdmt_compress.h +4 -15
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/decompress/huf_decompress_amd64.S +10 -3
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/decompress/zstd_decompress.c +14 -11
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/decompress/zstd_decompress_block.c +6 -12
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/decompress/zstd_decompress_internal.h +5 -5
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/zdict.h +15 -8
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/zstd.h +241 -132
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/zstd_errors.h +1 -8
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/zlibWrapper/gzwrite.c +2 -1
- data/lib/2.4/zstdlib_c.bundle +0 -0
- data/lib/2.5/zstdlib_c.bundle +0 -0
- data/lib/2.6/zstdlib_c.bundle +0 -0
- data/lib/2.7/zstdlib_c.bundle +0 -0
- data/lib/3.0/zstdlib_c.bundle +0 -0
- data/lib/3.1/zstdlib_c.bundle +0 -0
- data/lib/3.2/zstdlib_c.bundle +0 -0
- data/lib/3.3/zstdlib_c.bundle +0 -0
- data/lib/3.4/zstdlib_c.bundle +0 -0
- metadata +75 -73
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/allocations.h +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/debug.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/entropy_common.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/pool.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/threading.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/xxhash.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/common/zstd_common.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/clevels.h +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/fse_compress.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/huf_compress.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_compress_literals.h +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_compress_superblock.h +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/compress/zstd_ldm_geartab.h +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/decompress/huf_decompress.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/decompress/zstd_ddict.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/decompress/zstd_ddict.h +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/lib/decompress/zstd_decompress_block.h +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/zlibWrapper/gzclose.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/zlibWrapper/gzcompatibility.h +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/zlibWrapper/gzguts.h +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/zlibWrapper/gzlib.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/zlibWrapper/gzread.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/zlibWrapper/zstd_zlibwrapper.c +0 -0
- data/ext/zstdlib_c/{zstd-1.5.6 → zstd-1.5.7}/zlibWrapper/zstd_zlibwrapper.h +5 -5
@@ -408,7 +408,7 @@ MEM_STATIC U32 ZSTD_readMINMATCH(const void* memPtr, U32 length)
|
|
408
408
|
Assumption : always within prefix (i.e. not within extDict) */
|
409
409
|
static
|
410
410
|
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
411
|
-
U32 ZSTD_insertAndFindFirstIndexHash3 (const
|
411
|
+
U32 ZSTD_insertAndFindFirstIndexHash3 (const ZSTD_MatchState_t* ms,
|
412
412
|
U32* nextToUpdate3,
|
413
413
|
const BYTE* const ip)
|
414
414
|
{
|
@@ -440,7 +440,7 @@ U32 ZSTD_insertAndFindFirstIndexHash3 (const ZSTD_matchState_t* ms,
|
|
440
440
|
static
|
441
441
|
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
442
442
|
U32 ZSTD_insertBt1(
|
443
|
-
const
|
443
|
+
const ZSTD_MatchState_t* ms,
|
444
444
|
const BYTE* const ip, const BYTE* const iend,
|
445
445
|
U32 const target,
|
446
446
|
U32 const mls, const int extDict)
|
@@ -560,7 +560,7 @@ U32 ZSTD_insertBt1(
|
|
560
560
|
FORCE_INLINE_TEMPLATE
|
561
561
|
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
562
562
|
void ZSTD_updateTree_internal(
|
563
|
-
|
563
|
+
ZSTD_MatchState_t* ms,
|
564
564
|
const BYTE* const ip, const BYTE* const iend,
|
565
565
|
const U32 mls, const ZSTD_dictMode_e dictMode)
|
566
566
|
{
|
@@ -580,7 +580,7 @@ void ZSTD_updateTree_internal(
|
|
580
580
|
ms->nextToUpdate = target;
|
581
581
|
}
|
582
582
|
|
583
|
-
void ZSTD_updateTree(
|
583
|
+
void ZSTD_updateTree(ZSTD_MatchState_t* ms, const BYTE* ip, const BYTE* iend) {
|
584
584
|
ZSTD_updateTree_internal(ms, ip, iend, ms->cParams.minMatch, ZSTD_noDict);
|
585
585
|
}
|
586
586
|
|
@@ -589,7 +589,7 @@ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
|
589
589
|
U32
|
590
590
|
ZSTD_insertBtAndGetAllMatches (
|
591
591
|
ZSTD_match_t* matches, /* store result (found matches) in this table (presumed large enough) */
|
592
|
-
|
592
|
+
ZSTD_MatchState_t* ms,
|
593
593
|
U32* nextToUpdate3,
|
594
594
|
const BYTE* const ip, const BYTE* const iLimit,
|
595
595
|
const ZSTD_dictMode_e dictMode,
|
@@ -625,7 +625,7 @@ ZSTD_insertBtAndGetAllMatches (
|
|
625
625
|
U32 mnum = 0;
|
626
626
|
U32 nbCompares = 1U << cParams->searchLog;
|
627
627
|
|
628
|
-
const
|
628
|
+
const ZSTD_MatchState_t* dms = dictMode == ZSTD_dictMatchState ? ms->dictMatchState : NULL;
|
629
629
|
const ZSTD_compressionParameters* const dmsCParams =
|
630
630
|
dictMode == ZSTD_dictMatchState ? &dms->cParams : NULL;
|
631
631
|
const BYTE* const dmsBase = dictMode == ZSTD_dictMatchState ? dms->window.base : NULL;
|
@@ -664,13 +664,13 @@ ZSTD_insertBtAndGetAllMatches (
|
|
664
664
|
assert(curr >= windowLow);
|
665
665
|
if ( dictMode == ZSTD_extDict
|
666
666
|
&& ( ((repOffset-1) /*intentional overflow*/ < curr - windowLow) /* equivalent to `curr > repIndex >= windowLow` */
|
667
|
-
& ((
|
667
|
+
& (ZSTD_index_overlap_check(dictLimit, repIndex)) )
|
668
668
|
&& (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(repMatch, minMatch)) ) {
|
669
669
|
repLen = (U32)ZSTD_count_2segments(ip+minMatch, repMatch+minMatch, iLimit, dictEnd, prefixStart) + minMatch;
|
670
670
|
}
|
671
671
|
if (dictMode == ZSTD_dictMatchState
|
672
672
|
&& ( ((repOffset-1) /*intentional overflow*/ < curr - (dmsLowLimit + dmsIndexDelta)) /* equivalent to `curr > repIndex >= dmsLowLimit` */
|
673
|
-
& ((
|
673
|
+
& (ZSTD_index_overlap_check(dictLimit, repIndex)) )
|
674
674
|
&& (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(repMatch, minMatch)) ) {
|
675
675
|
repLen = (U32)ZSTD_count_2segments(ip+minMatch, repMatch+minMatch, iLimit, dmsEnd, prefixStart) + minMatch;
|
676
676
|
} }
|
@@ -819,7 +819,7 @@ ZSTD_insertBtAndGetAllMatches (
|
|
819
819
|
|
820
820
|
typedef U32 (*ZSTD_getAllMatchesFn)(
|
821
821
|
ZSTD_match_t*,
|
822
|
-
|
822
|
+
ZSTD_MatchState_t*,
|
823
823
|
U32*,
|
824
824
|
const BYTE*,
|
825
825
|
const BYTE*,
|
@@ -831,7 +831,7 @@ FORCE_INLINE_TEMPLATE
|
|
831
831
|
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
832
832
|
U32 ZSTD_btGetAllMatches_internal(
|
833
833
|
ZSTD_match_t* matches,
|
834
|
-
|
834
|
+
ZSTD_MatchState_t* ms,
|
835
835
|
U32* nextToUpdate3,
|
836
836
|
const BYTE* ip,
|
837
837
|
const BYTE* const iHighLimit,
|
@@ -854,7 +854,7 @@ U32 ZSTD_btGetAllMatches_internal(
|
|
854
854
|
#define GEN_ZSTD_BT_GET_ALL_MATCHES_(dictMode, mls) \
|
855
855
|
static U32 ZSTD_BT_GET_ALL_MATCHES_FN(dictMode, mls)( \
|
856
856
|
ZSTD_match_t* matches, \
|
857
|
-
|
857
|
+
ZSTD_MatchState_t* ms, \
|
858
858
|
U32* nextToUpdate3, \
|
859
859
|
const BYTE* ip, \
|
860
860
|
const BYTE* const iHighLimit, \
|
@@ -886,7 +886,7 @@ GEN_ZSTD_BT_GET_ALL_MATCHES(dictMatchState)
|
|
886
886
|
}
|
887
887
|
|
888
888
|
static ZSTD_getAllMatchesFn
|
889
|
-
ZSTD_selectBtGetAllMatches(
|
889
|
+
ZSTD_selectBtGetAllMatches(ZSTD_MatchState_t const* ms, ZSTD_dictMode_e const dictMode)
|
890
890
|
{
|
891
891
|
ZSTD_getAllMatchesFn const getAllMatchesFns[3][4] = {
|
892
892
|
ZSTD_BT_GET_ALL_MATCHES_ARRAY(noDict),
|
@@ -905,7 +905,7 @@ ZSTD_selectBtGetAllMatches(ZSTD_matchState_t const* ms, ZSTD_dictMode_e const di
|
|
905
905
|
|
906
906
|
/* Struct containing info needed to make decision about ldm inclusion */
|
907
907
|
typedef struct {
|
908
|
-
|
908
|
+
RawSeqStore_t seqStore; /* External match candidates store for this block */
|
909
909
|
U32 startPosInBlock; /* Start position of the current match candidate */
|
910
910
|
U32 endPosInBlock; /* End position of the current match candidate */
|
911
911
|
U32 offset; /* Offset of the match candidate */
|
@@ -915,7 +915,7 @@ typedef struct {
|
|
915
915
|
* Moves forward in @rawSeqStore by @nbBytes,
|
916
916
|
* which will update the fields 'pos' and 'posInSequence'.
|
917
917
|
*/
|
918
|
-
static void ZSTD_optLdm_skipRawSeqStoreBytes(
|
918
|
+
static void ZSTD_optLdm_skipRawSeqStoreBytes(RawSeqStore_t* rawSeqStore, size_t nbBytes)
|
919
919
|
{
|
920
920
|
U32 currPos = (U32)(rawSeqStore->posInSequence + nbBytes);
|
921
921
|
while (currPos && rawSeqStore->pos < rawSeqStore->size) {
|
@@ -972,7 +972,7 @@ ZSTD_opt_getNextMatchAndUpdateSeqStore(ZSTD_optLdm_t* optLdm, U32 currPosInBlock
|
|
972
972
|
return;
|
973
973
|
}
|
974
974
|
|
975
|
-
/* Matches may be <
|
975
|
+
/* Matches may be < minMatch by this process. In that case, we will reject them
|
976
976
|
when we are deciding whether or not to add the ldm */
|
977
977
|
optLdm->startPosInBlock = currPosInBlock + literalsBytesRemaining;
|
978
978
|
optLdm->endPosInBlock = optLdm->startPosInBlock + matchBytesRemaining;
|
@@ -994,7 +994,8 @@ ZSTD_opt_getNextMatchAndUpdateSeqStore(ZSTD_optLdm_t* optLdm, U32 currPosInBlock
|
|
994
994
|
* into 'matches'. Maintains the correct ordering of 'matches'.
|
995
995
|
*/
|
996
996
|
static void ZSTD_optLdm_maybeAddMatch(ZSTD_match_t* matches, U32* nbMatches,
|
997
|
-
const ZSTD_optLdm_t* optLdm, U32 currPosInBlock
|
997
|
+
const ZSTD_optLdm_t* optLdm, U32 currPosInBlock,
|
998
|
+
U32 minMatch)
|
998
999
|
{
|
999
1000
|
U32 const posDiff = currPosInBlock - optLdm->startPosInBlock;
|
1000
1001
|
/* Note: ZSTD_match_t actually contains offBase and matchLength (before subtracting MINMATCH) */
|
@@ -1003,7 +1004,7 @@ static void ZSTD_optLdm_maybeAddMatch(ZSTD_match_t* matches, U32* nbMatches,
|
|
1003
1004
|
/* Ensure that current block position is not outside of the match */
|
1004
1005
|
if (currPosInBlock < optLdm->startPosInBlock
|
1005
1006
|
|| currPosInBlock >= optLdm->endPosInBlock
|
1006
|
-
|| candidateMatchLength <
|
1007
|
+
|| candidateMatchLength < minMatch) {
|
1007
1008
|
return;
|
1008
1009
|
}
|
1009
1010
|
|
@@ -1023,7 +1024,8 @@ static void ZSTD_optLdm_maybeAddMatch(ZSTD_match_t* matches, U32* nbMatches,
|
|
1023
1024
|
static void
|
1024
1025
|
ZSTD_optLdm_processMatchCandidate(ZSTD_optLdm_t* optLdm,
|
1025
1026
|
ZSTD_match_t* matches, U32* nbMatches,
|
1026
|
-
U32 currPosInBlock, U32 remainingBytes
|
1027
|
+
U32 currPosInBlock, U32 remainingBytes,
|
1028
|
+
U32 minMatch)
|
1027
1029
|
{
|
1028
1030
|
if (optLdm->seqStore.size == 0 || optLdm->seqStore.pos >= optLdm->seqStore.size) {
|
1029
1031
|
return;
|
@@ -1040,7 +1042,7 @@ ZSTD_optLdm_processMatchCandidate(ZSTD_optLdm_t* optLdm,
|
|
1040
1042
|
}
|
1041
1043
|
ZSTD_opt_getNextMatchAndUpdateSeqStore(optLdm, currPosInBlock, remainingBytes);
|
1042
1044
|
}
|
1043
|
-
ZSTD_optLdm_maybeAddMatch(matches, nbMatches, optLdm, currPosInBlock);
|
1045
|
+
ZSTD_optLdm_maybeAddMatch(matches, nbMatches, optLdm, currPosInBlock, minMatch);
|
1044
1046
|
}
|
1045
1047
|
|
1046
1048
|
|
@@ -1072,8 +1074,8 @@ listStats(const U32* table, int lastEltID)
|
|
1072
1074
|
FORCE_INLINE_TEMPLATE
|
1073
1075
|
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
1074
1076
|
size_t
|
1075
|
-
ZSTD_compressBlock_opt_generic(
|
1076
|
-
|
1077
|
+
ZSTD_compressBlock_opt_generic(ZSTD_MatchState_t* ms,
|
1078
|
+
SeqStore_t* seqStore,
|
1077
1079
|
U32 rep[ZSTD_REP_NUM],
|
1078
1080
|
const void* src, size_t srcSize,
|
1079
1081
|
const int optLevel,
|
@@ -1122,7 +1124,8 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
|
|
1122
1124
|
U32 const ll0 = !litlen;
|
1123
1125
|
U32 nbMatches = getAllMatches(matches, ms, &nextToUpdate3, ip, iend, rep, ll0, minMatch);
|
1124
1126
|
ZSTD_optLdm_processMatchCandidate(&optLdm, matches, &nbMatches,
|
1125
|
-
(U32)(ip-istart), (U32)(iend-ip)
|
1127
|
+
(U32)(ip-istart), (U32)(iend-ip),
|
1128
|
+
minMatch);
|
1126
1129
|
if (!nbMatches) {
|
1127
1130
|
DEBUGLOG(8, "no match found at cPos %u", (unsigned)(ip-istart));
|
1128
1131
|
ip++;
|
@@ -1197,7 +1200,7 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
|
|
1197
1200
|
for (cur = 1; cur <= last_pos; cur++) {
|
1198
1201
|
const BYTE* const inr = ip + cur;
|
1199
1202
|
assert(cur <= ZSTD_OPT_NUM);
|
1200
|
-
DEBUGLOG(7, "cPos:%
|
1203
|
+
DEBUGLOG(7, "cPos:%i==rPos:%u", (int)(inr-istart), cur);
|
1201
1204
|
|
1202
1205
|
/* Fix current position with one literal if cheaper */
|
1203
1206
|
{ U32 const litlen = opt[cur-1].litlen + 1;
|
@@ -1207,8 +1210,8 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
|
|
1207
1210
|
assert(price < 1000000000); /* overflow check */
|
1208
1211
|
if (price <= opt[cur].price) {
|
1209
1212
|
ZSTD_optimal_t const prevMatch = opt[cur];
|
1210
|
-
DEBUGLOG(7, "cPos:%
|
1211
|
-
inr-istart, cur, ZSTD_fCost(price), ZSTD_fCost(opt[cur].price), litlen,
|
1213
|
+
DEBUGLOG(7, "cPos:%i==rPos:%u : better price (%.2f<=%.2f) using literal (ll==%u) (hist:%u,%u,%u)",
|
1214
|
+
(int)(inr-istart), cur, ZSTD_fCost(price), ZSTD_fCost(opt[cur].price), litlen,
|
1212
1215
|
opt[cur-1].rep[0], opt[cur-1].rep[1], opt[cur-1].rep[2]);
|
1213
1216
|
opt[cur] = opt[cur-1];
|
1214
1217
|
opt[cur].litlen = litlen;
|
@@ -1227,34 +1230,34 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
|
|
1227
1230
|
&& (with1literal < opt[cur+1].price) ) {
|
1228
1231
|
/* update offset history - before it disappears */
|
1229
1232
|
U32 const prev = cur - prevMatch.mlen;
|
1230
|
-
|
1233
|
+
Repcodes_t const newReps = ZSTD_newRep(opt[prev].rep, prevMatch.off, opt[prev].litlen==0);
|
1231
1234
|
assert(cur >= prevMatch.mlen);
|
1232
1235
|
DEBUGLOG(7, "==> match+1lit is cheaper (%.2f < %.2f) (hist:%u,%u,%u) !",
|
1233
1236
|
ZSTD_fCost(with1literal), ZSTD_fCost(withMoreLiterals),
|
1234
1237
|
newReps.rep[0], newReps.rep[1], newReps.rep[2] );
|
1235
1238
|
opt[cur+1] = prevMatch; /* mlen & offbase */
|
1236
|
-
ZSTD_memcpy(opt[cur+1].rep, &newReps, sizeof(
|
1239
|
+
ZSTD_memcpy(opt[cur+1].rep, &newReps, sizeof(Repcodes_t));
|
1237
1240
|
opt[cur+1].litlen = 1;
|
1238
1241
|
opt[cur+1].price = with1literal;
|
1239
1242
|
if (last_pos < cur+1) last_pos = cur+1;
|
1240
1243
|
}
|
1241
1244
|
}
|
1242
1245
|
} else {
|
1243
|
-
DEBUGLOG(7, "cPos:%
|
1244
|
-
inr-istart, cur, ZSTD_fCost(price), ZSTD_fCost(opt[cur].price));
|
1246
|
+
DEBUGLOG(7, "cPos:%i==rPos:%u : literal would cost more (%.2f>%.2f)",
|
1247
|
+
(int)(inr-istart), cur, ZSTD_fCost(price), ZSTD_fCost(opt[cur].price));
|
1245
1248
|
}
|
1246
1249
|
}
|
1247
1250
|
|
1248
1251
|
/* Offset history is not updated during match comparison.
|
1249
1252
|
* Do it here, now that the match is selected and confirmed.
|
1250
1253
|
*/
|
1251
|
-
ZSTD_STATIC_ASSERT(sizeof(opt[cur].rep) == sizeof(
|
1254
|
+
ZSTD_STATIC_ASSERT(sizeof(opt[cur].rep) == sizeof(Repcodes_t));
|
1252
1255
|
assert(cur >= opt[cur].mlen);
|
1253
1256
|
if (opt[cur].litlen == 0) {
|
1254
1257
|
/* just finished a match => alter offset history */
|
1255
1258
|
U32 const prev = cur - opt[cur].mlen;
|
1256
|
-
|
1257
|
-
ZSTD_memcpy(opt[cur].rep, &newReps, sizeof(
|
1259
|
+
Repcodes_t const newReps = ZSTD_newRep(opt[prev].rep, opt[cur].off, opt[prev].litlen==0);
|
1260
|
+
ZSTD_memcpy(opt[cur].rep, &newReps, sizeof(Repcodes_t));
|
1258
1261
|
}
|
1259
1262
|
|
1260
1263
|
/* last match must start at a minimum distance of 8 from oend */
|
@@ -1276,7 +1279,8 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
|
|
1276
1279
|
U32 matchNb;
|
1277
1280
|
|
1278
1281
|
ZSTD_optLdm_processMatchCandidate(&optLdm, matches, &nbMatches,
|
1279
|
-
(U32)(inr-istart), (U32)(iend-inr)
|
1282
|
+
(U32)(inr-istart), (U32)(iend-inr),
|
1283
|
+
minMatch);
|
1280
1284
|
|
1281
1285
|
if (!nbMatches) {
|
1282
1286
|
DEBUGLOG(7, "rPos:%u : no match found", cur);
|
@@ -1284,8 +1288,8 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
|
|
1284
1288
|
}
|
1285
1289
|
|
1286
1290
|
{ U32 const longestML = matches[nbMatches-1].len;
|
1287
|
-
DEBUGLOG(7, "cPos:%
|
1288
|
-
inr-istart, cur, nbMatches, longestML);
|
1291
|
+
DEBUGLOG(7, "cPos:%i==rPos:%u, found %u matches, of longest ML=%u",
|
1292
|
+
(int)(inr-istart), cur, nbMatches, longestML);
|
1289
1293
|
|
1290
1294
|
if ( (longestML > sufficient_len)
|
1291
1295
|
|| (cur + longestML >= ZSTD_OPT_NUM)
|
@@ -1353,10 +1357,10 @@ _shortestPath: /* cur, last_pos, best_mlen, best_off have to be set */
|
|
1353
1357
|
/* Update offset history */
|
1354
1358
|
if (lastStretch.litlen == 0) {
|
1355
1359
|
/* finishing on a match : update offset history */
|
1356
|
-
|
1357
|
-
ZSTD_memcpy(rep, &reps, sizeof(
|
1360
|
+
Repcodes_t const reps = ZSTD_newRep(opt[cur].rep, lastStretch.off, opt[cur].litlen==0);
|
1361
|
+
ZSTD_memcpy(rep, &reps, sizeof(Repcodes_t));
|
1358
1362
|
} else {
|
1359
|
-
ZSTD_memcpy(rep, lastStretch.rep, sizeof(
|
1363
|
+
ZSTD_memcpy(rep, lastStretch.rep, sizeof(Repcodes_t));
|
1360
1364
|
assert(cur >= lastStretch.litlen);
|
1361
1365
|
cur -= lastStretch.litlen;
|
1362
1366
|
}
|
@@ -1411,8 +1415,8 @@ _shortestPath: /* cur, last_pos, best_mlen, best_off have to be set */
|
|
1411
1415
|
U32 const mlen = opt[storePos].mlen;
|
1412
1416
|
U32 const offBase = opt[storePos].off;
|
1413
1417
|
U32 const advance = llen + mlen;
|
1414
|
-
DEBUGLOG(6, "considering seq starting at %
|
1415
|
-
anchor - istart, (unsigned)llen, (unsigned)mlen);
|
1418
|
+
DEBUGLOG(6, "considering seq starting at %i, llen=%u, mlen=%u",
|
1419
|
+
(int)(anchor - istart), (unsigned)llen, (unsigned)mlen);
|
1416
1420
|
|
1417
1421
|
if (mlen==0) { /* only literals => must be last "sequence", actually starting a new stream of sequences */
|
1418
1422
|
assert(storePos == storeEnd); /* must be last sequence */
|
@@ -1440,7 +1444,7 @@ _shortestPath: /* cur, last_pos, best_mlen, best_off have to be set */
|
|
1440
1444
|
|
1441
1445
|
#ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR
|
1442
1446
|
static size_t ZSTD_compressBlock_opt0(
|
1443
|
-
|
1447
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
1444
1448
|
const void* src, size_t srcSize, const ZSTD_dictMode_e dictMode)
|
1445
1449
|
{
|
1446
1450
|
return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 0 /* optLevel */, dictMode);
|
@@ -1449,7 +1453,7 @@ static size_t ZSTD_compressBlock_opt0(
|
|
1449
1453
|
|
1450
1454
|
#ifndef ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR
|
1451
1455
|
static size_t ZSTD_compressBlock_opt2(
|
1452
|
-
|
1456
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
1453
1457
|
const void* src, size_t srcSize, const ZSTD_dictMode_e dictMode)
|
1454
1458
|
{
|
1455
1459
|
return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 2 /* optLevel */, dictMode);
|
@@ -1458,7 +1462,7 @@ static size_t ZSTD_compressBlock_opt2(
|
|
1458
1462
|
|
1459
1463
|
#ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR
|
1460
1464
|
size_t ZSTD_compressBlock_btopt(
|
1461
|
-
|
1465
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
1462
1466
|
const void* src, size_t srcSize)
|
1463
1467
|
{
|
1464
1468
|
DEBUGLOG(5, "ZSTD_compressBlock_btopt");
|
@@ -1477,8 +1481,8 @@ size_t ZSTD_compressBlock_btopt(
|
|
1477
1481
|
*/
|
1478
1482
|
static
|
1479
1483
|
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
1480
|
-
void ZSTD_initStats_ultra(
|
1481
|
-
|
1484
|
+
void ZSTD_initStats_ultra(ZSTD_MatchState_t* ms,
|
1485
|
+
SeqStore_t* seqStore,
|
1482
1486
|
U32 rep[ZSTD_REP_NUM],
|
1483
1487
|
const void* src, size_t srcSize)
|
1484
1488
|
{
|
@@ -1503,7 +1507,7 @@ void ZSTD_initStats_ultra(ZSTD_matchState_t* ms,
|
|
1503
1507
|
}
|
1504
1508
|
|
1505
1509
|
size_t ZSTD_compressBlock_btultra(
|
1506
|
-
|
1510
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
1507
1511
|
const void* src, size_t srcSize)
|
1508
1512
|
{
|
1509
1513
|
DEBUGLOG(5, "ZSTD_compressBlock_btultra (srcSize=%zu)", srcSize);
|
@@ -1511,7 +1515,7 @@ size_t ZSTD_compressBlock_btultra(
|
|
1511
1515
|
}
|
1512
1516
|
|
1513
1517
|
size_t ZSTD_compressBlock_btultra2(
|
1514
|
-
|
1518
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
1515
1519
|
const void* src, size_t srcSize)
|
1516
1520
|
{
|
1517
1521
|
U32 const curr = (U32)((const BYTE*)src - ms->window.base);
|
@@ -1541,14 +1545,14 @@ size_t ZSTD_compressBlock_btultra2(
|
|
1541
1545
|
|
1542
1546
|
#ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR
|
1543
1547
|
size_t ZSTD_compressBlock_btopt_dictMatchState(
|
1544
|
-
|
1548
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
1545
1549
|
const void* src, size_t srcSize)
|
1546
1550
|
{
|
1547
1551
|
return ZSTD_compressBlock_opt0(ms, seqStore, rep, src, srcSize, ZSTD_dictMatchState);
|
1548
1552
|
}
|
1549
1553
|
|
1550
1554
|
size_t ZSTD_compressBlock_btopt_extDict(
|
1551
|
-
|
1555
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
1552
1556
|
const void* src, size_t srcSize)
|
1553
1557
|
{
|
1554
1558
|
return ZSTD_compressBlock_opt0(ms, seqStore, rep, src, srcSize, ZSTD_extDict);
|
@@ -1557,14 +1561,14 @@ size_t ZSTD_compressBlock_btopt_extDict(
|
|
1557
1561
|
|
1558
1562
|
#ifndef ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR
|
1559
1563
|
size_t ZSTD_compressBlock_btultra_dictMatchState(
|
1560
|
-
|
1564
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
1561
1565
|
const void* src, size_t srcSize)
|
1562
1566
|
{
|
1563
1567
|
return ZSTD_compressBlock_opt2(ms, seqStore, rep, src, srcSize, ZSTD_dictMatchState);
|
1564
1568
|
}
|
1565
1569
|
|
1566
1570
|
size_t ZSTD_compressBlock_btultra_extDict(
|
1567
|
-
|
1571
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
1568
1572
|
const void* src, size_t srcSize)
|
1569
1573
|
{
|
1570
1574
|
return ZSTD_compressBlock_opt2(ms, seqStore, rep, src, srcSize, ZSTD_extDict);
|
@@ -11,28 +11,24 @@
|
|
11
11
|
#ifndef ZSTD_OPT_H
|
12
12
|
#define ZSTD_OPT_H
|
13
13
|
|
14
|
-
#if defined (__cplusplus)
|
15
|
-
extern "C" {
|
16
|
-
#endif
|
17
|
-
|
18
14
|
#include "zstd_compress_internal.h"
|
19
15
|
|
20
16
|
#if !defined(ZSTD_EXCLUDE_BTLAZY2_BLOCK_COMPRESSOR) \
|
21
17
|
|| !defined(ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR) \
|
22
18
|
|| !defined(ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR)
|
23
19
|
/* used in ZSTD_loadDictionaryContent() */
|
24
|
-
void ZSTD_updateTree(
|
20
|
+
void ZSTD_updateTree(ZSTD_MatchState_t* ms, const BYTE* ip, const BYTE* iend);
|
25
21
|
#endif
|
26
22
|
|
27
23
|
#ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR
|
28
24
|
size_t ZSTD_compressBlock_btopt(
|
29
|
-
|
25
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
30
26
|
void const* src, size_t srcSize);
|
31
27
|
size_t ZSTD_compressBlock_btopt_dictMatchState(
|
32
|
-
|
28
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
33
29
|
void const* src, size_t srcSize);
|
34
30
|
size_t ZSTD_compressBlock_btopt_extDict(
|
35
|
-
|
31
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
36
32
|
void const* src, size_t srcSize);
|
37
33
|
|
38
34
|
#define ZSTD_COMPRESSBLOCK_BTOPT ZSTD_compressBlock_btopt
|
@@ -46,20 +42,20 @@ size_t ZSTD_compressBlock_btopt_extDict(
|
|
46
42
|
|
47
43
|
#ifndef ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR
|
48
44
|
size_t ZSTD_compressBlock_btultra(
|
49
|
-
|
45
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
50
46
|
void const* src, size_t srcSize);
|
51
47
|
size_t ZSTD_compressBlock_btultra_dictMatchState(
|
52
|
-
|
48
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
53
49
|
void const* src, size_t srcSize);
|
54
50
|
size_t ZSTD_compressBlock_btultra_extDict(
|
55
|
-
|
51
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
56
52
|
void const* src, size_t srcSize);
|
57
53
|
|
58
54
|
/* note : no btultra2 variant for extDict nor dictMatchState,
|
59
55
|
* because btultra2 is not meant to work with dictionaries
|
60
56
|
* and is only specific for the first block (no prefix) */
|
61
57
|
size_t ZSTD_compressBlock_btultra2(
|
62
|
-
|
58
|
+
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
63
59
|
void const* src, size_t srcSize);
|
64
60
|
|
65
61
|
#define ZSTD_COMPRESSBLOCK_BTULTRA ZSTD_compressBlock_btultra
|
@@ -73,8 +69,4 @@ size_t ZSTD_compressBlock_btultra2(
|
|
73
69
|
#define ZSTD_COMPRESSBLOCK_BTULTRA2 NULL
|
74
70
|
#endif
|
75
71
|
|
76
|
-
#if defined (__cplusplus)
|
77
|
-
}
|
78
|
-
#endif
|
79
|
-
|
80
72
|
#endif /* ZSTD_OPT_H */
|