zstd-ruby 1.3.3.0 → 1.3.4.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (44) hide show
  1. checksums.yaml +5 -5
  2. data/README.md +1 -1
  3. data/ext/zstdruby/libzstd/BUCK +13 -0
  4. data/ext/zstdruby/libzstd/README.md +32 -25
  5. data/ext/zstdruby/libzstd/common/bitstream.h +1 -1
  6. data/ext/zstdruby/libzstd/common/compiler.h +25 -0
  7. data/ext/zstdruby/libzstd/common/cpu.h +216 -0
  8. data/ext/zstdruby/libzstd/common/error_private.c +1 -0
  9. data/ext/zstdruby/libzstd/common/fse.h +1 -1
  10. data/ext/zstdruby/libzstd/common/fse_decompress.c +2 -2
  11. data/ext/zstdruby/libzstd/common/huf.h +114 -89
  12. data/ext/zstdruby/libzstd/common/pool.c +46 -17
  13. data/ext/zstdruby/libzstd/common/pool.h +18 -9
  14. data/ext/zstdruby/libzstd/common/threading.h +12 -12
  15. data/ext/zstdruby/libzstd/common/zstd_errors.h +16 -7
  16. data/ext/zstdruby/libzstd/common/zstd_internal.h +4 -5
  17. data/ext/zstdruby/libzstd/compress/fse_compress.c +19 -11
  18. data/ext/zstdruby/libzstd/compress/huf_compress.c +160 -62
  19. data/ext/zstdruby/libzstd/compress/zstd_compress.c +973 -644
  20. data/ext/zstdruby/libzstd/compress/zstd_compress_internal.h +281 -34
  21. data/ext/zstdruby/libzstd/compress/zstd_double_fast.c +80 -62
  22. data/ext/zstdruby/libzstd/compress/zstd_double_fast.h +11 -4
  23. data/ext/zstdruby/libzstd/compress/zstd_fast.c +87 -71
  24. data/ext/zstdruby/libzstd/compress/zstd_fast.h +10 -6
  25. data/ext/zstdruby/libzstd/compress/zstd_lazy.c +333 -274
  26. data/ext/zstdruby/libzstd/compress/zstd_lazy.h +33 -16
  27. data/ext/zstdruby/libzstd/compress/zstd_ldm.c +305 -359
  28. data/ext/zstdruby/libzstd/compress/zstd_ldm.h +64 -21
  29. data/ext/zstdruby/libzstd/compress/zstd_opt.c +194 -56
  30. data/ext/zstdruby/libzstd/compress/zstd_opt.h +17 -5
  31. data/ext/zstdruby/libzstd/compress/zstdmt_compress.c +1131 -449
  32. data/ext/zstdruby/libzstd/compress/zstdmt_compress.h +32 -16
  33. data/ext/zstdruby/libzstd/decompress/huf_decompress.c +390 -290
  34. data/ext/zstdruby/libzstd/decompress/zstd_decompress.c +777 -439
  35. data/ext/zstdruby/libzstd/dictBuilder/cover.c +11 -8
  36. data/ext/zstdruby/libzstd/dictBuilder/zdict.c +83 -50
  37. data/ext/zstdruby/libzstd/dictBuilder/zdict.h +44 -43
  38. data/ext/zstdruby/libzstd/legacy/zstd_legacy.h +2 -0
  39. data/ext/zstdruby/libzstd/legacy/zstd_v04.c +42 -118
  40. data/ext/zstdruby/libzstd/legacy/zstd_v06.c +2 -2
  41. data/ext/zstdruby/libzstd/legacy/zstd_v07.c +2 -2
  42. data/ext/zstdruby/libzstd/zstd.h +254 -254
  43. data/lib/zstd-ruby/version.rb +1 -1
  44. metadata +4 -3
@@ -22,32 +22,71 @@ extern "C" {
22
22
  ***************************************/
23
23
 
24
24
  #define ZSTD_LDM_DEFAULT_WINDOW_LOG ZSTD_WINDOWLOG_DEFAULTMAX
25
- #define ZSTD_LDM_HASHEVERYLOG_NOTSET 9999
26
25
 
27
- /** ZSTD_compressBlock_ldm_generic() :
26
+ /**
27
+ * ZSTD_ldm_generateSequences():
28
28
  *
29
- * This is a block compressor intended for long distance matching.
29
+ * Generates the sequences using the long distance match finder.
30
+ * Generates long range matching sequences in `sequences`, which parse a prefix
31
+ * of the source. `sequences` must be large enough to store every sequence,
32
+ * which can be checked with `ZSTD_ldm_getMaxNbSeq()`.
33
+ * @returns 0 or an error code.
30
34
  *
31
- * The function searches for matches of length at least
32
- * ldmParams.minMatchLength using a hash table in cctx->ldmState.
33
- * Matches can be at a distance of up to cParams.windowLog.
35
+ * NOTE: The user must have called ZSTD_window_update() for all of the input
36
+ * they have, even if they pass it to ZSTD_ldm_generateSequences() in chunks.
37
+ * NOTE: This function returns an error if it runs out of space to store
38
+ * sequences.
39
+ */
40
+ size_t ZSTD_ldm_generateSequences(
41
+ ldmState_t* ldms, rawSeqStore_t* sequences,
42
+ ldmParams_t const* params, void const* src, size_t srcSize);
43
+
44
+ /**
45
+ * ZSTD_ldm_blockCompress():
46
+ *
47
+ * Compresses a block using the predefined sequences, along with a secondary
48
+ * block compressor. The literals section of every sequence is passed to the
49
+ * secondary block compressor, and those sequences are interspersed with the
50
+ * predefined sequences. Returns the length of the last literals.
51
+ * Updates `rawSeqStore.pos` to indicate how many sequences have been consumed.
52
+ * `rawSeqStore.seq` may also be updated to split the last sequence between two
53
+ * blocks.
54
+ * @return The length of the last literals.
55
+ *
56
+ * NOTE: The source must be at most the maximum block size, but the predefined
57
+ * sequences can be any size, and may be longer than the block. In the case that
58
+ * they are longer than the block, the last sequences may need to be split into
59
+ * two. We handle that case correctly, and update `rawSeqStore` appropriately.
60
+ * NOTE: This function does not return any errors.
61
+ */
62
+ size_t ZSTD_ldm_blockCompress(rawSeqStore_t* rawSeqStore,
63
+ ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
64
+ ZSTD_compressionParameters const* cParams,
65
+ void const* src, size_t srcSize,
66
+ int const extDict);
67
+
68
+ /**
69
+ * ZSTD_ldm_skipSequences():
34
70
  *
35
- * Upon finding a match, the unmatched literals are compressed using a
36
- * ZSTD_blockCompressor (depending on the strategy in the compression
37
- * parameters), which stores the matched sequences. The "long distance"
38
- * match is then stored with the remaining literals from the
39
- * ZSTD_blockCompressor. */
40
- size_t ZSTD_compressBlock_ldm(ZSTD_CCtx* cctx, const void* src, size_t srcSize);
41
- size_t ZSTD_compressBlock_ldm_extDict(ZSTD_CCtx* ctx,
42
- const void* src, size_t srcSize);
71
+ * Skip past `srcSize` bytes worth of sequences in `rawSeqStore`.
72
+ * Avoids emitting matches less than `minMatch` bytes.
73
+ * Must be called for data with is not passed to ZSTD_ldm_blockCompress().
74
+ */
75
+ void ZSTD_ldm_skipSequences(rawSeqStore_t* rawSeqStore, size_t srcSize,
76
+ U32 const minMatch);
43
77
 
44
- /** ZSTD_ldm_initializeParameters() :
45
- * Initialize the long distance matching parameters to their default values. */
46
- size_t ZSTD_ldm_initializeParameters(ldmParams_t* params, U32 enableLdm);
47
78
 
48
79
  /** ZSTD_ldm_getTableSize() :
49
- * Estimate the space needed for long distance matching tables. */
50
- size_t ZSTD_ldm_getTableSize(U32 hashLog, U32 bucketSizeLog);
80
+ * Estimate the space needed for long distance matching tables or 0 if LDM is
81
+ * disabled.
82
+ */
83
+ size_t ZSTD_ldm_getTableSize(ldmParams_t params);
84
+
85
+ /** ZSTD_ldm_getSeqSpace() :
86
+ * Return an upper bound on the number of sequences that can be produced by
87
+ * the long distance matcher, or 0 if LDM is disabled.
88
+ */
89
+ size_t ZSTD_ldm_getMaxNbSeq(ldmParams_t params, size_t maxChunkSize);
51
90
 
52
91
  /** ZSTD_ldm_getTableSize() :
53
92
  * Return prime8bytes^(minMatchLength-1) */
@@ -58,8 +97,12 @@ U64 ZSTD_ldm_getHashPower(U32 minMatchLength);
58
97
  * windowLog and params->hashLog.
59
98
  *
60
99
  * Ensures that params->bucketSizeLog is <= params->hashLog (setting it to
61
- * params->hashLog if it is not). */
62
- void ZSTD_ldm_adjustParameters(ldmParams_t* params, U32 windowLog);
100
+ * params->hashLog if it is not).
101
+ *
102
+ * Ensures that the minMatchLength >= targetLength during optimal parsing.
103
+ */
104
+ void ZSTD_ldm_adjustParameters(ldmParams_t* params,
105
+ ZSTD_compressionParameters const* cParams);
63
106
 
64
107
  #if defined (__cplusplus)
65
108
  }
@@ -10,7 +10,6 @@
10
10
 
11
11
  #include "zstd_compress_internal.h"
12
12
  #include "zstd_opt.h"
13
- #include "zstd_lazy.h" /* ZSTD_updateTree, ZSTD_updateTree_extDict */
14
13
 
15
14
 
16
15
  #define ZSTD_LITFREQ_ADD 2 /* scaling factor for litFreq, so that frequencies adapt faster to new stats. Also used for matchSum (?) */
@@ -244,14 +243,15 @@ MEM_STATIC U32 ZSTD_readMINMATCH(const void* memPtr, U32 length)
244
243
 
245
244
  /* Update hashTable3 up to ip (excluded)
246
245
  Assumption : always within prefix (i.e. not within extDict) */
247
- static U32 ZSTD_insertAndFindFirstIndexHash3 (ZSTD_CCtx* const cctx, const BYTE* const ip)
246
+ static U32 ZSTD_insertAndFindFirstIndexHash3 (ZSTD_matchState_t* ms, const BYTE* const ip)
248
247
  {
249
- U32* const hashTable3 = cctx->hashTable3;
250
- U32 const hashLog3 = cctx->hashLog3;
251
- const BYTE* const base = cctx->base;
252
- U32 idx = cctx->nextToUpdate3;
253
- U32 const target = cctx->nextToUpdate3 = (U32)(ip - base);
248
+ U32* const hashTable3 = ms->hashTable3;
249
+ U32 const hashLog3 = ms->hashLog3;
250
+ const BYTE* const base = ms->window.base;
251
+ U32 idx = ms->nextToUpdate3;
252
+ U32 const target = ms->nextToUpdate3 = (U32)(ip - base);
254
253
  size_t const hash3 = ZSTD_hash3Ptr(ip, hashLog3);
254
+ assert(hashLog3 > 0);
255
255
 
256
256
  while(idx < target) {
257
257
  hashTable3[ZSTD_hash3Ptr(base+idx, hashLog3)] = idx;
@@ -265,36 +265,173 @@ static U32 ZSTD_insertAndFindFirstIndexHash3 (ZSTD_CCtx* const cctx, const BYTE*
265
265
  /*-*************************************
266
266
  * Binary Tree search
267
267
  ***************************************/
268
+ /** ZSTD_insertBt1() : add one or multiple positions to tree.
269
+ * ip : assumed <= iend-8 .
270
+ * @return : nb of positions added */
271
+ static U32 ZSTD_insertBt1(
272
+ ZSTD_matchState_t* ms, ZSTD_compressionParameters const* cParams,
273
+ const BYTE* const ip, const BYTE* const iend,
274
+ U32 const mls, U32 const extDict)
275
+ {
276
+ U32* const hashTable = ms->hashTable;
277
+ U32 const hashLog = cParams->hashLog;
278
+ size_t const h = ZSTD_hashPtr(ip, hashLog, mls);
279
+ U32* const bt = ms->chainTable;
280
+ U32 const btLog = cParams->chainLog - 1;
281
+ U32 const btMask = (1 << btLog) - 1;
282
+ U32 matchIndex = hashTable[h];
283
+ size_t commonLengthSmaller=0, commonLengthLarger=0;
284
+ const BYTE* const base = ms->window.base;
285
+ const BYTE* const dictBase = ms->window.dictBase;
286
+ const U32 dictLimit = ms->window.dictLimit;
287
+ const BYTE* const dictEnd = dictBase + dictLimit;
288
+ const BYTE* const prefixStart = base + dictLimit;
289
+ const BYTE* match;
290
+ const U32 current = (U32)(ip-base);
291
+ const U32 btLow = btMask >= current ? 0 : current - btMask;
292
+ U32* smallerPtr = bt + 2*(current&btMask);
293
+ U32* largerPtr = smallerPtr + 1;
294
+ U32 dummy32; /* to be nullified at the end */
295
+ U32 const windowLow = ms->window.lowLimit;
296
+ U32 matchEndIdx = current+8+1;
297
+ size_t bestLength = 8;
298
+ U32 nbCompares = 1U << cParams->searchLog;
299
+ #ifdef ZSTD_C_PREDICT
300
+ U32 predictedSmall = *(bt + 2*((current-1)&btMask) + 0);
301
+ U32 predictedLarge = *(bt + 2*((current-1)&btMask) + 1);
302
+ predictedSmall += (predictedSmall>0);
303
+ predictedLarge += (predictedLarge>0);
304
+ #endif /* ZSTD_C_PREDICT */
305
+
306
+ DEBUGLOG(8, "ZSTD_insertBt1 (%u)", current);
307
+
308
+ assert(ip <= iend-8); /* required for h calculation */
309
+ hashTable[h] = current; /* Update Hash Table */
310
+
311
+ while (nbCompares-- && (matchIndex > windowLow)) {
312
+ U32* const nextPtr = bt + 2*(matchIndex & btMask);
313
+ size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */
314
+ assert(matchIndex < current);
315
+
316
+ #ifdef ZSTD_C_PREDICT /* note : can create issues when hlog small <= 11 */
317
+ const U32* predictPtr = bt + 2*((matchIndex-1) & btMask); /* written this way, as bt is a roll buffer */
318
+ if (matchIndex == predictedSmall) {
319
+ /* no need to check length, result known */
320
+ *smallerPtr = matchIndex;
321
+ if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop the search */
322
+ smallerPtr = nextPtr+1; /* new "smaller" => larger of match */
323
+ matchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to current) */
324
+ predictedSmall = predictPtr[1] + (predictPtr[1]>0);
325
+ continue;
326
+ }
327
+ if (matchIndex == predictedLarge) {
328
+ *largerPtr = matchIndex;
329
+ if (matchIndex <= btLow) { largerPtr=&dummy32; break; } /* beyond tree size, stop the search */
330
+ largerPtr = nextPtr;
331
+ matchIndex = nextPtr[0];
332
+ predictedLarge = predictPtr[0] + (predictPtr[0]>0);
333
+ continue;
334
+ }
335
+ #endif
336
+
337
+ if ((!extDict) || (matchIndex+matchLength >= dictLimit)) {
338
+ assert(matchIndex+matchLength >= dictLimit); /* might be wrong if extDict is incorrectly set to 0 */
339
+ match = base + matchIndex;
340
+ matchLength += ZSTD_count(ip+matchLength, match+matchLength, iend);
341
+ } else {
342
+ match = dictBase + matchIndex;
343
+ matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iend, dictEnd, prefixStart);
344
+ if (matchIndex+matchLength >= dictLimit)
345
+ match = base + matchIndex; /* to prepare for next usage of match[matchLength] */
346
+ }
347
+
348
+ if (matchLength > bestLength) {
349
+ bestLength = matchLength;
350
+ if (matchLength > matchEndIdx - matchIndex)
351
+ matchEndIdx = matchIndex + (U32)matchLength;
352
+ }
353
+
354
+ if (ip+matchLength == iend) { /* equal : no way to know if inf or sup */
355
+ break; /* drop , to guarantee consistency ; miss a bit of compression, but other solutions can corrupt tree */
356
+ }
357
+
358
+ if (match[matchLength] < ip[matchLength]) { /* necessarily within buffer */
359
+ /* match is smaller than current */
360
+ *smallerPtr = matchIndex; /* update smaller idx */
361
+ commonLengthSmaller = matchLength; /* all smaller will now have at least this guaranteed common length */
362
+ if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop searching */
363
+ smallerPtr = nextPtr+1; /* new "candidate" => larger than match, which was smaller than target */
364
+ matchIndex = nextPtr[1]; /* new matchIndex, larger than previous and closer to current */
365
+ } else {
366
+ /* match is larger than current */
367
+ *largerPtr = matchIndex;
368
+ commonLengthLarger = matchLength;
369
+ if (matchIndex <= btLow) { largerPtr=&dummy32; break; } /* beyond tree size, stop searching */
370
+ largerPtr = nextPtr;
371
+ matchIndex = nextPtr[0];
372
+ } }
373
+
374
+ *smallerPtr = *largerPtr = 0;
375
+ if (bestLength > 384) return MIN(192, (U32)(bestLength - 384)); /* speed optimization */
376
+ assert(matchEndIdx > current + 8);
377
+ return matchEndIdx - (current + 8);
378
+ }
379
+
380
+ FORCE_INLINE_TEMPLATE
381
+ void ZSTD_updateTree_internal(
382
+ ZSTD_matchState_t* ms, ZSTD_compressionParameters const* cParams,
383
+ const BYTE* const ip, const BYTE* const iend,
384
+ const U32 mls, const U32 extDict)
385
+ {
386
+ const BYTE* const base = ms->window.base;
387
+ U32 const target = (U32)(ip - base);
388
+ U32 idx = ms->nextToUpdate;
389
+ DEBUGLOG(7, "ZSTD_updateTree_internal, from %u to %u (extDict:%u)",
390
+ idx, target, extDict);
391
+
392
+ while(idx < target)
393
+ idx += ZSTD_insertBt1(ms, cParams, base+idx, iend, mls, extDict);
394
+ ms->nextToUpdate = target;
395
+ }
396
+
397
+ void ZSTD_updateTree(
398
+ ZSTD_matchState_t* ms, ZSTD_compressionParameters const* cParams,
399
+ const BYTE* ip, const BYTE* iend)
400
+ {
401
+ ZSTD_updateTree_internal(ms, cParams, ip, iend, cParams->searchLength, 0 /*extDict*/);
402
+ }
403
+
268
404
  FORCE_INLINE_TEMPLATE
269
405
  U32 ZSTD_insertBtAndGetAllMatches (
270
- ZSTD_CCtx* zc,
406
+ ZSTD_matchState_t* ms, ZSTD_compressionParameters const* cParams,
271
407
  const BYTE* const ip, const BYTE* const iLimit, int const extDict,
272
- U32 nbCompares, U32 const mls, U32 const sufficient_len,
273
408
  U32 rep[ZSTD_REP_NUM], U32 const ll0,
274
- ZSTD_match_t* matches, const U32 lengthToBeat)
409
+ ZSTD_match_t* matches, const U32 lengthToBeat, U32 const mls /* template */)
275
410
  {
276
- const BYTE* const base = zc->base;
411
+ U32 const sufficient_len = MIN(cParams->targetLength, ZSTD_OPT_NUM -1);
412
+ const BYTE* const base = ms->window.base;
277
413
  U32 const current = (U32)(ip-base);
278
- U32 const hashLog = zc->appliedParams.cParams.hashLog;
414
+ U32 const hashLog = cParams->hashLog;
279
415
  U32 const minMatch = (mls==3) ? 3 : 4;
280
- U32* const hashTable = zc->hashTable;
416
+ U32* const hashTable = ms->hashTable;
281
417
  size_t const h = ZSTD_hashPtr(ip, hashLog, mls);
282
418
  U32 matchIndex = hashTable[h];
283
- U32* const bt = zc->chainTable;
284
- U32 const btLog = zc->appliedParams.cParams.chainLog - 1;
419
+ U32* const bt = ms->chainTable;
420
+ U32 const btLog = cParams->chainLog - 1;
285
421
  U32 const btMask= (1U << btLog) - 1;
286
422
  size_t commonLengthSmaller=0, commonLengthLarger=0;
287
- const BYTE* const dictBase = zc->dictBase;
288
- U32 const dictLimit = zc->dictLimit;
423
+ const BYTE* const dictBase = ms->window.dictBase;
424
+ U32 const dictLimit = ms->window.dictLimit;
289
425
  const BYTE* const dictEnd = dictBase + dictLimit;
290
426
  const BYTE* const prefixStart = base + dictLimit;
291
427
  U32 const btLow = btMask >= current ? 0 : current - btMask;
292
- U32 const windowLow = zc->lowLimit;
428
+ U32 const windowLow = ms->window.lowLimit;
293
429
  U32* smallerPtr = bt + 2*(current&btMask);
294
430
  U32* largerPtr = bt + 2*(current&btMask) + 1;
295
431
  U32 matchEndIdx = current+8+1; /* farthest referenced position of any match => detects repetitive patterns */
296
432
  U32 dummy32; /* to be nullified at the end */
297
433
  U32 mnum = 0;
434
+ U32 nbCompares = 1U << cParams->searchLog;
298
435
 
299
436
  size_t bestLength = lengthToBeat-1;
300
437
  DEBUGLOG(7, "ZSTD_insertBtAndGetAllMatches");
@@ -335,7 +472,7 @@ U32 ZSTD_insertBtAndGetAllMatches (
335
472
 
336
473
  /* HC3 match finder */
337
474
  if ((mls == 3) /*static*/ && (bestLength < mls)) {
338
- U32 const matchIndex3 = ZSTD_insertAndFindFirstIndexHash3 (zc, ip);
475
+ U32 const matchIndex3 = ZSTD_insertAndFindFirstIndexHash3(ms, ip);
339
476
  if ((matchIndex3 > windowLow)
340
477
  & (current - matchIndex3 < (1<<18)) /*heuristic : longer distance likely too expensive*/ ) {
341
478
  size_t mlen;
@@ -359,7 +496,7 @@ U32 ZSTD_insertBtAndGetAllMatches (
359
496
  mnum = 1;
360
497
  if ( (mlen > sufficient_len) |
361
498
  (ip+mlen == iLimit) ) { /* best possible length */
362
- zc->nextToUpdate = current+1; /* skip insertion */
499
+ ms->nextToUpdate = current+1; /* skip insertion */
363
500
  return 1;
364
501
  } } } }
365
502
 
@@ -416,30 +553,29 @@ U32 ZSTD_insertBtAndGetAllMatches (
416
553
  *smallerPtr = *largerPtr = 0;
417
554
 
418
555
  assert(matchEndIdx > current+8);
419
- zc->nextToUpdate = matchEndIdx - 8; /* skip repetitive patterns */
556
+ ms->nextToUpdate = matchEndIdx - 8; /* skip repetitive patterns */
420
557
  return mnum;
421
558
  }
422
559
 
423
560
 
424
561
  FORCE_INLINE_TEMPLATE U32 ZSTD_BtGetAllMatches (
425
- ZSTD_CCtx* zc, /* Index table will be updated */
562
+ ZSTD_matchState_t* ms, ZSTD_compressionParameters const* cParams,
426
563
  const BYTE* ip, const BYTE* const iHighLimit, int const extDict,
427
- U32 const maxNbAttempts, U32 const matchLengthSearch, U32 const sufficient_len,
428
564
  U32 rep[ZSTD_REP_NUM], U32 const ll0,
429
565
  ZSTD_match_t* matches, U32 const lengthToBeat)
430
566
  {
567
+ U32 const matchLengthSearch = cParams->searchLength;
431
568
  DEBUGLOG(7, "ZSTD_BtGetAllMatches");
432
- if (ip < zc->base + zc->nextToUpdate) return 0; /* skipped area */
433
- if (extDict) ZSTD_updateTree_extDict(zc, ip, iHighLimit, maxNbAttempts, matchLengthSearch);
434
- else ZSTD_updateTree(zc, ip, iHighLimit, maxNbAttempts, matchLengthSearch);
569
+ if (ip < ms->window.base + ms->nextToUpdate) return 0; /* skipped area */
570
+ ZSTD_updateTree_internal(ms, cParams, ip, iHighLimit, matchLengthSearch, extDict);
435
571
  switch(matchLengthSearch)
436
572
  {
437
- case 3 : return ZSTD_insertBtAndGetAllMatches(zc, ip, iHighLimit, extDict, maxNbAttempts, 3, sufficient_len, rep, ll0, matches, lengthToBeat);
573
+ case 3 : return ZSTD_insertBtAndGetAllMatches(ms, cParams, ip, iHighLimit, extDict, rep, ll0, matches, lengthToBeat, 3);
438
574
  default :
439
- case 4 : return ZSTD_insertBtAndGetAllMatches(zc, ip, iHighLimit, extDict, maxNbAttempts, 4, sufficient_len, rep, ll0, matches, lengthToBeat);
440
- case 5 : return ZSTD_insertBtAndGetAllMatches(zc, ip, iHighLimit, extDict, maxNbAttempts, 5, sufficient_len, rep, ll0, matches, lengthToBeat);
575
+ case 4 : return ZSTD_insertBtAndGetAllMatches(ms, cParams, ip, iHighLimit, extDict, rep, ll0, matches, lengthToBeat, 4);
576
+ case 5 : return ZSTD_insertBtAndGetAllMatches(ms, cParams, ip, iHighLimit, extDict, rep, ll0, matches, lengthToBeat, 5);
441
577
  case 7 :
442
- case 6 : return ZSTD_insertBtAndGetAllMatches(zc, ip, iHighLimit, extDict, maxNbAttempts, 6, sufficient_len, rep, ll0, matches, lengthToBeat);
578
+ case 6 : return ZSTD_insertBtAndGetAllMatches(ms, cParams, ip, iHighLimit, extDict, rep, ll0, matches, lengthToBeat, 6);
443
579
  }
444
580
  }
445
581
 
@@ -527,36 +663,33 @@ static int ZSTD_literalsContribution_cached(
527
663
  }
528
664
 
529
665
  FORCE_INLINE_TEMPLATE
530
- size_t ZSTD_compressBlock_opt_generic(ZSTD_CCtx* ctx,
666
+ size_t ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,seqStore_t* seqStore,
667
+ U32 rep[ZSTD_REP_NUM],
668
+ ZSTD_compressionParameters const* cParams,
531
669
  const void* src, size_t srcSize,
532
670
  const int optLevel, const int extDict)
533
671
  {
534
- seqStore_t* const seqStorePtr = &(ctx->seqStore);
535
- optState_t* const optStatePtr = &(ctx->optState);
672
+ optState_t* const optStatePtr = &ms->opt;
536
673
  const BYTE* const istart = (const BYTE*)src;
537
674
  const BYTE* ip = istart;
538
675
  const BYTE* anchor = istart;
539
676
  const BYTE* const iend = istart + srcSize;
540
677
  const BYTE* const ilimit = iend - 8;
541
- const BYTE* const base = ctx->base;
542
- const BYTE* const prefixStart = base + ctx->dictLimit;
678
+ const BYTE* const base = ms->window.base;
679
+ const BYTE* const prefixStart = base + ms->window.dictLimit;
543
680
 
544
- U32 const maxSearches = 1U << ctx->appliedParams.cParams.searchLog;
545
- U32 const sufficient_len = MIN(ctx->appliedParams.cParams.targetLength, ZSTD_OPT_NUM -1);
546
- U32 const mls = ctx->appliedParams.cParams.searchLength;
547
- U32 const minMatch = (ctx->appliedParams.cParams.searchLength == 3) ? 3 : 4;
681
+ U32 const sufficient_len = MIN(cParams->targetLength, ZSTD_OPT_NUM -1);
682
+ U32 const minMatch = (cParams->searchLength == 3) ? 3 : 4;
548
683
 
549
684
  ZSTD_optimal_t* const opt = optStatePtr->priceTable;
550
685
  ZSTD_match_t* const matches = optStatePtr->matchTable;
551
686
  cachedLiteralPrice_t cachedLitPrice;
552
- U32 rep[ZSTD_REP_NUM];
553
687
 
554
688
  /* init */
555
689
  DEBUGLOG(5, "ZSTD_compressBlock_opt_generic");
556
- ctx->nextToUpdate3 = ctx->nextToUpdate;
690
+ ms->nextToUpdate3 = ms->nextToUpdate;
557
691
  ZSTD_rescaleFreqs(optStatePtr, (const BYTE*)src, srcSize);
558
692
  ip += (ip==prefixStart);
559
- { int i; for (i=0; i<ZSTD_REP_NUM; i++) rep[i]=seqStorePtr->rep[i]; }
560
693
  memset(&cachedLitPrice, 0, sizeof(cachedLitPrice));
561
694
 
562
695
  /* Match Loop */
@@ -567,7 +700,7 @@ size_t ZSTD_compressBlock_opt_generic(ZSTD_CCtx* ctx,
567
700
  /* find first match */
568
701
  { U32 const litlen = (U32)(ip - anchor);
569
702
  U32 const ll0 = !litlen;
570
- U32 const nbMatches = ZSTD_BtGetAllMatches(ctx, ip, iend, extDict, maxSearches, mls, sufficient_len, rep, ll0, matches, minMatch);
703
+ U32 const nbMatches = ZSTD_BtGetAllMatches(ms, cParams, ip, iend, extDict, rep, ll0, matches, minMatch);
571
704
  if (!nbMatches) { ip++; continue; }
572
705
 
573
706
  /* initialize opt[0] */
@@ -653,7 +786,7 @@ size_t ZSTD_compressBlock_opt_generic(ZSTD_CCtx* ctx,
653
786
  U32 const litlen = (opt[cur].mlen == 1) ? opt[cur].litlen : 0;
654
787
  U32 const previousPrice = (cur > litlen) ? opt[cur-litlen].price : 0;
655
788
  U32 const basePrice = previousPrice + ZSTD_fullLiteralsCost(inr-litlen, litlen, optStatePtr);
656
- U32 const nbMatches = ZSTD_BtGetAllMatches(ctx, inr, iend, extDict, maxSearches, mls, sufficient_len, opt[cur].rep, ll0, matches, minMatch);
789
+ U32 const nbMatches = ZSTD_BtGetAllMatches(ms, cParams, inr, iend, extDict, opt[cur].rep, ll0, matches, minMatch);
657
790
  U32 matchNb;
658
791
  if (!nbMatches) continue;
659
792
 
@@ -749,37 +882,42 @@ _shortestPath: /* cur, last_pos, best_mlen, best_off have to be set */
749
882
  }
750
883
 
751
884
  ZSTD_updateStats(optStatePtr, llen, anchor, offset, mlen);
752
- ZSTD_storeSeq(seqStorePtr, llen, anchor, offset, mlen-MINMATCH);
885
+ ZSTD_storeSeq(seqStore, llen, anchor, offset, mlen-MINMATCH);
753
886
  anchor = ip;
754
887
  } }
755
888
  ZSTD_setLog2Prices(optStatePtr);
756
889
  } /* while (ip < ilimit) */
757
890
 
758
- /* Save reps for next block */
759
- { int i; for (i=0; i<ZSTD_REP_NUM; i++) seqStorePtr->repToConfirm[i] = rep[i]; }
760
-
761
891
  /* Return the last literals size */
762
892
  return iend - anchor;
763
893
  }
764
894
 
765
895
 
766
- size_t ZSTD_compressBlock_btopt(ZSTD_CCtx* ctx, const void* src, size_t srcSize)
896
+ size_t ZSTD_compressBlock_btopt(
897
+ ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
898
+ ZSTD_compressionParameters const* cParams, void const* src, size_t srcSize)
767
899
  {
768
900
  DEBUGLOG(5, "ZSTD_compressBlock_btopt");
769
- return ZSTD_compressBlock_opt_generic(ctx, src, srcSize, 0 /*optLevel*/, 0 /*extDict*/);
901
+ return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, cParams, src, srcSize, 0 /*optLevel*/, 0 /*extDict*/);
770
902
  }
771
903
 
772
- size_t ZSTD_compressBlock_btultra(ZSTD_CCtx* ctx, const void* src, size_t srcSize)
904
+ size_t ZSTD_compressBlock_btultra(
905
+ ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
906
+ ZSTD_compressionParameters const* cParams, void const* src, size_t srcSize)
773
907
  {
774
- return ZSTD_compressBlock_opt_generic(ctx, src, srcSize, 2 /*optLevel*/, 0 /*extDict*/);
908
+ return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, cParams, src, srcSize, 2 /*optLevel*/, 0 /*extDict*/);
775
909
  }
776
910
 
777
- size_t ZSTD_compressBlock_btopt_extDict(ZSTD_CCtx* ctx, const void* src, size_t srcSize)
911
+ size_t ZSTD_compressBlock_btopt_extDict(
912
+ ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
913
+ ZSTD_compressionParameters const* cParams, void const* src, size_t srcSize)
778
914
  {
779
- return ZSTD_compressBlock_opt_generic(ctx, src, srcSize, 0 /*optLevel*/, 1 /*extDict*/);
915
+ return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, cParams, src, srcSize, 0 /*optLevel*/, 1 /*extDict*/);
780
916
  }
781
917
 
782
- size_t ZSTD_compressBlock_btultra_extDict(ZSTD_CCtx* ctx, const void* src, size_t srcSize)
918
+ size_t ZSTD_compressBlock_btultra_extDict(
919
+ ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
920
+ ZSTD_compressionParameters const* cParams, void const* src, size_t srcSize)
783
921
  {
784
- return ZSTD_compressBlock_opt_generic(ctx, src, srcSize, 2 /*optLevel*/, 1 /*extDict*/);
922
+ return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, cParams, src, srcSize, 2 /*optLevel*/, 1 /*extDict*/);
785
923
  }