zstd-ruby 1.5.5.1 → 1.5.6.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (48) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +1 -1
  3. data/ext/zstdruby/libzstd/common/allocations.h +1 -1
  4. data/ext/zstdruby/libzstd/common/bitstream.h +49 -29
  5. data/ext/zstdruby/libzstd/common/compiler.h +114 -22
  6. data/ext/zstdruby/libzstd/common/cpu.h +36 -0
  7. data/ext/zstdruby/libzstd/common/debug.c +6 -0
  8. data/ext/zstdruby/libzstd/common/debug.h +20 -11
  9. data/ext/zstdruby/libzstd/common/error_private.h +45 -36
  10. data/ext/zstdruby/libzstd/common/fse.h +3 -2
  11. data/ext/zstdruby/libzstd/common/fse_decompress.c +19 -17
  12. data/ext/zstdruby/libzstd/common/huf.h +14 -1
  13. data/ext/zstdruby/libzstd/common/mem.h +0 -9
  14. data/ext/zstdruby/libzstd/common/pool.c +1 -1
  15. data/ext/zstdruby/libzstd/common/pool.h +1 -1
  16. data/ext/zstdruby/libzstd/common/portability_macros.h +2 -0
  17. data/ext/zstdruby/libzstd/common/threading.c +8 -2
  18. data/ext/zstdruby/libzstd/common/xxhash.c +5 -11
  19. data/ext/zstdruby/libzstd/common/xxhash.h +2341 -1007
  20. data/ext/zstdruby/libzstd/common/zstd_internal.h +5 -5
  21. data/ext/zstdruby/libzstd/compress/fse_compress.c +8 -7
  22. data/ext/zstdruby/libzstd/compress/huf_compress.c +54 -25
  23. data/ext/zstdruby/libzstd/compress/zstd_compress.c +282 -161
  24. data/ext/zstdruby/libzstd/compress/zstd_compress_internal.h +29 -27
  25. data/ext/zstdruby/libzstd/compress/zstd_compress_superblock.c +224 -113
  26. data/ext/zstdruby/libzstd/compress/zstd_cwksp.h +19 -13
  27. data/ext/zstdruby/libzstd/compress/zstd_double_fast.c +17 -5
  28. data/ext/zstdruby/libzstd/compress/zstd_double_fast.h +11 -0
  29. data/ext/zstdruby/libzstd/compress/zstd_fast.c +14 -6
  30. data/ext/zstdruby/libzstd/compress/zstd_lazy.c +129 -87
  31. data/ext/zstdruby/libzstd/compress/zstd_lazy.h +103 -28
  32. data/ext/zstdruby/libzstd/compress/zstd_ldm.c +8 -2
  33. data/ext/zstdruby/libzstd/compress/zstd_opt.c +216 -112
  34. data/ext/zstdruby/libzstd/compress/zstd_opt.h +31 -7
  35. data/ext/zstdruby/libzstd/compress/zstdmt_compress.c +94 -79
  36. data/ext/zstdruby/libzstd/decompress/huf_decompress.c +188 -126
  37. data/ext/zstdruby/libzstd/decompress/huf_decompress_amd64.S +38 -19
  38. data/ext/zstdruby/libzstd/decompress/zstd_decompress.c +84 -32
  39. data/ext/zstdruby/libzstd/decompress/zstd_decompress_block.c +231 -208
  40. data/ext/zstdruby/libzstd/decompress/zstd_decompress_block.h +1 -1
  41. data/ext/zstdruby/libzstd/decompress/zstd_decompress_internal.h +2 -0
  42. data/ext/zstdruby/libzstd/dictBuilder/cover.c +16 -12
  43. data/ext/zstdruby/libzstd/dictBuilder/cover.h +2 -8
  44. data/ext/zstdruby/libzstd/dictBuilder/fastcover.c +2 -2
  45. data/ext/zstdruby/libzstd/dictBuilder/zdict.c +12 -6
  46. data/ext/zstdruby/libzstd/zstd.h +129 -60
  47. data/lib/zstd-ruby/version.rb +1 -1
  48. metadata +1 -1
@@ -192,6 +192,7 @@ MEM_STATIC void ZSTD_cwksp_assert_internal_consistency(ZSTD_cwksp* ws) {
192
192
  {
193
193
  intptr_t const offset = __msan_test_shadow(ws->initOnceStart,
194
194
  (U8*)ZSTD_cwksp_initialAllocStart(ws) - (U8*)ws->initOnceStart);
195
+ (void)offset;
195
196
  #if defined(ZSTD_MSAN_PRINT)
196
197
  if(offset!=-1) {
197
198
  __msan_print_shadow((U8*)ws->initOnceStart + offset - 8, 32);
@@ -433,7 +434,7 @@ MEM_STATIC void* ZSTD_cwksp_reserve_aligned(ZSTD_cwksp* ws, size_t bytes)
433
434
 
434
435
  /**
435
436
  * Aligned on 64 bytes. These buffers have the special property that
436
- * their values remain constrained, allowing us to re-use them without
437
+ * their values remain constrained, allowing us to reuse them without
437
438
  * memset()-ing them.
438
439
  */
439
440
  MEM_STATIC void* ZSTD_cwksp_reserve_table(ZSTD_cwksp* ws, size_t bytes)
@@ -525,7 +526,7 @@ MEM_STATIC void ZSTD_cwksp_mark_tables_dirty(ZSTD_cwksp* ws)
525
526
  DEBUGLOG(4, "cwksp: ZSTD_cwksp_mark_tables_dirty");
526
527
 
527
528
  #if ZSTD_MEMORY_SANITIZER && !defined (ZSTD_MSAN_DONT_POISON_WORKSPACE)
528
- /* To validate that the table re-use logic is sound, and that we don't
529
+ /* To validate that the table reuse logic is sound, and that we don't
529
530
  * access table space that we haven't cleaned, we re-"poison" the table
530
531
  * space every time we mark it dirty.
531
532
  * Since tableValidEnd space and initOnce space may overlap we don't poison
@@ -602,9 +603,9 @@ MEM_STATIC void ZSTD_cwksp_clear(ZSTD_cwksp* ws) {
602
603
  DEBUGLOG(4, "cwksp: clearing!");
603
604
 
604
605
  #if ZSTD_MEMORY_SANITIZER && !defined (ZSTD_MSAN_DONT_POISON_WORKSPACE)
605
- /* To validate that the context re-use logic is sound, and that we don't
606
+ /* To validate that the context reuse logic is sound, and that we don't
606
607
  * access stuff that this compression hasn't initialized, we re-"poison"
607
- * the workspace except for the areas in which we expect memory re-use
608
+ * the workspace except for the areas in which we expect memory reuse
608
609
  * without initialization (objects, valid tables area and init once
609
610
  * memory). */
610
611
  {
@@ -635,6 +636,15 @@ MEM_STATIC void ZSTD_cwksp_clear(ZSTD_cwksp* ws) {
635
636
  ZSTD_cwksp_assert_internal_consistency(ws);
636
637
  }
637
638
 
639
+ MEM_STATIC size_t ZSTD_cwksp_sizeof(const ZSTD_cwksp* ws) {
640
+ return (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->workspace);
641
+ }
642
+
643
+ MEM_STATIC size_t ZSTD_cwksp_used(const ZSTD_cwksp* ws) {
644
+ return (size_t)((BYTE*)ws->tableEnd - (BYTE*)ws->workspace)
645
+ + (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->allocStart);
646
+ }
647
+
638
648
  /**
639
649
  * The provided workspace takes ownership of the buffer [start, start+size).
640
650
  * Any existing values in the workspace are ignored (the previously managed
@@ -666,6 +676,11 @@ MEM_STATIC size_t ZSTD_cwksp_create(ZSTD_cwksp* ws, size_t size, ZSTD_customMem
666
676
  MEM_STATIC void ZSTD_cwksp_free(ZSTD_cwksp* ws, ZSTD_customMem customMem) {
667
677
  void *ptr = ws->workspace;
668
678
  DEBUGLOG(4, "cwksp: freeing workspace");
679
+ #if ZSTD_MEMORY_SANITIZER && !defined(ZSTD_MSAN_DONT_POISON_WORKSPACE)
680
+ if (ptr != NULL && customMem.customFree != NULL) {
681
+ __msan_unpoison(ptr, ZSTD_cwksp_sizeof(ws));
682
+ }
683
+ #endif
669
684
  ZSTD_memset(ws, 0, sizeof(ZSTD_cwksp));
670
685
  ZSTD_customFree(ptr, customMem);
671
686
  }
@@ -679,15 +694,6 @@ MEM_STATIC void ZSTD_cwksp_move(ZSTD_cwksp* dst, ZSTD_cwksp* src) {
679
694
  ZSTD_memset(src, 0, sizeof(ZSTD_cwksp));
680
695
  }
681
696
 
682
- MEM_STATIC size_t ZSTD_cwksp_sizeof(const ZSTD_cwksp* ws) {
683
- return (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->workspace);
684
- }
685
-
686
- MEM_STATIC size_t ZSTD_cwksp_used(const ZSTD_cwksp* ws) {
687
- return (size_t)((BYTE*)ws->tableEnd - (BYTE*)ws->workspace)
688
- + (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->allocStart);
689
- }
690
-
691
697
  MEM_STATIC int ZSTD_cwksp_reserve_failed(const ZSTD_cwksp* ws) {
692
698
  return ws->allocFailed;
693
699
  }
@@ -11,7 +11,11 @@
11
11
  #include "zstd_compress_internal.h"
12
12
  #include "zstd_double_fast.h"
13
13
 
14
- static void ZSTD_fillDoubleHashTableForCDict(ZSTD_matchState_t* ms,
14
+ #ifndef ZSTD_EXCLUDE_DFAST_BLOCK_COMPRESSOR
15
+
16
+ static
17
+ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
18
+ void ZSTD_fillDoubleHashTableForCDict(ZSTD_matchState_t* ms,
15
19
  void const* end, ZSTD_dictTableLoadMethod_e dtlm)
16
20
  {
17
21
  const ZSTD_compressionParameters* const cParams = &ms->cParams;
@@ -47,7 +51,9 @@ static void ZSTD_fillDoubleHashTableForCDict(ZSTD_matchState_t* ms,
47
51
  } }
48
52
  }
49
53
 
50
- static void ZSTD_fillDoubleHashTableForCCtx(ZSTD_matchState_t* ms,
54
+ static
55
+ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
56
+ void ZSTD_fillDoubleHashTableForCCtx(ZSTD_matchState_t* ms,
51
57
  void const* end, ZSTD_dictTableLoadMethod_e dtlm)
52
58
  {
53
59
  const ZSTD_compressionParameters* const cParams = &ms->cParams;
@@ -95,6 +101,7 @@ void ZSTD_fillDoubleHashTable(ZSTD_matchState_t* ms,
95
101
 
96
102
 
97
103
  FORCE_INLINE_TEMPLATE
104
+ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
98
105
  size_t ZSTD_compressBlock_doubleFast_noDict_generic(
99
106
  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
100
107
  void const* src, size_t srcSize, U32 const mls /* template */)
@@ -305,6 +312,7 @@ _match_stored:
305
312
 
306
313
 
307
314
  FORCE_INLINE_TEMPLATE
315
+ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
308
316
  size_t ZSTD_compressBlock_doubleFast_dictMatchState_generic(
309
317
  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
310
318
  void const* src, size_t srcSize,
@@ -348,8 +356,8 @@ size_t ZSTD_compressBlock_doubleFast_dictMatchState_generic(
348
356
  if (ms->prefetchCDictTables) {
349
357
  size_t const hashTableBytes = (((size_t)1) << dictCParams->hashLog) * sizeof(U32);
350
358
  size_t const chainTableBytes = (((size_t)1) << dictCParams->chainLog) * sizeof(U32);
351
- PREFETCH_AREA(dictHashLong, hashTableBytes)
352
- PREFETCH_AREA(dictHashSmall, chainTableBytes)
359
+ PREFETCH_AREA(dictHashLong, hashTableBytes);
360
+ PREFETCH_AREA(dictHashSmall, chainTableBytes);
353
361
  }
354
362
 
355
363
  /* init */
@@ -589,7 +597,9 @@ size_t ZSTD_compressBlock_doubleFast_dictMatchState(
589
597
  }
590
598
 
591
599
 
592
- static size_t ZSTD_compressBlock_doubleFast_extDict_generic(
600
+ static
601
+ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
602
+ size_t ZSTD_compressBlock_doubleFast_extDict_generic(
593
603
  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
594
604
  void const* src, size_t srcSize,
595
605
  U32 const mls /* template */)
@@ -756,3 +766,5 @@ size_t ZSTD_compressBlock_doubleFast_extDict(
756
766
  return ZSTD_compressBlock_doubleFast_extDict_7(ms, seqStore, rep, src, srcSize);
757
767
  }
758
768
  }
769
+
770
+ #endif /* ZSTD_EXCLUDE_DFAST_BLOCK_COMPRESSOR */
@@ -18,9 +18,12 @@ extern "C" {
18
18
  #include "../common/mem.h" /* U32 */
19
19
  #include "zstd_compress_internal.h" /* ZSTD_CCtx, size_t */
20
20
 
21
+ #ifndef ZSTD_EXCLUDE_DFAST_BLOCK_COMPRESSOR
22
+
21
23
  void ZSTD_fillDoubleHashTable(ZSTD_matchState_t* ms,
22
24
  void const* end, ZSTD_dictTableLoadMethod_e dtlm,
23
25
  ZSTD_tableFillPurpose_e tfp);
26
+
24
27
  size_t ZSTD_compressBlock_doubleFast(
25
28
  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
26
29
  void const* src, size_t srcSize);
@@ -31,6 +34,14 @@ size_t ZSTD_compressBlock_doubleFast_extDict(
31
34
  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
32
35
  void const* src, size_t srcSize);
33
36
 
37
+ #define ZSTD_COMPRESSBLOCK_DOUBLEFAST ZSTD_compressBlock_doubleFast
38
+ #define ZSTD_COMPRESSBLOCK_DOUBLEFAST_DICTMATCHSTATE ZSTD_compressBlock_doubleFast_dictMatchState
39
+ #define ZSTD_COMPRESSBLOCK_DOUBLEFAST_EXTDICT ZSTD_compressBlock_doubleFast_extDict
40
+ #else
41
+ #define ZSTD_COMPRESSBLOCK_DOUBLEFAST NULL
42
+ #define ZSTD_COMPRESSBLOCK_DOUBLEFAST_DICTMATCHSTATE NULL
43
+ #define ZSTD_COMPRESSBLOCK_DOUBLEFAST_EXTDICT NULL
44
+ #endif /* ZSTD_EXCLUDE_DFAST_BLOCK_COMPRESSOR */
34
45
 
35
46
  #if defined (__cplusplus)
36
47
  }
@@ -11,7 +11,9 @@
11
11
  #include "zstd_compress_internal.h" /* ZSTD_hashPtr, ZSTD_count, ZSTD_storeSeq */
12
12
  #include "zstd_fast.h"
13
13
 
14
- static void ZSTD_fillHashTableForCDict(ZSTD_matchState_t* ms,
14
+ static
15
+ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
16
+ void ZSTD_fillHashTableForCDict(ZSTD_matchState_t* ms,
15
17
  const void* const end,
16
18
  ZSTD_dictTableLoadMethod_e dtlm)
17
19
  {
@@ -46,7 +48,9 @@ static void ZSTD_fillHashTableForCDict(ZSTD_matchState_t* ms,
46
48
  } } } }
47
49
  }
48
50
 
49
- static void ZSTD_fillHashTableForCCtx(ZSTD_matchState_t* ms,
51
+ static
52
+ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
53
+ void ZSTD_fillHashTableForCCtx(ZSTD_matchState_t* ms,
50
54
  const void* const end,
51
55
  ZSTD_dictTableLoadMethod_e dtlm)
52
56
  {
@@ -139,8 +143,9 @@ void ZSTD_fillHashTable(ZSTD_matchState_t* ms,
139
143
  *
140
144
  * This is also the work we do at the beginning to enter the loop initially.
141
145
  */
142
- FORCE_INLINE_TEMPLATE size_t
143
- ZSTD_compressBlock_fast_noDict_generic(
146
+ FORCE_INLINE_TEMPLATE
147
+ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
148
+ size_t ZSTD_compressBlock_fast_noDict_generic(
144
149
  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
145
150
  void const* src, size_t srcSize,
146
151
  U32 const mls, U32 const hasStep)
@@ -456,6 +461,7 @@ size_t ZSTD_compressBlock_fast(
456
461
  }
457
462
 
458
463
  FORCE_INLINE_TEMPLATE
464
+ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
459
465
  size_t ZSTD_compressBlock_fast_dictMatchState_generic(
460
466
  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
461
467
  void const* src, size_t srcSize, U32 const mls, U32 const hasStep)
@@ -502,7 +508,7 @@ size_t ZSTD_compressBlock_fast_dictMatchState_generic(
502
508
 
503
509
  if (ms->prefetchCDictTables) {
504
510
  size_t const hashTableBytes = (((size_t)1) << dictCParams->hashLog) * sizeof(U32);
505
- PREFETCH_AREA(dictHashTable, hashTableBytes)
511
+ PREFETCH_AREA(dictHashTable, hashTableBytes);
506
512
  }
507
513
 
508
514
  /* init */
@@ -681,7 +687,9 @@ size_t ZSTD_compressBlock_fast_dictMatchState(
681
687
  }
682
688
 
683
689
 
684
- static size_t ZSTD_compressBlock_fast_extDict_generic(
690
+ static
691
+ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
692
+ size_t ZSTD_compressBlock_fast_extDict_generic(
685
693
  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
686
694
  void const* src, size_t srcSize, U32 const mls, U32 const hasStep)
687
695
  {