thread_safe 0.1.1-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (38) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +21 -0
  3. data/Gemfile +4 -0
  4. data/LICENSE +144 -0
  5. data/README.md +34 -0
  6. data/Rakefile +36 -0
  7. data/examples/bench_cache.rb +35 -0
  8. data/ext/org/jruby/ext/thread_safe/JRubyCacheBackendLibrary.java +200 -0
  9. data/ext/org/jruby/ext/thread_safe/jsr166e/ConcurrentHashMapV8.java +3842 -0
  10. data/ext/org/jruby/ext/thread_safe/jsr166e/LongAdder.java +204 -0
  11. data/ext/org/jruby/ext/thread_safe/jsr166e/Striped64.java +342 -0
  12. data/ext/org/jruby/ext/thread_safe/jsr166y/ThreadLocalRandom.java +199 -0
  13. data/ext/thread_safe/JrubyCacheBackendService.java +15 -0
  14. data/lib/thread_safe.rb +65 -0
  15. data/lib/thread_safe/atomic_reference_cache_backend.rb +922 -0
  16. data/lib/thread_safe/cache.rb +137 -0
  17. data/lib/thread_safe/mri_cache_backend.rb +62 -0
  18. data/lib/thread_safe/non_concurrent_cache_backend.rb +133 -0
  19. data/lib/thread_safe/synchronized_cache_backend.rb +76 -0
  20. data/lib/thread_safe/synchronized_delegator.rb +35 -0
  21. data/lib/thread_safe/util.rb +16 -0
  22. data/lib/thread_safe/util/adder.rb +59 -0
  23. data/lib/thread_safe/util/atomic_reference.rb +12 -0
  24. data/lib/thread_safe/util/cheap_lockable.rb +105 -0
  25. data/lib/thread_safe/util/power_of_two_tuple.rb +26 -0
  26. data/lib/thread_safe/util/striped64.rb +226 -0
  27. data/lib/thread_safe/util/volatile.rb +62 -0
  28. data/lib/thread_safe/util/volatile_tuple.rb +46 -0
  29. data/lib/thread_safe/util/xor_shift_random.rb +39 -0
  30. data/lib/thread_safe/version.rb +3 -0
  31. data/test/test_array.rb +20 -0
  32. data/test/test_cache.rb +792 -0
  33. data/test/test_cache_loops.rb +453 -0
  34. data/test/test_hash.rb +20 -0
  35. data/test/test_helper.rb +73 -0
  36. data/test/test_synchronized_delegator.rb +42 -0
  37. data/thread_safe.gemspec +21 -0
  38. metadata +100 -0
@@ -0,0 +1,137 @@
1
+ require 'thread'
2
+
3
+ module ThreadSafe
4
+ autoload :JRubyCacheBackend, 'thread_safe/jruby_cache_backend'
5
+ autoload :MriCacheBackend, 'thread_safe/mri_cache_backend'
6
+ autoload :NonConcurrentCacheBackend, 'thread_safe/non_concurrent_cache_backend'
7
+ autoload :AtomicReferenceCacheBackend, 'thread_safe/atomic_reference_cache_backend'
8
+ autoload :SynchronizedCacheBackend, 'thread_safe/synchronized_cache_backend'
9
+
10
+ ConcurrentCacheBackend =
11
+ case defined?(RUBY_ENGINE) && RUBY_ENGINE
12
+ when 'jruby'; JRubyCacheBackend
13
+ when 'ruby'; MriCacheBackend
14
+ when 'rbx'; AtomicReferenceCacheBackend
15
+ else
16
+ warn 'ThreadSafe: unsupported Ruby engine, using a fully synchronized ThreadSafe::Cache implementation' if $VERBOSE
17
+ SynchronizedCacheBackend
18
+ end
19
+
20
+ class Cache < ConcurrentCacheBackend
21
+ KEY_ERROR = defined?(KeyError) ? KeyError : IndexError # there is no KeyError in 1.8 mode
22
+
23
+ def initialize(options = nil, &block)
24
+ if options.kind_of?(::Hash)
25
+ validate_options_hash!(options)
26
+ else
27
+ options = nil
28
+ end
29
+
30
+ super(options)
31
+ @default_proc = block
32
+ end
33
+
34
+ def [](key)
35
+ if value = super
36
+ value
37
+ elsif @default_proc && !key?(key)
38
+ @default_proc.call(self, key)
39
+ else
40
+ value
41
+ end
42
+ end
43
+
44
+ def fetch(key, default_value = NULL)
45
+ if NULL != (value = get_or_default(key, NULL))
46
+ value
47
+ elsif block_given?
48
+ yield key
49
+ elsif NULL != default_value
50
+ default_value
51
+ else
52
+ raise KEY_ERROR, 'key not found'
53
+ end
54
+ end
55
+
56
+ def put_if_absent(key, value)
57
+ computed = false
58
+ result = compute_if_absent(key) do
59
+ computed = true
60
+ value
61
+ end
62
+ computed ? nil : result
63
+ end unless method_defined?(:put_if_absent)
64
+
65
+ def value?(value)
66
+ each_value do |v|
67
+ return true if value.equal?(v)
68
+ end
69
+ false
70
+ end unless method_defined?(:value?)
71
+
72
+ def keys
73
+ arr = []
74
+ each_pair {|k, v| arr << k}
75
+ arr
76
+ end unless method_defined?(:keys)
77
+
78
+ def values
79
+ arr = []
80
+ each_pair {|k, v| arr << v}
81
+ arr
82
+ end unless method_defined?(:values)
83
+
84
+ def each_key
85
+ each_pair {|k, v| yield k}
86
+ end unless method_defined?(:each_key)
87
+
88
+ def each_value
89
+ each_pair {|k, v| yield v}
90
+ end unless method_defined?(:each_value)
91
+
92
+ def empty?
93
+ each_pair {|k, v| return false}
94
+ true
95
+ end unless method_defined?(:empty?)
96
+
97
+ def size
98
+ count = 0
99
+ each_pair {|k, v| count += 1}
100
+ count
101
+ end unless method_defined?(:size)
102
+
103
+ def marshal_dump
104
+ raise TypeError, "can't dump hash with default proc" if @default_proc
105
+ h = {}
106
+ each_pair {|k, v| h[k] = v}
107
+ h
108
+ end
109
+
110
+ def marshal_load(hash)
111
+ initialize
112
+ populate_from(hash)
113
+ end
114
+
115
+ undef :freeze
116
+
117
+ private
118
+ def initialize_copy(other)
119
+ super
120
+ populate_from(other)
121
+ end
122
+
123
+ def populate_from(hash)
124
+ hash.each_pair {|k, v| self[k] = v}
125
+ self
126
+ end
127
+
128
+ def validate_options_hash!(options)
129
+ if (initial_capacity = options[:initial_capacity]) && (!initial_capacity.kind_of?(Fixnum) || initial_capacity < 0)
130
+ raise ArgumentError, ":initial_capacity must be a positive Fixnum"
131
+ end
132
+ if (load_factor = options[:load_factor]) && (!load_factor.kind_of?(Numeric) || load_factor <= 0 || load_factor > 1)
133
+ raise ArgumentError, ":load_factor must be a number between 0 and 1"
134
+ end
135
+ end
136
+ end
137
+ end
@@ -0,0 +1,62 @@
1
+ module ThreadSafe
2
+ class MriCacheBackend < NonConcurrentCacheBackend
3
+ # We can get away with a single global write lock (instead of a per-instance one) because of the GVL/green threads.
4
+ #
5
+ # The previous implementation used `Thread.critical` on 1.8 MRI to implement the 4 composed atomic operations (`put_if_absent`, `replace_pair`,
6
+ # `replace_if_exists`, `delete_pair`) this however doesn't work for `compute_if_absent` because on 1.8 the Mutex class is itself implemented
7
+ # via `Thread.critical` and a call to `Mutex#lock` does not restore the previous `Thread.critical` value (thus any synchronisation clears the
8
+ # `Thread.critical` flag and we loose control). This poses a problem as the provided block might use synchronisation on its own.
9
+ #
10
+ # NOTE: a neat idea of writing a c-ext to manually perform atomic put_if_absent, while relying on Ruby not releasing a GVL while calling
11
+ # a c-ext will not work because of the potentially Ruby implemented `#hash` and `#eql?` key methods.
12
+ WRITE_LOCK = Mutex.new
13
+
14
+ def []=(key, value)
15
+ WRITE_LOCK.synchronize { super }
16
+ end
17
+
18
+ def compute_if_absent(key)
19
+ if stored_value = _get(key) # fast non-blocking path for the most likely case
20
+ stored_value
21
+ else
22
+ WRITE_LOCK.synchronize { super }
23
+ end
24
+ end
25
+
26
+ def compute_if_present(key)
27
+ WRITE_LOCK.synchronize { super }
28
+ end
29
+
30
+ def compute(key)
31
+ WRITE_LOCK.synchronize { super }
32
+ end
33
+
34
+ def merge_pair(key, value)
35
+ WRITE_LOCK.synchronize { super }
36
+ end
37
+
38
+ def replace_pair(key, old_value, new_value)
39
+ WRITE_LOCK.synchronize { super }
40
+ end
41
+
42
+ def replace_if_exists(key, new_value)
43
+ WRITE_LOCK.synchronize { super }
44
+ end
45
+
46
+ def get_and_set(key, value)
47
+ WRITE_LOCK.synchronize { super }
48
+ end
49
+
50
+ def delete(key)
51
+ WRITE_LOCK.synchronize { super }
52
+ end
53
+
54
+ def delete_pair(key, value)
55
+ WRITE_LOCK.synchronize { super }
56
+ end
57
+
58
+ def clear
59
+ WRITE_LOCK.synchronize { super }
60
+ end
61
+ end
62
+ end
@@ -0,0 +1,133 @@
1
+ module ThreadSafe
2
+ class NonConcurrentCacheBackend
3
+ # WARNING: all public methods of the class must operate on the @backend directly without calling each other. This is important
4
+ # because of the SynchronizedCacheBackend which uses a non-reentrant mutex for perfomance reasons.
5
+ def initialize(options = nil)
6
+ @backend = {}
7
+ end
8
+
9
+ def [](key)
10
+ @backend[key]
11
+ end
12
+
13
+ def []=(key, value)
14
+ @backend[key] = value
15
+ end
16
+
17
+ def compute_if_absent(key)
18
+ if NULL != (stored_value = @backend.fetch(key, NULL))
19
+ stored_value
20
+ else
21
+ @backend[key] = yield
22
+ end
23
+ end
24
+
25
+ def replace_pair(key, old_value, new_value)
26
+ if pair?(key, old_value)
27
+ @backend[key] = new_value
28
+ true
29
+ else
30
+ false
31
+ end
32
+ end
33
+
34
+ def replace_if_exists(key, new_value)
35
+ if NULL != (stored_value = @backend.fetch(key, NULL))
36
+ @backend[key] = new_value
37
+ stored_value
38
+ end
39
+ end
40
+
41
+ def compute_if_present(key)
42
+ if NULL != (stored_value = @backend.fetch(key, NULL))
43
+ store_computed_value(key, yield(stored_value))
44
+ end
45
+ end
46
+
47
+ def compute(key)
48
+ store_computed_value(key, yield(@backend[key]))
49
+ end
50
+
51
+ def merge_pair(key, value)
52
+ if NULL == (stored_value = @backend.fetch(key, NULL))
53
+ @backend[key] = value
54
+ else
55
+ store_computed_value(key, yield(stored_value))
56
+ end
57
+ end
58
+
59
+ def get_and_set(key, value)
60
+ stored_value = @backend[key]
61
+ @backend[key] = value
62
+ stored_value
63
+ end
64
+
65
+ def key?(key)
66
+ @backend.key?(key)
67
+ end
68
+
69
+ def value?(value)
70
+ @backend.value?(value)
71
+ end
72
+
73
+ def delete(key)
74
+ @backend.delete(key)
75
+ end
76
+
77
+ def delete_pair(key, value)
78
+ if pair?(key, value)
79
+ @backend.delete(key)
80
+ true
81
+ else
82
+ false
83
+ end
84
+ end
85
+
86
+ def clear
87
+ @backend.clear
88
+ self
89
+ end
90
+
91
+ def each_pair
92
+ dupped_backend.each_pair do |k, v|
93
+ yield k, v
94
+ end
95
+ self
96
+ end
97
+
98
+ def size
99
+ @backend.size
100
+ end
101
+
102
+ def get_or_default(key, default_value)
103
+ @backend.fetch(key, default_value)
104
+ end
105
+
106
+ alias_method :_get, :[]
107
+ alias_method :_set, :[]=
108
+ private :_get, :_set
109
+ private
110
+ def initialize_copy(other)
111
+ super
112
+ @backend = {}
113
+ self
114
+ end
115
+
116
+ def dupped_backend
117
+ @backend.dup
118
+ end
119
+
120
+ def pair?(key, expected_value)
121
+ NULL != (stored_value = @backend.fetch(key, NULL)) && expected_value.equal?(stored_value)
122
+ end
123
+
124
+ def store_computed_value(key, new_value)
125
+ if new_value.nil?
126
+ @backend.delete(key)
127
+ nil
128
+ else
129
+ @backend[key] = new_value
130
+ end
131
+ end
132
+ end
133
+ end
@@ -0,0 +1,76 @@
1
+ module ThreadSafe
2
+ class SynchronizedCacheBackend < NonConcurrentCacheBackend
3
+ require 'mutex_m'
4
+ include Mutex_m
5
+ # WARNING: Mutex_m is a non-reentrant lock, so the synchronized methods are not allowed to call each other.
6
+
7
+ def [](key)
8
+ synchronize { super }
9
+ end
10
+
11
+ def []=(key, value)
12
+ synchronize { super }
13
+ end
14
+
15
+ def compute_if_absent(key)
16
+ synchronize { super }
17
+ end
18
+
19
+ def compute_if_present(key)
20
+ synchronize { super }
21
+ end
22
+
23
+ def compute(key)
24
+ synchronize { super }
25
+ end
26
+
27
+ def merge_pair(key, value)
28
+ synchronize { super }
29
+ end
30
+
31
+ def replace_pair(key, old_value, new_value)
32
+ synchronize { super }
33
+ end
34
+
35
+ def replace_if_exists(key, new_value)
36
+ synchronize { super }
37
+ end
38
+
39
+ def get_and_set(key, value)
40
+ synchronize { super }
41
+ end
42
+
43
+ def key?(key)
44
+ synchronize { super }
45
+ end
46
+
47
+ def value?(value)
48
+ synchronize { super }
49
+ end
50
+
51
+ def delete(key)
52
+ synchronize { super }
53
+ end
54
+
55
+ def delete_pair(key, value)
56
+ synchronize { super }
57
+ end
58
+
59
+ def clear
60
+ synchronize { super }
61
+ end
62
+
63
+ def size
64
+ synchronize { super }
65
+ end
66
+
67
+ def get_or_default(key, default_value)
68
+ synchronize { super }
69
+ end
70
+
71
+ private
72
+ def dupped_backend
73
+ synchronize { super }
74
+ end
75
+ end
76
+ end
@@ -0,0 +1,35 @@
1
+ # This class provides a trivial way to synchronize all calls to a given object
2
+ # by wrapping it with a Delegator that performs Mutex#lock/unlock calls around
3
+ # the delegated #send. Example:
4
+ #
5
+ # array = [] # not thread-safe on many impls
6
+ # array = MutexedDelegator.new(array) # thread-safe
7
+ #
8
+ # A simple Mutex provides a very coarse-grained way to synchronize a given
9
+ # object, in that it will cause synchronization for methods that have no
10
+ # need for it, but this is a trivial way to get thread-safety where none may
11
+ # exist currently on some implementations.
12
+ #
13
+ # This class is currently being considered for inclusion into stdlib, via
14
+ # https://bugs.ruby-lang.org/issues/8556
15
+
16
+ require 'delegate'
17
+
18
+ unless defined?(SynchronizedDelegator)
19
+ class SynchronizedDelegator < SimpleDelegator
20
+ def initialize(*)
21
+ super
22
+ @mutex = Mutex.new
23
+ end
24
+
25
+ def method_missing(m, *args, &block)
26
+ begin
27
+ mutex = @mutex
28
+ mutex.lock
29
+ super
30
+ ensure
31
+ mutex.unlock
32
+ end
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,16 @@
1
+ module ThreadSafe
2
+ module Util
3
+ FIXNUM_BIT_SIZE = (0.size * 8) - 2
4
+ MAX_INT = (2 ** FIXNUM_BIT_SIZE) - 1
5
+ CPU_COUNT = 16 # is there a way to determine this?
6
+
7
+ autoload :AtomicReference, 'thread_safe/util/atomic_reference'
8
+ autoload :Adder, 'thread_safe/util/adder'
9
+ autoload :CheapLockable, 'thread_safe/util/cheap_lockable'
10
+ autoload :PowerOfTwoTuple, 'thread_safe/util/power_of_two_tuple'
11
+ autoload :Striped64, 'thread_safe/util/striped64'
12
+ autoload :Volatile, 'thread_safe/util/volatile'
13
+ autoload :VolatileTuple, 'thread_safe/util/volatile_tuple'
14
+ autoload :XorShiftRandom, 'thread_safe/util/xor_shift_random'
15
+ end
16
+ end