asynchronic 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. data/lib/asynchronic.rb +0 -2
  2. data/lib/asynchronic/data_store/helper.rb +42 -0
  3. data/lib/asynchronic/data_store/in_memory.rb +18 -22
  4. data/lib/asynchronic/data_store/key.rb +19 -1
  5. data/lib/asynchronic/data_store/lazy_store.rb +17 -0
  6. data/lib/asynchronic/data_store/lazy_value.rb +34 -0
  7. data/lib/asynchronic/data_store/readonly_store.rb +17 -0
  8. data/lib/asynchronic/data_store/redis.rb +16 -27
  9. data/lib/asynchronic/data_store/scoped_store.rb +52 -0
  10. data/lib/asynchronic/environment.rb +7 -27
  11. data/lib/asynchronic/job.rb +15 -27
  12. data/lib/asynchronic/process.rb +105 -76
  13. data/lib/asynchronic/queue_engine/in_memory.rb +5 -1
  14. data/lib/asynchronic/queue_engine/ost.rb +5 -1
  15. data/lib/asynchronic/queue_engine/synchronic.rb +68 -0
  16. data/lib/asynchronic/transparent_proxy.rb +52 -0
  17. data/lib/asynchronic/version.rb +1 -1
  18. data/spec/data_store/data_store_examples.rb +48 -32
  19. data/spec/data_store/in_memory_spec.rb +5 -0
  20. data/spec/data_store/key_spec.rb +36 -12
  21. data/spec/data_store/lazy_value_examples.rb +38 -0
  22. data/spec/data_store/redis_spec.rb +17 -0
  23. data/spec/data_store/scoped_store_spec.rb +60 -0
  24. data/spec/expectations.rb +7 -7
  25. data/spec/facade_spec.rb +15 -13
  26. data/spec/jobs.rb +70 -49
  27. data/spec/minitest_helper.rb +11 -1
  28. data/spec/process/life_cycle_examples.rb +149 -135
  29. data/spec/queue_engine/synchronic_spec.rb +27 -0
  30. data/spec/transparent_proxy_spec.rb +36 -0
  31. data/spec/worker/worker_examples.rb +1 -1
  32. metadata +117 -79
  33. checksums.yaml +0 -7
  34. data/lib/asynchronic/data_store/lookup.rb +0 -27
  35. data/lib/asynchronic/hash.rb +0 -31
  36. data/lib/asynchronic/runtime.rb +0 -40
  37. data/spec/data_store/lookup_spec.rb +0 -92
data/lib/asynchronic.rb CHANGED
@@ -9,8 +9,6 @@ Dir.glob(File.expand_path('asynchronic/**/*.rb', File.dirname(__FILE__))).sort.e
9
9
 
10
10
  module Asynchronic
11
11
 
12
- UUID_REGEXP = '[a-z\d]{8}-[a-z\d]{4}-[a-z\d]{4}-[a-z\d]{4}-[a-z\d]{12}'
13
-
14
12
  extend ClassConfig
15
13
 
16
14
  attr_config :default_queue, :asynchronic
@@ -0,0 +1,42 @@
1
+ module Asynchronic
2
+ module DataStore
3
+ module Helper
4
+
5
+ include Enumerable
6
+
7
+ def each
8
+ keys.each { |k| yield [k, self[k]] }
9
+ nil
10
+ end
11
+
12
+ def merge(hash)
13
+ hash.each { |k,v| self[k] = v }
14
+ end
15
+
16
+ def clear
17
+ keys.each { |k| delete k }
18
+ end
19
+
20
+ def scoped(key)
21
+ ScopedStore.new self, key
22
+ end
23
+
24
+ def readonly?
25
+ false
26
+ end
27
+
28
+ def readonly
29
+ ReadonlyStore.new self
30
+ end
31
+
32
+ def lazy?
33
+ false
34
+ end
35
+
36
+ def lazy
37
+ LazyStore.new self
38
+ end
39
+
40
+ end
41
+ end
42
+ end
@@ -2,44 +2,40 @@ module Asynchronic
2
2
  module DataStore
3
3
  class InMemory
4
4
 
5
- def initialize
5
+ include Helper
6
+
7
+ def initialize(hash={})
6
8
  @hash = {}
7
9
  @mutex = Mutex.new
10
+ self.class.connections[object_id] = self
8
11
  end
9
12
 
10
- def get(key)
13
+ def [](key)
11
14
  @hash[key.to_s]
12
15
  end
13
16
 
14
- def set(key, value)
17
+ def []=(key, value)
15
18
  @mutex.synchronize { @hash[key.to_s] = value }
16
19
  end
17
20
 
18
- def merge(key, hash)
19
- scoped_key = Key.new key
20
- hash.each do |k,v|
21
- set scoped_key[k].to_s, v
22
- end
21
+ def delete(key)
22
+ @hash.delete key.to_s
23
23
  end
24
24
 
25
- def to_hash(key)
26
- children_key = "#{key}:"
27
- keys(children_key).inject({}) do |hash, k|
28
- hash[k[children_key.size..-1]] = get k
29
- hash
30
- end
25
+ def keys
26
+ @hash.keys.map { |k| Key.new k }
31
27
  end
32
28
 
33
- def keys(key=nil)
34
- key ? keys.select { |k| k.start_with? key.to_s } : @hash.keys
29
+ alias_method :connection, :object_id
30
+
31
+ def self.connect(object_id)
32
+ connections[object_id]
35
33
  end
36
34
 
37
- def clear(key=nil)
38
- if key
39
- @hash.delete_if { |k,v| k.start_with? key.to_s }
40
- else
41
- @hash.clear
42
- end
35
+ private
36
+
37
+ def self.connections
38
+ @connections ||= {}
43
39
  end
44
40
 
45
41
  end
@@ -2,12 +2,30 @@ module Asynchronic
2
2
  module DataStore
3
3
  class Key < String
4
4
 
5
+ SEPARATOR = '|'
6
+
5
7
  def initialize(key)
6
8
  super key.to_s
7
9
  end
8
10
 
9
11
  def [](key)
10
- self.class.new "#{self}:#{key}"
12
+ self.class.new [self,key].join(SEPARATOR)
13
+ end
14
+
15
+ def sections
16
+ split SEPARATOR
17
+ end
18
+
19
+ def nested?
20
+ sections.count > 1
21
+ end
22
+
23
+ def remove_first(count=1)
24
+ self.class.new sections[count..-1].join(SEPARATOR)
25
+ end
26
+
27
+ def remove_last(count=1)
28
+ self.class.new sections[0..-count-1].join(SEPARATOR)
11
29
  end
12
30
 
13
31
  end
@@ -0,0 +1,17 @@
1
+ require_relative '../transparent_proxy'
2
+
3
+ module Asynchronic
4
+ module DataStore
5
+ class LazyStore < TransparentProxy
6
+
7
+ def [](key)
8
+ LazyValue.new __getobj__, key
9
+ end
10
+
11
+ def lazy?
12
+ true
13
+ end
14
+
15
+ end
16
+ end
17
+ end
@@ -0,0 +1,34 @@
1
+ require_relative '../transparent_proxy'
2
+
3
+ module Asynchronic
4
+ module DataStore
5
+ class LazyValue < TransparentProxy
6
+
7
+ def initialize(data_store, key)
8
+ @data_store_class = data_store.class
9
+ @data_store_connection = data_store.connection
10
+ @key = key
11
+ end
12
+
13
+ def reload
14
+ @value = nil
15
+ self
16
+ end
17
+
18
+ def inspect
19
+ "#<#{proxy_class} @data_store_class=#{@data_store_class} @data_store_connection=#{@data_store_connection} @key=#{@key}>"
20
+ end
21
+
22
+ def data_store
23
+ @data_store_class.connect @data_store_connection
24
+ end
25
+
26
+ private
27
+
28
+ def __getobj__
29
+ @value ||= data_store[@key]
30
+ end
31
+
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,17 @@
1
+ require_relative '../transparent_proxy'
2
+
3
+ module Asynchronic
4
+ module DataStore
5
+ class ReadonlyStore < TransparentProxy
6
+
7
+ def []=(key, value)
8
+ raise "Can't modify read only data store"
9
+ end
10
+
11
+ def readonly?
12
+ true
13
+ end
14
+
15
+ end
16
+ end
17
+ end
@@ -2,49 +2,38 @@ module Asynchronic
2
2
  module DataStore
3
3
  class Redis
4
4
 
5
- attr_reader :connection
5
+ include Helper
6
6
 
7
7
  def initialize(*args)
8
8
  @connection = ::Redis.new *args
9
9
  end
10
10
 
11
- def get(key)
12
- value = connection.get root[key]
11
+ def [](key)
12
+ value = @connection.get key.to_s
13
13
  value ? Marshal.load(value) : nil
14
+ rescue => ex
15
+ Asynchronic.logger.warn('Asynchronic') { ex.message }
16
+ value
14
17
  end
15
18
 
16
- def set(key, value)
17
- connection.set root[key], Marshal.dump(value)
19
+ def []=(key, value)
20
+ @connection.set key.to_s, Marshal.dump(value)
18
21
  end
19
22
 
20
- def merge(key, hash)
21
- scoped_key = Key.new key
22
- hash.each do |k,v|
23
- set scoped_key[k], v
24
- end
23
+ def delete(key)
24
+ @connection.del key.to_s
25
25
  end
26
26
 
27
- def to_hash(key)
28
- children_key = "#{key}:"
29
- keys(children_key).inject({}) do |hash, k|
30
- hash[k[children_key.size..-1]] = get k
31
- hash
32
- end
27
+ def keys
28
+ @connection.keys.map { |k| Key.new k }
33
29
  end
34
30
 
35
- def keys(key=nil)
36
- keys = key ? connection.keys("#{root[key]}*") : connection.keys
37
- keys.map { |k| k[(root.size + 1)..-1] }
31
+ def connection
32
+ @connection.client.options
38
33
  end
39
34
 
40
- def clear(key=nil)
41
- keys(key).each { |k| connection.del root[k] }
42
- end
43
-
44
- private
45
-
46
- def root
47
- Key.new :asynchronic
35
+ def self.connect(options)
36
+ new options
48
37
  end
49
38
 
50
39
  end
@@ -0,0 +1,52 @@
1
+ module Asynchronic
2
+ module DataStore
3
+ class ScopedStore
4
+
5
+ include Helper
6
+
7
+ attr_reader :data_store
8
+ attr_reader :scope
9
+
10
+ def initialize(data_store, scope)
11
+ @data_store = data_store
12
+ @scope = Key.new scope
13
+ end
14
+
15
+ def [](key)
16
+ @data_store[@scope[key]]
17
+ end
18
+
19
+ def []=(key, value)
20
+ @data_store[@scope[key]] = value
21
+ end
22
+
23
+ def delete(key)
24
+ @data_store.delete @scope[key]
25
+ end
26
+
27
+ def keys
28
+ @data_store.keys.
29
+ select { |k| k.start_with? @scope[''] }.
30
+ map { |k| Key.new(k).remove_first @scope.sections.count }
31
+ end
32
+
33
+ def connection
34
+ {
35
+ data_store_class: @data_store.class,
36
+ data_store_connection: @data_store.connection,
37
+ scope: @scope
38
+ }
39
+ end
40
+
41
+ def self.connect(options)
42
+ data_store = options[:data_store_class].connect options[:data_store_connection]
43
+ new data_store, options[:scope]
44
+ end
45
+
46
+ def to_s
47
+ "#<#{self.class} @data_store=#{@data_store} @scope=#{@scope}>"
48
+ end
49
+
50
+ end
51
+ end
52
+ end
@@ -6,15 +6,7 @@ module Asynchronic
6
6
 
7
7
  def initialize(queue_engine, data_store)
8
8
  @queue_engine = queue_engine
9
- @data_store = data_store
10
- end
11
-
12
- def [](key)
13
- data_store.get key
14
- end
15
-
16
- def []=(key, value)
17
- data_store.set key, value
9
+ @data_store = data_store.scoped :asynchronic
18
10
  end
19
11
 
20
12
  def queue(name)
@@ -29,28 +21,16 @@ module Asynchronic
29
21
  queue(queue || queue_engine.default_queue).push msg
30
22
  end
31
23
 
32
- def build_job(job_class, options={})
33
- Asynchronic.logger.debug('Asynchronic') { "Building job #{job_class} - #{options}" }
34
- job_class.new(options).tap do |job|
35
- self[job.lookup.id] = job
36
- self[job.lookup.created_at] = Time.now
37
- end
24
+ def create_process(type, params={})
25
+ Process.create self, type, params
38
26
  end
39
27
 
40
- def build_process(job_class, options={})
41
- Process.new build_job(job_class, options), self
28
+ def load_process(id)
29
+ Process.new self, id
42
30
  end
43
-
44
- def load_process(pid)
45
- Process.new self[pid], self
46
- end
47
-
31
+
48
32
  def processes
49
- data_store.keys.
50
- select { |k| k.match Regexp.new("job:#{Asynchronic::UUID_REGEXP}:created_at$") }.
51
- sort_by {|k| data_store.get k }.
52
- reverse.
53
- map { |k| load_process k.gsub(':created_at', '') }
33
+ Process.all self
54
34
  end
55
35
 
56
36
  end
@@ -1,45 +1,33 @@
1
1
  module Asynchronic
2
2
  class Job
3
3
 
4
- attr_reader :id
5
- attr_reader :name
6
- attr_reader :queue
7
- attr_reader :parent
8
- attr_reader :dependencies
9
- attr_reader :local
10
-
11
- def initialize(options={})
12
- @id = SecureRandom.uuid
13
- @name = options.key?(:alias) ? options[:alias].to_s : self.class.to_s
14
- @queue = options[:queue] || self.class.queue
15
- @parent = options[:parent]
16
- @dependencies = Array(options[:dependencies] || options[:dependency]).map(&:to_s)
17
- @local = options[:local] || {}
18
-
19
- raise 'Cant have dependencies without parent job' if dependencies.any? && parent.nil?
4
+ def initialize(process)
5
+ @process = process
20
6
  end
21
7
 
22
- def lookup
23
- DataStore::Lookup.new self
8
+ def params
9
+ @process.params
24
10
  end
25
11
 
26
- def self.queue(queue=nil)
27
- queue ? @queue = queue : @queue
12
+ def result(reference)
13
+ @process[reference].result
28
14
  end
29
15
 
30
- def self.implementation
31
- @implementation
16
+ def self.queue(name=nil)
17
+ name ? @queue = name : @queue
32
18
  end
33
19
 
34
- def self.enqueue(data={})
35
- process = Asynchronic.environment.build_process self
36
- process.enqueue data
20
+ def self.enqueue(params={})
21
+ process = Asynchronic.environment.create_process self, params
22
+ process.enqueue
23
+ process.id
37
24
  end
38
25
 
39
26
  private
40
27
 
41
- def self.define(&block)
42
- @implementation = block
28
+ def async(type, params={})
29
+ @process.nest type, params
30
+ nil
43
31
  end
44
32
 
45
33
  end