execache 0.1.10 → 0.2.0
Sign up to get free protection for your applications and to get access to all the features.
- data/README.md +4 -8
- data/execache.gemspec +1 -1
- data/lib/execache.rb +77 -65
- data/spec/execache_spec.rb +10 -27
- data/spec/fixtures/execache.yml +1 -0
- metadata +7 -7
data/README.md
CHANGED
@@ -32,6 +32,7 @@ Configure
|
|
32
32
|
Given the above example, our `execache.yml` looks like this:
|
33
33
|
|
34
34
|
redis: localhost:6379/0
|
35
|
+
parallel: 3
|
35
36
|
some_binary:
|
36
37
|
command: '/bin/some/binary'
|
37
38
|
separators:
|
@@ -52,17 +53,12 @@ Execute Commands
|
|
52
53
|
client = Execache::Client.new("localhost:6379/0")
|
53
54
|
|
54
55
|
results = client.exec(
|
56
|
+
:ttl => 60,
|
55
57
|
:some_binary => {
|
56
58
|
:args => 'preliminary_arg',
|
57
59
|
:groups => [
|
58
|
-
|
59
|
-
|
60
|
-
:ttl => 60
|
61
|
-
},
|
62
|
-
{
|
63
|
-
:args => 'arg2a arg2b',
|
64
|
-
:ttl => 60
|
65
|
-
}
|
60
|
+
'arg1a arg1b',
|
61
|
+
'arg2a arg2b'
|
66
62
|
]
|
67
63
|
}
|
68
64
|
)
|
data/execache.gemspec
CHANGED
data/lib/execache.rb
CHANGED
@@ -16,6 +16,7 @@ class Execache
|
|
16
16
|
|
17
17
|
def initialize(yaml)
|
18
18
|
options = YAML.load(File.read(yaml))
|
19
|
+
parallel = options['parallel'] || 3
|
19
20
|
|
20
21
|
puts "\nStarting execache server (redis @ #{options['redis']})..."
|
21
22
|
|
@@ -24,100 +25,111 @@ class Execache
|
|
24
25
|
|
25
26
|
begin
|
26
27
|
while true
|
27
|
-
request = redis.lpop('execache:request')
|
28
|
-
if request
|
28
|
+
if request = redis.lpop('execache:request')
|
29
29
|
Timeout.timeout(60) do
|
30
30
|
request = Yajl::Parser.parse(request)
|
31
|
+
|
32
|
+
# Options
|
33
|
+
global_cache_key = request.delete('cache_key')
|
31
34
|
channel = request.delete('channel')
|
32
|
-
force = request.delete('
|
33
|
-
|
35
|
+
force = request.delete('force')
|
36
|
+
ttl = request.delete('ttl')
|
37
|
+
|
34
38
|
pending = false
|
39
|
+
results = {}
|
35
40
|
|
36
41
|
request.each do |cmd_type, cmd_options|
|
37
|
-
|
42
|
+
cache_keys = []
|
43
|
+
groups = []
|
44
|
+
|
45
|
+
# Binary + preliminary arguments
|
38
46
|
command = [
|
39
47
|
options[cmd_type]['command'],
|
40
48
|
cmd_options['args']
|
41
|
-
]
|
49
|
+
].join(' ')
|
42
50
|
|
43
|
-
#
|
44
|
-
cmd_options['groups'].each do |
|
51
|
+
# For each argument group...
|
52
|
+
cmd_options['groups'].each do |args|
|
45
53
|
cache_key = Digest::SHA1.hexdigest(
|
46
|
-
"#{
|
54
|
+
"#{global_cache_key || command} #{args}"
|
47
55
|
)
|
48
|
-
|
56
|
+
cache_key = "execache:cache:#{cache_key}"
|
49
57
|
cache = redis.get(cache_key)
|
50
|
-
|
51
|
-
|
58
|
+
|
59
|
+
# If force cache overwrite || no cache || pending cache
|
60
|
+
if force || !cache || cache == '[PENDING]'
|
52
61
|
pending = true
|
53
|
-
|
54
|
-
|
55
|
-
group['result'] = Yajl::Parser.parse(cache)
|
62
|
+
|
63
|
+
# Else, store cache result
|
56
64
|
else
|
57
|
-
|
65
|
+
results[cmd_type] ||= []
|
66
|
+
results[cmd_type] << Yajl::Parser.parse(cache)
|
67
|
+
end
|
68
|
+
|
69
|
+
# If force cache overwrite || no cache
|
70
|
+
if force || !cache
|
58
71
|
redis.set(cache_key, '[PENDING]')
|
59
72
|
redis.expire(cache_key, 60) # Timeout incase execution fails
|
60
|
-
|
73
|
+
|
74
|
+
cache_keys << cache_key
|
75
|
+
groups << args
|
61
76
|
end
|
62
77
|
end
|
63
78
|
|
64
|
-
# Add
|
65
|
-
|
66
|
-
|
79
|
+
# Add to command queue if commands present
|
80
|
+
unless groups.empty?
|
81
|
+
command = {
|
82
|
+
:cache_keys => cache_keys,
|
83
|
+
:cmd_type => cmd_type,
|
84
|
+
:command => command,
|
85
|
+
:groups => groups,
|
86
|
+
:ttl => ttl
|
87
|
+
}
|
88
|
+
redis.rpush("execache:commands", Yajl::Encoder.encode(command))
|
67
89
|
end
|
68
90
|
end
|
91
|
+
|
92
|
+
redis.publish(
|
93
|
+
"execache:response:#{channel}",
|
94
|
+
pending ? '[PENDING]' : Yajl::Encoder.encode(results)
|
95
|
+
)
|
96
|
+
end
|
97
|
+
end
|
69
98
|
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
if cmd_options['cmd']
|
77
|
-
redis.set('execache:wait', '1')
|
78
|
-
redis.expire('execache:wait', 120)
|
79
|
-
separators = options[cmd_type]['separators'] || {}
|
80
|
-
separators['group'] ||= "[END]"
|
81
|
-
separators['result'] ||= "\n"
|
82
|
-
output = `#{cmd_options['cmd']}`
|
83
|
-
output = output.split(separators['group'] + separators['result'])
|
84
|
-
output = output.collect { |r| r.split(separators['result']) }
|
85
|
-
redis.del('execache:wait')
|
86
|
-
end
|
87
|
-
|
88
|
-
cmd_options['groups'].each do |group|
|
89
|
-
unless group['result']
|
90
|
-
redis.set(
|
91
|
-
group['cache_key'],
|
92
|
-
Yajl::Encoder.encode(output.shift)
|
93
|
-
)
|
94
|
-
if group['ttl']
|
95
|
-
redis.expire(group['cache_key'], group['ttl'])
|
96
|
-
end
|
97
|
-
end
|
98
|
-
end
|
99
|
-
end
|
100
|
-
end
|
101
|
-
end
|
102
|
-
end
|
103
|
-
else
|
104
|
-
response = request.inject({}) do |hash, (cmd_type, cmd_options)|
|
105
|
-
hash[cmd_type] = []
|
99
|
+
# Execute queued commands
|
100
|
+
if redis.get("execache:parallel").to_i <= parallel && cmd = redis.lpop("execache:commands")
|
101
|
+
redis.incr("execache:parallel")
|
102
|
+
Thread.new do
|
103
|
+
Timeout.timeout(60) do
|
104
|
+
cmd = Yajl::Parser.parse(cmd)
|
106
105
|
|
107
|
-
|
108
|
-
|
109
|
-
|
106
|
+
cache_keys = cmd['cache_keys']
|
107
|
+
cmd_type = cmd['cmd_type']
|
108
|
+
command = cmd['command']
|
109
|
+
groups = cmd['groups']
|
110
|
+
ttl = cmd['ttl']
|
111
|
+
|
112
|
+
separators = options[cmd_type]['separators'] || {}
|
113
|
+
separators['group'] ||= "[END]"
|
114
|
+
separators['result'] ||= "\n"
|
115
|
+
|
116
|
+
results = `#{command} #{groups.join(' ')}`
|
117
|
+
results = results.split(separators['group'] + separators['result'])
|
118
|
+
results = results.collect { |r| r.split(separators['result']) }
|
110
119
|
|
111
|
-
|
120
|
+
redis.decr("execache:parallel")
|
121
|
+
|
122
|
+
results.each_with_index do |result, i|
|
123
|
+
redis.set(
|
124
|
+
cache_keys[i],
|
125
|
+
Yajl::Encoder.encode(result)
|
126
|
+
)
|
127
|
+
redis.expire(cache_keys[i], ttl) if ttl
|
112
128
|
end
|
113
129
|
end
|
114
|
-
|
115
|
-
redis.publish(
|
116
|
-
"execache:response:#{channel}",
|
117
|
-
pending ? '[PENDING]' : Yajl::Encoder.encode(response)
|
118
|
-
)
|
119
130
|
end
|
120
131
|
end
|
132
|
+
|
121
133
|
sleep(1.0 / 1000.0)
|
122
134
|
end
|
123
135
|
rescue Interrupt
|
data/spec/execache_spec.rb
CHANGED
@@ -5,17 +5,12 @@ describe Execache do
|
|
5
5
|
def client_exec(options={})
|
6
6
|
@client.exec(
|
7
7
|
{
|
8
|
+
:ttl => 60,
|
8
9
|
:some_binary => {
|
9
10
|
:args => 'preliminary_arg',
|
10
11
|
:groups => [
|
11
|
-
|
12
|
-
|
13
|
-
:ttl => 60
|
14
|
-
},
|
15
|
-
{
|
16
|
-
:args => 'arg2a arg2b',
|
17
|
-
:ttl => 60
|
18
|
-
}
|
12
|
+
'arg1a arg1b',
|
13
|
+
'arg2a arg2b'
|
19
14
|
]
|
20
15
|
}
|
21
16
|
}.merge(options)
|
@@ -27,7 +22,7 @@ describe Execache do
|
|
27
22
|
Execache.new("#{$root}/spec/fixtures/execache.yml")
|
28
23
|
end
|
29
24
|
@client = Execache::Client.new("localhost:6379/0")
|
30
|
-
@client.redis_1.keys("execache
|
25
|
+
@client.redis_1.keys("execache:*").each do |key|
|
31
26
|
@client.redis_1.del(key)
|
32
27
|
end
|
33
28
|
end
|
@@ -64,14 +59,10 @@ describe Execache do
|
|
64
59
|
|
65
60
|
it "should read from cache for individual groups" do
|
66
61
|
@client.exec(
|
62
|
+
:ttl => 60,
|
67
63
|
:some_binary => {
|
68
64
|
:args => 'preliminary_arg',
|
69
|
-
:groups => [
|
70
|
-
{
|
71
|
-
:args => 'arg2a arg2b',
|
72
|
-
:ttl => 60
|
73
|
-
}
|
74
|
-
]
|
65
|
+
:groups => [ 'arg2a arg2b' ]
|
75
66
|
}
|
76
67
|
).should == {
|
77
68
|
"some_binary" => [
|
@@ -80,14 +71,10 @@ describe Execache do
|
|
80
71
|
}
|
81
72
|
|
82
73
|
@client.exec(
|
74
|
+
:ttl => 60,
|
83
75
|
:some_binary => {
|
84
76
|
:args => 'preliminary_arg',
|
85
|
-
:groups => [
|
86
|
-
{
|
87
|
-
:args => 'arg1a arg1b',
|
88
|
-
:ttl => 60
|
89
|
-
}
|
90
|
-
]
|
77
|
+
:groups => [ 'arg1a arg1b' ]
|
91
78
|
}
|
92
79
|
).should == {
|
93
80
|
"some_binary" => [
|
@@ -98,14 +85,10 @@ describe Execache do
|
|
98
85
|
|
99
86
|
it "should not read cache if preliminary arg changes" do
|
100
87
|
@client.exec(
|
88
|
+
:ttl => 60,
|
101
89
|
:some_binary => {
|
102
90
|
:args => 'preliminary_arg2',
|
103
|
-
:groups => [
|
104
|
-
{
|
105
|
-
:args => 'arg2a arg2b',
|
106
|
-
:ttl => 60
|
107
|
-
}
|
108
|
-
]
|
91
|
+
:groups => [ 'arg2a arg2b' ]
|
109
92
|
}
|
110
93
|
).should == {
|
111
94
|
"some_binary" => [
|
data/spec/fixtures/execache.yml
CHANGED
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: execache
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.2.0
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -13,7 +13,7 @@ date: 2011-11-18 00:00:00.000000000Z
|
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: rspec
|
16
|
-
requirement: &
|
16
|
+
requirement: &70159473617000 !ruby/object:Gem::Requirement
|
17
17
|
none: false
|
18
18
|
requirements:
|
19
19
|
- - ~>
|
@@ -21,10 +21,10 @@ dependencies:
|
|
21
21
|
version: '1.0'
|
22
22
|
type: :development
|
23
23
|
prerelease: false
|
24
|
-
version_requirements: *
|
24
|
+
version_requirements: *70159473617000
|
25
25
|
- !ruby/object:Gem::Dependency
|
26
26
|
name: redis
|
27
|
-
requirement: &
|
27
|
+
requirement: &70159473616280 !ruby/object:Gem::Requirement
|
28
28
|
none: false
|
29
29
|
requirements:
|
30
30
|
- - ~>
|
@@ -32,10 +32,10 @@ dependencies:
|
|
32
32
|
version: 2.2.2
|
33
33
|
type: :runtime
|
34
34
|
prerelease: false
|
35
|
-
version_requirements: *
|
35
|
+
version_requirements: *70159473616280
|
36
36
|
- !ruby/object:Gem::Dependency
|
37
37
|
name: yajl-ruby
|
38
|
-
requirement: &
|
38
|
+
requirement: &70159473615440 !ruby/object:Gem::Requirement
|
39
39
|
none: false
|
40
40
|
requirements:
|
41
41
|
- - ~>
|
@@ -43,7 +43,7 @@ dependencies:
|
|
43
43
|
version: 1.0.0
|
44
44
|
type: :runtime
|
45
45
|
prerelease: false
|
46
|
-
version_requirements: *
|
46
|
+
version_requirements: *70159473615440
|
47
47
|
description: Run commands in parallel and cache the output. Redis queues jobs and
|
48
48
|
stores the result.
|
49
49
|
email:
|