simple_worker 0.3.15 → 0.3.16
Sign up to get free protection for your applications and to get access to all the features.
- data/README.markdown +90 -10
- data/lib/rails2_init.rb +7 -0
- data/lib/railtie.rb +23 -0
- data/lib/simple_worker.rb +24 -172
- data/lib/simple_worker/base.rb +188 -157
- data/lib/simple_worker/config.rb +19 -13
- data/lib/simple_worker/service.rb +170 -0
- data/lib/simple_worker/used_in_worker.rb +3 -0
- data/rails/init.rb +0 -0
- data/test/requiring_worker.rb +4 -0
- data/test/test_base.rb +26 -22
- data/test/test_simple_worker.rb +32 -88
- metadata +9 -3
data/README.markdown
CHANGED
@@ -1,8 +1,10 @@
|
|
1
|
-
Using Simple Worker
|
2
|
-
|
3
1
|
Getting Started
|
4
2
|
===============
|
5
3
|
|
4
|
+
[Sign up for a SimpleWorker account][1], it's free to try!
|
5
|
+
|
6
|
+
[1]: http://www.simpleworker.com/
|
7
|
+
|
6
8
|
Configure SimpleWorker
|
7
9
|
----------------------
|
8
10
|
|
@@ -68,9 +70,43 @@ action in your application. This is almost the same as queuing your worker.
|
|
68
70
|
worker.body = "This is the body"
|
69
71
|
**worker.schedule(:start_at=>1.hours.since)**
|
70
72
|
|
73
|
+
Check Status
|
74
|
+
------------
|
75
|
+
|
76
|
+
If you still have access to the worker object, just call:
|
77
|
+
|
78
|
+
worker.status
|
79
|
+
|
80
|
+
If you only have the job ID, call:
|
81
|
+
|
82
|
+
SimpleWorker.status(job_id)
|
83
|
+
|
84
|
+
This will return a hash like:
|
85
|
+
|
86
|
+
{"task_id"=>"ece460ce-12d8-11e0-8e15-12313b0440c6",
|
87
|
+
"status"=>"running",
|
88
|
+
"msg"=>nil,
|
89
|
+
"start_time"=>"2010-12-28T23:19:36+00:00",
|
90
|
+
"end_time"=>nil,
|
91
|
+
"duration"=>nil,
|
92
|
+
"progress"=>{"percent"=>25}}
|
93
|
+
|
94
|
+
TODO: How to access log.
|
95
|
+
|
96
|
+
Logging
|
97
|
+
-------
|
98
|
+
|
99
|
+
log "Starting to do something..."
|
100
|
+
|
101
|
+
|
102
|
+
Setting Progress
|
103
|
+
----------------
|
104
|
+
|
105
|
+
set_progress(:percent => progress, :message => "Server running. Trying to connect...")
|
106
|
+
|
71
107
|
|
72
108
|
|
73
|
-
Schedule
|
109
|
+
Schedule a Recurring Job - CRON
|
74
110
|
------------------------------
|
75
111
|
|
76
112
|
The alternative is when you want to user it like Cron. In this case you'll probably
|
@@ -80,6 +116,7 @@ app starts or anything so best to keep it external.
|
|
80
116
|
Create a file called 'schedule_email_worker.rb' and add this:
|
81
117
|
|
82
118
|
require 'simple_worker'
|
119
|
+
require_relative 'email_worker'
|
83
120
|
|
84
121
|
worker = EmailWorker.new
|
85
122
|
worker.to = current_user.email
|
@@ -92,16 +129,59 @@ Now run it and your worker will be scheduled to run every hour.
|
|
92
129
|
SimpleWorker on Rails
|
93
130
|
---------------------
|
94
131
|
|
95
|
-
|
132
|
+
Rails 2.X:
|
96
133
|
|
97
|
-
|
134
|
+
config.gem 'simple_worker'
|
98
135
|
|
99
|
-
|
100
|
-
- In application.rb, uncomment config.autoload_paths and put:
|
136
|
+
Rails 3.X:
|
101
137
|
|
102
|
-
|
138
|
+
gem 'simple_worker'
|
139
|
+
|
140
|
+
Now you can use your workers like they're part of your app! We recommend putting your worker classes in
|
141
|
+
/app/workers path.
|
142
|
+
|
143
|
+
Configuring a Database Connection
|
144
|
+
---------------------------------
|
145
|
+
|
146
|
+
Although you could easily do this in your worker, this makes it a bit more convenient and more importantly
|
147
|
+
it will create the connection for you. If you are using ActiveRecord, you would add the following to your
|
148
|
+
SimpleWorker config:
|
149
|
+
|
150
|
+
config.database = {
|
151
|
+
:adapter => "mysql2",
|
152
|
+
:host => "localhost",
|
153
|
+
:database => "appdb",
|
154
|
+
:username => "appuser",
|
155
|
+
:password => "secret"
|
156
|
+
}
|
157
|
+
|
158
|
+
Then before you job is run, SimpleWorker will establish the ActiveRecord connection.
|
159
|
+
|
160
|
+
Including/Merging other Ruby Classes
|
161
|
+
------------------------------------
|
162
|
+
|
163
|
+
If you are using the Rails setup above, you can probably skip this as your models will automatically be merged.
|
164
|
+
|
165
|
+
class AvgWorker < SimpleWorker::Base
|
166
|
+
|
167
|
+
attr_accessor :aws_access_key,
|
168
|
+
:aws_secret_key,
|
169
|
+
:s3_suffix
|
170
|
+
|
171
|
+
merge File.join(File.dirname(__FILE__), "..", "app", "models", "user.rb")
|
172
|
+
merge File.join(File.dirname(__FILE__), "..", "app", "models", "account")
|
173
|
+
|
174
|
+
Or simpler yet, try using relative paths:
|
175
|
+
|
176
|
+
merge "../app/models/user"
|
177
|
+
merge "../app/models/account.rb"
|
178
|
+
|
179
|
+
|
180
|
+
Bringing in other Workers
|
181
|
+
---------------------
|
103
182
|
|
104
|
-
|
183
|
+
merge_worker
|
184
|
+
TODO
|
105
185
|
|
106
186
|
|
107
187
|
Configuration Options
|
@@ -118,7 +198,7 @@ Eg:
|
|
118
198
|
config.global_attributes[:db_user] = "sa"
|
119
199
|
config.global_attributes[:db_pass] = "pass"
|
120
200
|
|
121
|
-
Then in your worker, you
|
201
|
+
Then in your worker, you would have the attributes defined:
|
122
202
|
|
123
203
|
attr_accessor :db_user, :db_pass
|
124
204
|
|
data/lib/rails2_init.rb
ADDED
data/lib/railtie.rb
ADDED
@@ -0,0 +1,23 @@
|
|
1
|
+
# see http://api.rubyonrails.org/classes/Rails/Railtie.html
|
2
|
+
|
3
|
+
require 'simple_worker'
|
4
|
+
require 'rails'
|
5
|
+
|
6
|
+
module SimpleWorker
|
7
|
+
class Railtie < Rails::Railtie
|
8
|
+
railtie_name :simple_worker
|
9
|
+
|
10
|
+
initializer "simple_worker.configure_rails_initialization" do |app|
|
11
|
+
puts 'railtie'
|
12
|
+
puts "Initializing list of Rails models..."
|
13
|
+
SimpleWorker.configure do |c2|
|
14
|
+
# path = File.join(File.dirname(caller[0]), '..', 'app/models/*.rb')
|
15
|
+
path = File.join(Rails.root, 'app/models/*.rb')
|
16
|
+
puts 'path=' + path
|
17
|
+
c2.models = Dir.glob(path)
|
18
|
+
puts 'config.models=' + c2.models.inspect
|
19
|
+
end
|
20
|
+
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
data/lib/simple_worker.rb
CHANGED
@@ -1,180 +1,32 @@
|
|
1
1
|
require 'appoxy_api'
|
2
2
|
require 'active_support/core_ext'
|
3
|
-
|
4
|
-
|
5
|
-
|
3
|
+
require_relative 'simple_worker/service'
|
4
|
+
require_relative 'simple_worker/base'
|
5
|
+
require_relative 'simple_worker/config'
|
6
|
+
require_relative 'simple_worker/used_in_worker'
|
6
7
|
|
7
8
|
|
8
9
|
module SimpleWorker
|
9
10
|
|
10
|
-
|
11
|
-
|
12
|
-
|
11
|
+
class << self
|
12
|
+
attr_accessor :config,
|
13
|
+
:service
|
13
14
|
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
end
|
15
|
+
def configure()
|
16
|
+
SimpleWorker.config ||= Config.new
|
17
|
+
yield(config)
|
18
|
+
SimpleWorker.service = Service.new(config.access_key, config.secret_key, :config=>config)
|
19
19
|
end
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
# - :callback_url
|
34
|
-
# - :merge => array of files to merge in with this file
|
35
|
-
def upload(filename, class_name, options={})
|
36
|
-
|
37
|
-
# check whether it should upload again
|
38
|
-
tmp = Dir.tmpdir()
|
39
|
-
# puts 'tmp=' + tmp.to_s
|
40
|
-
md5file = "simple_workr_#{class_name.gsub("::", ".")}.md5"
|
41
|
-
existing_md5 = nil
|
42
|
-
f = File.join(tmp, md5file)
|
43
|
-
if File.exists?(f)
|
44
|
-
existing_md5 = IO.read(f)
|
45
|
-
# puts 'existing_md5=' + existing_md5
|
46
|
-
end
|
47
|
-
|
48
|
-
filename = build_merged_file(filename, options[:merge]) if options[:merge]
|
49
|
-
|
50
|
-
# sys.classes[subclass].__file__
|
51
|
-
# puts '__FILE__=' + Base.subclass.__file__.to_s
|
52
|
-
md5 = Digest::MD5.hexdigest(File.read(filename))
|
53
|
-
# puts "new md5=" + md5
|
54
|
-
|
55
|
-
if md5 != existing_md5
|
56
|
-
puts "#{class_name}: new code, so uploading"
|
57
|
-
File.open(f, 'w') { |f| f.write(md5) }
|
58
|
-
else
|
59
|
-
puts "#{class_name}: same code, not uploading"
|
60
|
-
end
|
61
|
-
|
62
|
-
mystring = nil
|
63
|
-
file = File.open(filename, "r") do |f|
|
64
|
-
mystring = f.read
|
65
|
-
end
|
66
|
-
options = {"code"=>mystring, "class_name"=>class_name}
|
67
|
-
ret = post("code/put", options)
|
68
|
-
ret
|
69
|
-
end
|
70
|
-
|
71
|
-
def build_merged_file(filename, merge)
|
72
|
-
merge = merge.dup
|
73
|
-
merge << filename
|
74
|
-
fname2 = File.join(Dir.tmpdir(), File.basename(filename))
|
75
|
-
# puts 'fname2=' + fname2
|
76
|
-
# puts 'merged_file_array=' + merge.inspect
|
77
|
-
File.open(fname2, "w") do |f|
|
78
|
-
merge.each do |m|
|
79
|
-
f.write File.open(m, 'r') { |mo| mo.read }
|
80
|
-
f.write "\n\n"
|
81
|
-
end
|
82
|
-
end
|
83
|
-
fname2
|
84
|
-
end
|
85
|
-
|
86
|
-
def add_sw_params(hash_to_send)
|
87
|
-
# todo: remove secret key?? Can use worker service from within a worker without it now
|
88
|
-
hash_to_send["sw_access_key"] = self.access_key
|
89
|
-
hash_to_send["sw_secret_key"] = self.secret_key
|
90
|
-
end
|
91
|
-
|
92
|
-
# class_name: The class name of a previously upload class, eg: MySuperWorker
|
93
|
-
# data: Arbitrary hash of your own data that your task will need to run.
|
94
|
-
def queue(class_name, data={})
|
95
|
-
if !data.is_a?(Array)
|
96
|
-
data = [data]
|
97
|
-
end
|
98
|
-
hash_to_send = {}
|
99
|
-
hash_to_send["payload"] = data
|
100
|
-
hash_to_send["class_name"] = class_name
|
101
|
-
add_sw_params(hash_to_send)
|
102
|
-
if defined?(RAILS_ENV)
|
103
|
-
hash_to_send["rails_env"] = RAILS_ENV
|
104
|
-
end
|
105
|
-
return queue_raw(class_name, hash_to_send)
|
106
|
-
|
107
|
-
end
|
108
|
-
|
109
|
-
def queue_raw(class_name, data={})
|
110
|
-
params = nil
|
111
|
-
hash_to_send = data
|
112
|
-
hash_to_send["class_name"] = class_name
|
113
|
-
ret = post("queue/add", hash_to_send)
|
114
|
-
ret
|
115
|
-
|
116
|
-
end
|
117
|
-
|
118
|
-
|
119
|
-
#
|
120
|
-
# schedule: hash of scheduling options that can include:
|
121
|
-
# Required:
|
122
|
-
# - start_at: Time of first run - DateTime or Time object.
|
123
|
-
# Optional:
|
124
|
-
# - run_every: Time in seconds between runs. If ommitted, task will only run once.
|
125
|
-
# - delay_type: Fixed Rate or Fixed Delay. Default is fixed_delay.
|
126
|
-
# - end_at: Scheduled task will stop running after this date (optional, if ommitted, runs forever or until cancelled)
|
127
|
-
# - run_times: Task will run exactly :run_times. For instance if :run_times is 5, then the task will run 5 times.
|
128
|
-
#
|
129
|
-
def schedule(class_name, data, schedule)
|
130
|
-
raise "Schedule must be a hash." if !schedule.is_a? Hash
|
131
|
-
# if !data.is_a?(Array)
|
132
|
-
# data = [data]
|
133
|
-
# end
|
134
|
-
hash_to_send = {}
|
135
|
-
hash_to_send["payload"] = data
|
136
|
-
hash_to_send["class_name"] = class_name
|
137
|
-
hash_to_send["schedule"] = schedule
|
138
|
-
add_sw_params(hash_to_send)
|
139
|
-
# puts 'about to send ' + hash_to_send.inspect
|
140
|
-
ret = post("scheduler/schedule", hash_to_send)
|
141
|
-
ret
|
142
|
-
end
|
143
|
-
|
144
|
-
def cancel_schedule(scheduled_task_id)
|
145
|
-
raise "Must include a schedule id." if scheduled_task_id.blank?
|
146
|
-
hash_to_send = {}
|
147
|
-
hash_to_send["scheduled_task_id"] = scheduled_task_id
|
148
|
-
ret = post("scheduler/cancel", hash_to_send)
|
149
|
-
ret
|
150
|
-
end
|
151
|
-
|
152
|
-
def get_schedules()
|
153
|
-
hash_to_send = {}
|
154
|
-
ret = get("scheduler/list", hash_to_send)
|
155
|
-
ret
|
156
|
-
end
|
157
|
-
|
158
|
-
def status(task_id)
|
159
|
-
data = {"task_id"=>task_id}
|
160
|
-
ret = get("task/status", data)
|
161
|
-
ret
|
162
|
-
end
|
163
|
-
|
164
|
-
def schedule_status(schedule_id)
|
165
|
-
data = {"schedule_id"=>schedule_id}
|
166
|
-
ret = get("scheduler/status", data)
|
167
|
-
ret
|
168
|
-
end
|
169
|
-
|
170
|
-
def log(task_id)
|
171
|
-
data = {"task_id"=>task_id}
|
172
|
-
ret = get("task/log", data)
|
173
|
-
# puts 'ret=' + ret.inspect
|
174
|
-
# ret["log"] = Base64.decode64(ret["log"])
|
175
|
-
ret
|
176
|
-
end
|
177
|
-
|
178
|
-
|
179
|
-
end
|
180
|
-
end
|
20
|
+
end
|
21
|
+
|
22
|
+
end
|
23
|
+
|
24
|
+
if defined?(Rails)
|
25
|
+
puts 'Rails=' + Rails.inspect
|
26
|
+
puts 'vers=' + Rails::VERSION::MAJOR.inspect
|
27
|
+
if Rails::VERSION::MAJOR == 2
|
28
|
+
require_relative 'rails2_init.rb'
|
29
|
+
else
|
30
|
+
require_relative 'railtie'
|
31
|
+
end
|
32
|
+
end
|
data/lib/simple_worker/base.rb
CHANGED
@@ -4,208 +4,239 @@ require 'digest/md5'
|
|
4
4
|
|
5
5
|
module SimpleWorker
|
6
6
|
|
7
|
-
|
7
|
+
class Base
|
8
8
|
|
9
|
-
|
9
|
+
attr_accessor :task_set_id, :task_id, :schedule_id
|
10
10
|
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
11
|
+
class << self
|
12
|
+
attr_accessor :subclass, :caller_file
|
13
|
+
@merged = []
|
14
|
+
@merged_workers = []
|
15
15
|
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
16
|
+
def reset!
|
17
|
+
@merged = []
|
18
|
+
@merged_workers = []
|
19
|
+
end
|
20
20
|
|
21
|
-
|
22
|
-
|
21
|
+
def inherited(subclass)
|
22
|
+
subclass.reset!
|
23
23
|
|
24
24
|
# puts "subclass.inspect=" + subclass.inspect
|
25
25
|
# puts 'existing caller=' + (subclass.instance_variable_defined?(:@caller_file) ? subclass.instance_variable_get(:@caller_file).inspect : "nil")
|
26
26
|
# puts "caller=" + caller.inspect
|
27
27
|
# splits = caller[0].split(":")
|
28
28
|
# caller_file = splits[0] + ":" + splits[1]
|
29
|
-
|
30
|
-
|
29
|
+
caller_file = caller[0][0...(caller[0].index(":in"))]
|
30
|
+
caller_file = caller_file[0...(caller_file.rindex(":"))]
|
31
31
|
# puts 'caller_file=' + caller_file
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
32
|
+
# don't need these class_variables anymore probably
|
33
|
+
subclass.instance_variable_set(:@caller_file, caller_file)
|
34
|
+
|
35
|
+
super
|
36
|
+
end
|
37
|
+
|
38
|
+
# merges the specified files.
|
39
|
+
# todo: don't allow multiple files per merge, just one like require
|
40
|
+
def merge(*files)
|
41
|
+
files.each do |f|
|
42
|
+
f = f.to_str
|
43
|
+
unless ends_with?(f, ".rb")
|
44
|
+
f << ".rb"
|
45
|
+
end
|
46
|
+
exists = false
|
47
|
+
if File.exist? f
|
48
|
+
exists = true
|
49
|
+
else
|
50
|
+
# try relative
|
51
|
+
f2 = File.join(File.dirname(caller[0]), f)
|
52
|
+
if File.exist? f2
|
53
|
+
exists = true
|
54
|
+
f = f2
|
47
55
|
end
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
end
|
56
|
+
end
|
57
|
+
unless exists
|
58
|
+
raise "File not found: " + f
|
59
|
+
end
|
60
|
+
require f
|
61
|
+
@merged << File.expand_path(f)
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
def ends_with?(s, suffix)
|
66
|
+
suffix = suffix.to_s
|
67
|
+
s[-suffix.length, suffix.length] == suffix
|
68
|
+
end
|
69
|
+
|
70
|
+
def merge_worker(file, class_name)
|
71
|
+
puts 'merge_worker in ' + self.name
|
72
|
+
merge(file)
|
73
|
+
@merged_workers << [File.expand_path(file), class_name]
|
74
|
+
end
|
75
|
+
end
|
55
76
|
|
56
77
|
|
57
|
-
|
58
|
-
|
59
|
-
|
78
|
+
def log(str)
|
79
|
+
puts str.to_s
|
80
|
+
end
|
60
81
|
|
61
|
-
|
62
|
-
|
63
|
-
|
82
|
+
def user_dir
|
83
|
+
"."
|
84
|
+
end
|
64
85
|
|
65
|
-
|
66
|
-
|
67
|
-
|
86
|
+
def set_progress(hash)
|
87
|
+
puts 'set_progress: ' + hash.inspect
|
88
|
+
end
|
68
89
|
|
69
|
-
|
70
|
-
|
71
|
-
|
90
|
+
def who_am_i?
|
91
|
+
return self.class.name
|
92
|
+
end
|
72
93
|
|
73
|
-
|
74
|
-
|
75
|
-
|
94
|
+
def uploaded?
|
95
|
+
self.class.instance_variable_defined?(:@uploaded) && self.class.instance_variable_get(:@uploaded)
|
96
|
+
end
|
76
97
|
|
77
|
-
|
78
|
-
|
98
|
+
# Call this if you want to run locally and get some extra features from this gem like global attributes.
|
99
|
+
def run_local
|
79
100
|
# puts 'run_local'
|
80
|
-
|
81
|
-
|
82
|
-
|
101
|
+
set_auto_attributes
|
102
|
+
run
|
103
|
+
end
|
104
|
+
|
105
|
+
def set_auto_attributes
|
106
|
+
set_global_attributes
|
107
|
+
end
|
83
108
|
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
109
|
+
def set_global_attributes
|
110
|
+
ga = SimpleWorker.config.global_attributes
|
111
|
+
if ga && ga.size > 0
|
112
|
+
ga.each_pair do |k, v|
|
88
113
|
# puts "k=#{k} v=#{v}"
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
end
|
93
|
-
end
|
114
|
+
if self.respond_to?(k)
|
115
|
+
self.send("#{k}=", v)
|
116
|
+
end
|
94
117
|
end
|
118
|
+
end
|
119
|
+
end
|
95
120
|
|
96
|
-
|
97
|
-
|
121
|
+
# Will send in all instance_variables.
|
122
|
+
def queue
|
98
123
|
# puts 'in queue'
|
99
|
-
|
100
|
-
|
124
|
+
set_auto_attributes
|
125
|
+
upload_if_needed
|
101
126
|
|
102
|
-
|
127
|
+
response = SimpleWorker.service.queue(self.class.name, sw_get_data)
|
103
128
|
# puts 'queue response=' + response.inspect
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
129
|
+
@task_set_id = response["task_set_id"]
|
130
|
+
@task_id = response["tasks"][0]["task_id"]
|
131
|
+
response
|
132
|
+
end
|
108
133
|
|
109
|
-
|
110
|
-
|
111
|
-
|
134
|
+
def status
|
135
|
+
SimpleWorker.service.status(task_id)
|
136
|
+
end
|
112
137
|
|
113
|
-
|
114
|
-
|
115
|
-
|
138
|
+
def upload
|
139
|
+
upload_if_needed
|
140
|
+
end
|
116
141
|
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
142
|
+
#
|
143
|
+
# schedule: hash of scheduling options that can include:
|
144
|
+
# Required:
|
145
|
+
# - start_at: Time of first run - DateTime or Time object.
|
146
|
+
# Optional:
|
147
|
+
# - run_every: Time in seconds between runs. If ommitted, task will only run once.
|
148
|
+
# - delay_type: Fixed Rate or Fixed Delay. Default is fixed_delay.
|
149
|
+
# - end_at: Scheduled task will stop running after this date (optional, if ommitted, runs forever or until cancelled)
|
150
|
+
# - run_times: Task will run exactly :run_times. For instance if :run_times is 5, then the task will run 5 times.
|
151
|
+
#
|
152
|
+
def schedule(schedule)
|
153
|
+
set_global_attributes
|
154
|
+
upload_if_needed
|
155
|
+
|
156
|
+
response = SimpleWorker.service.schedule(self.class.name, sw_get_data, schedule)
|
132
157
|
# puts 'schedule response=' + response.inspect
|
133
|
-
|
134
|
-
|
135
|
-
|
158
|
+
@schedule_id = response["schedule_id"]
|
159
|
+
response
|
160
|
+
end
|
136
161
|
|
137
|
-
|
138
|
-
|
139
|
-
|
162
|
+
def schedule_status
|
163
|
+
SimpleWorker.service.schedule_status(schedule_id)
|
164
|
+
end
|
140
165
|
|
141
|
-
|
142
|
-
|
166
|
+
# Callbacks for developer
|
167
|
+
def before_upload
|
143
168
|
|
144
|
-
|
169
|
+
end
|
145
170
|
|
146
|
-
|
171
|
+
def after_upload
|
147
172
|
|
148
|
-
|
173
|
+
end
|
149
174
|
|
150
|
-
|
175
|
+
def before_run
|
151
176
|
|
152
|
-
|
177
|
+
end
|
153
178
|
|
154
|
-
|
179
|
+
def after_run
|
155
180
|
|
156
|
-
|
181
|
+
end
|
157
182
|
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
183
|
+
private
|
184
|
+
|
185
|
+
def upload_if_needed
|
186
|
+
|
187
|
+
before_upload
|
188
|
+
|
189
|
+
puts 'upload_if_needed ' + self.class.name
|
190
|
+
# Todo, watch for this file changing or something so we can reupload
|
191
|
+
unless uploaded?
|
192
|
+
merged = self.class.instance_variable_get(:@merged)
|
193
|
+
puts 'merged1=' + merged.inspect
|
194
|
+
|
195
|
+
subclass = self.class
|
196
|
+
rfile = subclass.instance_variable_get(:@caller_file) # Base.caller_file # File.expand_path(Base.subclass)
|
197
|
+
puts 'subclass file=' + rfile.inspect
|
198
|
+
puts 'subclass.name=' + subclass.name
|
199
|
+
superclass = subclass
|
200
|
+
# Also get merged from subclasses up to SimpleWorker::Base
|
201
|
+
while (superclass = superclass.superclass)
|
202
|
+
puts 'superclass=' + superclass.name
|
203
|
+
break if superclass.name == SimpleWorker::Base.name
|
204
|
+
super_merged = superclass.instance_variable_get(:@merged)
|
179
205
|
# puts 'merging caller file: ' + superclass.instance_variable_get(:@caller_file).inspect
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
206
|
+
super_merged << superclass.instance_variable_get(:@caller_file)
|
207
|
+
merged = super_merged + merged
|
208
|
+
puts 'merged with superclass=' + merged.inspect
|
209
|
+
end
|
210
|
+
merged += SimpleWorker.config.models if SimpleWorker.config.models
|
211
|
+
SimpleWorker.service.upload(rfile, subclass.name, :merge=>merged)
|
212
|
+
self.class.instance_variable_set(:@uploaded, true)
|
213
|
+
else
|
214
|
+
puts 'already uploaded for ' + self.class.name
|
215
|
+
end
|
216
|
+
merged_workers = self.class.instance_variable_get(:@merged_workers)
|
217
|
+
puts 'now uploading merged WORKERS ' + merged_workers.inspect
|
218
|
+
merged_workers.each do |mw|
|
219
|
+
# to support merges in the secondary worker, we should instantiate it here, then call "upload"
|
220
|
+
puts 'instantiating and uploading ' + mw[1]
|
221
|
+
Kernel.const_get(mw[1]).new.upload
|
195
222
|
# SimpleWorker.service.upload(mw[0], mw[1])
|
196
|
-
|
223
|
+
end
|
197
224
|
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
def sw_get_data
|
202
|
-
data = {}
|
203
|
-
self.instance_variables.each do |iv|
|
204
|
-
data[iv] = instance_variable_get(iv)
|
205
|
-
end
|
206
|
-
return data
|
207
|
-
end
|
225
|
+
after_upload
|
226
|
+
end
|
208
227
|
|
228
|
+
def sw_get_data
|
229
|
+
data = {}
|
230
|
+
self.instance_variables.each do |iv|
|
231
|
+
data[iv] = instance_variable_get(iv)
|
232
|
+
end
|
209
233
|
|
234
|
+
config_data = {}
|
235
|
+
config_data['database'] = SimpleWorker.config.database if SimpleWorker.config.database
|
236
|
+
data[:sw_config] = config_data
|
237
|
+
return data
|
210
238
|
end
|
239
|
+
|
240
|
+
|
241
|
+
end
|
211
242
|
end
|
data/lib/simple_worker/config.rb
CHANGED
@@ -1,21 +1,27 @@
|
|
1
1
|
module SimpleWorker
|
2
2
|
|
3
3
|
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
4
|
+
# Config is used to setup the SimpleWorker client.
|
5
|
+
# You must set the access_key and secret_key.
|
6
|
+
#
|
7
|
+
# config.global_attributes allows you to specify attributes that will automatically be set on every worker,
|
8
|
+
# this is good for database connection information or things that will be used across the board.
|
9
|
+
#
|
10
|
+
# config.database configures a database connection. If specified like ActiveRecord, SimpleWorker will automatically establish a connection
|
11
|
+
# for you before running your worker.
|
12
|
+
class Config
|
13
|
+
attr_accessor :access_key,
|
14
|
+
:secret_key,
|
15
|
+
:host,
|
16
|
+
:global_attributes,
|
17
|
+
:models,
|
18
|
+
:database
|
14
19
|
|
15
|
-
|
16
|
-
|
17
|
-
end
|
20
|
+
def initialize
|
21
|
+
@global_attributes = {}
|
18
22
|
end
|
19
23
|
|
24
|
+
end
|
25
|
+
|
20
26
|
end
|
21
27
|
|
@@ -0,0 +1,170 @@
|
|
1
|
+
module SimpleWorker
|
2
|
+
|
3
|
+
class Service < Appoxy::Api::Client
|
4
|
+
|
5
|
+
attr_accessor :config
|
6
|
+
|
7
|
+
def initialize(access_key, secret_key, options={})
|
8
|
+
puts 'Starting SimpleWorker::Service...'
|
9
|
+
self.config = options[:config] if options[:config]
|
10
|
+
super("http://api.simpleworker.com/api/", access_key, secret_key, options)
|
11
|
+
self.host = self.config.host if self.config && self.config.host
|
12
|
+
end
|
13
|
+
|
14
|
+
# Options:
|
15
|
+
# - :callback_url
|
16
|
+
# - :merge => array of files to merge in with this file
|
17
|
+
def upload(filename, class_name, options={})
|
18
|
+
puts "Uploading #{class_name}"
|
19
|
+
# check whether it should upload again
|
20
|
+
tmp = Dir.tmpdir()
|
21
|
+
md5file = "simple_worker_#{class_name.gsub("::", ".")}.md5"
|
22
|
+
existing_md5 = nil
|
23
|
+
f = File.join(tmp, md5file)
|
24
|
+
if File.exists?(f)
|
25
|
+
existing_md5 = IO.read(f)
|
26
|
+
end
|
27
|
+
|
28
|
+
filename = build_merged_file(filename, options[:merge]) if options[:merge]
|
29
|
+
|
30
|
+
# sys.classes[subclass].__file__
|
31
|
+
# puts '__FILE__=' + Base.subclass.__file__.to_s
|
32
|
+
md5 = Digest::MD5.hexdigest(File.read(filename))
|
33
|
+
# puts "new md5=" + md5
|
34
|
+
|
35
|
+
new_code = false
|
36
|
+
if md5 != existing_md5
|
37
|
+
puts "#{class_name}: new code, uploading"
|
38
|
+
File.open(f, 'w') { |f| f.write(md5) }
|
39
|
+
new_code = true
|
40
|
+
else
|
41
|
+
puts "#{class_name}: same code, not uploading"
|
42
|
+
end
|
43
|
+
|
44
|
+
if new_code
|
45
|
+
mystring = nil
|
46
|
+
file = File.open(filename, "r") do |f|
|
47
|
+
mystring = f.read
|
48
|
+
end
|
49
|
+
options = {"code"=>mystring, "class_name"=>class_name}
|
50
|
+
ret = post("code/put", options)
|
51
|
+
ret
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
def build_merged_file(filename, merge)
|
56
|
+
merge = merge.dup
|
57
|
+
merge << filename
|
58
|
+
merge.uniq!
|
59
|
+
fname2 = File.join(Dir.tmpdir(), File.basename(filename))
|
60
|
+
# puts 'fname2=' + fname2
|
61
|
+
# puts 'merged_file_array=' + merge.inspect
|
62
|
+
File.open(fname2, "w") do |f|
|
63
|
+
merge.each do |m|
|
64
|
+
puts "merging #{m} into #{filename}"
|
65
|
+
f.write File.open(m, 'r') { |mo| mo.read }
|
66
|
+
f.write "\n\n"
|
67
|
+
end
|
68
|
+
end
|
69
|
+
fname2
|
70
|
+
end
|
71
|
+
|
72
|
+
def add_sw_params(hash_to_send)
|
73
|
+
# todo: remove secret key?? Can use worker service from within a worker without it now
|
74
|
+
hash_to_send["sw_access_key"] = self.access_key
|
75
|
+
hash_to_send["sw_secret_key"] = self.secret_key
|
76
|
+
end
|
77
|
+
|
78
|
+
# class_name: The class name of a previously upload class, eg: MySuperWorker
|
79
|
+
# data: Arbitrary hash of your own data that your task will need to run.
|
80
|
+
def queue(class_name, data={})
|
81
|
+
puts "Queuing #{class_name}"
|
82
|
+
if !data.is_a?(Array)
|
83
|
+
data = [data]
|
84
|
+
end
|
85
|
+
p data
|
86
|
+
hash_to_send = {}
|
87
|
+
hash_to_send["payload"] = data
|
88
|
+
hash_to_send["class_name"] = class_name
|
89
|
+
add_sw_params(hash_to_send)
|
90
|
+
if defined?(RAILS_ENV)
|
91
|
+
hash_to_send["rails_env"] = RAILS_ENV
|
92
|
+
end
|
93
|
+
return queue_raw(class_name, hash_to_send)
|
94
|
+
|
95
|
+
end
|
96
|
+
|
97
|
+
def queue_raw(class_name, data={})
|
98
|
+
params = nil
|
99
|
+
hash_to_send = data
|
100
|
+
hash_to_send["class_name"] = class_name
|
101
|
+
ret = post("queue/add", hash_to_send)
|
102
|
+
ret
|
103
|
+
|
104
|
+
end
|
105
|
+
|
106
|
+
|
107
|
+
#
|
108
|
+
# schedule: hash of scheduling options that can include:
|
109
|
+
# Required:
|
110
|
+
# - start_at: Time of first run - DateTime or Time object.
|
111
|
+
# Optional:
|
112
|
+
# - run_every: Time in seconds between runs. If ommitted, task will only run once.
|
113
|
+
# - delay_type: Fixed Rate or Fixed Delay. Default is fixed_delay.
|
114
|
+
# - end_at: Scheduled task will stop running after this date (optional, if ommitted, runs forever or until cancelled)
|
115
|
+
# - run_times: Task will run exactly :run_times. For instance if :run_times is 5, then the task will run 5 times.
|
116
|
+
#
|
117
|
+
def schedule(class_name, data, schedule)
|
118
|
+
puts "Scheduling #{class_name}"
|
119
|
+
raise "Schedule must be a hash." if !schedule.is_a? Hash
|
120
|
+
# if !data.is_a?(Array)
|
121
|
+
# data = [data]
|
122
|
+
# end
|
123
|
+
hash_to_send = {}
|
124
|
+
hash_to_send["payload"] = data
|
125
|
+
hash_to_send["class_name"] = class_name
|
126
|
+
hash_to_send["schedule"] = schedule
|
127
|
+
add_sw_params(hash_to_send)
|
128
|
+
# puts 'about to send ' + hash_to_send.inspect
|
129
|
+
ret = post("scheduler/schedule", hash_to_send)
|
130
|
+
ret
|
131
|
+
end
|
132
|
+
|
133
|
+
def cancel_schedule(scheduled_task_id)
|
134
|
+
raise "Must include a schedule id." if scheduled_task_id.blank?
|
135
|
+
hash_to_send = {}
|
136
|
+
hash_to_send["scheduled_task_id"] = scheduled_task_id
|
137
|
+
ret = post("scheduler/cancel", hash_to_send)
|
138
|
+
ret
|
139
|
+
end
|
140
|
+
|
141
|
+
def get_schedules()
|
142
|
+
hash_to_send = {}
|
143
|
+
ret = get("scheduler/list", hash_to_send)
|
144
|
+
ret
|
145
|
+
end
|
146
|
+
|
147
|
+
def status(task_id)
|
148
|
+
data = {"task_id"=>task_id}
|
149
|
+
ret = get("task/status", data)
|
150
|
+
ret
|
151
|
+
end
|
152
|
+
|
153
|
+
def schedule_status(schedule_id)
|
154
|
+
data = {"schedule_id"=>schedule_id}
|
155
|
+
ret = get("scheduler/status", data)
|
156
|
+
ret
|
157
|
+
end
|
158
|
+
|
159
|
+
def log(task_id)
|
160
|
+
data = {"task_id"=>task_id}
|
161
|
+
ret = get("task/log", data)
|
162
|
+
# puts 'ret=' + ret.inspect
|
163
|
+
# ret["log"] = Base64.decode64(ret["log"])
|
164
|
+
ret
|
165
|
+
end
|
166
|
+
|
167
|
+
|
168
|
+
end
|
169
|
+
|
170
|
+
end
|
@@ -1,3 +1,6 @@
|
|
1
|
+
# UsedInWorker can be included in classes that you are merging in order to get some of the SimpleWorker features into that class.
|
2
|
+
# For instance, you can use the log() method and it will be logged to the SimpleWorker logs.
|
3
|
+
|
1
4
|
module SimpleWorker
|
2
5
|
module UsedInWorker
|
3
6
|
|
data/rails/init.rb
ADDED
File without changes
|
data/test/test_base.rb
CHANGED
@@ -1,34 +1,38 @@
|
|
1
1
|
require 'test/unit'
|
2
2
|
require 'yaml'
|
3
3
|
begin
|
4
|
-
|
4
|
+
require File.join(File.dirname(__FILE__), '../lib/simple_worker')
|
5
5
|
rescue Exception => ex
|
6
|
-
|
7
|
-
|
6
|
+
puts ex.message
|
7
|
+
require 'simple_worker'
|
8
8
|
end
|
9
|
-
|
10
|
-
|
11
|
-
|
9
|
+
require_relative "test_worker"
|
10
|
+
require_relative "test_worker_2"
|
11
|
+
require_relative "test_worker_3"
|
12
12
|
|
13
13
|
class TestBase < Test::Unit::TestCase
|
14
14
|
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
15
|
+
def setup
|
16
|
+
@config = YAML::load(File.open(File.expand_path("~/.test_configs/simple_worker.yml")))
|
17
|
+
#puts @config.inspect
|
18
|
+
@access_key = @config['simple_worker']['access_key']
|
19
|
+
@secret_key = @config['simple_worker']['secret_key']
|
20
20
|
|
21
|
-
|
22
|
-
|
21
|
+
# new style
|
22
|
+
SimpleWorker.configure do |config|
|
23
|
+
config.access_key = @access_key
|
24
|
+
config.secret_key = @secret_key
|
25
|
+
# config.host = "http://localhost:3000/api/"
|
26
|
+
config.global_attributes["db_user"] = "sa"
|
27
|
+
config.global_attributes["db_pass"] = "pass"
|
28
|
+
config.database = {
|
29
|
+
:adapter => "mysql2",
|
30
|
+
:host => "localhost",
|
31
|
+
:database => "appdb",
|
32
|
+
:username => "appuser",
|
33
|
+
:password => "secret"
|
34
|
+
}
|
23
35
|
|
24
|
-
# new style
|
25
|
-
SimpleWorker.configure do |config|
|
26
|
-
config.access_key = @access_key
|
27
|
-
config.secret_key = @secret_key
|
28
|
-
config.host = "http://localhost:3000/api/"
|
29
|
-
config.global_attributes["db_user"] = "sa"
|
30
|
-
config.global_attributes["db_pass"] = "pass"
|
31
|
-
|
32
|
-
end
|
33
36
|
end
|
37
|
+
end
|
34
38
|
end
|
data/test/test_simple_worker.rb
CHANGED
@@ -1,18 +1,16 @@
|
|
1
|
-
|
2
|
-
|
3
1
|
require_relative 'test_base'
|
4
2
|
|
5
3
|
class SimpleWorkerTests < TestBase
|
6
4
|
|
7
5
|
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
6
|
+
def test_new_worker_style
|
7
|
+
# Add something to queue, get task ID back
|
8
|
+
tw = TestWorker2.new
|
9
|
+
tw.s3_key = "active style runner"
|
10
|
+
tw.times = 3
|
11
|
+
tw.x = true
|
14
12
|
|
15
|
-
|
13
|
+
# schedule up a task
|
16
14
|
# start_at = 10.seconds.since
|
17
15
|
# response_hash_single = tw.schedule(:start_at=>start_at, :run_every=>30, :run_times=>3)
|
18
16
|
# puts 'response_hash=' + response_hash_single.inspect
|
@@ -21,93 +19,39 @@ class SimpleWorkerTests < TestBase
|
|
21
19
|
# puts "status #{i}: " + tw.schedule_status.inspect
|
22
20
|
# end
|
23
21
|
|
24
|
-
|
25
|
-
|
26
|
-
|
22
|
+
# queue up a task
|
23
|
+
puts 'queuing ' + tw.inspect
|
24
|
+
response_hash_single = tw.queue
|
27
25
|
|
28
26
|
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
27
|
+
puts 'response_hash=' + response_hash_single.inspect
|
28
|
+
puts 'task_set_id=' + tw.task_set_id
|
29
|
+
puts 'task_id=' + tw.task_id
|
30
|
+
10.times do |i|
|
31
|
+
puts "status #{i}: " + tw.status.inspect
|
32
|
+
break if tw.status["status"] == "complete"
|
33
|
+
sleep 2
|
34
|
+
end
|
37
35
|
|
38
|
-
|
36
|
+
assert tw.status["status"] == "complete"
|
39
37
|
|
40
|
-
|
38
|
+
end
|
41
39
|
|
42
|
-
|
43
|
-
|
44
|
-
|
40
|
+
def test_global_attributes
|
41
|
+
worker = TestWorker3.new
|
42
|
+
worker.run_local
|
45
43
|
|
46
|
-
|
44
|
+
puts 'worker=' + worker.inspect
|
47
45
|
|
48
|
-
|
49
|
-
|
50
|
-
|
46
|
+
assert_equal "sa", worker.db_user
|
47
|
+
assert_equal "pass", worker.db_pass
|
48
|
+
assert_equal 123, worker.x
|
49
|
+
|
50
|
+
end
|
51
|
+
|
52
|
+
def test_require_relative_merge
|
51
53
|
|
52
|
-
end
|
53
|
-
#
|
54
|
-
#
|
55
|
-
# def test_queue
|
56
|
-
#
|
57
|
-
#
|
58
|
-
# # Upload latest runner code
|
59
|
-
# @worker.upload(File.join(File.dirname(__FILE__), "./test_worker.rb"), "TestWorker")
|
60
|
-
#
|
61
|
-
# # Add something to queue, get task ID back
|
62
|
-
# # Single task
|
63
|
-
# response_hash_single = @worker.queue("TestWorker", {"s3_key"=>"single runner", "times"=>10})
|
64
|
-
#
|
65
|
-
# # task set
|
66
|
-
# response_hash = @worker.queue("TestWorker", [{"id"=>"local_id", "s3_key"=>"some key", "times"=>4}, {"s3_key"=>"some key2", "times"=>3}, {"s3_key"=>"some key", "times"=>2}])
|
67
|
-
#
|
68
|
-
# # Check status
|
69
|
-
# tasks = response_hash["tasks"]
|
70
|
-
# puts 'tasks.size=' + tasks.size.to_s
|
71
|
-
# while tasks.size > 0
|
72
|
-
# tasks.each do |t|
|
73
|
-
# puts "t=" + t.inspect
|
74
|
-
# status_response = @worker.status(t["task_id"])
|
75
|
-
# puts 'status for ' + t["task_id"] + ' = ' + status_response["status"]
|
76
|
-
# if status_response["status"] == "complete" || status_response["status"] == "error" || status_response["status"] == "cancelled"
|
77
|
-
# tasks.delete(t)
|
78
|
-
# end
|
79
|
-
# end
|
80
|
-
# end
|
81
|
-
#
|
82
|
-
# # lets try to get the log now too
|
83
|
-
# task_id = response_hash_single["tasks"][0]["task_id"]
|
84
|
-
# puts 'task_id=' + task_id
|
85
|
-
# status_with_log = @worker.log(task_id)
|
86
|
-
# puts 'log=' + status_with_log.inspect
|
87
|
-
#
|
88
|
-
# end
|
89
|
-
#
|
90
|
-
# def test_scheduled
|
91
|
-
#
|
92
|
-
# # Upload latest runner code
|
93
|
-
# @worker.upload(File.join(File.dirname(__FILE__), "./scheduled_worker.rb"), "ScheduledWorker")
|
94
|
-
#
|
95
|
-
# start_at = 10.seconds.since
|
96
|
-
# #start_at = start_at.gmtime # testing different timezone
|
97
|
-
# puts 'start_at =' + start_at.inspect
|
98
|
-
# response_hash = @worker.schedule("ScheduledWorker", {"msg"=>"One time test."}, {:start_at=>start_at})
|
99
|
-
# puts 'response_hash=' + response_hash.inspect
|
100
|
-
#
|
101
|
-
# start_at = 10.seconds.since
|
102
|
-
# response_hash = @worker.schedule("ScheduledWorker", {"msg"=>"Run times test"}, {:start_at=>start_at, :run_every=>30, :run_times=>3})
|
103
|
-
# puts 'response_hash=' + response_hash.inspect
|
104
|
-
#
|
105
|
-
# start_at = 10.seconds.since
|
106
|
-
# end_at = 2.minutes.since
|
107
|
-
# response_hash = @worker.schedule("ScheduledWorker", {"msg"=>"End at test"}, {:start_at=>start_at, :run_every=>30, :end_at=>end_at, :run_times=>20})
|
108
|
-
# puts 'response_hash=' + response_hash.inspect
|
109
|
-
#
|
110
|
-
# end
|
111
54
|
|
55
|
+
end
|
112
56
|
end
|
113
57
|
|
metadata
CHANGED
@@ -5,8 +5,8 @@ version: !ruby/object:Gem::Version
|
|
5
5
|
segments:
|
6
6
|
- 0
|
7
7
|
- 3
|
8
|
-
-
|
9
|
-
version: 0.3.
|
8
|
+
- 16
|
9
|
+
version: 0.3.16
|
10
10
|
platform: ruby
|
11
11
|
authors:
|
12
12
|
- Travis Reeder
|
@@ -14,7 +14,7 @@ autorequire:
|
|
14
14
|
bindir: bin
|
15
15
|
cert_chain: []
|
16
16
|
|
17
|
-
date: 2010-
|
17
|
+
date: 2010-12-29 00:00:00 -08:00
|
18
18
|
default_executable:
|
19
19
|
dependencies:
|
20
20
|
- !ruby/object:Gem::Dependency
|
@@ -39,12 +39,17 @@ extensions: []
|
|
39
39
|
extra_rdoc_files:
|
40
40
|
- README.markdown
|
41
41
|
files:
|
42
|
+
- lib/rails2_init.rb
|
43
|
+
- lib/railtie.rb
|
42
44
|
- lib/simple_worker.rb
|
43
45
|
- lib/simple_worker/base.rb
|
44
46
|
- lib/simple_worker/config.rb
|
47
|
+
- lib/simple_worker/service.rb
|
45
48
|
- lib/simple_worker/used_in_worker.rb
|
49
|
+
- rails/init.rb
|
46
50
|
- README.markdown
|
47
51
|
- test/models/model_1.rb
|
52
|
+
- test/requiring_worker.rb
|
48
53
|
- test/scheduled_worker.rb
|
49
54
|
- test/second_worker.rb
|
50
55
|
- test/test_base.rb
|
@@ -87,6 +92,7 @@ specification_version: 3
|
|
87
92
|
summary: Classified
|
88
93
|
test_files:
|
89
94
|
- test/models/model_1.rb
|
95
|
+
- test/requiring_worker.rb
|
90
96
|
- test/scheduled_worker.rb
|
91
97
|
- test/second_worker.rb
|
92
98
|
- test/test_base.rb
|