thinking-sphinx-099 1.2.13 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/VERSION.yml CHANGED
@@ -1,5 +1,5 @@
1
1
  ---
2
- :patch: 13
2
+ :patch: 0
3
3
  :major: 1
4
4
  :build:
5
- :minor: 2
5
+ :minor: 3
@@ -17,4 +17,4 @@ Feature: Searching across multiple model
17
17
  Scenario: Retrieving results from multiple models
18
18
  Given Sphinx is running
19
19
  When I search for ten
20
- Then I should get 6 results
20
+ Then I should get 4 results
@@ -1,6 +1,5 @@
1
1
  Before do
2
2
  $queries_executed = []
3
- ThinkingSphinx::Deltas::Job.cancel_thinking_sphinx_jobs
4
3
 
5
4
  @model = nil
6
5
  @method = :search
@@ -1,6 +1,4 @@
1
1
  require 'thinking_sphinx/deltas/default_delta'
2
- require 'thinking_sphinx/deltas/delayed_delta'
3
- require 'thinking_sphinx/deltas/datetime_delta'
4
2
 
5
3
  module ThinkingSphinx
6
4
  module Deltas
@@ -65,14 +65,12 @@ namespace :thinking_sphinx do
65
65
 
66
66
  desc "Index data for Sphinx using Thinking Sphinx's settings"
67
67
  task :index => :app_env do
68
- ThinkingSphinx::Deltas::Job.cancel_thinking_sphinx_jobs
69
-
70
68
  config = ThinkingSphinx::Configuration.instance
71
69
  unless ENV["INDEX_ONLY"] == "true"
72
70
  puts "Generating Configuration to #{config.config_file}"
73
71
  config.build
74
72
  end
75
-
73
+
76
74
  FileUtils.mkdir_p config.searchd_file_path
77
75
  cmd = "#{config.bin_path}#{config.indexer_binary_name} --config \"#{config.config_file}\" --all"
78
76
  cmd << " --rotate" if sphinx_running?
@@ -86,30 +84,6 @@ namespace :thinking_sphinx do
86
84
  Rake::Task["thinking_sphinx:index"].invoke
87
85
  Rake::Task["thinking_sphinx:start"].invoke
88
86
  end
89
-
90
- namespace :index do
91
- task :delta => :app_env do
92
- ThinkingSphinx.indexed_models.select { |model|
93
- model.constantize.sphinx_indexes.any? { |index| index.delta? }
94
- }.each do |model|
95
- model.constantize.sphinx_indexes.select { |index|
96
- index.delta? && index.delta_object.respond_to?(:delayed_index)
97
- }.each { |index|
98
- index.delta_object.delayed_index(index.model)
99
- }
100
- end
101
- end
102
- end
103
-
104
- desc "Process stored delta index requests"
105
- task :delayed_delta => :app_env do
106
- require 'delayed/worker'
107
-
108
- Delayed::Worker.new(
109
- :min_priority => ENV['MIN_PRIORITY'],
110
- :max_priority => ENV['MAX_PRIORITY']
111
- ).start
112
- end
113
87
  end
114
88
 
115
89
  namespace :ts do
@@ -123,10 +97,6 @@ namespace :ts do
123
97
  task :stop => "thinking_sphinx:stop"
124
98
  desc "Index data for Sphinx using Thinking Sphinx's settings"
125
99
  task :in => "thinking_sphinx:index"
126
- namespace :in do
127
- desc "Index Thinking Sphinx datetime delta indexes"
128
- task :delta => "thinking_sphinx:index:delta"
129
- end
130
100
  task :index => "thinking_sphinx:index"
131
101
  desc "Restart Sphinx"
132
102
  task :restart => "thinking_sphinx:restart"
@@ -136,8 +106,6 @@ namespace :ts do
136
106
  task :config => "thinking_sphinx:configure"
137
107
  desc "Stop Sphinx (if it's running), rebuild the indexes, and start Sphinx"
138
108
  task :rebuild => "thinking_sphinx:rebuild"
139
- desc "Process stored delta index requests"
140
- task :dd => "thinking_sphinx:delayed_delta"
141
109
  end
142
110
 
143
111
  def sphinx_pid
@@ -93,6 +93,10 @@ describe ThinkingSphinx do
93
93
  ::ActiveRecord::Base.stub!(
94
94
  :connection => @connection
95
95
  )
96
+
97
+ ThinkingSphinx.module_eval do
98
+ class_variable_set :@@use_group_by_shortcut, nil
99
+ end
96
100
  end
97
101
 
98
102
  it "should return true if no ONLY_FULL_GROUP_BY" do
@@ -33,21 +33,8 @@ Jeweler::Tasks.new do |gem|
33
33
  gem.add_dependency 'activerecord', '>= 1.15.6'
34
34
 
35
35
  gem.post_install_message = <<-MESSAGE
36
- With the release of Thinking Sphinx 1.1.18, there is one important change to
37
- note: previously, the default morphology for indexing was 'stem_en'. The new
38
- default is nil, to avoid any unexpected behavior. If you wish to keep the old
39
- value though, you will need to add the following settings to your
40
- config/sphinx.yml file:
41
-
42
- development:
43
- morphology: stem_en
44
- test:
45
- morphology: stem_en
46
- production:
47
- morphology: stem_en
48
-
49
- To understand morphologies/stemmers better, visit the following link:
50
- http://www.sphinxsearch.com/docs/manual-0.9.8.html#conf-morphology
36
+ If you're upgrading, you should read this:
37
+ http://freelancing-god.github.com/ts/en/upgrading.html
51
38
 
52
39
  MESSAGE
53
40
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: thinking-sphinx-099
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.13
4
+ version: 1.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Pat Allan
@@ -9,7 +9,7 @@ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
11
 
12
- date: 2009-11-01 00:00:00 +11:00
12
+ date: 2009-11-04 00:00:00 +11:00
13
13
  default_executable:
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
@@ -52,12 +52,7 @@ files:
52
52
  - lib/thinking_sphinx/core/array.rb
53
53
  - lib/thinking_sphinx/core/string.rb
54
54
  - lib/thinking_sphinx/deltas.rb
55
- - lib/thinking_sphinx/deltas/datetime_delta.rb
56
55
  - lib/thinking_sphinx/deltas/default_delta.rb
57
- - lib/thinking_sphinx/deltas/delayed_delta.rb
58
- - lib/thinking_sphinx/deltas/delayed_delta/delta_job.rb
59
- - lib/thinking_sphinx/deltas/delayed_delta/flag_as_deleted_job.rb
60
- - lib/thinking_sphinx/deltas/delayed_delta/job.rb
61
56
  - lib/thinking_sphinx/deploy/capistrano.rb
62
57
  - lib/thinking_sphinx/excerpter.rb
63
58
  - lib/thinking_sphinx/facet.rb
@@ -86,10 +81,6 @@ files:
86
81
  - vendor/after_commit/lib/after_commit/active_record.rb
87
82
  - vendor/after_commit/lib/after_commit/connection_adapters.rb
88
83
  - vendor/after_commit/test/after_commit_test.rb
89
- - vendor/delayed_job/lib/delayed/job.rb
90
- - vendor/delayed_job/lib/delayed/message_sending.rb
91
- - vendor/delayed_job/lib/delayed/performable_method.rb
92
- - vendor/delayed_job/lib/delayed/worker.rb
93
84
  - vendor/riddle/lib/riddle.rb
94
85
  - vendor/riddle/lib/riddle/client.rb
95
86
  - vendor/riddle/lib/riddle/client/filter.rb
@@ -111,21 +102,8 @@ homepage: http://ts.freelancing-gods.com
111
102
  licenses: []
112
103
 
113
104
  post_install_message: |+
114
- With the release of Thinking Sphinx 1.1.18, there is one important change to
115
- note: previously, the default morphology for indexing was 'stem_en'. The new
116
- default is nil, to avoid any unexpected behavior. If you wish to keep the old
117
- value though, you will need to add the following settings to your
118
- config/sphinx.yml file:
119
-
120
- development:
121
- morphology: stem_en
122
- test:
123
- morphology: stem_en
124
- production:
125
- morphology: stem_en
126
-
127
- To understand morphologies/stemmers better, visit the following link:
128
- http://www.sphinxsearch.com/docs/manual-0.9.8.html#conf-morphology
105
+ If you're upgrading, you should read this:
106
+ http://freelancing-god.github.com/ts/en/upgrading.html
129
107
 
130
108
  rdoc_options:
131
109
  - --charset=UTF-8
@@ -154,8 +132,6 @@ test_files:
154
132
  - features/alternate_primary_key.feature
155
133
  - features/attribute_transformation.feature
156
134
  - features/attribute_updates.feature
157
- - features/datetime_deltas.feature
158
- - features/delayed_delta_indexing.feature
159
135
  - features/deleting_instances.feature
160
136
  - features/direct_attributes.feature
161
137
  - features/excerpts.feature
@@ -172,8 +148,6 @@ test_files:
172
148
  - features/step_definitions/alpha_steps.rb
173
149
  - features/step_definitions/beta_steps.rb
174
150
  - features/step_definitions/common_steps.rb
175
- - features/step_definitions/datetime_delta_steps.rb
176
- - features/step_definitions/delayed_delta_indexing_steps.rb
177
151
  - features/step_definitions/extensible_delta_indexing_steps.rb
178
152
  - features/step_definitions/facet_steps.rb
179
153
  - features/step_definitions/find_arguments_steps.rb
@@ -193,7 +167,6 @@ test_files:
193
167
  - features/support/db/fixtures/categories.rb
194
168
  - features/support/db/fixtures/cats.rb
195
169
  - features/support/db/fixtures/comments.rb
196
- - features/support/db/fixtures/delayed_betas.rb
197
170
  - features/support/db/fixtures/developers.rb
198
171
  - features/support/db/fixtures/dogs.rb
199
172
  - features/support/db/fixtures/extensible_betas.rb
@@ -202,7 +175,6 @@ test_files:
202
175
  - features/support/db/fixtures/posts.rb
203
176
  - features/support/db/fixtures/robots.rb
204
177
  - features/support/db/fixtures/tags.rb
205
- - features/support/db/fixtures/thetas.rb
206
178
  - features/support/db/migrations/create_alphas.rb
207
179
  - features/support/db/migrations/create_animals.rb
208
180
  - features/support/db/migrations/create_authors.rb
@@ -211,7 +183,6 @@ test_files:
211
183
  - features/support/db/migrations/create_boxes.rb
212
184
  - features/support/db/migrations/create_categories.rb
213
185
  - features/support/db/migrations/create_comments.rb
214
- - features/support/db/migrations/create_delayed_betas.rb
215
186
  - features/support/db/migrations/create_developers.rb
216
187
  - features/support/db/migrations/create_extensible_betas.rb
217
188
  - features/support/db/migrations/create_gammas.rb
@@ -220,7 +191,6 @@ test_files:
220
191
  - features/support/db/migrations/create_robots.rb
221
192
  - features/support/db/migrations/create_taggings.rb
222
193
  - features/support/db/migrations/create_tags.rb
223
- - features/support/db/migrations/create_thetas.rb
224
194
  - features/support/db/mysql.rb
225
195
  - features/support/db/postgresql.rb
226
196
  - features/support/env.rb
@@ -233,7 +203,6 @@ test_files:
233
203
  - features/support/models/cat.rb
234
204
  - features/support/models/category.rb
235
205
  - features/support/models/comment.rb
236
- - features/support/models/delayed_beta.rb
237
206
  - features/support/models/developer.rb
238
207
  - features/support/models/dog.rb
239
208
  - features/support/models/extensible_beta.rb
@@ -243,7 +212,6 @@ test_files:
243
212
  - features/support/models/robot.rb
244
213
  - features/support/models/tag.rb
245
214
  - features/support/models/tagging.rb
246
- - features/support/models/theta.rb
247
215
  - features/support/post_database.rb
248
216
  - spec/lib/thinking_sphinx/active_record/delta_spec.rb
249
217
  - spec/lib/thinking_sphinx/active_record/has_many_association_spec.rb
@@ -254,7 +222,6 @@ test_files:
254
222
  - spec/lib/thinking_sphinx/configuration_spec.rb
255
223
  - spec/lib/thinking_sphinx/core/array_spec.rb
256
224
  - spec/lib/thinking_sphinx/core/string_spec.rb
257
- - spec/lib/thinking_sphinx/deltas/job_spec.rb
258
225
  - spec/lib/thinking_sphinx/excerpter_spec.rb
259
226
  - spec/lib/thinking_sphinx/facet_search_spec.rb
260
227
  - spec/lib/thinking_sphinx/facet_spec.rb
@@ -1,66 +0,0 @@
1
- Feature: Datetime Delta Indexing
2
- In order to have delta indexing on frequently-updated sites
3
- Developers
4
- Should be able to use an existing datetime column to track changes
5
-
6
- Scenario: Delta Index should not fire automatically
7
- Given Sphinx is running
8
- And I am searching on thetas
9
- When I search for one
10
- Then I should get 1 result
11
-
12
- When I change the name of theta one to eleven
13
- And I wait for Sphinx to catch up
14
- And I search for one
15
- Then I should get 1 result
16
-
17
- When I search for eleven
18
- Then I should get 0 results
19
-
20
- Scenario: Delta Index should fire when jobs are run
21
- Given Sphinx is running
22
- And I am searching on thetas
23
- When I search for two
24
- Then I should get 1 result
25
-
26
- When I change the name of theta two to twelve
27
- And I wait for Sphinx to catch up
28
- And I search for twelve
29
- Then I should get 0 results
30
-
31
- When I index the theta datetime delta
32
- And I wait for Sphinx to catch up
33
- And I search for twelve
34
- Then I should get 1 result
35
-
36
- When I search for two
37
- Then I should get 0 results
38
-
39
- Scenario: New records should be merged into the core index
40
- Given Sphinx is running
41
- And I am searching on thetas
42
- When I search for thirteen
43
- Then I should get 0 results
44
-
45
- When I create a new theta named thirteen
46
- And I search for thirteen
47
- Then I should get 0 results
48
-
49
- When I index the theta datetime delta
50
- And I wait for Sphinx to catch up
51
- And I search for thirteen
52
- Then I should get 1 result
53
-
54
- When I search for the document id of theta thirteen in the theta_core index
55
- Then it should exist
56
-
57
- Scenario: Deleting records
58
- Given Sphinx is running
59
- And I am searching on thetas
60
- When I search for three
61
- Then I should get 1 result
62
-
63
- When I delete the theta named three
64
- And I wait for Sphinx to catch up
65
- And I search for three
66
- Then I should get 0 results
@@ -1,37 +0,0 @@
1
- Feature: Delayed Delta Indexing
2
- In order to have delta indexing on frequently-updated sites
3
- Developers
4
- Should be able to use delayed_job to handle delta indexes to lower system load
5
-
6
- Scenario: Delta Index should not fire automatically
7
- Given Sphinx is running
8
- And I am searching on delayed betas
9
- When I search for one
10
- Then I should get 1 result
11
-
12
- When I change the name of delayed beta one to eleven
13
- And I wait for Sphinx to catch up
14
- And I search for one
15
- Then I should get 1 result
16
-
17
- When I search for eleven
18
- Then I should get 0 results
19
-
20
- Scenario: Delta Index should fire when jobs are run
21
- Given Sphinx is running
22
- And I am searching on delayed betas
23
- When I search for one
24
- Then I should get 1 result
25
-
26
- When I change the name of delayed beta two to twelve
27
- And I wait for Sphinx to catch up
28
- And I search for twelve
29
- Then I should get 0 results
30
-
31
- When I run the delayed jobs
32
- And I wait for Sphinx to catch up
33
- And I search for twelve
34
- Then I should get 1 result
35
-
36
- When I search for two
37
- Then I should get 0 results
@@ -1,15 +0,0 @@
1
- When /^I index the theta datetime delta$/ do
2
- Theta.sphinx_indexes.first.delta_object.delayed_index(Theta)
3
- end
4
-
5
- When /^I change the name of theta (\w+) to (\w+)$/ do |current, replacement|
6
- Theta.find_by_name(current).update_attributes(:name => replacement)
7
- end
8
-
9
- When /^I create a new theta named (\w+)$/ do |name|
10
- Theta.create(:name => name)
11
- end
12
-
13
- When /^I delete the theta named (\w+)$/ do |name|
14
- Theta.find_by_name(name).destroy
15
- end
@@ -1,7 +0,0 @@
1
- When /^I run the delayed jobs$/ do
2
- Delayed::Job.work_off.inspect
3
- end
4
-
5
- When /^I change the name of delayed beta (\w+) to (\w+)$/ do |current, replacement|
6
- DelayedBeta.find_by_name(current).update_attributes(:name => replacement)
7
- end
@@ -1,10 +0,0 @@
1
- DelayedBeta.create :name => "one"
2
- DelayedBeta.create :name => "two"
3
- DelayedBeta.create :name => "three"
4
- DelayedBeta.create :name => "four"
5
- DelayedBeta.create :name => "five"
6
- DelayedBeta.create :name => "six"
7
- DelayedBeta.create :name => "seven"
8
- DelayedBeta.create :name => "eight"
9
- DelayedBeta.create :name => "nine"
10
- DelayedBeta.create :name => "ten"
@@ -1,10 +0,0 @@
1
- Theta.create :name => "one"
2
- Theta.create :name => "two"
3
- Theta.create :name => "three"
4
- Theta.create :name => "four"
5
- Theta.create :name => "five"
6
- Theta.create :name => "six"
7
- Theta.create :name => "seven"
8
- Theta.create :name => "eight"
9
- Theta.create :name => "nine"
10
- Theta.create :name => "ten"
@@ -1,17 +0,0 @@
1
- ActiveRecord::Base.connection.create_table :delayed_betas, :force => true do |t|
2
- t.column :name, :string, :null => false
3
- t.column :delta, :boolean, :null => false, :default => false
4
- end
5
-
6
- ActiveRecord::Base.connection.create_table :delayed_jobs, :force => true do |t|
7
- t.column :priority, :integer, :default => 0
8
- t.column :attempts, :integer, :default => 0
9
- t.column :handler, :text
10
- t.column :last_error, :string
11
- t.column :run_at, :datetime
12
- t.column :locked_at, :datetime
13
- t.column :failed_at, :datetime
14
- t.column :locked_by, :string
15
- t.column :created_at, :datetime
16
- t.column :updated_at, :datetime
17
- end
@@ -1,5 +0,0 @@
1
- ActiveRecord::Base.connection.create_table :thetas, :force => true do |t|
2
- t.column :name, :string, :null => false
3
- t.column :created_at, :datetime, :null => false
4
- t.column :updated_at, :datetime, :null => false
5
- end
@@ -1,7 +0,0 @@
1
- class DelayedBeta < ActiveRecord::Base
2
- define_index do
3
- indexes :name, :sortable => true
4
-
5
- set_property :delta => :delayed
6
- end
7
- end
@@ -1,7 +0,0 @@
1
- class Theta < ActiveRecord::Base
2
- define_index do
3
- indexes :name, :sortable => true
4
-
5
- set_property :delta => :datetime, :threshold => 1.hour
6
- end
7
- end
@@ -1,50 +0,0 @@
1
- module ThinkingSphinx
2
- module Deltas
3
- class DatetimeDelta < ThinkingSphinx::Deltas::DefaultDelta
4
- attr_accessor :column, :threshold
5
-
6
- def initialize(index, options)
7
- @index = index
8
- @column = options.delete(:delta_column) || :updated_at
9
- @threshold = options.delete(:threshold) || 1.day
10
- end
11
-
12
- def index(model, instance = nil)
13
- # do nothing
14
- true
15
- end
16
-
17
- def delayed_index(model)
18
- config = ThinkingSphinx::Configuration.instance
19
- rotate = ThinkingSphinx.sphinx_running? ? "--rotate" : ""
20
-
21
- output = `#{config.bin_path}#{config.indexer_binary_name} --config #{config.config_file} #{rotate} #{delta_index_name model}`
22
- output += `#{config.bin_path}#{config.indexer_binary_name} --config #{config.config_file} #{rotate} --merge #{core_index_name model} #{delta_index_name model} --merge-dst-range sphinx_deleted 0 0`
23
- puts output unless ThinkingSphinx.suppress_delta_output?
24
-
25
- true
26
- end
27
-
28
- def toggle(instance)
29
- # do nothing
30
- end
31
-
32
- def toggled(instance)
33
- instance.send(@column) > @threshold.ago
34
- end
35
-
36
- def reset_query(model)
37
- nil
38
- end
39
-
40
- def clause(model, toggled)
41
- if toggled
42
- "#{model.quoted_table_name}.#{model.connection.quote_column_name(@column.to_s)}" +
43
- " > #{adapter.time_difference(@threshold)}"
44
- else
45
- nil
46
- end
47
- end
48
- end
49
- end
50
- end
@@ -1,30 +0,0 @@
1
- require 'delayed/job'
2
-
3
- require 'thinking_sphinx/deltas/delayed_delta/delta_job'
4
- require 'thinking_sphinx/deltas/delayed_delta/flag_as_deleted_job'
5
- require 'thinking_sphinx/deltas/delayed_delta/job'
6
-
7
- module ThinkingSphinx
8
- module Deltas
9
- class DelayedDelta < ThinkingSphinx::Deltas::DefaultDelta
10
- def index(model, instance = nil)
11
- return true unless ThinkingSphinx.updates_enabled? && ThinkingSphinx.deltas_enabled?
12
- return true if instance && !toggled(instance)
13
-
14
- ThinkingSphinx::Deltas::Job.enqueue(
15
- ThinkingSphinx::Deltas::DeltaJob.new(delta_index_name(model)),
16
- ThinkingSphinx::Configuration.instance.delayed_job_priority
17
- )
18
-
19
- Delayed::Job.enqueue(
20
- ThinkingSphinx::Deltas::FlagAsDeletedJob.new(
21
- core_index_name(model), instance.sphinx_document_id
22
- ),
23
- ThinkingSphinx::Configuration.instance.delayed_job_priority
24
- ) if instance
25
-
26
- true
27
- end
28
- end
29
- end
30
- end
@@ -1,24 +0,0 @@
1
- module ThinkingSphinx
2
- module Deltas
3
- class DeltaJob
4
- attr_accessor :index
5
-
6
- def initialize(index)
7
- @index = index
8
- end
9
-
10
- def perform
11
- return true unless ThinkingSphinx.updates_enabled? &&
12
- ThinkingSphinx.deltas_enabled?
13
-
14
- config = ThinkingSphinx::Configuration.instance
15
- client = Riddle::Client.new config.address, config.port
16
-
17
- output = `#{config.bin_path}#{config.indexer_binary_name} --config #{config.config_file} --rotate #{index}`
18
- puts output unless ThinkingSphinx.suppress_delta_output?
19
-
20
- true
21
- end
22
- end
23
- end
24
- end
@@ -1,27 +0,0 @@
1
- module ThinkingSphinx
2
- module Deltas
3
- class FlagAsDeletedJob
4
- attr_accessor :index, :document_id
5
-
6
- def initialize(index, document_id)
7
- @index, @document_id = index, document_id
8
- end
9
-
10
- def perform
11
- return true unless ThinkingSphinx.updates_enabled?
12
-
13
- config = ThinkingSphinx::Configuration.instance
14
- client = Riddle::Client.new config.address, config.port
15
-
16
- client.update(
17
- @index,
18
- ['sphinx_deleted'],
19
- {@document_id => [1]}
20
- ) if ThinkingSphinx.sphinx_running? &&
21
- ThinkingSphinx::Search.search_for_id(@document_id, @index)
22
-
23
- true
24
- end
25
- end
26
- end
27
- end
@@ -1,26 +0,0 @@
1
- module ThinkingSphinx
2
- module Deltas
3
- class Job < Delayed::Job
4
- def self.enqueue(object, priority = 0)
5
- super unless duplicates_exist(object)
6
- end
7
-
8
- def self.cancel_thinking_sphinx_jobs
9
- if connection.tables.include?("delayed_jobs")
10
- delete_all("handler LIKE '--- !ruby/object:ThinkingSphinx::Deltas::%'")
11
- end
12
- end
13
-
14
- private
15
-
16
- def self.duplicates_exist(object)
17
- count(
18
- :conditions => {
19
- :handler => object.to_yaml,
20
- :locked_at => nil
21
- }
22
- ) > 0
23
- end
24
- end
25
- end
26
- end
@@ -1,32 +0,0 @@
1
- require 'spec/spec_helper'
2
-
3
- describe ThinkingSphinx::Deltas::Job do
4
- describe '.cancel_thinking_sphinx_jobs' do
5
- before :each do
6
- ThinkingSphinx::Deltas::Job.stub!(:delete_all => true)
7
- end
8
-
9
- it "should not delete any rows if the delayed_jobs table does not exist" do
10
- ThinkingSphinx::Deltas::Job.connection.stub!(:tables => [])
11
- ThinkingSphinx::Deltas::Job.should_not_receive(:delete_all)
12
-
13
- ThinkingSphinx::Deltas::Job.cancel_thinking_sphinx_jobs
14
- end
15
-
16
- it "should delete rows if the delayed_jobs table does exist" do
17
- ThinkingSphinx::Deltas::Job.connection.stub!(:tables => ['delayed_jobs'])
18
- ThinkingSphinx::Deltas::Job.should_receive(:delete_all)
19
-
20
- ThinkingSphinx::Deltas::Job.cancel_thinking_sphinx_jobs
21
- end
22
-
23
- it "should delete only Thinking Sphinx jobs" do
24
- ThinkingSphinx::Deltas::Job.connection.stub!(:tables => ['delayed_jobs'])
25
- ThinkingSphinx::Deltas::Job.should_receive(:delete_all) do |sql|
26
- sql.should match(/handler LIKE '--- !ruby\/object:ThinkingSphinx::Deltas::\%'/)
27
- end
28
-
29
- ThinkingSphinx::Deltas::Job.cancel_thinking_sphinx_jobs
30
- end
31
- end
32
- end
@@ -1,251 +0,0 @@
1
- module Delayed
2
-
3
- class DeserializationError < StandardError
4
- end
5
-
6
- class Job < ActiveRecord::Base
7
- MAX_ATTEMPTS = 25
8
- MAX_RUN_TIME = 4.hours
9
- set_table_name :delayed_jobs
10
-
11
- # By default failed jobs are destroyed after too many attempts.
12
- # If you want to keep them around (perhaps to inspect the reason
13
- # for the failure), set this to false.
14
- cattr_accessor :destroy_failed_jobs
15
- self.destroy_failed_jobs = true
16
-
17
- # Every worker has a unique name which by default is the pid of the process.
18
- # There are some advantages to overriding this with something which survives worker retarts:
19
- # Workers can safely resume working on tasks which are locked by themselves. The worker will assume that it crashed before.
20
- cattr_accessor :worker_name
21
- self.worker_name = "host:#{Socket.gethostname} pid:#{Process.pid}" rescue "pid:#{Process.pid}"
22
-
23
- NextTaskSQL = '(run_at <= ? AND (locked_at IS NULL OR locked_at < ?) OR (locked_by = ?)) AND failed_at IS NULL'
24
- NextTaskOrder = 'priority DESC, run_at ASC'
25
-
26
- ParseObjectFromYaml = /\!ruby\/\w+\:([^\s]+)/
27
-
28
- cattr_accessor :min_priority, :max_priority
29
- self.min_priority = nil
30
- self.max_priority = nil
31
-
32
- class LockError < StandardError
33
- end
34
-
35
- def self.clear_locks!
36
- update_all("locked_by = null, locked_at = null", ["locked_by = ?", worker_name])
37
- end
38
-
39
- def failed?
40
- failed_at
41
- end
42
- alias_method :failed, :failed?
43
-
44
- def payload_object
45
- @payload_object ||= deserialize(self['handler'])
46
- end
47
-
48
- def name
49
- @name ||= begin
50
- payload = payload_object
51
- if payload.respond_to?(:display_name)
52
- payload.display_name
53
- else
54
- payload.class.name
55
- end
56
- end
57
- end
58
-
59
- def payload_object=(object)
60
- self['handler'] = object.to_yaml
61
- end
62
-
63
- def reschedule(message, backtrace = [], time = nil)
64
- if self.attempts < MAX_ATTEMPTS
65
- time ||= Job.db_time_now + (attempts ** 4) + 5
66
-
67
- self.attempts += 1
68
- self.run_at = time
69
- self.last_error = message + "\n" + backtrace.join("\n")
70
- self.unlock
71
- save!
72
- else
73
- logger.info "* [JOB] PERMANENTLY removing #{self.name} because of #{attempts} consequetive failures."
74
- destroy_failed_jobs ? destroy : update_attribute(:failed_at, Time.now)
75
- end
76
- end
77
-
78
- def self.enqueue(*args, &block)
79
- object = block_given? ? EvaledJob.new(&block) : args.shift
80
-
81
- unless object.respond_to?(:perform) || block_given?
82
- raise ArgumentError, 'Cannot enqueue items which do not respond to perform'
83
- end
84
-
85
- priority = args[0] || 0
86
- run_at = args[1]
87
-
88
- Job.create(:payload_object => object, :priority => priority.to_i, :run_at => run_at)
89
- end
90
-
91
- def self.find_available(limit = 5, max_run_time = MAX_RUN_TIME)
92
-
93
- time_now = db_time_now
94
-
95
- sql = NextTaskSQL.dup
96
-
97
- conditions = [time_now, time_now - max_run_time, worker_name]
98
-
99
- if self.min_priority
100
- sql << ' AND (priority >= ?)'
101
- conditions << min_priority
102
- end
103
-
104
- if self.max_priority
105
- sql << ' AND (priority <= ?)'
106
- conditions << max_priority
107
- end
108
-
109
- conditions.unshift(sql)
110
-
111
- records = ActiveRecord::Base.silence do
112
- find(:all, :conditions => conditions, :order => NextTaskOrder, :limit => limit)
113
- end
114
-
115
- records.sort_by { rand() }
116
- end
117
-
118
- # Get the payload of the next job we can get an exclusive lock on.
119
- # If no jobs are left we return nil
120
- def self.reserve(max_run_time = MAX_RUN_TIME, &block)
121
-
122
- # We get up to 5 jobs from the db. In face we cannot get exclusive access to a job we try the next.
123
- # this leads to a more even distribution of jobs across the worker processes
124
- find_available(5, max_run_time).each do |job|
125
- begin
126
- logger.info "* [JOB] aquiring lock on #{job.name}"
127
- job.lock_exclusively!(max_run_time, worker_name)
128
- runtime = Benchmark.realtime do
129
- invoke_job(job.payload_object, &block)
130
- job.destroy
131
- end
132
- logger.info "* [JOB] #{job.name} completed after %.4f" % runtime
133
-
134
- return job
135
- rescue LockError
136
- # We did not get the lock, some other worker process must have
137
- logger.warn "* [JOB] failed to aquire exclusive lock for #{job.name}"
138
- rescue StandardError => e
139
- job.reschedule e.message, e.backtrace
140
- log_exception(job, e)
141
- return job
142
- end
143
- end
144
-
145
- nil
146
- end
147
-
148
- # This method is used internally by reserve method to ensure exclusive access
149
- # to the given job. It will rise a LockError if it cannot get this lock.
150
- def lock_exclusively!(max_run_time, worker = worker_name)
151
- now = self.class.db_time_now
152
- affected_rows = if locked_by != worker
153
- # We don't own this job so we will update the locked_by name and the locked_at
154
- self.class.update_all(["locked_at = ?, locked_by = ?", now, worker], ["id = ? and (locked_at is null or locked_at < ?)", id, (now - max_run_time.to_i)])
155
- else
156
- # We already own this job, this may happen if the job queue crashes.
157
- # Simply resume and update the locked_at
158
- self.class.update_all(["locked_at = ?", now], ["id = ? and locked_by = ?", id, worker])
159
- end
160
- raise LockError.new("Attempted to aquire exclusive lock failed") unless affected_rows == 1
161
-
162
- self.locked_at = now
163
- self.locked_by = worker
164
- end
165
-
166
- def unlock
167
- self.locked_at = nil
168
- self.locked_by = nil
169
- end
170
-
171
- # This is a good hook if you need to report job processing errors in additional or different ways
172
- def self.log_exception(job, error)
173
- logger.error "* [JOB] #{job.name} failed with #{error.class.name}: #{error.message} - #{job.attempts} failed attempts"
174
- logger.error(error)
175
- end
176
-
177
- def self.work_off(num = 100)
178
- success, failure = 0, 0
179
-
180
- num.times do
181
- job = self.reserve do |j|
182
- begin
183
- j.perform
184
- success += 1
185
- rescue
186
- failure += 1
187
- raise
188
- end
189
- end
190
-
191
- break if job.nil?
192
- end
193
-
194
- return [success, failure]
195
- end
196
-
197
- # Moved into its own method so that new_relic can trace it.
198
- def self.invoke_job(job, &block)
199
- block.call(job)
200
- end
201
-
202
- private
203
-
204
- def deserialize(source)
205
- handler = YAML.load(source) rescue nil
206
-
207
- unless handler.respond_to?(:perform)
208
- if handler.nil? && source =~ ParseObjectFromYaml
209
- handler_class = $1
210
- end
211
- attempt_to_load(handler_class || handler.class)
212
- handler = YAML.load(source)
213
- end
214
-
215
- return handler if handler.respond_to?(:perform)
216
-
217
- raise DeserializationError,
218
- 'Job failed to load: Unknown handler. Try to manually require the appropiate file.'
219
- rescue TypeError, LoadError, NameError => e
220
- raise DeserializationError,
221
- "Job failed to load: #{e.message}. Try to manually require the required file."
222
- end
223
-
224
- # Constantize the object so that ActiveSupport can attempt
225
- # its auto loading magic. Will raise LoadError if not successful.
226
- def attempt_to_load(klass)
227
- klass.constantize
228
- end
229
-
230
- def self.db_time_now
231
- (ActiveRecord::Base.default_timezone == :utc) ? Time.now.utc : Time.now
232
- end
233
-
234
- protected
235
-
236
- def before_save
237
- self.run_at ||= self.class.db_time_now
238
- end
239
-
240
- end
241
-
242
- class EvaledJob
243
- def initialize
244
- @job = yield
245
- end
246
-
247
- def perform
248
- eval(@job)
249
- end
250
- end
251
- end
@@ -1,7 +0,0 @@
1
- module Delayed
2
- module MessageSending
3
- def send_later(method, *args)
4
- Delayed::Job.enqueue Delayed::PerformableMethod.new(self, method.to_sym, args)
5
- end
6
- end
7
- end
@@ -1,55 +0,0 @@
1
- module Delayed
2
- class PerformableMethod < Struct.new(:object, :method, :args)
3
- CLASS_STRING_FORMAT = /^CLASS\:([A-Z][\w\:]+)$/
4
- AR_STRING_FORMAT = /^AR\:([A-Z][\w\:]+)\:(\d+)$/
5
-
6
- def initialize(object, method, args)
7
- raise NoMethodError, "undefined method `#{method}' for #{self.inspect}" unless object.respond_to?(method)
8
-
9
- self.object = dump(object)
10
- self.args = args.map { |a| dump(a) }
11
- self.method = method.to_sym
12
- end
13
-
14
- def display_name
15
- case self.object
16
- when CLASS_STRING_FORMAT then "#{$1}.#{method}"
17
- when AR_STRING_FORMAT then "#{$1}##{method}"
18
- else "Unknown##{method}"
19
- end
20
- end
21
-
22
- def perform
23
- load(object).send(method, *args.map{|a| load(a)})
24
- rescue ActiveRecord::RecordNotFound
25
- # We cannot do anything about objects which were deleted in the meantime
26
- true
27
- end
28
-
29
- private
30
-
31
- def load(arg)
32
- case arg
33
- when CLASS_STRING_FORMAT then $1.constantize
34
- when AR_STRING_FORMAT then $1.constantize.find($2)
35
- else arg
36
- end
37
- end
38
-
39
- def dump(arg)
40
- case arg
41
- when Class then class_to_string(arg)
42
- when ActiveRecord::Base then ar_to_string(arg)
43
- else arg
44
- end
45
- end
46
-
47
- def ar_to_string(obj)
48
- "AR:#{obj.class}:#{obj.id}"
49
- end
50
-
51
- def class_to_string(obj)
52
- "CLASS:#{obj.name}"
53
- end
54
- end
55
- end
@@ -1,54 +0,0 @@
1
- module Delayed
2
- class Worker
3
- SLEEP = 5
4
-
5
- cattr_accessor :logger
6
- self.logger = if defined?(Merb::Logger)
7
- Merb.logger
8
- elsif defined?(RAILS_DEFAULT_LOGGER)
9
- RAILS_DEFAULT_LOGGER
10
- end
11
-
12
- def initialize(options={})
13
- @quiet = options[:quiet]
14
- Delayed::Job.min_priority = options[:min_priority] if options.has_key?(:min_priority)
15
- Delayed::Job.max_priority = options[:max_priority] if options.has_key?(:max_priority)
16
- end
17
-
18
- def start
19
- say "*** Starting job worker #{Delayed::Job.worker_name}"
20
-
21
- trap('TERM') { say 'Exiting...'; $exit = true }
22
- trap('INT') { say 'Exiting...'; $exit = true }
23
-
24
- loop do
25
- result = nil
26
-
27
- realtime = Benchmark.realtime do
28
- result = Delayed::Job.work_off
29
- end
30
-
31
- count = result.sum
32
-
33
- break if $exit
34
-
35
- if count.zero?
36
- sleep(SLEEP)
37
- else
38
- say "#{count} jobs processed at %.4f j/s, %d failed ..." % [count / realtime, result.last]
39
- end
40
-
41
- break if $exit
42
- end
43
-
44
- ensure
45
- Delayed::Job.clear_locks!
46
- end
47
-
48
- def say(text)
49
- puts text unless @quiet
50
- logger.info text if logger
51
- end
52
-
53
- end
54
- end