batches_task_processor 0.3.2 → 0.3.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9e3ca93ca15247aafcea0afc6873b11bc26d8519e7b5ef477ceefd9174552888
4
- data.tar.gz: e2669c5ed9c3f8afdefa0d5e59ee7750437560d95b78792dfa5c43da2edbbf1e
3
+ metadata.gz: 69f248153b65e49d24ebbebaa064cb3f5e03aec503cb2d2b9add636c96e8aad4
4
+ data.tar.gz: d020b38e7e8fea12d7a7df841ca976881ab86b7dee326f3befd4ccc94d205033
5
5
  SHA512:
6
- metadata.gz: 140fa98d409be395a422f2687d64d0690b1b65ae1ded1cd7e00e2b0d3217680c65b8629b7aea8da5b38dd6b62fb79acd8631ae250bfe262121b1107466b65f16
7
- data.tar.gz: e594672fb7e890d0e475f7480f6b503c34d64fc4525c50c6bfe47909b1c4b573ab6af62db13140d847cd6a5ce49332e42d7b3ea8db75d5931d27b284dea41b99
6
+ metadata.gz: 0bfdfae31beed3d6bb7ea8c3004f842bd5936a94d9f188ebfaeb573c44f38207aeef2ea343bc9db848ce66c50e1ff27d9da4c5a261f98b783c95551e7a3f7059
7
+ data.tar.gz: 4219b4010280239086ee4a9dabe50876a495f599f2d4450227cd8c8dac105ae9e798aa7e2348838b52b8e9c7f78a61aded822e334f9674cca5c1edb92cd8784a
data/README.md CHANGED
@@ -18,8 +18,12 @@ And then execute: `bundle install && bundle exec rake db:migrate`
18
18
  key: 'my_process',
19
19
  data: Article.all.limit(200000).pluck(:id),
20
20
  qty_jobs: 10,
21
+ queue_name: 'default',
21
22
  preload_job_items: 'Article.where(id: items)',
22
- process_item: 'puts "my article ID: #{item.id}"'
23
+ process_item: %{
24
+ puts "Processing article #{item.id}..."
25
+ HugeArticleProcessor.new(item).call
26
+ }
23
27
  )
24
28
  task.start!
25
29
  ```
@@ -4,11 +4,11 @@ require 'csv'
4
4
  module BatchesTaskProcessor
5
5
  class Model < ActiveRecord::Base
6
6
  self.table_name = 'batches_task_processors'
7
- has_many :items, class_name: 'BatchesTaskProcessor::ModelItem', dependent: :destroy, foreign_key: :batches_task_processors_id
7
+ has_many :items, class_name: 'BatchesTaskProcessor::ModelItem', dependent: :delete_all, foreign_key: :batches_task_processors_id
8
8
  validates :process_item, presence: true
9
9
  validates :key, presence: true
10
- before_create :apply_data_uniqueness
11
- before_create :check_qty_jobs
10
+ before_save :apply_data_uniqueness
11
+ before_save :check_qty_jobs
12
12
  # state: :pending, :processing, :finished, :canceled
13
13
 
14
14
  def qty_items_job
@@ -38,11 +38,16 @@ module BatchesTaskProcessor
38
38
  end
39
39
 
40
40
  def status
41
- Rails.logger.info "Process status: #{task_model.items.count}/#{task_model.data.count}"
41
+ Rails.logger.info "Process status: #{items.count}/#{data.count}"
42
+ end
43
+
44
+ def retry_failures
45
+ start!
42
46
  end
43
47
 
44
48
  def export
45
- path = Rails.root.join('tmp/batches_task_processor_result.csv')
49
+ filename = (key || 'batches_task_processor_result').try(:parameterize)
50
+ path = Rails.root.join("tmp/#{filename}.csv")
46
51
  data = items.pluck(:key, :result, :error_details)
47
52
  data = [['Key', 'Result', 'Error details']] + data
48
53
  File.write(path, data.map(&:to_csv).join)
@@ -52,10 +52,10 @@ module BatchesTaskProcessor
52
52
  def run_job(job)
53
53
  log "Running ##{job} job..."
54
54
  items = job_items(job)
55
- (items.try(:find_each) || items.each).with_index do |item, index|
55
+ (items.try(:find_each) || items.each).with_index(1) do |item, index|
56
56
  key = item.try(:id) || item
57
57
  break log('Process cancelled') if process_cancelled?
58
- next log("Skipping #{key}...") if already_processed?(key)
58
+ next if already_processed?(key)
59
59
 
60
60
  start_process_item(item, job, key, index)
61
61
  end
@@ -72,14 +72,14 @@ module BatchesTaskProcessor
72
72
  def start_process_item(item, job, key, index)
73
73
  log "Processing key: #{key}, job: #{job}, counter: #{index}/#{task_model.qty_items_job}"
74
74
  result = process_item(item)
75
- task_model.items.create!(key: key, result: result.to_s[0..255])
75
+ task_model.items.where(key: key).first_or_initialize.update!(result: result, error_details: nil)
76
76
  rescue => e
77
- task_model.items.create!(key: key, error_details: e.message)
77
+ task_model.items.where(key: key).first_or_initialize.update!(result: nil, error_details: e.message)
78
78
  log "Process failed #{job}/#{key}: #{e.message}"
79
79
  end
80
80
 
81
81
  def already_processed?(key)
82
- task_model.items.where(key: key).exists?
82
+ task_model.items.where(key: key, error_details: nil).exists?
83
83
  end
84
84
 
85
85
  def process_cancelled?
@@ -1,3 +1,3 @@
1
1
  module BatchesTaskProcessor
2
- VERSION = "0.3.2"
2
+ VERSION = "0.3.5"
3
3
  end
@@ -6,7 +6,7 @@ class AddBatchesTaskProcessor < ActiveRecord::Migration[5.0]
6
6
  t.string :key
7
7
  t.string :state, default: :pending
8
8
  t.json :data, default: [] if support_json?
9
- t.text :data unless support_json?
9
+ t.text :data, limit: 999999 unless support_json?
10
10
  t.integer :qty_jobs, default: 10
11
11
  t.datetime :finished_at
12
12
  t.text :preload_job_items
@@ -18,7 +18,7 @@ class AddBatchesTaskProcessor < ActiveRecord::Migration[5.0]
18
18
  create_table :batches_task_processor_items do |t|
19
19
  t.belongs_to :batches_task_processors, foreign_key: true, index: { name: 'index_batches_task_processors_parent_id' }
20
20
  t.string :key
21
- t.text :result
21
+ t.text :result, limit: 999999
22
22
  t.text :error_details
23
23
  t.timestamps
24
24
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: batches_task_processor
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.2
4
+ version: 0.3.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Owen Peredo
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-08-02 00:00:00.000000000 Z
11
+ date: 2022-08-26 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rails