postjob 0.4.4 → 0.4.5

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 3db8f37270f64683adc0ef25167f8198cb93e681
4
- data.tar.gz: 323f1a09174b3364e8776c2e59e8a9fad9e22708
3
+ metadata.gz: 78a70bee1c76f0285da58ecc0e1f38f4d420d9f1
4
+ data.tar.gz: 4fe406b37ebe91e317c00a6c8cb84fceb6b5ce55
5
5
  SHA512:
6
- metadata.gz: d22d0304488080812e9f3b967462867b45d1b51c8eb7733bcace9f4f5bfb2a60db95a70c335f71c393e23077a0a30bc0bbca80731783dac5aeebf93f18536b1f
7
- data.tar.gz: a261688942b25f1c1c07870d500138d4a601663e18e081ae4a4d09e756ff0e30f246da60fe2669f7b40adaf9dc271998e6d80a130c0dde5001bbaa87f5300ce1
6
+ metadata.gz: 3e55f152396f288ee5f51ccbc08ab23cb54e82f8afac7b8b69a248d38ff2ada757c075292008810ed749ba585f01591d87cb2780ab49289b46948a83b7360296
7
+ data.tar.gz: b7ccda0358d5377e8d4bcb089e736fd3f06efe5149f82a6ee792870dcfc05f5966f739313fd53834f501fa9901afdd76473b75be22fd70a780b751cc621a0241
data/lib/postjob/job.rb CHANGED
@@ -9,6 +9,11 @@ class Postjob::Job < Hash
9
9
  replace hsh.dup
10
10
  end
11
11
 
12
+ def self.find(job_id)
13
+ scope = Postjob::Queue.search(id: job_id)
14
+ Simple::SQL.ask(scope, into: Postjob::Job)
15
+ end
16
+
12
17
  def self.attribute(sym)
13
18
  eval <<~RUBY
14
19
  define_method(:#{sym}) { self[:#{sym}] }
@@ -55,6 +60,11 @@ class Postjob::Job < Hash
55
60
  end
56
61
  end
57
62
 
63
+ def resolved?
64
+ expect! status => STATUSES
65
+ %w(ok timeout failed).include?(status)
66
+ end
67
+
58
68
  def result
59
69
  results && results.first
60
70
  end
@@ -1,37 +1,66 @@
1
1
  CREATE TABLE IF NOT EXISTS {SCHEMA_NAME}.postjobs (
2
- -- id values, readonly once created
2
+ -- identifiers ------------------------------------------------------------------------------------------
3
+ -- These are set when enqueueing a workflow
3
4
  id BIGSERIAL PRIMARY KEY, -- process id
4
- parent_id BIGINT REFERENCES {SCHEMA_NAME}.postjobs ON DELETE CASCADE, -- parent process id
5
+ parent_id BIGINT REFERENCES {SCHEMA_NAME}.postjobs ON DELETE CASCADE, -- parent process id
5
6
  full_id VARCHAR, -- full process id
6
7
  root_id BIGINT, -- root process id
7
8
 
9
+ -- timestamps -------------------------------------------------------------------------------------------
10
+ -- managed automatically
8
11
  created_at timestamp NOT NULL DEFAULT (now() at time zone 'utc'), -- creation timestamp
9
12
  updated_at timestamp NOT NULL DEFAULT (now() at time zone 'utc'), -- update timestamp
10
13
 
14
+ -- workflow specification -------------------------------------------------------------------------------
15
+ -- Set when enqueueing the workflow, readonly
16
+
11
17
  queue VARCHAR, -- queue name. (readonly)
12
18
  workflow VARCHAR NOT NULL, -- e.g. "MyJobModule" (readonly)
13
19
  workflow_method VARCHAR NOT NULL DEFAULT 'run', -- e.g. "run" (readonly)
14
- workflow_version VARCHAR NOT NULL DEFAULT '', -- e.g. "1.0"
15
20
  args JSONB, -- args
21
+ timing_out_at timestamp, -- job times out after this timestamp
22
+ max_attempts INTEGER NOT NULL DEFAULT 1, -- maximum attempts before failing
23
+
24
+ -- process state ----------------------------------------------------------------------------------------
25
+ -- Managed automatically.
16
26
 
17
- -- process state ----------------------------------------------------
27
+ -- The workflow version is pinned as soon as the workflow is running for the first time.
28
+ -- It is readonly afterwards. Note that the default is not NULL, but '', because this allows
29
+ -- to query via (workflow, workflow_version) IN (('Foo', ''), ('Foo', '1.0'))
30
+ workflow_version VARCHAR NOT NULL DEFAULT '', -- e.g. "1.0"
18
31
 
32
+ -- The workflow status, one of 'ready', 'sleep', 'failed', 'err', 'timeout', 'ok'
19
33
  status {SCHEMA_NAME}.statuses DEFAULT 'ready',
20
- next_run_at timestamp DEFAULT (now() at time zone 'utc'), -- when possible to run next?
21
- timing_out_at timestamp, -- job times out after this timestamp
22
- failed_attempts INTEGER NOT NULL DEFAULT 0, -- failed how often?
23
- max_attempts INTEGER NOT NULL DEFAULT 1, -- maximum attempts before failing
24
34
 
25
- -- process result ---------------------------------------------------
35
+ -- Timestamp when consider running this the next time.
36
+ next_run_at timestamp DEFAULT (now() at time zone 'utc'),
37
+
38
+ -- Number of failed attempts so far.
39
+ failed_attempts INTEGER NOT NULL DEFAULT 0,
26
40
 
27
- results JSONB,
28
- error VARCHAR,
29
- error_message VARCHAR,
30
- error_backtrace JSONB,
41
+ -- process result ---------------------------------------------------------------------------------------
31
42
 
32
- -- custom fields
43
+ results JSONB, -- The process result, if any. Only valid when status == 'ok'
44
+ error VARCHAR, -- The last error; usually set to the klass of the error.
45
+ error_message VARCHAR, -- The human readable error message, for displaying purposes
46
+ error_backtrace JSONB, -- additional error information, for debugging purposes
47
+
48
+ -- custom fields ----------------------------------------------------------------------------------------
33
49
  workflow_status VARCHAR,
34
- tags JSONB
50
+ tags JSONB,
51
+
52
+ -- processing_client information ------------------------------------------------------------------------
53
+ -- This information is passed along from workers during processing. They are only valid
54
+ -- when status == 'processing'
55
+ --
56
+ -- Initially these columns didn't exist, and have been created via another migration
57
+ -- (003b_processing_columns.sql). They are listed here for documentation purposes.
58
+ processing_client varchar, -- host:port of client (taken from pg_stat_activity)
59
+ processing_client_identifier varchar, -- free text info, set via set_client_identifier()
60
+ processing_started_at timestamp, -- when did processing start?
61
+ processing_max_duration float -- maximum expected duration of processing. Afterwards the
62
+ -- processing is considered failed for unknown reasons, and
63
+ -- potentially restarted.
35
64
  );
36
65
 
37
66
  -- [TODO] check indices
@@ -1,3 +1,6 @@
1
+ -- The tokens table contains tokens for externally resolvable workflows. They are considered
2
+ -- secret, and therefore not part of the main table.
3
+
1
4
  CREATE TABLE IF NOT EXISTS {SCHEMA_NAME}.tokens (
2
5
  id BIGSERIAL PRIMARY KEY,
3
6
  postjob_id BIGINT REFERENCES {SCHEMA_NAME}.postjobs ON DELETE CASCADE,
@@ -1,19 +1,18 @@
1
1
  module Postjob::Queue
2
2
  DEFAULT_ATTRIBUTES = [
3
- "id",
4
- "full_id",
5
3
  "workflow || COALESCE('@' || workflow_version, '') || args AS job",
6
- "workflow_status",
7
- "status",
8
- "error",
9
4
  "COALESCE((results->0)::varchar, error_message) AS result",
10
- "next_run_at",
11
- "error_backtrace",
12
5
  "(now() at time zone 'utc') - created_at AS age",
13
- "updated_at - created_at AS runtime",
14
- "tags"
6
+ "updated_at - created_at AS runtime"
15
7
  ]
16
8
 
9
+ def default_attributes
10
+ @default_attributes ||= begin
11
+ column_names = Simple::SQL::Reflection.columns("#{SCHEMA_NAME}.postjobs")
12
+ column_names + DEFAULT_ATTRIBUTES
13
+ end
14
+ end
15
+
17
16
  # Builds a search scope (see Simple::SQL::Scope) for the passed in filter criteria.
18
17
  def search(filter = {})
19
18
  expect! filter => Hash
@@ -21,7 +20,7 @@ module Postjob::Queue
21
20
  # extract options
22
21
  filter = filter.dup
23
22
  root_only = filter.delete(:root_only) || false
24
- attributes = filter.delete(:attributes) || DEFAULT_ATTRIBUTES
23
+ attributes = filter.delete(:attributes) || default_attributes
25
24
  expect! attributes => Array
26
25
 
27
26
  # build Scope
@@ -38,17 +37,46 @@ module Postjob::Queue
38
37
  expect! key => [Symbol, String]
39
38
  end
40
39
 
41
- column_names = Simple::SQL::Reflection.columns("#{SCHEMA_NAME}.postjobs")
42
- column_names += column_names.map(&:to_sym)
40
+ converted_filters = filter.inject({}) do |hsh, (key, value)|
41
+ hsh.update key => convert_filter_value(value, key: key)
42
+ end
43
43
 
44
- column_filters, tags_filters = filter.partition { |key, _| column_names.include?(key) }
44
+ column_filters, tags_filters = converted_filters.partition { |key, _| !column_types[key].nil? }
45
45
 
46
46
  scope = scope.where(Hash[column_filters])
47
47
  scope = scope.where(tags: Hash[tags_filters])
48
48
  scope
49
49
  end
50
50
 
51
- def apply_tag_filters(scope, filters)
52
- scope.where(tags: Hash[filters])
51
+ class << self
52
+ def column_types
53
+ @column_types ||= _column_types
54
+ end
55
+
56
+ private
57
+
58
+ def _column_types
59
+ column_info = ::Simple::SQL::Reflection.column_info("#{SCHEMA_NAME}.postjobs")
60
+ hsh = {}
61
+ column_info.each do |column, rec|
62
+ hsh[column.to_sym] = hsh[column.to_s] = rec.data_type
63
+ end
64
+ hsh
65
+ end
66
+ end
67
+
68
+ def convert_filter_value(value, key:)
69
+ value = Array(value)
70
+
71
+ case Postjob::Queue.column_types[key]
72
+ when "bigint" then simplify_array(value.map(&:to_i))
73
+ when "integer" then simplify_array(value.map(&:to_i))
74
+ when nil then simplify_array(value.map(&:to_i) + value.map(&:to_s))
75
+ else simplify_array(value.map(&:to_s))
76
+ end
77
+ end
78
+
79
+ def simplify_array(ary)
80
+ ary.length == 1 ? ary.first : ary
53
81
  end
54
82
  end
@@ -107,6 +107,13 @@ module Postjob::Runner
107
107
  private
108
108
 
109
109
  # runs a job. Returns a [ status, value, shutdown ] tuple.
110
+ #
111
+ # The shutdown value is used by a worker in run mode (i.e. process
112
+ # indefinetively) to determine whether or not it should cancel
113
+ # processing. It is usually nil; but if the worker received a
114
+ # SIGINT it will be :shutdown instead.
115
+ #
116
+ # We are catching SIGINT to allow the job status to be updated.
110
117
  def invoke_workflow(workflow, job)
111
118
  value = catch(:pending) {
112
119
  expect! job.args => [Array, nil]
data/spec/spec_helper.rb CHANGED
@@ -7,7 +7,7 @@ require "awesome_print"
7
7
 
8
8
  unless ENV["SKIP_SIMPLE_COV"]
9
9
  SimpleCov.start do
10
- minimum_coverage 88
10
+ minimum_coverage 86
11
11
  add_filter "/spec/"
12
12
  end
13
13
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: postjob
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.4
4
+ version: 0.4.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - radiospiel
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-06-20 00:00:00.000000000 Z
11
+ date: 2018-07-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rspec
@@ -123,7 +123,7 @@ dependencies:
123
123
  version: '0.4'
124
124
  - - ">="
125
125
  - !ruby/object:Gem::Version
126
- version: 0.4.7
126
+ version: 0.4.8
127
127
  type: :runtime
128
128
  prerelease: false
129
129
  version_requirements: !ruby/object:Gem::Requirement
@@ -133,7 +133,7 @@ dependencies:
133
133
  version: '0.4'
134
134
  - - ">="
135
135
  - !ruby/object:Gem::Version
136
- version: 0.4.7
136
+ version: 0.4.8
137
137
  - !ruby/object:Gem::Dependency
138
138
  name: simple-cli
139
139
  requirement: !ruby/object:Gem::Requirement