postjob 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. checksums.yaml +7 -0
  2. data/README.md +23 -0
  3. data/bin/postjob +11 -0
  4. data/lib/postjob/cli/db.rb +39 -0
  5. data/lib/postjob/cli/job.rb +67 -0
  6. data/lib/postjob/cli/ps.rb +110 -0
  7. data/lib/postjob/cli/run.rb +19 -0
  8. data/lib/postjob/cli.rb +31 -0
  9. data/lib/postjob/error.rb +16 -0
  10. data/lib/postjob/job.rb +66 -0
  11. data/lib/postjob/migrations.rb +97 -0
  12. data/lib/postjob/queue/encoder.rb +40 -0
  13. data/lib/postjob/queue/notifications.rb +72 -0
  14. data/lib/postjob/queue/search.rb +82 -0
  15. data/lib/postjob/queue.rb +331 -0
  16. data/lib/postjob/registry.rb +52 -0
  17. data/lib/postjob/runner.rb +153 -0
  18. data/lib/postjob/workflow.rb +60 -0
  19. data/lib/postjob.rb +170 -0
  20. data/spec/postjob/enqueue_spec.rb +86 -0
  21. data/spec/postjob/full_workflow_spec.rb +86 -0
  22. data/spec/postjob/job_control/manual_spec.rb +45 -0
  23. data/spec/postjob/job_control/max_attempts_spec.rb +70 -0
  24. data/spec/postjob/job_control/timeout_spec.rb +31 -0
  25. data/spec/postjob/job_control/workflow_status_spec.rb +52 -0
  26. data/spec/postjob/process_job_spec.rb +25 -0
  27. data/spec/postjob/queue/encoder_spec.rb +46 -0
  28. data/spec/postjob/queue/search_spec.rb +141 -0
  29. data/spec/postjob/run_spec.rb +69 -0
  30. data/spec/postjob/step_spec.rb +26 -0
  31. data/spec/postjob/sub_workflow_spec.rb +27 -0
  32. data/spec/spec_helper.rb +35 -0
  33. data/spec/support/configure_active_record.rb +18 -0
  34. data/spec/support/configure_database.rb +19 -0
  35. data/spec/support/configure_simple_sql.rb +17 -0
  36. data/spec/support/connect_active_record.rb +6 -0
  37. data/spec/support/test_helper.rb +53 -0
  38. metadata +269 -0
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 93400c11de9310d973cb74ef6912d5e4190d5950
4
+ data.tar.gz: b505261dc9cb3a2aacf905e8f37027c07ff78946
5
+ SHA512:
6
+ metadata.gz: fe6794cfc9b47039aa6f57381f9465b30a15e7b7369b6eb0ec051acfa9c2786ae8a52fbc0cd71b1e450824e61238e18e892e69b7ef9611247eb7c5a16f0ea0b3
7
+ data.tar.gz: eeb6632e7cebbd95f96d801c8b67a3aea53e5a5c8960801f7292cf22aa62c2986fbcb60eb8564cf545a29f63c35f311057bfbb36887a298a4db5cabfd6be17e3
data/README.md ADDED
@@ -0,0 +1,23 @@
1
+ # Postjob
2
+
3
+ The `postjob` gem implements a simple way to have restartable, asynchronous, and distributed processes.
4
+
5
+ ## Development
6
+
7
+ After checking out the repo, run `bin/setup` to install dependencies. Make sure you have a local postgresql implementation of at least version 9.5. Add a `postqueue` user with a `postqueue` password, and create a `postqueue_test` database for it. The script `./scripts/prepare_pg` can be somewhat helpful in establishing that.
8
+
9
+ Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
10
+
11
+ To install this gem onto your local machine, run `bundle exec rake install`.
12
+
13
+ To release a new version, run `./scripts/release`, which will bump the version number, create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
14
+
15
+ ## Contributing
16
+
17
+ Bug reports and pull requests are welcome on GitHub at https://github.com/[USERNAME]/postqueue.
18
+
19
+
20
+ ## License
21
+
22
+ The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT).
23
+
data/bin/postjob ADDED
@@ -0,0 +1,11 @@
1
+ #!/usr/bin/env ruby
2
+ $: << "lib"
3
+
4
+ require "bundler"
5
+ Bundler.setup
6
+
7
+ require "postjob"
8
+ require "postjob/cli"
9
+
10
+ Simple::SQL.logger = Simple::CLI.logger
11
+ Postjob::CLI.run!(*ARGV)
@@ -0,0 +1,39 @@
1
+ require "postjob/cli"
2
+
3
+ module Postjob::CLI
4
+ def db_migrate
5
+ require "postjob/migrations"
6
+
7
+ connect_to_database!
8
+ Postjob::Migrations.migrate!
9
+ end
10
+
11
+ def db_unmigrate
12
+ require "postjob/migrations"
13
+
14
+ connect_to_database!
15
+ Postjob::Migrations.unmigrate!
16
+ end
17
+
18
+ def db_remigrate
19
+ require "postjob/migrations"
20
+
21
+ connect_to_database!
22
+ Postjob::Migrations.unmigrate!
23
+ Postjob::Migrations.migrate!
24
+ end
25
+
26
+ private
27
+
28
+ USE_ACTIVE_RECORD = false
29
+
30
+ def connect_to_database!
31
+ if USE_ACTIVE_RECORD
32
+ require "active_record"
33
+ abc = ::Simple::SQL::Config.read_database_yml
34
+ ::ActiveRecord::Base.establish_connection abc
35
+ else
36
+ ::Simple::SQL.connect!
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,67 @@
1
+ module Postjob::CLI
2
+ # Enqueues a workflow
3
+ #
4
+ # Adds a workflow to the job table, with name <workflow> and the given
5
+ # arguments.
6
+ #
7
+ # Note that the workflow will receive the arguments as strings and must be
8
+ # prepared to handle these.
9
+ def job_enqueue(workflow, *args, queue: "ruby", tags: nil)
10
+ connect_to_database!
11
+
12
+ Postjob.enqueue! workflow, *args, queue: queue, tags: parse_tags(tags)
13
+ end
14
+
15
+ # Reset failed jobs
16
+ #
17
+ # This resets all failed jobs within the job tree, below the passed in
18
+ # job id.
19
+ def job_reset(job_id)
20
+ job_id = Integer(job_id)
21
+ full_job_id = Simple::SQL.ask "SELECT full_id FROM postjob.postjobs WHERE id=$1", job_id
22
+ full_job_id || logger.error("No such job: #{job_id}")
23
+
24
+ job_ids = Simple::SQL.all <<~SQL
25
+ SELECT id FROM postjob.postjobs
26
+ WHERE (full_id LIKE '#{full_job_id}.%' OR full_id='#{full_job_id}')
27
+ AND status IN ('failed', 'err', 'timeout')
28
+ SQL
29
+
30
+ logger.warn "Affected jobs: #{job_ids.count}"
31
+ return if job_ids.empty?
32
+
33
+ Simple::SQL.ask <<~SQL, job_ids
34
+ UPDATE postjob.postjobs
35
+ SET
36
+ status='ready', next_run_at=now(),
37
+ results=null, failed_attempts=0, error=NULL, error_message=NULL, error_backtrace=NULL
38
+ WHERE id = ANY($1);
39
+ SQL
40
+
41
+ Simple::SQL.ask <<~SQL
42
+ NOTIFY postjob_notifications
43
+ SQL
44
+
45
+ logger.warn "The following jobs have been reset: #{job_ids.join(", ")}"
46
+ end
47
+
48
+ private
49
+
50
+ # parses "foo:bar,baz:quibble" into { "foo" => "bar", "baz" => "quibble"}
51
+ def parse_tags(tags)
52
+ return nil unless tags
53
+ tags.split(",").inject({}) do |hsh, tag|
54
+ expect! tag => /\A[^:]+:[^:]+\z/
55
+ k, v = tag.split(":", 2)
56
+ hsh.update k => v
57
+ end
58
+ end
59
+
60
+ public
61
+
62
+ def registry
63
+ workflows = Postjob::Registry.workflows
64
+ names = workflows.map(&:workflow_name)
65
+ puts names.sort.join("\n")
66
+ end
67
+ end
@@ -0,0 +1,110 @@
1
+ module Postjob::CLI
2
+ private
3
+
4
+ def ps_query(conditions = [])
5
+ conditions.compact!
6
+
7
+ conditions << "TRUE"
8
+ condition_fragment = conditions
9
+ .compact
10
+ .map { |s| "(#{s})" }
11
+ .join(" AND ")
12
+
13
+ <<~SQL
14
+ SELECT
15
+ id,
16
+ full_id,
17
+ workflow
18
+ || (CASE WHEN workflow_version != '' THEN '@' ELSE '' END)
19
+ || workflow_version
20
+ || (CASE WHEN workflow_method != 'run' THEN '.' || workflow_method ELSE '' END)
21
+ || args AS job,
22
+ workflow_status,
23
+ status,
24
+ error,
25
+ COALESCE((results->0)::varchar, error_message) AS result,
26
+ next_run_at,
27
+ error_backtrace,
28
+ (now() at time zone 'utc') - created_at AS age,
29
+ updated_at - created_at AS runtime,
30
+ tags
31
+ FROM postjob.postjobs
32
+ WHERE #{condition_fragment}
33
+ ORDER BY root_id DESC, id ASC
34
+ SQL
35
+ end
36
+
37
+ def tags_condition(tags)
38
+ return nil unless tags
39
+
40
+ kv = parse_tags(tags)
41
+ "tags @> '#{Postjob::Queue::Encoder.encode(kv)}'"
42
+ end
43
+
44
+ public
45
+
46
+ # Show job status
47
+ #
48
+ # This command lists the statuses of all jobs that are either root jobs,
49
+ # i.e. enqueued workflows, or that have failed.
50
+ #
51
+ # Example:
52
+ #
53
+ # postjob ps --tags=foo:bar,bar:baz --limit=100
54
+ #
55
+ # For a listing of all jobs in the system use ps:full, see 'postjob help ps:full'
56
+ # for details.
57
+ def ps(*ids, limit: "100", tags: nil)
58
+ expect! limit => /\A\d+\z/
59
+ limit = Integer(limit)
60
+
61
+ unless ids.empty?
62
+ ps_full *ids, limit: limit, tags: tags
63
+ return
64
+ end
65
+
66
+ conditions = []
67
+ conditions << "root_id=id OR status NOT IN ('ready', 'sleep', 'ok')"
68
+ conditions << tags_condition(tags)
69
+ conditions << ids_condition(ids)
70
+
71
+ query = ps_query(conditions)
72
+ print_sql limit: limit, query: query
73
+ end
74
+
75
+ def ps_full(*ids, limit: 100, tags: nil)
76
+ conditions = []
77
+ conditions << tags_condition(tags)
78
+ conditions << ids_condition(ids)
79
+
80
+ query = ps_query(conditions)
81
+
82
+ limit = Integer(limit)
83
+ print_sql limit: limit, query: query
84
+ end
85
+
86
+ private
87
+
88
+ def parse_ids(*ids)
89
+ return [] if ids.empty?
90
+ ids.flatten.inject([]) { |a, ids_string| a.concat ids_string.split(",") }
91
+ .map { |p| Integer(p) }
92
+ .uniq
93
+ end
94
+
95
+ def ids_condition(ids)
96
+ ids = parse_ids(ids)
97
+ return nil if ids.empty?
98
+ "root_id IN (#{ids.join(',')})"
99
+ end
100
+
101
+ def print_sql(limit:, query:)
102
+ connect_to_database!
103
+ records = Simple::SQL.records("#{query} LIMIT $1+1", limit)
104
+
105
+ tp records[0, limit]
106
+ if records.length > limit
107
+ logger.warn "Output limited up to limit #{limit}. Use the --limit command line option for a different limit."
108
+ end
109
+ end
110
+ end
@@ -0,0 +1,19 @@
1
+ module Postjob::CLI
2
+ def step
3
+ run count: 1
4
+ end
5
+
6
+ def run(count: nil, fast: false, quiet: false)
7
+ count = Integer(count) if count
8
+ Postjob.fast_mode = fast
9
+
10
+ connect_to_database!
11
+
12
+ processed = Postjob.run(count: count) do |job|
13
+ logger.info "Processed job w/id #{job.id}" if job
14
+ STDERR.print "." unless quiet
15
+ end
16
+
17
+ logger.info "Processed #{processed} jobs"
18
+ end
19
+ end
@@ -0,0 +1,31 @@
1
+ require "simple/cli"
2
+ require "table_print"
3
+
4
+ Dir.glob("#{File.dirname(__FILE__)}/cli/**/*.rb").sort.each do |path|
5
+ load(path)
6
+ end
7
+
8
+ module Postjob::CLI
9
+ include ::Simple::CLI
10
+
11
+ def run!(command, *args)
12
+ Postjob.logger = logger
13
+ load_environment!
14
+
15
+ super
16
+ end
17
+
18
+ private
19
+
20
+ def load_environment(path)
21
+ return unless File.exist?(path)
22
+
23
+ logger.warn "#{path}: loading Postjob configuration"
24
+ load path
25
+ end
26
+
27
+ def load_environment!
28
+ load_environment("config/environment.rb")
29
+ load_environment("config/postjob.rb")
30
+ end
31
+ end
@@ -0,0 +1,16 @@
1
+ module Postjob
2
+ class Error < RuntimeError
3
+ def initialize(job)
4
+ @job = job
5
+ end
6
+
7
+ def message
8
+ msg = "Failing child job"
9
+ msg += " [#{@job.result}]" if @job.result
10
+ msg
11
+ end
12
+ end
13
+
14
+ class Error::Nonrecoverable < Error
15
+ end
16
+ end
@@ -0,0 +1,66 @@
1
+ #
2
+ # A job class in-memory representation.
3
+ #
4
+ class Postjob::Job < Hash
5
+ def initialize(hsh)
6
+ replace hsh.dup
7
+ end
8
+
9
+ def self.attribute(sym)
10
+ eval <<~RUBY
11
+ define_method(:#{sym}) { self[:#{sym}] }
12
+ RUBY
13
+ end
14
+
15
+ attribute :id
16
+ attribute :parent_id
17
+ attribute :full_id
18
+ attribute :root_id
19
+ attribute :created_at
20
+ attribute :queue
21
+ attribute :workflow
22
+ attribute :workflow_method
23
+ attribute :workflow_version
24
+ attribute :args
25
+ attribute :next_run_at
26
+ attribute :timing_out_at
27
+ attribute :failed_attempts
28
+ attribute :max_attempts
29
+ attribute :status
30
+ attribute :results
31
+ attribute :error
32
+ attribute :error_message
33
+ attribute :error_backtrace
34
+ attribute :recipients
35
+ attribute :workflow_status
36
+ attribute :timed_out
37
+ attribute :tags
38
+
39
+ STATUSES = %w(ok ready sleep err failed timeout)
40
+
41
+ def resolve
42
+ expect! status => STATUSES
43
+
44
+ case status
45
+ when "ok" then result
46
+ when "ready" then :pending
47
+ when "sleep" then :pending
48
+ when "timeout" then raise Timeout::Error
49
+ when "err" then :pending
50
+ when "failed" then raise Postjob::Error::Nonrecoverable, self
51
+ end
52
+ end
53
+
54
+ def result
55
+ results && results.first
56
+ end
57
+
58
+ def to_s
59
+ full_workflow = workflow
60
+ full_workflow += "@#{workflow_version}" if workflow_version != ""
61
+ full_workflow += ".#{workflow_method}" if workflow_method != "run"
62
+
63
+ args = (self.args || []).map(&:inspect).join(", ")
64
+ "Postjob##{full_id}: #{full_workflow}(#{args}) (#{status})"
65
+ end
66
+ end
@@ -0,0 +1,97 @@
1
+ module Postjob
2
+ module Migrations
3
+ extend self
4
+
5
+ SQL = ::Simple::SQL
6
+ SCHEMA_NAME = Postjob::Queue::SCHEMA_NAME
7
+
8
+ def unmigrate!
9
+ if SCHEMA_NAME != "public"
10
+ SQL.exec <<~SQL
11
+ DROP SCHEMA IF EXISTS #{SCHEMA_NAME} CASCADE;
12
+ SQL
13
+ end
14
+ end
15
+
16
+ PG_TYPES = <<~SQL
17
+ SELECT pg_namespace.nspname AS schema, pg_type.typname AS name
18
+ FROM pg_type
19
+ LEFT JOIN pg_namespace on pg_namespace.oid=pg_type.typnamespace
20
+ SQL
21
+
22
+ def migrate!
23
+ SQL.exec <<~SQL
24
+ CREATE SCHEMA IF NOT EXISTS #{SCHEMA_NAME};
25
+ SQL
26
+
27
+ unless SQL.ask("SELECT 1 FROM (#{PG_TYPES}) sq WHERE (schema,name) = ($1, $2)", SCHEMA_NAME, "statuses")
28
+ SQL.exec <<~SQL
29
+ CREATE TYPE #{SCHEMA_NAME}.statuses AS ENUM (
30
+ 'ready', -- process can run
31
+ 'sleep', -- process has external dependencies to wait for.
32
+ 'failed', -- process failed, with nonrecoverable error
33
+ 'err', -- process errored (with recoverable error)
34
+ 'timeout', -- process timed out
35
+ 'ok' -- process succeeded
36
+ );
37
+ SQL
38
+ end
39
+
40
+ SQL.exec <<~SQL
41
+ CREATE TABLE IF NOT EXISTS #{SCHEMA_NAME}.postjobs (
42
+ -- id values, readonly once created
43
+ id BIGSERIAL PRIMARY KEY, -- process id
44
+ parent_id BIGINT REFERENCES #{SCHEMA_NAME}.postjobs ON DELETE CASCADE, -- parent process id
45
+ full_id VARCHAR, -- full process id
46
+ root_id BIGINT, -- root process id
47
+
48
+ created_at timestamp NOT NULL DEFAULT (now() at time zone 'utc'), -- creation timestamp
49
+ updated_at timestamp NOT NULL DEFAULT (now() at time zone 'utc'), -- update timestamp
50
+
51
+ queue VARCHAR, -- queue name. (readonly)
52
+ workflow VARCHAR NOT NULL, -- e.g. "MyJobModule" (readonly)
53
+ workflow_method VARCHAR NOT NULL DEFAULT 'run', -- e.g. "run" (readonly)
54
+ workflow_version VARCHAR NOT NULL DEFAULT '', -- e.g. "1.0"
55
+ args JSONB, -- args
56
+
57
+ -- process state ----------------------------------------------------
58
+
59
+ status #{SCHEMA_NAME}.statuses DEFAULT 'ready',
60
+ next_run_at timestamp DEFAULT (now() at time zone 'utc'), -- when possible to run next?
61
+ timing_out_at timestamp, -- job times out after this timestamp
62
+ failed_attempts INTEGER NOT NULL DEFAULT 0, -- failed how often?
63
+ max_attempts INTEGER NOT NULL DEFAULT 1, -- maximum attempts before failing
64
+
65
+ -- process result ---------------------------------------------------
66
+
67
+ results JSONB,
68
+ error VARCHAR,
69
+ error_message VARCHAR,
70
+ error_backtrace JSONB,
71
+
72
+ -- custom fields
73
+ workflow_status VARCHAR,
74
+ tags JSONB
75
+ );
76
+
77
+ -- [TODO] check indices
78
+ CREATE INDEX IF NOT EXISTS postjobs_tags_idx
79
+ ON #{SCHEMA_NAME}.postjobs USING GIN (tags jsonb_path_ops);
80
+ CREATE INDEX IF NOT EXISTS postjobs_parent_id_idx
81
+ ON #{SCHEMA_NAME}.postjobs(parent_id);
82
+ SQL
83
+
84
+ SQL.exec <<~SQL
85
+ CREATE TABLE IF NOT EXISTS #{SCHEMA_NAME}.tokens (
86
+ id BIGSERIAL PRIMARY KEY,
87
+ postjob_id BIGINT REFERENCES #{SCHEMA_NAME}.postjobs ON DELETE CASCADE,
88
+ token UUID NOT NULL,
89
+ created_at timestamp NOT NULL DEFAULT (now() at time zone 'utc')
90
+ );
91
+
92
+ CREATE INDEX IF NOT EXISTS tokens_postjob_id_idx ON #{SCHEMA_NAME}.tokens(postjob_id);
93
+ CREATE INDEX IF NOT EXISTS tokens_token_idx ON #{SCHEMA_NAME}.tokens(token);
94
+ SQL
95
+ end
96
+ end
97
+ end
@@ -0,0 +1,40 @@
1
+ # rubocop:disable Style/Documentation
2
+ module Postjob::Queue
3
+ end
4
+
5
+ require "json"
6
+
7
+ #
8
+ # The Postjob::Queue::Encoder module wraps the JSON encoder, to ensure that only
9
+ # *our* data is encoded.
10
+ #
11
+ # Workflows should exclusively use Numbers, true, false, nil, Strings, Times and
12
+ # Dates, and Arrays and Hashes built of those.
13
+ #
14
+ # postjob does not support all data types supported by Ruby's "json" library.
15
+ # We do not support Symbols, but might also not support things like ActiveRecord
16
+ # objects etc.
17
+ module Postjob::Queue::Encoder
18
+ extend self
19
+
20
+ def encode(data)
21
+ verify_encodable!(data)
22
+ JSON.generate(data)
23
+ end
24
+
25
+ private
26
+
27
+ def verify_encodable!(obj)
28
+ case obj
29
+ when nil, true, false then :ok
30
+ when String then :ok
31
+ when Numeric then :ok
32
+ when Time, Date, DateTime then :ok
33
+ when Hash then verify_encodable!(obj.keys) && verify_encodable!(obj.values)
34
+ when Array then obj.each { |entry| verify_encodable!(entry) }
35
+ else
36
+ msg = "Unencodable #{obj.class.name} object: #{obj.inspect}"
37
+ raise ArgumentError, msg
38
+ end
39
+ end
40
+ end
@@ -0,0 +1,72 @@
1
+ #
2
+ # The Postjob::Queue manages enqueueing and fetching jobs from a job queue.
3
+ module Postjob::Queue::Notifications
4
+ extend self
5
+
6
+ SQL = ::Postjob::Queue::SQL
7
+ TABLE_NAME = ::Postjob::Queue::TABLE_NAME
8
+ CHANNEL = "postjob_notifications"
9
+
10
+ def notify_listeners
11
+ SQL.ask "NOTIFY #{CHANNEL}"
12
+ end
13
+
14
+ def wait_for_new_job
15
+ started_at = Time.now
16
+
17
+ start_listening
18
+
19
+ loop do
20
+ wait_time = time_to_next_job
21
+ break if wait_time && wait_time <= 0
22
+
23
+ wait_time ||= 120
24
+ Postjob.logger.debug "postjob: waiting for notification for up to #{time_to_next_job} seconds"
25
+ break if Simple::SQL.wait_for_notify(wait_time)
26
+ end
27
+
28
+ # flush notifications. It is possible that a huge number of notifications
29
+ # piled up while we have been waiting. The following line takes care of
30
+ # those.
31
+ while Simple::SQL.wait_for_notify(0.000001)
32
+ :nop
33
+ end
34
+
35
+ Postjob.logger.debug "postjob: awoke after #{format('%.03f secs', (Time.now - started_at))}"
36
+ end
37
+
38
+ private
39
+
40
+ def start_listening
41
+ return if @is_listening
42
+
43
+ Simple::SQL.ask "LISTEN #{CHANNEL}"
44
+ @is_listening = true
45
+ end
46
+
47
+ # returns the maximum number of seconds to wait until the
48
+ # next runnable or timeoutable job comes up.
49
+ def time_to_next_job
50
+ queries = []
51
+
52
+ escaped_workflows_and_versions = Postjob::Registry.sql_escaped_workflows_and_versions
53
+ if escaped_workflows_and_versions != ""
54
+ queries.push <<~SQL
55
+ SELECT
56
+ EXTRACT(EPOCH FROM MIN(next_run_at) - (now() at time zone 'utc'))
57
+ FROM #{TABLE_NAME}
58
+ WHERE status = 'ready' AND ((workflow, workflow_version) IN (#{escaped_workflows_and_versions}))
59
+ SQL
60
+ end
61
+
62
+ queries.push <<~SQL
63
+ SELECT
64
+ EXTRACT(EPOCH FROM MIN(timing_out_at) - (now() at time zone 'utc'))
65
+ FROM #{TABLE_NAME}
66
+ WHERE status IN ('ready', 'sleep')
67
+ SQL
68
+
69
+ timestamps = Simple::SQL.all(queries.join(" UNION "))
70
+ timestamps.compact.min
71
+ end
72
+ end
@@ -0,0 +1,82 @@
1
+ # rubocop:disable Style/Documentation
2
+ module Postjob::Queue
3
+ end
4
+
5
+ #
6
+ # The Postjob::Queue::Search module is able to inspect the Postjob Queue.
7
+ module Postjob::Queue::Search
8
+ extend self
9
+
10
+ def one(id, filter: {}, into: nil)
11
+ query = query(page: 0, per: 1, filter: filter, id: id)
12
+ Simple::SQL.record(query, into: into)
13
+ end
14
+
15
+ def all(page: 0, per: 100, filter: {}, into: nil)
16
+ query = query(page: page, per: per, filter: filter)
17
+ Simple::SQL.records(query, into: into)
18
+ end
19
+
20
+ private
21
+
22
+ def query(page: nil, per: nil, filter: {}, root_only: true, id: nil)
23
+ expect! id => [Integer, nil]
24
+ expect! page => [Integer, nil]
25
+ expect! per => [Integer, nil]
26
+ expect! { page >= 0 && per > 0 }
27
+ expect! filter => Hash
28
+
29
+ conditions = []
30
+ conditions << "id=#{id}" if id
31
+ conditions << "root_id=id" if root_only
32
+ conditions << tags_condition(filter)
33
+
34
+ query = base_query(conditions)
35
+ query = paginated_query query, per: per, page: page
36
+ query
37
+ end
38
+
39
+ def paginated_query(query, per:, page:)
40
+ expect! per => Integer
41
+ expect! page => Integer
42
+
43
+ <<~SQL
44
+ SELECT * FROM (#{query}) sq LIMIT #{per} OFFSET #{per * page}
45
+ SQL
46
+ end
47
+
48
+ def base_query(conditions = [])
49
+ conditions.compact!
50
+ conditions << "TRUE"
51
+ condition_fragment = conditions
52
+ .compact
53
+ .map { |s| "(#{s})" }
54
+ .join(" AND ")
55
+
56
+ <<~SQL
57
+ SELECT
58
+ id,
59
+ full_id,
60
+ workflow || COALESCE('@' || workflow_version, '') || args AS job,
61
+ workflow_status,
62
+ status,
63
+ error,
64
+ COALESCE((results->0)::varchar, error_message) AS result,
65
+ next_run_at,
66
+ error_backtrace,
67
+ (now() at time zone 'utc') - created_at AS age,
68
+ updated_at - created_at AS runtime,
69
+ tags
70
+ FROM postjob.postjobs
71
+ WHERE #{condition_fragment}
72
+ ORDER BY id
73
+ SQL
74
+ end
75
+
76
+ def tags_condition(keys_and_values)
77
+ expect! keys_and_values => Hash
78
+ return nil if keys_and_values.empty?
79
+
80
+ "tags @> '#{Postjob::Queue::Encoder.encode(keys_and_values)}'"
81
+ end
82
+ end