gush 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,146 @@
1
+ module Gush
2
+ class Client
3
+ attr_reader :configuration
4
+
5
+ def initialize(config = Gush.configuration)
6
+ @configuration = config
7
+ @sidekiq = build_sidekiq
8
+ @redis = build_redis
9
+ load_gushfile
10
+ end
11
+
12
+ def configure
13
+ yield configuration
14
+ @sidekiq = build_sidekiq
15
+ @redis = build_redis
16
+ end
17
+
18
+ def create_workflow(name)
19
+ id = SecureRandom.uuid.split("-").first
20
+
21
+ begin
22
+ workflow = name.constantize.new(id)
23
+ rescue NameError
24
+ raise WorkflowNotFound.new("Workflow with given name doesn't exist")
25
+ end
26
+
27
+ persist_workflow(workflow)
28
+ workflow
29
+ end
30
+
31
+ def start_workflow(id, jobs = [])
32
+ workflow = find_workflow(id)
33
+ workflow.start!
34
+ persist_workflow(workflow)
35
+
36
+ jobs = if jobs.empty?
37
+ workflow.next_jobs
38
+ else
39
+ jobs.map {|name| workflow.find_job(name) }
40
+ end
41
+
42
+ jobs.each do |job|
43
+ job.enqueue!
44
+ persist_job(workflow.id, job)
45
+ enqueue_job(workflow.id, job)
46
+ end
47
+ end
48
+
49
+ def stop_workflow(id)
50
+ workflow = find_workflow(id)
51
+ workflow.stop!
52
+ persist_workflow(workflow)
53
+ end
54
+
55
+ def all_workflows
56
+ redis.keys("gush.workflows.*").map do |key|
57
+ id = key.sub("gush.workflows.", "")
58
+ find_workflow(id)
59
+ end
60
+ end
61
+
62
+ def find_workflow(id)
63
+ data = redis.get("gush.workflows.#{id}")
64
+ unless data.nil?
65
+ hash = Yajl::Parser.parse(data, symbolize_keys: true)
66
+ keys = redis.keys("gush.jobs.#{id}.*")
67
+ nodes = redis.mget(*keys).map { |json| Yajl::Parser.parse(json, symbolize_keys: true) }
68
+ workflow_from_hash(hash, nodes)
69
+ else
70
+ raise WorkflowNotFound.new("Workflow with given id doesn't exist")
71
+ end
72
+ end
73
+
74
+ def persist_workflow(workflow)
75
+ redis.set("gush.workflows.#{workflow.id}", workflow.to_json)
76
+ workflow.nodes.each {|job| persist_job(workflow.id, job) }
77
+ end
78
+
79
+ def persist_job(workflow_id, job)
80
+ redis.set("gush.jobs.#{workflow_id}.#{job.class.to_s}", job.to_json)
81
+ end
82
+
83
+ def destroy_workflow(workflow)
84
+ redis.del("gush.workflows.#{workflow.id}")
85
+ workflow.nodes.each {|job| destroy_job(workflow.id, job) }
86
+ end
87
+
88
+ def destroy_job(workflow_id, job)
89
+ redis.del("gush.jobs.#{workflow_id}.#{job.class.to_s}")
90
+ end
91
+
92
+ def worker_report(message)
93
+ report("gush.workers.status", message)
94
+ end
95
+
96
+ def workflow_report(message)
97
+ report("gush.workflows.status", message)
98
+ end
99
+
100
+ private
101
+
102
+ attr_reader :sidekiq, :redis
103
+
104
+ def workflow_from_hash(hash, nodes = nil)
105
+ flow = hash[:klass].constantize.new(hash[:id], configure: false)
106
+ flow.logger_builder(hash.fetch(:logger_builder, 'Gush::LoggerBuilder').constantize)
107
+ flow.stopped = hash.fetch(:stopped, false)
108
+
109
+ (nodes || hash[:nodes]).each do |node|
110
+ flow.nodes << Gush::Job.from_hash(node)
111
+ end
112
+
113
+ flow
114
+ end
115
+
116
+ def report(key, message)
117
+ redis.publish(key, Yajl::Encoder.new.encode(message))
118
+ end
119
+
120
+ def enqueue_job(workflow_id, job)
121
+ sidekiq.push(
122
+ 'class' => Gush::Worker,
123
+ 'queue' => configuration.namespace,
124
+ 'args' => [workflow_id, job.class.to_s, configuration.to_json]
125
+ )
126
+ end
127
+
128
+ def build_sidekiq
129
+ Sidekiq::Client.new(connection_pool)
130
+ end
131
+
132
+ def build_redis
133
+ Redis.new(url: configuration.redis_url)
134
+ end
135
+
136
+ def connection_pool
137
+ ConnectionPool.new(size: configuration.concurrency, timeout: 1) { build_redis }
138
+ end
139
+
140
+ def load_gushfile
141
+ require configuration.gushfile
142
+ rescue LoadError
143
+ raise Thor::Error, "failed to load #{configuration.gushfile.basename}".colorize(:red)
144
+ end
145
+ end
146
+ end
@@ -0,0 +1,42 @@
1
+ require 'yajl'
2
+
3
+ module Gush
4
+ class Configuration
5
+ attr_accessor :concurrency, :namespace, :redis_url, :environment
6
+
7
+ def self.from_json(json)
8
+ new(Yajl::Parser.parse(json, symbolize_keys: true))
9
+ end
10
+
11
+ def initialize(hash = {})
12
+ self.concurrency = hash.fetch(:concurrency, 5)
13
+ self.namespace = hash.fetch(:namespace, 'gush')
14
+ self.redis_url = hash.fetch(:redis_url, 'redis://localhost:6379')
15
+ self.gushfile = hash.fetch(:gushfile, 'Gushfile.rb')
16
+ self.environment = hash.fetch(:environment, 'development')
17
+ end
18
+
19
+ def gushfile=(path)
20
+ @gushfile = Pathname(path)
21
+ end
22
+
23
+ def gushfile
24
+ raise Thor::Error, "#{@gushfile} not found, please add it to your project".colorize(:red) unless @gushfile.exist?
25
+ @gushfile.realpath
26
+ end
27
+
28
+ def to_hash
29
+ {
30
+ concurrency: concurrency,
31
+ namespace: namespace,
32
+ redis_url: redis_url,
33
+ environment: environment,
34
+ gushfile: gushfile.to_path
35
+ }
36
+ end
37
+
38
+ def to_json
39
+ Yajl::Encoder.new.encode(to_hash)
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,3 @@
1
+ class WorkflowNotFound < StandardError; end
2
+ class DependencyLevelTooDeep < StandardError; end
3
+
data/lib/gush/job.rb ADDED
@@ -0,0 +1,161 @@
1
+ require 'gush/metadata'
2
+
3
+ module Gush
4
+ class Job
5
+ include Gush::Metadata
6
+
7
+ RECURSION_LIMIT = 1000
8
+
9
+ DEFAULTS = {
10
+ finished: false,
11
+ enqueued: false,
12
+ failed: false,
13
+ running: false
14
+ }
15
+
16
+ attr_accessor :finished, :enqueued, :failed, :workflow_id, :incoming, :outgoing,
17
+ :finished_at, :failed_at, :started_at, :jid, :running
18
+
19
+ attr_reader :name
20
+
21
+ attr_writer :logger
22
+
23
+ def initialize(opts = {})
24
+ options = DEFAULTS.dup.merge(opts)
25
+ assign_variables(options)
26
+ end
27
+
28
+ def as_json
29
+ {
30
+ name: @name,
31
+ klass: self.class.to_s,
32
+ finished: @finished,
33
+ enqueued: @enqueued,
34
+ failed: @failed,
35
+ incoming: @incoming,
36
+ outgoing: @outgoing,
37
+ finished_at: @finished_at,
38
+ started_at: @started_at,
39
+ failed_at: @failed_at,
40
+ running: @running
41
+ }
42
+ end
43
+
44
+ def to_json(options = {})
45
+ Yajl::Encoder.new.encode(as_json)
46
+ end
47
+
48
+ def self.from_hash(hash)
49
+ hash[:klass].constantize.new(
50
+ name: hash[:name],
51
+ finished: hash[:finished],
52
+ enqueued: hash[:enqueued],
53
+ failed: hash[:failed],
54
+ incoming: hash[:incoming],
55
+ outgoing: hash[:outgoing],
56
+ failed_at: hash[:failed_at],
57
+ finished_at: hash[:finished_at],
58
+ started_at: hash[:started_at],
59
+ running: hash[:running]
60
+ )
61
+ end
62
+
63
+ def before_work
64
+ end
65
+
66
+ def work
67
+ end
68
+
69
+ def after_work
70
+ end
71
+
72
+ def start!
73
+ @enqueued = false
74
+ @running = true
75
+ @started_at = Time.now.to_i
76
+ end
77
+
78
+ def enqueue!
79
+ @enqueued = true
80
+ @running = false
81
+ @failed = false
82
+ @started_at = nil
83
+ @finished_at = nil
84
+ @failed_at = nil
85
+ end
86
+
87
+ def finish!
88
+ @running = false
89
+ @finished = true
90
+ @enqueued = false
91
+ @failed = false
92
+ @finished_at = Time.now.to_i
93
+ end
94
+
95
+ def fail!
96
+ @finished = true
97
+ @running = false
98
+ @failed = true
99
+ @enqueued = false
100
+ @finished_at = Time.now.to_i
101
+ @failed_at = Time.now.to_i
102
+ end
103
+
104
+ def enqueued?
105
+ !!enqueued
106
+ end
107
+
108
+ def finished?
109
+ !!finished
110
+ end
111
+
112
+ def failed?
113
+ !!failed
114
+ end
115
+
116
+ def succeeded?
117
+ finished? && !failed?
118
+ end
119
+
120
+ def running?
121
+ !!running
122
+ end
123
+
124
+ def can_be_started?(flow)
125
+ !running? &&
126
+ !enqueued? &&
127
+ !finished? &&
128
+ !failed? &&
129
+ dependencies_satisfied?(flow)
130
+ end
131
+
132
+ def dependencies(flow, level = 0)
133
+ fail DependencyLevelTooDeep if level > RECURSION_LIMIT
134
+ (incoming.map {|name| flow.find_job(name) } + incoming.flat_map{ |name| flow.find_job(name).dependencies(flow, level + 1) }).uniq
135
+ end
136
+
137
+ def logger
138
+ fail "You cannot log when the job is not running" unless running?
139
+ @logger
140
+ end
141
+
142
+ private
143
+
144
+ def assign_variables(options)
145
+ @name = options[:name]
146
+ @finished = options[:finished]
147
+ @enqueued = options[:enqueued]
148
+ @failed = options[:failed]
149
+ @incoming = options[:incoming] || []
150
+ @outgoing = options[:outgoing] || []
151
+ @failed_at = options[:failed_at]
152
+ @finished_at = options[:finished_at]
153
+ @started_at = options[:started_at]
154
+ @running = options[:running]
155
+ end
156
+
157
+ def dependencies_satisfied?(flow)
158
+ dependencies(flow).all? { |dep| !dep.enqueued? && dep.finished? && !dep.failed? }
159
+ end
160
+ end
161
+ end
@@ -0,0 +1,15 @@
1
+ module Gush
2
+ class LoggerBuilder
3
+ attr_reader :workflow, :job, :jid
4
+
5
+ def initialize(workflow, job, jid)
6
+ @workflow = workflow
7
+ @job = job
8
+ @jid = jid
9
+ end
10
+
11
+ def build
12
+ NullLogger.new
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,24 @@
1
+ module Gush
2
+ module Metadata
3
+
4
+ def self.included(base)
5
+ base.extend(ClassMethods)
6
+ end
7
+
8
+ module ClassMethods
9
+ def metadata(params = {})
10
+ @metadata = (@metadata || {}).merge(params)
11
+ end
12
+ end
13
+
14
+ def name
15
+ metadata[:name] || @name
16
+ end
17
+
18
+ private
19
+
20
+ def metadata
21
+ self.class.metadata
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,6 @@
1
+ module Gush
2
+ class NullLogger
3
+ def method_missing(*)
4
+ end
5
+ end
6
+ end
@@ -0,0 +1,3 @@
1
+ module Gush
2
+ VERSION = "0.0.1"
3
+ end
@@ -0,0 +1,100 @@
1
+ require 'sidekiq'
2
+ require 'yajl'
3
+
4
+ module Gush
5
+ class Worker
6
+ include ::Sidekiq::Worker
7
+ sidekiq_options retry: false
8
+
9
+ def perform(workflow_id, job_id, configuration_json)
10
+ configure_client(configuration_json)
11
+
12
+ workflow = client.find_workflow(workflow_id)
13
+ job = workflow.find_job(job_id)
14
+
15
+ start = Time.now
16
+ report(workflow, job, :started, start)
17
+
18
+ job.logger = workflow.build_logger_for_job(job, job_id)
19
+ job.jid = jid
20
+
21
+ failed = false
22
+ error = nil
23
+
24
+ mark_as_started(workflow, job)
25
+ begin
26
+ job.before_work
27
+ job.work
28
+ job.after_work
29
+ rescue Exception => e
30
+ failed = true
31
+ error = e
32
+ end
33
+
34
+ unless failed
35
+ report(workflow, job, :finished, start)
36
+ mark_as_finished(workflow, job)
37
+
38
+ continue_workflow(workflow)
39
+ else
40
+ log_exception(job.logger, error)
41
+ mark_as_failed(workflow, job)
42
+ report(workflow, job, :failed, start, error.message)
43
+ end
44
+ end
45
+
46
+ private
47
+
48
+ attr_reader :client
49
+
50
+ def configure_client(config_json)
51
+ @client = Client.new(Configuration.from_json(config_json))
52
+ end
53
+
54
+ def mark_as_finished(workflow, job)
55
+ job.finish!
56
+ client.persist_job(workflow.id, job)
57
+ end
58
+
59
+ def mark_as_failed(workflow, job)
60
+ job.fail!
61
+ client.persist_job(workflow.id, job)
62
+ end
63
+
64
+ def mark_as_started(workflow, job)
65
+ job.start!
66
+ client.persist_job(workflow.id, job)
67
+ end
68
+
69
+ def report_workflow_status(workflow, job)
70
+ message = {workflow_id: workflow.id, status: workflow.status, started_at: workflow.started_at, finished_at: workflow.finished_at }
71
+ client.workflow_report(message)
72
+ end
73
+
74
+ def report(workflow, job, status, start, error = nil)
75
+ message = {status: status, workflow_id: workflow.id, job: job.name, duration: elapsed(start)}
76
+ message[:error] = error if error
77
+ client.worker_report(message)
78
+ end
79
+
80
+ def elapsed(start)
81
+ (Time.now - start).to_f.round(3)
82
+ end
83
+
84
+ def continue_workflow(workflow)
85
+ # refetch is important to get correct workflow status
86
+ unless client.find_workflow(workflow.id).stopped?
87
+ client.start_workflow(workflow.id)
88
+ end
89
+ end
90
+
91
+ def log_exception(logger, exception)
92
+ first, *rest = exception.backtrace
93
+
94
+ logger << "#{first}: #{exception.message} (#{exception.class})\n"
95
+ rest.each do |line|
96
+ logger << " from #{line}\n"
97
+ end
98
+ end
99
+ end
100
+ end