mkit 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/Gemfile +21 -0
- data/Gemfile.lock +137 -0
- data/LICENSE +21 -0
- data/README.md +126 -0
- data/Rakefile +54 -0
- data/bin/mkitc +31 -0
- data/bin/mkitd +55 -0
- data/config/database.yml +5 -0
- data/config/mkit_config.yml +15 -0
- data/config/mkitd_config.sh +5 -0
- data/db/migrate/001_setup.rb +105 -0
- data/db/migrate/002_mkit_jobs.rb +17 -0
- data/db/schema.rb +108 -0
- data/lib/mkit/app/controllers/mkitjobs_controller.rb +37 -0
- data/lib/mkit/app/controllers/pods_controller.rb +30 -0
- data/lib/mkit/app/controllers/services_controller.rb +87 -0
- data/lib/mkit/app/helpers/docker_helper.rb +75 -0
- data/lib/mkit/app/helpers/erb_helper.rb +18 -0
- data/lib/mkit/app/helpers/haproxy.rb +41 -0
- data/lib/mkit/app/helpers/interface_helper.rb +17 -0
- data/lib/mkit/app/helpers/services_helper.rb +54 -0
- data/lib/mkit/app/mkit_server.rb +8 -0
- data/lib/mkit/app/model/dns_host.rb +11 -0
- data/lib/mkit/app/model/lease.rb +26 -0
- data/lib/mkit/app/model/mkit_job.rb +48 -0
- data/lib/mkit/app/model/pod.rb +95 -0
- data/lib/mkit/app/model/pool.rb +60 -0
- data/lib/mkit/app/model/service.rb +266 -0
- data/lib/mkit/app/model/service_config.rb +16 -0
- data/lib/mkit/app/model/service_port.rb +30 -0
- data/lib/mkit/app/model/setting.rb +1 -0
- data/lib/mkit/app/model/volume.rb +53 -0
- data/lib/mkit/app/templates/docker/docker_run.sh.erb +1 -0
- data/lib/mkit/app/templates/haproxy/0000_defaults.cfg +23 -0
- data/lib/mkit/app/templates/haproxy/xapp_haproxy.cfg.erb +30 -0
- data/lib/mkit/cmd_runner.rb +27 -0
- data/lib/mkit/config/config.rb +18 -0
- data/lib/mkit/config/environment.rb +26 -0
- data/lib/mkit/config/initializers/001_hash.rb +11 -0
- data/lib/mkit/config/initializers/002_openstruct.rb +7 -0
- data/lib/mkit/config/load_default_configs.rb +29 -0
- data/lib/mkit/config/the_config.yml +3 -0
- data/lib/mkit/ctypes.rb +31 -0
- data/lib/mkit/docker_listener.rb +97 -0
- data/lib/mkit/exceptions.rb +30 -0
- data/lib/mkit/haproxy.rb +48 -0
- data/lib/mkit/job_manager.rb +53 -0
- data/lib/mkit/mkit_dns.rb +54 -0
- data/lib/mkit/mkit_interface.rb +31 -0
- data/lib/mkit/sagas/asaga.rb +11 -0
- data/lib/mkit/sagas/create_pod_saga.rb +28 -0
- data/lib/mkit/sagas/saga_manager.rb +10 -0
- data/lib/mkit/status.rb +47 -0
- data/lib/mkit/utils.rb +51 -0
- data/lib/mkit/version.rb +4 -0
- data/lib/mkit/workers/aworker.rb +11 -0
- data/lib/mkit/workers/haproxy_worker.rb +35 -0
- data/lib/mkit/workers/pod_worker.rb +39 -0
- data/lib/mkit/workers/service_worker.rb +27 -0
- data/lib/mkit/workers/worker_manager.rb +14 -0
- data/lib/mkit.rb +158 -0
- data/mkit.gemspec +40 -0
- data/mkitd +10 -0
- data/samples/apps/postgres.yml +22 -0
- data/samples/apps/rabbitmq.yml +19 -0
- data/samples/daemontools/log/run +44 -0
- data/samples/daemontools/run +42 -0
- data/samples/systemd/mkitd.service +12 -0
- metadata +393 -0
@@ -0,0 +1,97 @@
|
|
1
|
+
require 'pty'
|
2
|
+
require 'mkit/status'
|
3
|
+
|
4
|
+
#
|
5
|
+
# https://docs.docker.com/engine/reference/commandline/events
|
6
|
+
require 'mkit/app/helpers/docker_helper'
|
7
|
+
module MKIt
|
8
|
+
class DockerListener
|
9
|
+
include MKIt::DockerHelper
|
10
|
+
|
11
|
+
def initialize
|
12
|
+
@consumers = []
|
13
|
+
end
|
14
|
+
|
15
|
+
def register_consumer(consumer:)
|
16
|
+
end
|
17
|
+
|
18
|
+
def parse_message(msg)
|
19
|
+
action = msg['Action'].to_sym
|
20
|
+
type = msg['Type'].to_sym
|
21
|
+
MKItLogger.info("docker <#{type}> <#{action}> received: \n\t#{msg}")
|
22
|
+
case type
|
23
|
+
when :container
|
24
|
+
pod_id = msg.id
|
25
|
+
pod_name = msg.Actor.Attributes.name
|
26
|
+
pod = Pod.find_by(name: pod_name)
|
27
|
+
unless pod.nil?
|
28
|
+
case action
|
29
|
+
when :create
|
30
|
+
pod.pod_id = pod_id
|
31
|
+
pod.status = MKIt::Status::CREATED
|
32
|
+
pod.save
|
33
|
+
pod.service.update_status!
|
34
|
+
when :start
|
35
|
+
pod.pod_id = pod_id
|
36
|
+
pod.save
|
37
|
+
pod.service.update_status!
|
38
|
+
when :kill
|
39
|
+
MKItLogger.debug(" #{type} #{action} <<NOOP / TODO>>")
|
40
|
+
when :die
|
41
|
+
MKItLogger.debug(" #{type} #{action} <<NOOP / TODO>>")
|
42
|
+
when :stop
|
43
|
+
pod.service.update_status!
|
44
|
+
else
|
45
|
+
MKItLogger.debug(" #{type} #{action} <<TODO>>")
|
46
|
+
end
|
47
|
+
else
|
48
|
+
MKItLogger.warn("docker <<#{type}>> <#{action}> received: #{msg}. But I don't know anything about pod #{pod_id}")
|
49
|
+
end
|
50
|
+
when :network
|
51
|
+
pod_id = msg.Actor.Attributes.container
|
52
|
+
pod = Pod.find_by(pod_id: pod_id)
|
53
|
+
unless pod.nil?
|
54
|
+
case action
|
55
|
+
when :connect
|
56
|
+
MKItLogger.info("docker network #{action} received: #{msg}")
|
57
|
+
pod.update_ip
|
58
|
+
pod.save
|
59
|
+
when :disconnect
|
60
|
+
MKItLogger.debug(" #{type} #{action} <<NOOP / TODO>>")
|
61
|
+
else
|
62
|
+
MKItLogger.debug(" #{type} #{action} <<TODO>>")
|
63
|
+
end
|
64
|
+
else
|
65
|
+
MKItLogger.warn("docker <<#{type}>> <#{action}> received: #{msg}. But I don't know anything about pod #{pod_id}")
|
66
|
+
end
|
67
|
+
else
|
68
|
+
MKItLogger.info("\t#{type} #{action} <<unknown>>")
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
def start
|
73
|
+
@thread ||= Thread.new {
|
74
|
+
cmd = "docker events --format '{{json .}}'"
|
75
|
+
begin
|
76
|
+
PTY.spawn( cmd ) do |stdout, stdin, pid|
|
77
|
+
begin
|
78
|
+
stdout.each { |line| parse_message JSON.parse(line).to_o }
|
79
|
+
rescue Errno::EIO
|
80
|
+
MKItLogger.warn("Errno:EIO error, but this probably just means " +
|
81
|
+
"that the process has finished giving output")
|
82
|
+
end
|
83
|
+
end
|
84
|
+
rescue PTY::ChildExited
|
85
|
+
MKItLogger.warn("docker event listener process exited!")
|
86
|
+
end
|
87
|
+
}
|
88
|
+
@thread.run
|
89
|
+
MKItLogger.info("docker listener started")
|
90
|
+
end
|
91
|
+
def stop
|
92
|
+
@thread.exit if @thread
|
93
|
+
MKItLogger.info("docker listener stopped")
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
@@ -0,0 +1,30 @@
|
|
1
|
+
module MKIt
|
2
|
+
class BaseException < Exception
|
3
|
+
attr_reader :error_code
|
4
|
+
def initialize(error_code, message = nil)
|
5
|
+
super(message)
|
6
|
+
@error_code = error_code
|
7
|
+
end
|
8
|
+
end
|
9
|
+
|
10
|
+
class ServiceAlreadyExists < BaseException
|
11
|
+
def initialize(message = nil)
|
12
|
+
super(409, message)
|
13
|
+
end
|
14
|
+
end
|
15
|
+
class ServiceNameMismatch < BaseException
|
16
|
+
def initialize(message = nil)
|
17
|
+
super(400, message)
|
18
|
+
end
|
19
|
+
end
|
20
|
+
class ServiceNotFoundException < StandardError; end
|
21
|
+
class PodNotFoundException < StandardError; end
|
22
|
+
class AppAlreadyDeployedException < StandardError; end
|
23
|
+
class InvalidPortMappingTypeException < StandardError; end
|
24
|
+
|
25
|
+
class PoolExaustedException < StandardError; end
|
26
|
+
|
27
|
+
class CmdRunnerException < StandardError; end
|
28
|
+
|
29
|
+
end
|
30
|
+
|
data/lib/mkit/haproxy.rb
ADDED
@@ -0,0 +1,48 @@
|
|
1
|
+
require 'pty'
|
2
|
+
#
|
3
|
+
#
|
4
|
+
#
|
5
|
+
module MKIt
|
6
|
+
class HAProxy
|
7
|
+
|
8
|
+
def initialize
|
9
|
+
# configs
|
10
|
+
# run standalone | daemon
|
11
|
+
@running = false
|
12
|
+
end
|
13
|
+
|
14
|
+
def start
|
15
|
+
@thread ||= Thread.new {
|
16
|
+
while (@running) do
|
17
|
+
cmd = "/usr/sbin/haproxy -f /etc/haproxy/haproxy.d"
|
18
|
+
%x{#{cmd}}
|
19
|
+
sleep 1
|
20
|
+
end
|
21
|
+
}
|
22
|
+
@thread.run
|
23
|
+
puts "haproxy started"
|
24
|
+
end
|
25
|
+
|
26
|
+
def start
|
27
|
+
@running = true
|
28
|
+
@thread ||= Thread.new {
|
29
|
+
while (@running) do
|
30
|
+
%{/usr/sbin/haproxy -f /etc/haproxy/haproxy.d/}
|
31
|
+
sleep(1)
|
32
|
+
end
|
33
|
+
}
|
34
|
+
puts "proxy started"
|
35
|
+
end
|
36
|
+
|
37
|
+
def stop
|
38
|
+
puts "proxy stopped"
|
39
|
+
end
|
40
|
+
|
41
|
+
def status
|
42
|
+
end
|
43
|
+
|
44
|
+
def reload
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
@@ -0,0 +1,53 @@
|
|
1
|
+
require 'mkit/app/model/mkit_job'
|
2
|
+
require 'mkit/utils'
|
3
|
+
|
4
|
+
module MKIt
|
5
|
+
class JobManager
|
6
|
+
def initialize
|
7
|
+
@workers = {}
|
8
|
+
end
|
9
|
+
|
10
|
+
def register_worker(worker, topics)
|
11
|
+
topics.each { | topic |
|
12
|
+
@workers[topic] ||= []
|
13
|
+
MKItLogger.info("register #{worker.class} for topic #{topic}")
|
14
|
+
@workers[topic] << worker
|
15
|
+
}
|
16
|
+
end
|
17
|
+
|
18
|
+
def start
|
19
|
+
MKItLogger.info('starting job manager')
|
20
|
+
@thread = Thread.new do
|
21
|
+
loop do
|
22
|
+
job = MkitJob.take
|
23
|
+
begin
|
24
|
+
if job.nil?
|
25
|
+
sleep(10)
|
26
|
+
else
|
27
|
+
topic = job.topic
|
28
|
+
job.processing!
|
29
|
+
if @workers[topic].nil?
|
30
|
+
MKItLogger.warn("no workers found for topic '#{topic}'")
|
31
|
+
else
|
32
|
+
workers = @workers[topic]
|
33
|
+
workers.each { | worker |
|
34
|
+
worker.do_the(job)
|
35
|
+
}
|
36
|
+
end
|
37
|
+
end
|
38
|
+
job.done! unless job.nil?
|
39
|
+
rescue Exception => e
|
40
|
+
job.error! unless job.nil?
|
41
|
+
MKItLogger.error e, e.message, e.backtrace.join("\n")
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
def stop
|
48
|
+
@thread.exit if @thread
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
|
@@ -0,0 +1,54 @@
|
|
1
|
+
require 'async/dns'
|
2
|
+
require 'async/dns/system'
|
3
|
+
require 'mkit/mkit_interface'
|
4
|
+
require 'ipaddr'
|
5
|
+
|
6
|
+
# INTERFACES = [
|
7
|
+
# [:udp, "127.0.0.20", 53],
|
8
|
+
# [:tcp, "127.0.0.20", 53],
|
9
|
+
# ]
|
10
|
+
# @resolver = RubyDNS::Resolver.new(
|
11
|
+
# [:udp, "192.168.4.254", 53],
|
12
|
+
# [:tcp, "192.168.4.254", 53]
|
13
|
+
# )
|
14
|
+
#
|
15
|
+
# # Use upstream DNS for name resolution.
|
16
|
+
UPSTREAM = RubyDNS::Resolver.new([
|
17
|
+
[:udp, "8.8.8.8", 53],
|
18
|
+
[:tcp, "8.8.8.8", 53]
|
19
|
+
])
|
20
|
+
|
21
|
+
Name = Resolv::DNS::Name
|
22
|
+
IN = Resolv::DNS::Resource::IN
|
23
|
+
|
24
|
+
module MKIt
|
25
|
+
class DNS < Async::DNS::Server
|
26
|
+
def initialize
|
27
|
+
addr = MKIt::Interface.ip
|
28
|
+
listen_addr = [
|
29
|
+
[:udp, addr, 53],
|
30
|
+
[:tcp, addr, 53],
|
31
|
+
]
|
32
|
+
super(listen_addr)
|
33
|
+
@logger.info!
|
34
|
+
@resolver = RubyDNS::Resolver.new(Async::DNS::System.nameservers)
|
35
|
+
end
|
36
|
+
def process(name, resource_class, transaction)
|
37
|
+
host = DnsHost.find_by_name(name)
|
38
|
+
if host.nil?
|
39
|
+
transaction.passthrough!(@resolver)
|
40
|
+
else
|
41
|
+
ipaddr = IPAddr.new host.ip
|
42
|
+
if resource_class == Resolv::DNS::Resource::IN::A
|
43
|
+
transaction.respond!(ipaddr.to_s)
|
44
|
+
elsif resource_class == Resolv::DNS::Resource::IN::AAAA
|
45
|
+
transaction.respond!(ipaddr.ipv4_mapped.to_s)
|
46
|
+
else
|
47
|
+
transaction.fail!(:NXDomain)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
|
@@ -0,0 +1,31 @@
|
|
1
|
+
require 'mkit/status'
|
2
|
+
require 'mkit/utils'
|
3
|
+
require 'mkit/exceptions'
|
4
|
+
require 'mkit/app/helpers/interface_helper'
|
5
|
+
|
6
|
+
module MKIt
|
7
|
+
class Interface
|
8
|
+
def self.ip
|
9
|
+
main_pool = Pool.find_by_name(MKIt::Utils.me)
|
10
|
+
main_pool.ip
|
11
|
+
end
|
12
|
+
|
13
|
+
def self.up
|
14
|
+
main_pool = Pool.find_by_name(MKIt::Utils.me)
|
15
|
+
interface_name = "#{main_pool.name}0"
|
16
|
+
interface_type = "tap"
|
17
|
+
ip = main_pool.ip
|
18
|
+
mask = main_pool.netmask
|
19
|
+
MKIt::InterfaceHelper.create(name: interface_name, ctype: interface_type)
|
20
|
+
MKIt::InterfaceHelper.up(name: interface_name, ip: ip, mask: mask)
|
21
|
+
end
|
22
|
+
|
23
|
+
def self.down
|
24
|
+
main_pool = Pool.find_by_name(MKIt::Utils.me)
|
25
|
+
interface_name = "#{main_pool.name}0"
|
26
|
+
interface_type = "tap"
|
27
|
+
MKIt::InterfaceHelper.down(name: interface_name)
|
28
|
+
MKIt::InterfaceHelper.remove(name: interface_name, ctype: interface_type)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
module MKIt
|
2
|
+
class CreatePodSaga < ASaga
|
3
|
+
|
4
|
+
def topics
|
5
|
+
%w{create_pod_saga}
|
6
|
+
end
|
7
|
+
|
8
|
+
#
|
9
|
+
# create_pod_saga:
|
10
|
+
#
|
11
|
+
# payload:
|
12
|
+
# * service_id
|
13
|
+
#
|
14
|
+
# triggers
|
15
|
+
# * nothing
|
16
|
+
#
|
17
|
+
def do_the(job)
|
18
|
+
MKItLogger.info("#{self.class} <#{job.topic}> #{job.inspect}....")
|
19
|
+
service = Service.find(job.service_id)
|
20
|
+
# create pod
|
21
|
+
|
22
|
+
pd = Pod.new( service: service, status: MKIt::Status::CREATED, name: SecureRandom.uuid.gsub('-','')[0..11])
|
23
|
+
service.pod << pd
|
24
|
+
service.save
|
25
|
+
MkitJob.publish(topic: :start_pod, service_id: job.service_id, pod_id: pd.id)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
data/lib/mkit/status.rb
ADDED
@@ -0,0 +1,47 @@
|
|
1
|
+
#
|
2
|
+
#
|
3
|
+
#
|
4
|
+
module MKIt
|
5
|
+
class MKItStatus
|
6
|
+
def initialize(status)
|
7
|
+
@status = status
|
8
|
+
end
|
9
|
+
def to_s
|
10
|
+
@status.to_s
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
module Status
|
15
|
+
# APP
|
16
|
+
CREATED = 'CREATED'
|
17
|
+
CREATING = 'CREATING'
|
18
|
+
DEPLOYING = 'DEPLOYING'
|
19
|
+
DEPLOYED = 'DEPLOYED'
|
20
|
+
PENDING = 'PENDING'
|
21
|
+
DEGRATED = 'DEGRATED'
|
22
|
+
|
23
|
+
# network
|
24
|
+
RESERVED = 'RESERVED'
|
25
|
+
IN_USE = 'IN_USE'
|
26
|
+
EXPIRED = 'EXPIRED'
|
27
|
+
|
28
|
+
# pods
|
29
|
+
STARTING = 'STARTING'
|
30
|
+
RUNNING = 'RUNNING'
|
31
|
+
STOPPED = 'STOPPED'
|
32
|
+
STOPING = 'STOPING'
|
33
|
+
PAUSED = 'PAUSED'
|
34
|
+
|
35
|
+
# Service
|
36
|
+
RESTARTING = 'RESTARTING'
|
37
|
+
UPDATING = 'UPDATING'
|
38
|
+
end
|
39
|
+
|
40
|
+
module PoolStatus
|
41
|
+
|
42
|
+
RESERVED = 'RESERVED'
|
43
|
+
IN_USE = 'IN_USE'
|
44
|
+
EXPIRED = 'EXPIRED'
|
45
|
+
EXAUSTED = 'EXAUSTED'
|
46
|
+
end
|
47
|
+
end
|
data/lib/mkit/utils.rb
ADDED
@@ -0,0 +1,51 @@
|
|
1
|
+
require 'erb'
|
2
|
+
require 'mkit/config/config'
|
3
|
+
module MKIt
|
4
|
+
module Utils
|
5
|
+
module_function
|
6
|
+
|
7
|
+
def me
|
8
|
+
'mkit'
|
9
|
+
end
|
10
|
+
|
11
|
+
def log
|
12
|
+
Console.logger
|
13
|
+
end
|
14
|
+
|
15
|
+
def root
|
16
|
+
File.expand_path("../../..", __FILE__)
|
17
|
+
end
|
18
|
+
|
19
|
+
def set_config_dir(config_dir)
|
20
|
+
@config_dir = config_dir
|
21
|
+
end
|
22
|
+
|
23
|
+
def config_dir
|
24
|
+
@config_dir.nil? ? "#{self.root}/config" : @config_dir
|
25
|
+
end
|
26
|
+
|
27
|
+
def load_db_config(db_config_dir = self.config_dir)
|
28
|
+
self.log.info "loading database configurations from '#{config_dir}'..."
|
29
|
+
YAML::load(ERB.new(IO.read("#{db_config_dir}/database.yml")).result)
|
30
|
+
end
|
31
|
+
|
32
|
+
def db_config_to_uri(env = MKIt::Config.mkit.database.env)
|
33
|
+
config = self.load_db_config[env]
|
34
|
+
|
35
|
+
if config["username"] || config["password"]
|
36
|
+
user_info = [ config["username"], config["password"] ].join(":")
|
37
|
+
else
|
38
|
+
user_info = nil
|
39
|
+
end
|
40
|
+
URI::Generic.new(config["adapter"],user_info,
|
41
|
+
config["hostname"] || "localhost",
|
42
|
+
config["port"],
|
43
|
+
nil,
|
44
|
+
"/#{config["database"]}",
|
45
|
+
nil,
|
46
|
+
nil,
|
47
|
+
nil).to_s
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
data/lib/mkit/version.rb
ADDED
@@ -0,0 +1,35 @@
|
|
1
|
+
module MKIt
|
2
|
+
class HAProxyWorker < AWorker
|
3
|
+
|
4
|
+
def topics
|
5
|
+
%w{create_proxy_config destroy_proxy_config update_proxy_config restart_proxy reload_proxy}
|
6
|
+
end
|
7
|
+
|
8
|
+
def do_the(job)
|
9
|
+
MKItLogger.info("#{self.class} working on the job #{job.inspect}....")
|
10
|
+
unless job.service_id.nil?
|
11
|
+
srv = Service.find(job.service_id)
|
12
|
+
config = srv.proxy_config
|
13
|
+
end
|
14
|
+
case job.topic.to_sym
|
15
|
+
when :update_proxy_config
|
16
|
+
MKItLogger.debug config.inspect
|
17
|
+
MKIt::HAProxy.create_config_file(filename: config[:filename], data: config[:data])
|
18
|
+
MKIt::HAProxy.reload
|
19
|
+
when :destroy_proxy_config
|
20
|
+
MKIt::HAProxy.delete_config_file(filename: job.data['filename'])
|
21
|
+
MKIt::HAProxy.reload
|
22
|
+
when :create_proxy_config
|
23
|
+
MKIt::HAProxy.create_config_file(filename: config[:filename], data: config[:data])
|
24
|
+
MKIt::HAProxy.reload
|
25
|
+
when :reload_proxy
|
26
|
+
MKIt::HAProxy.reload
|
27
|
+
when :restart_proxy
|
28
|
+
MKIt::HAProxy.restart
|
29
|
+
else
|
30
|
+
MKItLogger.warn("#{self.class} <<TODO>> job #{job.inspect}....")
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
@@ -0,0 +1,39 @@
|
|
1
|
+
#
|
2
|
+
module MKIt
|
3
|
+
class PodWorker < AWorker
|
4
|
+
|
5
|
+
def topics
|
6
|
+
%w{pod_network_connected pod_network_disconnected
|
7
|
+
pod_unhealthy
|
8
|
+
start_pod stop_pod update_pod deploy_pod destroy_pod
|
9
|
+
pod_ip_updated pod_destroyed
|
10
|
+
}
|
11
|
+
end
|
12
|
+
|
13
|
+
def do_the(job)
|
14
|
+
MKItLogger.info("#{self.class} <#{job.topic}> job #{job.inspect}....")
|
15
|
+
pod = Pod.find(job.pod_id) unless job.pod_id.nil?
|
16
|
+
case job.topic.to_sym
|
17
|
+
when :deploy_pod
|
18
|
+
MKItLogger.warn("#{self.class} @deprecated job #{job.inspect}....")
|
19
|
+
when :start_pod
|
20
|
+
pod.start
|
21
|
+
when :stop_pod
|
22
|
+
pod.stop
|
23
|
+
when :destroy_pod
|
24
|
+
pod.stop
|
25
|
+
pod.destroy
|
26
|
+
when :pod_destroyed
|
27
|
+
if Service.exists?(job.service_id)
|
28
|
+
MkitJob.publish(topic: :update_proxy_config, service_id: job.service_id)
|
29
|
+
end
|
30
|
+
when :pod_ip_updated
|
31
|
+
MkitJob.publish(topic: :update_proxy_config, service_id: job.service_id)
|
32
|
+
else
|
33
|
+
MKItLogger.info("#{self.class} <<TODO>> job #{job.inspect}....")
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
@@ -0,0 +1,27 @@
|
|
1
|
+
module MKIt
|
2
|
+
class ServiceWorker < AWorker
|
3
|
+
|
4
|
+
def topics
|
5
|
+
%w{start_service stop_service update_service delete_service}
|
6
|
+
end
|
7
|
+
|
8
|
+
def do_the(job)
|
9
|
+
MKItLogger.info("#{self.class} <#{job.topic}> job #{job.inspect}....")
|
10
|
+
srv = Service.find(job.service_id)
|
11
|
+
case job.topic.to_sym
|
12
|
+
when :start_service
|
13
|
+
srv.start
|
14
|
+
when :stop_service
|
15
|
+
srv.stop
|
16
|
+
when :update_service
|
17
|
+
MKItLogger.info("#{self.class} <#{job.topic}> <<TODO>> job #{job.inspect}....")
|
18
|
+
when :delete_service
|
19
|
+
Service.destroy(job.service_id)
|
20
|
+
else
|
21
|
+
MKItLogger.info("#{self.class} <#{job.topic}> <<TODO>> job #{job.inspect}....")
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
@@ -0,0 +1,14 @@
|
|
1
|
+
require 'mkit/workers/aworker'
|
2
|
+
require 'mkit/workers/service_worker'
|
3
|
+
require 'mkit/workers/pod_worker'
|
4
|
+
require 'mkit/workers/haproxy_worker'
|
5
|
+
|
6
|
+
module MKIt
|
7
|
+
class WorkerManager
|
8
|
+
def self.register_workers
|
9
|
+
ServiceWorker.new
|
10
|
+
PodWorker.new
|
11
|
+
HAProxyWorker.new
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|