karafka 1.2.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. checksums.yaml +7 -0
  2. data/.coditsu.yml +3 -0
  3. data/.console_irbrc +13 -0
  4. data/.gitignore +68 -0
  5. data/.rspec +1 -0
  6. data/.ruby-gemset +1 -0
  7. data/.ruby-version +1 -0
  8. data/.travis.yml +49 -0
  9. data/CHANGELOG.md +458 -0
  10. data/CODE_OF_CONDUCT.md +46 -0
  11. data/CONTRIBUTING.md +41 -0
  12. data/Gemfile +15 -0
  13. data/Gemfile.lock +126 -0
  14. data/MIT-LICENCE +18 -0
  15. data/README.md +102 -0
  16. data/bin/karafka +19 -0
  17. data/config/errors.yml +6 -0
  18. data/karafka.gemspec +42 -0
  19. data/lib/karafka.rb +79 -0
  20. data/lib/karafka/app.rb +45 -0
  21. data/lib/karafka/attributes_map.rb +69 -0
  22. data/lib/karafka/backends/inline.rb +16 -0
  23. data/lib/karafka/base_consumer.rb +68 -0
  24. data/lib/karafka/base_responder.rb +208 -0
  25. data/lib/karafka/callbacks.rb +30 -0
  26. data/lib/karafka/callbacks/config.rb +22 -0
  27. data/lib/karafka/callbacks/dsl.rb +16 -0
  28. data/lib/karafka/cli.rb +54 -0
  29. data/lib/karafka/cli/base.rb +78 -0
  30. data/lib/karafka/cli/console.rb +29 -0
  31. data/lib/karafka/cli/flow.rb +46 -0
  32. data/lib/karafka/cli/info.rb +29 -0
  33. data/lib/karafka/cli/install.rb +42 -0
  34. data/lib/karafka/cli/server.rb +66 -0
  35. data/lib/karafka/connection/api_adapter.rb +148 -0
  36. data/lib/karafka/connection/builder.rb +16 -0
  37. data/lib/karafka/connection/client.rb +107 -0
  38. data/lib/karafka/connection/delegator.rb +46 -0
  39. data/lib/karafka/connection/listener.rb +60 -0
  40. data/lib/karafka/consumers/callbacks.rb +54 -0
  41. data/lib/karafka/consumers/includer.rb +51 -0
  42. data/lib/karafka/consumers/responders.rb +24 -0
  43. data/lib/karafka/consumers/single_params.rb +15 -0
  44. data/lib/karafka/errors.rb +50 -0
  45. data/lib/karafka/fetcher.rb +44 -0
  46. data/lib/karafka/helpers/class_matcher.rb +78 -0
  47. data/lib/karafka/helpers/config_retriever.rb +46 -0
  48. data/lib/karafka/helpers/multi_delegator.rb +33 -0
  49. data/lib/karafka/instrumentation/listener.rb +112 -0
  50. data/lib/karafka/instrumentation/logger.rb +55 -0
  51. data/lib/karafka/instrumentation/monitor.rb +64 -0
  52. data/lib/karafka/loader.rb +28 -0
  53. data/lib/karafka/params/dsl.rb +158 -0
  54. data/lib/karafka/params/params_batch.rb +46 -0
  55. data/lib/karafka/parsers/json.rb +38 -0
  56. data/lib/karafka/patches/dry_configurable.rb +33 -0
  57. data/lib/karafka/patches/ruby_kafka.rb +34 -0
  58. data/lib/karafka/persistence/client.rb +25 -0
  59. data/lib/karafka/persistence/consumer.rb +38 -0
  60. data/lib/karafka/persistence/topic.rb +29 -0
  61. data/lib/karafka/process.rb +62 -0
  62. data/lib/karafka/responders/builder.rb +36 -0
  63. data/lib/karafka/responders/topic.rb +57 -0
  64. data/lib/karafka/routing/builder.rb +61 -0
  65. data/lib/karafka/routing/consumer_group.rb +61 -0
  66. data/lib/karafka/routing/consumer_mapper.rb +34 -0
  67. data/lib/karafka/routing/proxy.rb +37 -0
  68. data/lib/karafka/routing/router.rb +29 -0
  69. data/lib/karafka/routing/topic.rb +60 -0
  70. data/lib/karafka/routing/topic_mapper.rb +55 -0
  71. data/lib/karafka/schemas/config.rb +24 -0
  72. data/lib/karafka/schemas/consumer_group.rb +78 -0
  73. data/lib/karafka/schemas/consumer_group_topic.rb +18 -0
  74. data/lib/karafka/schemas/responder_usage.rb +39 -0
  75. data/lib/karafka/schemas/server_cli_options.rb +43 -0
  76. data/lib/karafka/server.rb +85 -0
  77. data/lib/karafka/setup/config.rb +193 -0
  78. data/lib/karafka/setup/configurators/base.rb +29 -0
  79. data/lib/karafka/setup/configurators/params.rb +25 -0
  80. data/lib/karafka/setup/configurators/water_drop.rb +32 -0
  81. data/lib/karafka/setup/dsl.rb +22 -0
  82. data/lib/karafka/status.rb +25 -0
  83. data/lib/karafka/templates/application_consumer.rb.example +6 -0
  84. data/lib/karafka/templates/application_responder.rb.example +11 -0
  85. data/lib/karafka/templates/karafka.rb.example +54 -0
  86. data/lib/karafka/version.rb +7 -0
  87. data/log/.gitkeep +0 -0
  88. metadata +303 -0
@@ -0,0 +1,43 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Schemas
5
+ # Schema for validating correctness of the server cli command options
6
+ # We validate some basics + the list of consumer_groups on which we want to use, to make
7
+ # sure that all of them are defined, plus that a pidfile does not exist
8
+ ServerCliOptions = Dry::Validation.Schema do
9
+ configure do
10
+ option :consumer_groups
11
+
12
+ def self.messages
13
+ super.merge(
14
+ en: {
15
+ errors: {
16
+ consumer_groups_inclusion: 'Unknown consumer group.',
17
+ pid_existence: 'Pidfile already exists.'
18
+ }
19
+ }
20
+ )
21
+ end
22
+ end
23
+
24
+ optional(:pid).filled(:str?)
25
+ optional(:daemon).filled(:bool?)
26
+ optional(:consumer_groups).filled(:array?)
27
+
28
+ validate(consumer_groups_inclusion: :consumer_groups) do |consumer_groups|
29
+ # If there were no consumer_groups declared in the server cli, it means that we will
30
+ # run all of them and no need to validate them here at all
31
+ if consumer_groups.nil?
32
+ true
33
+ else
34
+ (consumer_groups - Karafka::Routing::Builder.instance.map(&:name)).empty?
35
+ end
36
+ end
37
+
38
+ validate(pid_existence: :pid) do |pid|
39
+ pid ? !File.exist?(pid) : true
40
+ end
41
+ end
42
+ end
43
+ end
@@ -0,0 +1,85 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Karafka consuming server class
5
+ class Server
6
+ @consumer_threads = Concurrent::Array.new
7
+
8
+ # How long should we sleep between checks on shutting down consumers
9
+ SUPERVISION_SLEEP = 1
10
+ # What system exit code should we use when we terminated forcefully
11
+ FORCEFUL_EXIT_CODE = 2
12
+
13
+ class << self
14
+ # Set of consuming threads. Each consumer thread contains a single consumer
15
+ attr_accessor :consumer_threads
16
+
17
+ # Writer for list of consumer groups that we want to consume in our current process context
18
+ attr_writer :consumer_groups
19
+
20
+ # Method which runs app
21
+ def run
22
+ process.on_sigint { stop_supervised }
23
+ process.on_sigquit { stop_supervised }
24
+ process.on_sigterm { stop_supervised }
25
+ start_supervised
26
+ end
27
+
28
+ # @return [Array<String>] array with names of consumer groups that should be consumed in a
29
+ # current server context
30
+ def consumer_groups
31
+ # If not specified, a server will listed on all the topics
32
+ @consumer_groups ||= Karafka::App.consumer_groups.map(&:name).freeze
33
+ end
34
+
35
+ private
36
+
37
+ # @return [Karafka::Process] process wrapper instance used to catch system signal calls
38
+ def process
39
+ Karafka::Process.instance
40
+ end
41
+
42
+ # Starts Karafka with a supervision
43
+ # @note We don't need to sleep because Karafka::Fetcher is locking and waiting to
44
+ # finish loop (and it won't happen until we explicitily want to stop)
45
+ def start_supervised
46
+ process.supervise
47
+ Karafka::App.run!
48
+ Karafka::Fetcher.call
49
+ end
50
+
51
+ # Stops Karafka with a supervision (as long as there is a shutdown timeout)
52
+ # If consumers won't stop in a given timeframe, it will force them to exit
53
+ def stop_supervised
54
+ # Because this is called in the trap context, there is a chance that instrumentation
55
+ # listeners contain things that aren't allowed from within a trap context.
56
+ # To bypass that (instead of telling users not to do things they need to)
57
+ # we spin up a thread to instrument server.stop and server.stop.error and wait until
58
+ # they're finished
59
+ Thread.new { Karafka.monitor.instrument('server.stop', {}) }.join
60
+
61
+ Karafka::App.stop!
62
+ # If there is no shutdown timeout, we don't exit and wait until all the consumers
63
+ # had done their work
64
+ return unless Karafka::App.config.shutdown_timeout
65
+
66
+ # If there is a timeout, we check every 1 second (for the timeout period) if all
67
+ # the threads finished their work and if so, we can just return and normal
68
+ # shutdown process will take place
69
+ Karafka::App.config.shutdown_timeout.to_i.times do
70
+ return if consumer_threads.count(&:alive?).zero?
71
+ sleep SUPERVISION_SLEEP
72
+ end
73
+
74
+ raise Errors::ForcefulShutdown
75
+ rescue Errors::ForcefulShutdown => error
76
+ Thread.new { Karafka.monitor.instrument('server.stop.error', error: error) }.join
77
+ # We're done waiting, lets kill them!
78
+ consumer_threads.each(&:terminate)
79
+
80
+ # exit is not within the instrumentation as it would not trigger due to exit
81
+ Kernel.exit FORCEFUL_EXIT_CODE
82
+ end
83
+ end
84
+ end
85
+ end
@@ -0,0 +1,193 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Module containing all Karafka setup related elements like configuration settings,
5
+ # config validations and configurators for external gems integration
6
+ module Setup
7
+ # Configurator for setting up all the framework details that are required to make it work
8
+ # @note If you want to do some configurations after all of this is done, please add to
9
+ # karafka/config a proper file (needs to inherit from Karafka::Setup::Configurators::Base
10
+ # and implement setup method) after that everything will happen automatically
11
+ # @note This config object allows to create a 1 level nestings (nodes) only. This should be
12
+ # enough and will still keep the code simple
13
+ # @see Karafka::Setup::Configurators::Base for more details about configurators api
14
+ class Config
15
+ extend Dry::Configurable
16
+ extend Callbacks::Config
17
+
18
+ # Available settings
19
+ # option client_id [String] kafka client_id - used to provide
20
+ # default Kafka groups namespaces and identify that app in kafka
21
+ setting :client_id
22
+ # What backend do we want to use to process messages
23
+ setting :backend, :inline
24
+ # option logger [Instance] logger that we want to use
25
+ setting :logger, -> { ::Karafka::Instrumentation::Logger.instance }
26
+ # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
27
+ setting :monitor, -> { ::Karafka::Instrumentation::Monitor.instance }
28
+ # Mapper used to remap consumer groups ids, so in case users migrate from other tools
29
+ # or they need to maintain their own internal consumer group naming conventions, they
30
+ # can easily do it, replacing the default client_id + consumer name pattern concept
31
+ setting :consumer_mapper, -> { Routing::ConsumerMapper }
32
+ # Mapper used to remap names of topics, so we can have a clean internal topic namings
33
+ # despite using any Kafka provider that uses namespacing, etc
34
+ # It needs to implement two methods:
35
+ # - #incoming - for remapping from the incoming message to our internal format
36
+ # - #outgoing - for remapping from internal topic name into outgoing message
37
+ setting :topic_mapper, -> { Routing::TopicMapper }
38
+ # Default parser for parsing and unparsing incoming and outgoing data
39
+ setting :parser, -> { Karafka::Parsers::Json }
40
+ # If batch_fetching is true, we will fetch kafka messages in batches instead of 1 by 1
41
+ # @note Fetching does not equal consuming, see batch_consuming description for details
42
+ setting :batch_fetching, true
43
+ # If batch_consuming is true, we will have access to #params_batch instead of #params.
44
+ # #params_batch will contain params received from Kafka (may be more than 1) so we can
45
+ # process them in batches
46
+ setting :batch_consuming, false
47
+ # Should we operate in a single consumer instance across multiple batches of messages,
48
+ # from the same partition or should we build a new one for each incoming batch.
49
+ # Disabling that can be useful when you want to create a new consumer instance for each
50
+ # incoming batch. It's disabled by default, not to create more objects that needed
51
+ # on each batch
52
+ setting :persistent, true
53
+ # option shutdown_timeout [Integer, nil] the number of seconds after which Karafka no
54
+ # longer wait for the consumers to stop gracefully but instead we force
55
+ # terminate everything.
56
+ # @note Keep in mind, that if your business logic
57
+ # @note If set to nil, it won't forcefully shutdown the process at all.
58
+ setting :shutdown_timeout, 60
59
+ # option params_base_class [Class] base class for params class initialization
60
+ # This can be either a Hash or a HashWithIndifferentAccess depending on your
61
+ # requirements. Note, that by using HashWithIndifferentAccess, you remove some of the
62
+ # performance in favor of convenience. This can be useful especially if you already use
63
+ # it with Rails, etc
64
+ setting :params_base_class, Hash
65
+
66
+ # option kafka [Hash] - optional - kafka configuration options
67
+ setting :kafka do
68
+ # Array with at least one host
69
+ setting :seed_brokers
70
+ # option session_timeout [Integer] the number of seconds after which, if a client
71
+ # hasn't contacted the Kafka cluster, it will be kicked out of the group.
72
+ setting :session_timeout, 30
73
+ # Time that a given partition will be paused from fetching messages, when message
74
+ # consumption fails. It allows us to process other partitions, while the error is being
75
+ # resolved and also "slows" things down, so it prevents from "eating" up all messages and
76
+ # consuming them with failed code. Use `nil` if you want to pause forever and never retry.
77
+ setting :pause_timeout, 10
78
+ # option offset_commit_interval [Integer] the interval between offset commits,
79
+ # in seconds.
80
+ setting :offset_commit_interval, 10
81
+ # option offset_commit_threshold [Integer] the number of messages that can be
82
+ # processed before their offsets are committed. If zero, offset commits are
83
+ # not triggered by message consumption.
84
+ setting :offset_commit_threshold, 0
85
+ # option heartbeat_interval [Integer] the interval between heartbeats; must be less
86
+ # than the session window.
87
+ setting :heartbeat_interval, 10
88
+ # option offset_retention_time [Integer] The length of the retention window, known as
89
+ # offset retention time
90
+ setting :offset_retention_time, nil
91
+ # option fetcher_max_queue_size [Integer] max number of items in the fetch queue that
92
+ # are stored for further processing. Note, that each item in the queue represents a
93
+ # response from a single broker
94
+ setting :fetcher_max_queue_size, 100
95
+ # option max_bytes_per_partition [Integer] the maximum amount of data fetched
96
+ # from a single partition at a time.
97
+ setting :max_bytes_per_partition, 1_048_576
98
+ # whether to consume messages starting at the beginning or to just consume new messages
99
+ setting :start_from_beginning, true
100
+ # option min_bytes [Integer] the minimum number of bytes to read before
101
+ # returning messages from the server; if `max_wait_time` is reached, this
102
+ # is ignored.
103
+ setting :min_bytes, 1
104
+ # option max_bytes [Integer] the maximum number of bytes to read before returning messages
105
+ # from each broker.
106
+ setting :max_bytes, 10_485_760
107
+ # option max_wait_time [Integer, Float] max_wait_time is the maximum number of seconds to
108
+ # wait before returning data from a single message fetch. By setting this high you also
109
+ # increase the fetching throughput - and by setting it low you set a bound on latency.
110
+ # This configuration overrides `min_bytes`, so you'll _always_ get data back within the
111
+ # time specified. The default value is one second. If you want to have at most five
112
+ # seconds of latency, set `max_wait_time` to 5. You should make sure
113
+ # max_wait_time * num brokers + heartbeat_interval is less than session_timeout.
114
+ setting :max_wait_time, 1
115
+ # option automatically_mark_as_consumed [Boolean] should we automatically mark received
116
+ # messages as consumed (processed) after non-error consumption
117
+ setting :automatically_mark_as_consumed, true
118
+ # option reconnect_timeout [Integer] How long should we wait before trying to reconnect to
119
+ # Kafka cluster that went down (in seconds)
120
+ setting :reconnect_timeout, 5
121
+ # option connect_timeout [Integer] Sets the number of seconds to wait while connecting to
122
+ # a broker for the first time. When ruby-kafka initializes, it needs to connect to at
123
+ # least one host.
124
+ setting :connect_timeout, 10
125
+ # option socket_timeout [Integer] Sets the number of seconds to wait when reading from or
126
+ # writing to a socket connection to a broker. After this timeout expires the connection
127
+ # will be killed. Note that some Kafka operations are by definition long-running, such as
128
+ # waiting for new messages to arrive in a partition, so don't set this value too low
129
+ setting :socket_timeout, 30
130
+
131
+ # SSL authentication related settings
132
+ # option ca_cert [String, nil] SSL CA certificate
133
+ setting :ssl_ca_cert, nil
134
+ # option ssl_ca_cert_file_path [String, nil] SSL CA certificate file path
135
+ setting :ssl_ca_cert_file_path, nil
136
+ # option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
137
+ # certificate store
138
+ setting :ssl_ca_certs_from_system, false
139
+ # option ssl_client_cert [String, nil] SSL client certificate
140
+ setting :ssl_client_cert, nil
141
+ # option ssl_client_cert_key [String, nil] SSL client certificate password
142
+ setting :ssl_client_cert_key, nil
143
+ # option sasl_gssapi_principal [String, nil] sasl principal
144
+ setting :sasl_gssapi_principal, nil
145
+ # option sasl_gssapi_keytab [String, nil] sasl keytab
146
+ setting :sasl_gssapi_keytab, nil
147
+ # option sasl_plain_authzid [String] The authorization identity to use
148
+ setting :sasl_plain_authzid, ''
149
+ # option sasl_plain_username [String, nil] The username used to authenticate
150
+ setting :sasl_plain_username, nil
151
+ # option sasl_plain_password [String, nil] The password used to authenticate
152
+ setting :sasl_plain_password, nil
153
+ # option sasl_scram_username [String, nil] The username used to authenticate
154
+ setting :sasl_scram_username, nil
155
+ # option sasl_scram_password [String, nil] The password used to authenticate
156
+ setting :sasl_scram_password, nil
157
+ # option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
158
+ setting :sasl_scram_mechanism, nil
159
+ end
160
+
161
+ class << self
162
+ # Configurating method
163
+ # @yield Runs a block of code providing a config singleton instance to it
164
+ # @yieldparam [Karafka::Setup::Config] Karafka config instance
165
+ def setup
166
+ configure { |config| yield(config) }
167
+ end
168
+
169
+ # Everything that should be initialized after the setup
170
+ # Components are in karafka/config directory and are all loaded one by one
171
+ # If you want to configure a next component, please add a proper file to config dir
172
+ def setup_components
173
+ [
174
+ Configurators::Params,
175
+ Configurators::WaterDrop
176
+ ].each { |klass| klass.setup(config) }
177
+ end
178
+
179
+ # Validate config based on ConfigurationSchema
180
+ # @return [Boolean] true if configuration is valid
181
+ # @raise [Karafka::Errors::InvalidConfiguration] raised when configuration
182
+ # doesn't match with ConfigurationSchema
183
+ def validate!
184
+ validation_result = Karafka::Schemas::Config.call(config.to_h)
185
+
186
+ return true if validation_result.success?
187
+
188
+ raise Errors::InvalidConfiguration, validation_result.errors
189
+ end
190
+ end
191
+ end
192
+ end
193
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Setup
5
+ # Configurators module is used to enclose all the external dependencies configurations
6
+ # upon which Karafka depents
7
+ class Configurators
8
+ # Karafka has some components that it relies on (like Sidekiq)
9
+ # We need to configure all of them only when the framework was set up.
10
+ # Any class that descends from this one will be automatically invoked upon setup (after it)
11
+ # @note This should be used only for internal Karafka dependencies configuration
12
+ # End users configuration should go to the after_init block
13
+ # @example Configure an Example class
14
+ # class ExampleConfigurator < Base
15
+ # def setup
16
+ # ExampleClass.logger = Karafka.logger
17
+ # ExampleClass.redis = config.redis
18
+ # end
19
+ # end
20
+ class Base
21
+ # @param _config [Karafka::Config] config instance
22
+ # This method needs to be implemented in a subclass
23
+ def self.setup(_config)
24
+ raise NotImplementedError
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Setup
5
+ class Configurators
6
+ # Karafka::Params::Params are dynamically built based on user defined parent class
7
+ # so we cannot just require it, we need to initialize it after user is done with
8
+ # the framework configuration. This is a configurator that does exactly that.
9
+ class Params < Base
10
+ # Builds up Karafka::Params::Params class with user defined parent class
11
+ # @param config [Karafka::Setup::Config] Config we can user to setup things
12
+ def self.setup(config)
13
+ return if defined? Karafka::Params::Params
14
+
15
+ Karafka::Params.const_set(
16
+ 'Params',
17
+ Class
18
+ .new(config.params_base_class)
19
+ .tap { |klass| klass.include(Karafka::Params::Dsl) }
20
+ )
21
+ end
22
+ end
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Setup
5
+ class Configurators
6
+ # Class responsible for setting up WaterDrop configuration
7
+ class WaterDrop < Base
8
+ # Sets up a WaterDrop settings
9
+ # @param config [Karafka::Setup::Config] Config we can user to setup things
10
+ # @note This will also inject Karafka monitor as a default monitor into WaterDrop,
11
+ # so we have the same monitor within whole Karafka framework (same with logger)
12
+ def self.setup(config)
13
+ ::WaterDrop.setup do |water_config|
14
+ water_config.deliver = true
15
+
16
+ config.to_h.reject { |k, _v| k == :kafka }.each do |k, v|
17
+ key_assignment = :"#{k}="
18
+ next unless water_config.respond_to?(key_assignment)
19
+ water_config.public_send(key_assignment, v)
20
+ end
21
+
22
+ config.kafka.to_h.each do |k, v|
23
+ key_assignment = :"#{k}="
24
+ next unless water_config.kafka.respond_to?(key_assignment)
25
+ water_config.kafka.public_send(key_assignment, v)
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Setup
5
+ # Dsl for allowing to work with the configuration from the Karafka::App
6
+ # @note Despite providing methods, everything is still persisted and fetched
7
+ # from the Karafka::Setup::Config
8
+ module Dsl
9
+ # Sets up the whole configuration
10
+ # @param [Block] block configuration block
11
+ def setup(&block)
12
+ Setup::Config.setup(&block)
13
+ initialize!
14
+ end
15
+
16
+ # @return [Karafka::Config] config instance
17
+ def config
18
+ Setup::Config.config
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # App status monitor
5
+ class Status
6
+ include Singleton
7
+
8
+ # Available states and their transitions
9
+ STATES = {
10
+ initializing: :initialize!,
11
+ running: :run!,
12
+ stopped: :stop!
13
+ }.freeze
14
+
15
+ STATES.each do |state, transition|
16
+ define_method :"#{state}?" do
17
+ @status == state
18
+ end
19
+
20
+ define_method transition do
21
+ @status = state
22
+ end
23
+ end
24
+ end
25
+ end