sidekiq-alive-next 2.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.github/dependabot.yml +16 -0
- data/.github/release.yml +23 -0
- data/.github/workflows/release.yml +40 -0
- data/.github/workflows/test.yml +45 -0
- data/.gitignore +12 -0
- data/.rspec +4 -0
- data/.ruby-version +1 -0
- data/.tool-versions +1 -0
- data/CODE_OF_CONDUCT.md +74 -0
- data/Gemfile +8 -0
- data/Gemfile.lock +64 -0
- data/LICENSE.txt +21 -0
- data/README.md +284 -0
- data/Rakefile +6 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/docker-compose.yml +6 -0
- data/lib/sidekiq-alive-next.rb +146 -0
- data/lib/sidekiq_alive/config.rb +42 -0
- data/lib/sidekiq_alive/server.rb +45 -0
- data/lib/sidekiq_alive/version.rb +5 -0
- data/lib/sidekiq_alive/worker.rb +45 -0
- data/sidekiq_alive.gemspec +40 -0
- metadata +200 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 783e392caa595b779c30404aa7372fa6736acea3e05a6092980ed4b36ad42115
|
4
|
+
data.tar.gz: e87aa55b018053382fe89501d68900f77f03997ef245e6878e3f45bf9260e8bc
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: ce3325f94879b92c2847c0ddfc8e5a0cb1ea354d63344c2f897bd80b02e565556349fdb9ab71ab4a9328c66e0b747ded5adada03fd4c4362985e75ee040f9b0e
|
7
|
+
data.tar.gz: 73be9c7a97a513b46e14f8e76e7669aaf7757858e3155b13e4f602d678b2d91ee3a027bdc595319f0bc8faef17f8ce8463740507d808a12ab30a6fdaa1370853
|
@@ -0,0 +1,16 @@
|
|
1
|
+
version: 2
|
2
|
+
updates:
|
3
|
+
- package-ecosystem: "bundler"
|
4
|
+
directory: "/"
|
5
|
+
schedule:
|
6
|
+
interval: "daily"
|
7
|
+
reviewers:
|
8
|
+
- "andrcuns"
|
9
|
+
- package-ecosystem: github-actions
|
10
|
+
directory: "/"
|
11
|
+
schedule:
|
12
|
+
interval: "daily"
|
13
|
+
reviewers:
|
14
|
+
- andrcuns
|
15
|
+
labels:
|
16
|
+
- "ci"
|
data/.github/release.yml
ADDED
@@ -0,0 +1,23 @@
|
|
1
|
+
changelog:
|
2
|
+
categories:
|
3
|
+
- title: '🚀 New Features'
|
4
|
+
labels:
|
5
|
+
- 'feature'
|
6
|
+
- title: '🔬 Improvements'
|
7
|
+
labels:
|
8
|
+
- 'enhancement'
|
9
|
+
- title: '🐞 Bug Fixes'
|
10
|
+
labels:
|
11
|
+
- 'bug'
|
12
|
+
- title: '📦 Dependency updates'
|
13
|
+
labels:
|
14
|
+
- 'dependencies'
|
15
|
+
- title: '📄 Documentation updates'
|
16
|
+
labels:
|
17
|
+
- 'documentation'
|
18
|
+
- title: '🧰 Maintenance'
|
19
|
+
labels:
|
20
|
+
- 'maintenance'
|
21
|
+
- title: '👷 CI'
|
22
|
+
labels:
|
23
|
+
- 'ci'
|
@@ -0,0 +1,40 @@
|
|
1
|
+
name: Release
|
2
|
+
|
3
|
+
on: workflow_dispatch
|
4
|
+
|
5
|
+
jobs:
|
6
|
+
release:
|
7
|
+
name: Ruby gem
|
8
|
+
runs-on: ubuntu-latest
|
9
|
+
steps:
|
10
|
+
-
|
11
|
+
name: Checkout
|
12
|
+
uses: actions/checkout@v3
|
13
|
+
-
|
14
|
+
name: Set up Ruby 3.1
|
15
|
+
uses: ruby/setup-ruby@v1
|
16
|
+
with:
|
17
|
+
ruby-version: 3.1.2
|
18
|
+
bundler-cache: true
|
19
|
+
-
|
20
|
+
name: Create tag and push to rubygems
|
21
|
+
run: |
|
22
|
+
git config user.name github-actions
|
23
|
+
git config user.email github-actions@github.com
|
24
|
+
bundle exec rake release
|
25
|
+
env:
|
26
|
+
GEM_HOST_API_KEY: ${{ secrets.GEM_HOST_API_KEY }}
|
27
|
+
|
28
|
+
gh-release:
|
29
|
+
name: Github release
|
30
|
+
runs-on: ubuntu-latest
|
31
|
+
needs: release
|
32
|
+
steps:
|
33
|
+
-
|
34
|
+
name: Checkout
|
35
|
+
uses: actions/checkout@v3
|
36
|
+
-
|
37
|
+
uses: softprops/action-gh-release@v1
|
38
|
+
with:
|
39
|
+
token: ${{ secrets.GITHUB_TOKEN }}
|
40
|
+
generate_release_notes: true
|
@@ -0,0 +1,45 @@
|
|
1
|
+
name: Test
|
2
|
+
|
3
|
+
on:
|
4
|
+
push:
|
5
|
+
branches:
|
6
|
+
- master
|
7
|
+
pull_request:
|
8
|
+
branches:
|
9
|
+
- master
|
10
|
+
|
11
|
+
jobs:
|
12
|
+
test:
|
13
|
+
runs-on: ubuntu-latest
|
14
|
+
|
15
|
+
strategy:
|
16
|
+
fail-fast: false
|
17
|
+
matrix:
|
18
|
+
ruby-version: ["3.1", "3.0", "2.7"]
|
19
|
+
# Service containers to run with `runner-job`
|
20
|
+
services:
|
21
|
+
# Label used to access the service container
|
22
|
+
redis:
|
23
|
+
# Docker Hub image
|
24
|
+
image: redis
|
25
|
+
# Set health checks to wait until redis has started
|
26
|
+
options: >-
|
27
|
+
--health-cmd "redis-cli ping"
|
28
|
+
--health-interval 10s
|
29
|
+
--health-timeout 5s
|
30
|
+
--health-retries 5
|
31
|
+
ports:
|
32
|
+
# Maps port 6379 on service container to the host
|
33
|
+
- 6379:6379
|
34
|
+
|
35
|
+
steps:
|
36
|
+
- uses: actions/checkout@v3
|
37
|
+
- name: Set up Ruby ${{ matrix.ruby-version }}
|
38
|
+
uses: ruby/setup-ruby@v1
|
39
|
+
with:
|
40
|
+
ruby-version: ${{ matrix.ruby-version }}
|
41
|
+
bundler-cache: true
|
42
|
+
- name: Install dependencies
|
43
|
+
run: bundle install
|
44
|
+
- name: Run tests
|
45
|
+
run: bundle exec rspec --color
|
data/.gitignore
ADDED
data/.rspec
ADDED
data/.ruby-version
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
3.0.4
|
data/.tool-versions
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
ruby 3.0.4
|
data/CODE_OF_CONDUCT.md
ADDED
@@ -0,0 +1,74 @@
|
|
1
|
+
# Contributor Covenant Code of Conduct
|
2
|
+
|
3
|
+
## Our Pledge
|
4
|
+
|
5
|
+
In the interest of fostering an open and welcoming environment, we as
|
6
|
+
contributors and maintainers pledge to making participation in our project and
|
7
|
+
our community a harassment-free experience for everyone, regardless of age, body
|
8
|
+
size, disability, ethnicity, gender identity and expression, level of experience,
|
9
|
+
nationality, personal appearance, race, religion, or sexual identity and
|
10
|
+
orientation.
|
11
|
+
|
12
|
+
## Our Standards
|
13
|
+
|
14
|
+
Examples of behavior that contributes to creating a positive environment
|
15
|
+
include:
|
16
|
+
|
17
|
+
* Using welcoming and inclusive language
|
18
|
+
* Being respectful of differing viewpoints and experiences
|
19
|
+
* Gracefully accepting constructive criticism
|
20
|
+
* Focusing on what is best for the community
|
21
|
+
* Showing empathy towards other community members
|
22
|
+
|
23
|
+
Examples of unacceptable behavior by participants include:
|
24
|
+
|
25
|
+
* The use of sexualized language or imagery and unwelcome sexual attention or
|
26
|
+
advances
|
27
|
+
* Trolling, insulting/derogatory comments, and personal or political attacks
|
28
|
+
* Public or private harassment
|
29
|
+
* Publishing others' private information, such as a physical or electronic
|
30
|
+
address, without explicit permission
|
31
|
+
* Other conduct which could reasonably be considered inappropriate in a
|
32
|
+
professional setting
|
33
|
+
|
34
|
+
## Our Responsibilities
|
35
|
+
|
36
|
+
Project maintainers are responsible for clarifying the standards of acceptable
|
37
|
+
behavior and are expected to take appropriate and fair corrective action in
|
38
|
+
response to any instances of unacceptable behavior.
|
39
|
+
|
40
|
+
Project maintainers have the right and responsibility to remove, edit, or
|
41
|
+
reject comments, commits, code, wiki edits, issues, and other contributions
|
42
|
+
that are not aligned to this Code of Conduct, or to ban temporarily or
|
43
|
+
permanently any contributor for other behaviors that they deem inappropriate,
|
44
|
+
threatening, offensive, or harmful.
|
45
|
+
|
46
|
+
## Scope
|
47
|
+
|
48
|
+
This Code of Conduct applies both within project spaces and in public spaces
|
49
|
+
when an individual is representing the project or its community. Examples of
|
50
|
+
representing a project or community include using an official project e-mail
|
51
|
+
address, posting via an official social media account, or acting as an appointed
|
52
|
+
representative at an online or offline event. Representation of a project may be
|
53
|
+
further defined and clarified by project maintainers.
|
54
|
+
|
55
|
+
## Enforcement
|
56
|
+
|
57
|
+
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
58
|
+
reported by contacting the project team at arturictus@gmail.com. All
|
59
|
+
complaints will be reviewed and investigated and will result in a response that
|
60
|
+
is deemed necessary and appropriate to the circumstances. The project team is
|
61
|
+
obligated to maintain confidentiality with regard to the reporter of an incident.
|
62
|
+
Further details of specific enforcement policies may be posted separately.
|
63
|
+
|
64
|
+
Project maintainers who do not follow or enforce the Code of Conduct in good
|
65
|
+
faith may face temporary or permanent repercussions as determined by other
|
66
|
+
members of the project's leadership.
|
67
|
+
|
68
|
+
## Attribution
|
69
|
+
|
70
|
+
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
71
|
+
available at [http://contributor-covenant.org/version/1/4][version]
|
72
|
+
|
73
|
+
[homepage]: http://contributor-covenant.org
|
74
|
+
[version]: http://contributor-covenant.org/version/1/4/
|
data/Gemfile
ADDED
data/Gemfile.lock
ADDED
@@ -0,0 +1,64 @@
|
|
1
|
+
PATH
|
2
|
+
remote: .
|
3
|
+
specs:
|
4
|
+
sidekiq-alive-next (2.2.0)
|
5
|
+
sidekiq (>= 5, < 7)
|
6
|
+
webrick (>= 1, < 2)
|
7
|
+
|
8
|
+
GEM
|
9
|
+
remote: https://rubygems.org/
|
10
|
+
specs:
|
11
|
+
coderay (1.1.3)
|
12
|
+
connection_pool (2.3.0)
|
13
|
+
diff-lcs (1.5.0)
|
14
|
+
method_source (1.0.0)
|
15
|
+
mock_redis (0.34.0)
|
16
|
+
ruby2_keywords
|
17
|
+
pry (0.14.1)
|
18
|
+
coderay (~> 1.1)
|
19
|
+
method_source (~> 1.0)
|
20
|
+
rack (2.2.4)
|
21
|
+
rack-test (2.0.2)
|
22
|
+
rack (>= 1.3)
|
23
|
+
rake (13.0.6)
|
24
|
+
redis (4.8.0)
|
25
|
+
rspec (3.11.0)
|
26
|
+
rspec-core (~> 3.11.0)
|
27
|
+
rspec-expectations (~> 3.11.0)
|
28
|
+
rspec-mocks (~> 3.11.0)
|
29
|
+
rspec-core (3.11.0)
|
30
|
+
rspec-support (~> 3.11.0)
|
31
|
+
rspec-expectations (3.11.1)
|
32
|
+
diff-lcs (>= 1.2.0, < 2.0)
|
33
|
+
rspec-support (~> 3.11.0)
|
34
|
+
rspec-mocks (3.11.1)
|
35
|
+
diff-lcs (>= 1.2.0, < 2.0)
|
36
|
+
rspec-support (~> 3.11.0)
|
37
|
+
rspec-sidekiq (3.1.0)
|
38
|
+
rspec-core (~> 3.0, >= 3.0.0)
|
39
|
+
sidekiq (>= 2.4.0)
|
40
|
+
rspec-support (3.11.1)
|
41
|
+
ruby2_keywords (0.0.5)
|
42
|
+
sidekiq (6.5.7)
|
43
|
+
connection_pool (>= 2.2.5)
|
44
|
+
rack (~> 2.0)
|
45
|
+
redis (>= 4.5.0, < 5)
|
46
|
+
webrick (1.7.0)
|
47
|
+
|
48
|
+
PLATFORMS
|
49
|
+
aarch64-linux
|
50
|
+
arm64-darwin-21
|
51
|
+
x86_64-linux
|
52
|
+
|
53
|
+
DEPENDENCIES
|
54
|
+
bundler (> 1.16)
|
55
|
+
mock_redis
|
56
|
+
pry
|
57
|
+
rack-test
|
58
|
+
rake (~> 13.0)
|
59
|
+
rspec (~> 3.0)
|
60
|
+
rspec-sidekiq (~> 3.0)
|
61
|
+
sidekiq-alive-next!
|
62
|
+
|
63
|
+
BUNDLED WITH
|
64
|
+
2.3.22
|
data/LICENSE.txt
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
The MIT License (MIT)
|
2
|
+
|
3
|
+
Copyright (c) 2018 Artur Pañach
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
13
|
+
all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
21
|
+
THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,284 @@
|
|
1
|
+
# SidekiqAlive
|
2
|
+
|
3
|
+
**This is the fork of 'arturictus/sidekiq_alive' with a few minor fixes and tweaks, all credit goes to author of original repo**
|
4
|
+
|
5
|
+
SidekiqAlive offers a solution to add liveness probe for a Sidekiq instance deployed in Kubernetes.
|
6
|
+
This library can be used to check sidekiq health outside kubernetes.
|
7
|
+
|
8
|
+
**How?**
|
9
|
+
|
10
|
+
A http server is started and on each requests validates that a liveness key is stored in Redis. If it is there means is working.
|
11
|
+
|
12
|
+
A Sidekiq worker is the responsible to storing this key. If Sidekiq stops processing workers
|
13
|
+
this key gets expired by Redis an consequently the http server will return a 500 error.
|
14
|
+
|
15
|
+
This worker is responsible to requeue itself for the next liveness probe.
|
16
|
+
|
17
|
+
Each instance in kubernetes will be checked based on `ENV` variable `HOSTNAME` (kubernetes sets this for each replica/pod).
|
18
|
+
|
19
|
+
On initialization SidekiqAlive will asign to Sidekiq::Worker a queue with the current host and add this queue to the current instance queues to process.
|
20
|
+
|
21
|
+
example:
|
22
|
+
|
23
|
+
```
|
24
|
+
hostname: foo
|
25
|
+
Worker queue: sidekiq_alive-foo
|
26
|
+
instance queues:
|
27
|
+
- sidekiq_alive-foo
|
28
|
+
*- your queues
|
29
|
+
|
30
|
+
hostname: bar
|
31
|
+
Worker queue: sidekiq_alive-bar
|
32
|
+
instance queues:
|
33
|
+
- sidekiq_alive-bar
|
34
|
+
*- your queues
|
35
|
+
```
|
36
|
+
|
37
|
+
## Installation
|
38
|
+
|
39
|
+
Add this line to your application's Gemfile:
|
40
|
+
|
41
|
+
```ruby
|
42
|
+
gem 'sidekiq-alive-next'
|
43
|
+
```
|
44
|
+
|
45
|
+
And then execute:
|
46
|
+
|
47
|
+
$ bundle
|
48
|
+
|
49
|
+
Or install it yourself as:
|
50
|
+
|
51
|
+
$ gem install sidekiq-alive-next
|
52
|
+
|
53
|
+
## Usage
|
54
|
+
|
55
|
+
SidekiqAlive will start when running `sidekiq` command.
|
56
|
+
|
57
|
+
Run `Sidekiq`
|
58
|
+
|
59
|
+
```
|
60
|
+
bundle exec sidekiq
|
61
|
+
```
|
62
|
+
|
63
|
+
```
|
64
|
+
curl localhost:7433
|
65
|
+
#=> Alive!
|
66
|
+
```
|
67
|
+
|
68
|
+
**how to disable?**
|
69
|
+
You can disabled by setting `ENV` variable `DISABLE_SIDEKIQ_ALIVE`
|
70
|
+
example:
|
71
|
+
|
72
|
+
```
|
73
|
+
DISABLE_SIDEKIQ_ALIVE=true bundle exec sidekiq
|
74
|
+
```
|
75
|
+
|
76
|
+
### Kubernetes setup
|
77
|
+
|
78
|
+
Set `livenessProbe` in your Kubernetes deployment
|
79
|
+
|
80
|
+
example with recommended setup:
|
81
|
+
|
82
|
+
#### Sidekiq < 6
|
83
|
+
|
84
|
+
```yaml
|
85
|
+
spec:
|
86
|
+
containers:
|
87
|
+
- name: my_app
|
88
|
+
image: my_app:latest
|
89
|
+
env:
|
90
|
+
- name: RAILS_ENV
|
91
|
+
value: production
|
92
|
+
command:
|
93
|
+
- bundle
|
94
|
+
- exec
|
95
|
+
- sidekiq
|
96
|
+
ports:
|
97
|
+
- containerPort: 7433
|
98
|
+
livenessProbe:
|
99
|
+
httpGet:
|
100
|
+
path: /
|
101
|
+
port: 7433
|
102
|
+
initialDelaySeconds: 80 # app specific. Time your sidekiq takes to start processing.
|
103
|
+
timeoutSeconds: 5 # can be much less
|
104
|
+
readinessProbe:
|
105
|
+
httpGet:
|
106
|
+
path: /
|
107
|
+
port: 7433
|
108
|
+
initialDelaySeconds: 80 # app specific
|
109
|
+
timeoutSeconds: 5 # can be much less
|
110
|
+
lifecycle:
|
111
|
+
preStop:
|
112
|
+
exec:
|
113
|
+
# SIGTERM triggers a quick exit; gracefully terminate instead
|
114
|
+
command: ['bundle', 'exec', 'sidekiqctl', 'quiet']
|
115
|
+
terminationGracePeriodSeconds: 60 # put your longest Job time here plus security time.
|
116
|
+
```
|
117
|
+
|
118
|
+
#### Sidekiq >= 6
|
119
|
+
|
120
|
+
Create file:
|
121
|
+
|
122
|
+
_kube/sidekiq_quiet_
|
123
|
+
|
124
|
+
```bash
|
125
|
+
#!/bin/bash
|
126
|
+
|
127
|
+
# Find Pid
|
128
|
+
SIDEKIQ_PID=$(ps aux | grep sidekiq | grep busy | awk '{ print $2 }')
|
129
|
+
# Send TSTP signal
|
130
|
+
kill -SIGTSTP $SIDEKIQ_PID
|
131
|
+
```
|
132
|
+
|
133
|
+
Make it executable:
|
134
|
+
|
135
|
+
```
|
136
|
+
$ chmod +x kube/sidekiq_quiet
|
137
|
+
```
|
138
|
+
|
139
|
+
Execute it in your deployment preStop:
|
140
|
+
|
141
|
+
```yaml
|
142
|
+
spec:
|
143
|
+
containers:
|
144
|
+
- name: my_app
|
145
|
+
image: my_app:latest
|
146
|
+
env:
|
147
|
+
- name: RAILS_ENV
|
148
|
+
value: production
|
149
|
+
command:
|
150
|
+
- bundle
|
151
|
+
- exec
|
152
|
+
- sidekiq
|
153
|
+
ports:
|
154
|
+
- containerPort: 7433
|
155
|
+
livenessProbe:
|
156
|
+
httpGet:
|
157
|
+
path: /
|
158
|
+
port: 7433
|
159
|
+
initialDelaySeconds: 80 # app specific. Time your sidekiq takes to start processing.
|
160
|
+
timeoutSeconds: 5 # can be much less
|
161
|
+
readinessProbe:
|
162
|
+
httpGet:
|
163
|
+
path: /
|
164
|
+
port: 7433
|
165
|
+
initialDelaySeconds: 80 # app specific
|
166
|
+
timeoutSeconds: 5 # can be much less
|
167
|
+
lifecycle:
|
168
|
+
preStop:
|
169
|
+
exec:
|
170
|
+
# SIGTERM triggers a quick exit; gracefully terminate instead
|
171
|
+
command: ['kube/sidekiq_quiet']
|
172
|
+
terminationGracePeriodSeconds: 60 # put your longest Job time here plus security time.
|
173
|
+
```
|
174
|
+
|
175
|
+
### Outside kubernetes
|
176
|
+
|
177
|
+
It's just up to you how you want to use it.
|
178
|
+
|
179
|
+
An example in local would be:
|
180
|
+
|
181
|
+
```
|
182
|
+
bundle exec sidekiq
|
183
|
+
# let it initialize ...
|
184
|
+
```
|
185
|
+
|
186
|
+
```
|
187
|
+
curl localhost:7433
|
188
|
+
#=> Alive!
|
189
|
+
```
|
190
|
+
|
191
|
+
## Options
|
192
|
+
|
193
|
+
```ruby
|
194
|
+
SidekiqAlive.setup do |config|
|
195
|
+
# ==> Server host
|
196
|
+
# Host to bind the server.
|
197
|
+
# Can also be set with the environment variable SIDEKIQ_ALIVE_HOST.
|
198
|
+
# default: 0.0.0.0
|
199
|
+
#
|
200
|
+
# config.host = 0.0.0.0
|
201
|
+
|
202
|
+
# ==> Server port
|
203
|
+
# Port to bind the server.
|
204
|
+
# Can also be set with the environment variable SIDEKIQ_ALIVE_PORT.
|
205
|
+
# default: 7433
|
206
|
+
#
|
207
|
+
# config.port = 7433
|
208
|
+
|
209
|
+
# ==> Server path
|
210
|
+
# HTTP path to respond to.
|
211
|
+
# Can also be set with the environment variable SIDEKIQ_ALIVE_PATH.
|
212
|
+
# default: '/'
|
213
|
+
#
|
214
|
+
# config.path = '/'
|
215
|
+
|
216
|
+
# ==> Custom Liveness Probe
|
217
|
+
# Extra check to decide if restart the pod or not for example connection to DB.
|
218
|
+
# `false`, `nil` or `raise` will not write the liveness probe
|
219
|
+
# default: proc { true }
|
220
|
+
#
|
221
|
+
# config.custom_liveness_probe = proc { db_running? }
|
222
|
+
|
223
|
+
# ==> Liveness key
|
224
|
+
# Key to be stored in Redis as probe of liveness
|
225
|
+
# default: "SIDEKIQ::LIVENESS_PROBE_TIMESTAMP"
|
226
|
+
#
|
227
|
+
# config.liveness_key = "SIDEKIQ::LIVENESS_PROBE_TIMESTAMP"
|
228
|
+
|
229
|
+
# ==> Time to live
|
230
|
+
# Time for the key to be kept by Redis.
|
231
|
+
# Here is where you can set de periodicity that the Sidekiq has to probe it is working
|
232
|
+
# Time unit: seconds
|
233
|
+
# default: 10 * 60 # 10 minutes
|
234
|
+
#
|
235
|
+
# config.time_to_live = 10 * 60
|
236
|
+
|
237
|
+
# ==> Callback
|
238
|
+
# After the key is stored in redis you can perform anything.
|
239
|
+
# For example a webhook or email to notify the team
|
240
|
+
# default: proc {}
|
241
|
+
#
|
242
|
+
# require 'net/http'
|
243
|
+
# config.callback = proc { Net::HTTP.get("https://status.com/ping") }
|
244
|
+
|
245
|
+
# ==> Shutdown callback
|
246
|
+
# When sidekiq process is shutting down, you can perform some action, like cleaning up created queue
|
247
|
+
# default: proc {}
|
248
|
+
#
|
249
|
+
# config.shutdown_callback = proc do
|
250
|
+
# Sidekiq::Queue.all.find { |q| q.name == "#{queue_prefix}-#{SidekiqAlive.hostname}" }&.clear
|
251
|
+
# end
|
252
|
+
|
253
|
+
# ==> Queue Prefix
|
254
|
+
# SidekiqAlive will run in a independent queue for each instance/replica
|
255
|
+
# This queue name will be generated with: "#{queue_prefix}-#{hostname}.
|
256
|
+
# You can customize the prefix here.
|
257
|
+
# default: :sidekiq-alive
|
258
|
+
#
|
259
|
+
# config.queue_prefix = :other
|
260
|
+
|
261
|
+
# ==> Rack server
|
262
|
+
# Web server used to serve an HTTP response.
|
263
|
+
# Can also be set with the environment variable SIDEKIQ_ALIVE_SERVER.
|
264
|
+
# default: 'webrick'
|
265
|
+
#
|
266
|
+
# config.server = 'puma'
|
267
|
+
end
|
268
|
+
```
|
269
|
+
|
270
|
+
## Development
|
271
|
+
|
272
|
+
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
|
273
|
+
|
274
|
+
To install this gem onto your local machine, run `bundle exec rake install`.
|
275
|
+
|
276
|
+
Here is an example [rails app](https://github.com/arturictus/sidekiq_alive_example)
|
277
|
+
|
278
|
+
## Contributing
|
279
|
+
|
280
|
+
Bug reports and pull requests are welcome on GitHub at https://github.com/arturictus/sidekiq_alive. This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct.
|
281
|
+
|
282
|
+
## License
|
283
|
+
|
284
|
+
The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).
|
data/Rakefile
ADDED
data/bin/console
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'bundler/setup'
|
4
|
+
require 'sidekiq_alive'
|
5
|
+
|
6
|
+
# You can add fixtures and/or initialization code here to make experimenting
|
7
|
+
# with your gem easier. You can also use a different console, if you like.
|
8
|
+
|
9
|
+
# (If you use this, don't forget to add pry to your Gemfile!)
|
10
|
+
# require "pry"
|
11
|
+
# Pry.start
|
12
|
+
|
13
|
+
require 'irb'
|
14
|
+
IRB.start(__FILE__)
|
data/bin/setup
ADDED
data/docker-compose.yml
ADDED
@@ -0,0 +1,146 @@
|
|
1
|
+
require 'sidekiq'
|
2
|
+
require 'sidekiq/api'
|
3
|
+
require 'singleton'
|
4
|
+
require 'sidekiq_alive/version'
|
5
|
+
require 'sidekiq_alive/config'
|
6
|
+
|
7
|
+
module SidekiqAlive
|
8
|
+
def self.start
|
9
|
+
Sidekiq.configure_server do |sq_config|
|
10
|
+
sq_config.on(:startup) do
|
11
|
+
SidekiqAlive::Worker.sidekiq_options queue: current_queue
|
12
|
+
(sq_config.respond_to?(:[]) ? sq_config[:queues] : sq_config.options[:queues]).unshift(current_queue)
|
13
|
+
|
14
|
+
logger.info(startup_info)
|
15
|
+
|
16
|
+
register_current_instance
|
17
|
+
store_alive_key
|
18
|
+
SidekiqAlive::Worker.perform_async(hostname)
|
19
|
+
@server_pid = fork { SidekiqAlive::Server.run! }
|
20
|
+
|
21
|
+
logger.info(successful_startup_text)
|
22
|
+
end
|
23
|
+
|
24
|
+
sq_config.on(:quiet) do
|
25
|
+
unregister_current_instance
|
26
|
+
config.shutdown_callback.call
|
27
|
+
end
|
28
|
+
|
29
|
+
sq_config.on(:shutdown) do
|
30
|
+
Process.kill('TERM', @server_pid) unless @server_pid.nil?
|
31
|
+
Process.wait(@server_pid) unless @server_pid.nil?
|
32
|
+
|
33
|
+
unregister_current_instance
|
34
|
+
config.shutdown_callback.call
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
def self.current_queue
|
40
|
+
"#{config.queue_prefix}-#{hostname}"
|
41
|
+
end
|
42
|
+
|
43
|
+
def self.register_current_instance
|
44
|
+
register_instance(current_instance_register_key)
|
45
|
+
end
|
46
|
+
|
47
|
+
def self.unregister_current_instance
|
48
|
+
# Delete any pending jobs for this instance
|
49
|
+
logger.info(shutdown_info)
|
50
|
+
purge_pending_jobs
|
51
|
+
redis.del(current_instance_register_key)
|
52
|
+
end
|
53
|
+
|
54
|
+
def self.registered_instances
|
55
|
+
deep_scan("#{config.registered_instance_key}::*")
|
56
|
+
end
|
57
|
+
|
58
|
+
def self.deep_scan(keyword, keys = [], cursor = 0)
|
59
|
+
loop do
|
60
|
+
cursor, found_keys = SidekiqAlive.redis.scan(cursor, match: keyword, count: 1000)
|
61
|
+
keys += found_keys
|
62
|
+
break if cursor.to_i.zero?
|
63
|
+
end
|
64
|
+
keys
|
65
|
+
end
|
66
|
+
|
67
|
+
def self.purge_pending_jobs
|
68
|
+
# TODO:
|
69
|
+
# Sidekiq 6 allows better way to find scheduled jobs:
|
70
|
+
# https://github.com/mperham/sidekiq/wiki/API#scan
|
71
|
+
scheduled_set = Sidekiq::ScheduledSet.new
|
72
|
+
jobs = scheduled_set.select { |job| job.klass == 'SidekiqAlive::Worker' && job.queue == current_queue }
|
73
|
+
logger.info("[SidekiqAlive] Purging #{jobs.count} pending for #{hostname}")
|
74
|
+
jobs.each(&:delete)
|
75
|
+
logger.info("[SidekiqAlive] Removing queue #{current_queue}")
|
76
|
+
Sidekiq::Queue.new(current_queue).clear
|
77
|
+
end
|
78
|
+
|
79
|
+
def self.current_instance_register_key
|
80
|
+
"#{config.registered_instance_key}::#{hostname}"
|
81
|
+
end
|
82
|
+
|
83
|
+
def self.store_alive_key
|
84
|
+
redis.set(current_lifeness_key,
|
85
|
+
Time.now.to_i,
|
86
|
+
ex: config.time_to_live.to_i)
|
87
|
+
end
|
88
|
+
|
89
|
+
def self.redis
|
90
|
+
Sidekiq.redis { |r| r }
|
91
|
+
end
|
92
|
+
|
93
|
+
def self.alive?
|
94
|
+
redis.ttl(current_lifeness_key) != -2
|
95
|
+
end
|
96
|
+
|
97
|
+
# CONFIG ---------------------------------------
|
98
|
+
|
99
|
+
def self.setup
|
100
|
+
yield(config)
|
101
|
+
end
|
102
|
+
|
103
|
+
def self.logger
|
104
|
+
config.logger || Sidekiq.logger
|
105
|
+
end
|
106
|
+
|
107
|
+
def self.config
|
108
|
+
@config ||= SidekiqAlive::Config.instance
|
109
|
+
end
|
110
|
+
|
111
|
+
def self.current_lifeness_key
|
112
|
+
"#{config.liveness_key}::#{hostname}"
|
113
|
+
end
|
114
|
+
|
115
|
+
def self.hostname
|
116
|
+
ENV['HOSTNAME'] || 'HOSTNAME_NOT_SET'
|
117
|
+
end
|
118
|
+
|
119
|
+
def self.shutdown_info
|
120
|
+
'Shutting down sidekiq-alive!'
|
121
|
+
end
|
122
|
+
|
123
|
+
def self.startup_info
|
124
|
+
info = {
|
125
|
+
hostname: hostname,
|
126
|
+
port: config.port,
|
127
|
+
ttl: config.time_to_live,
|
128
|
+
queue: current_queue
|
129
|
+
}
|
130
|
+
|
131
|
+
"Starting sidekiq-alive: #{info}"
|
132
|
+
end
|
133
|
+
|
134
|
+
def self.successful_startup_text
|
135
|
+
"Successfully started sidekiq-alive, registered instances: #{registered_instances.join("\n\s\s- ")}"
|
136
|
+
end
|
137
|
+
|
138
|
+
def self.register_instance(instance_name)
|
139
|
+
redis.set(instance_name, Time.now.to_i, ex: config.registration_ttl.to_i)
|
140
|
+
end
|
141
|
+
end
|
142
|
+
|
143
|
+
require 'sidekiq_alive/worker'
|
144
|
+
require 'sidekiq_alive/server'
|
145
|
+
|
146
|
+
SidekiqAlive.start unless ENV.fetch('DISABLE_SIDEKIQ_ALIVE', '').casecmp('true').zero?
|
@@ -0,0 +1,42 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module SidekiqAlive
|
4
|
+
class Config
|
5
|
+
include Singleton
|
6
|
+
|
7
|
+
attr_accessor :host,
|
8
|
+
:port,
|
9
|
+
:path,
|
10
|
+
:liveness_key,
|
11
|
+
:time_to_live,
|
12
|
+
:callback,
|
13
|
+
:registered_instance_key,
|
14
|
+
:queue_prefix,
|
15
|
+
:server,
|
16
|
+
:custom_liveness_probe,
|
17
|
+
:logger,
|
18
|
+
:shutdown_callback
|
19
|
+
|
20
|
+
def initialize
|
21
|
+
set_defaults
|
22
|
+
end
|
23
|
+
|
24
|
+
def set_defaults
|
25
|
+
@host = ENV.fetch('SIDEKIQ_ALIVE_HOST', '0.0.0.0')
|
26
|
+
@port = ENV.fetch('SIDEKIQ_ALIVE_PORT', 7433)
|
27
|
+
@path = ENV.fetch('SIDEKIQ_ALIVE_PATH', '/')
|
28
|
+
@liveness_key = 'SIDEKIQ::LIVENESS_PROBE_TIMESTAMP'
|
29
|
+
@time_to_live = 10 * 60
|
30
|
+
@callback = proc {}
|
31
|
+
@registered_instance_key = 'SIDEKIQ_REGISTERED_INSTANCE'
|
32
|
+
@queue_prefix = :"sidekiq-alive"
|
33
|
+
@server = ENV.fetch('SIDEKIQ_ALIVE_SERVER', 'webrick')
|
34
|
+
@custom_liveness_probe = proc { true }
|
35
|
+
@shutdown_callback = proc {}
|
36
|
+
end
|
37
|
+
|
38
|
+
def registration_ttl
|
39
|
+
@registration_ttl || time_to_live + 60
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'rack'
|
4
|
+
|
5
|
+
module SidekiqAlive
|
6
|
+
class Server
|
7
|
+
class << self
|
8
|
+
def run!
|
9
|
+
handler = Rack::Handler.get(server)
|
10
|
+
|
11
|
+
Signal.trap('TERM') { handler.shutdown }
|
12
|
+
|
13
|
+
handler.run(self, Port: port, Host: host, AccessLog: [], Logger: SidekiqAlive.logger)
|
14
|
+
end
|
15
|
+
|
16
|
+
def host
|
17
|
+
SidekiqAlive.config.host
|
18
|
+
end
|
19
|
+
|
20
|
+
def port
|
21
|
+
SidekiqAlive.config.port
|
22
|
+
end
|
23
|
+
|
24
|
+
def path
|
25
|
+
SidekiqAlive.config.path
|
26
|
+
end
|
27
|
+
|
28
|
+
def server
|
29
|
+
SidekiqAlive.config.server
|
30
|
+
end
|
31
|
+
|
32
|
+
def call(env)
|
33
|
+
if Rack::Request.new(env).path != path
|
34
|
+
[404, {}, ['Not found']]
|
35
|
+
elsif SidekiqAlive.alive?
|
36
|
+
[200, {}, ['Alive!']]
|
37
|
+
else
|
38
|
+
response = "Can't find the alive key"
|
39
|
+
SidekiqAlive.logger.error(response)
|
40
|
+
[404, {}, [response]]
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module SidekiqAlive
|
4
|
+
class Worker
|
5
|
+
include Sidekiq::Worker
|
6
|
+
sidekiq_options retry: false
|
7
|
+
|
8
|
+
def perform(_hostname = SidekiqAlive.hostname)
|
9
|
+
# Checks if custom liveness probe passes should fail or return false
|
10
|
+
return unless config.custom_liveness_probe.call
|
11
|
+
|
12
|
+
# Writes the liveness in Redis
|
13
|
+
write_living_probe
|
14
|
+
# schedules next living probe
|
15
|
+
self.class.perform_in(config.time_to_live / 2, current_hostname)
|
16
|
+
end
|
17
|
+
|
18
|
+
def hostname_registered?(hostname)
|
19
|
+
SidekiqAlive.registered_instances.any? do |ri|
|
20
|
+
/#{hostname}/ =~ ri
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
def write_living_probe
|
25
|
+
# Write liveness probe
|
26
|
+
SidekiqAlive.store_alive_key
|
27
|
+
# Increment ttl for current registered instance
|
28
|
+
SidekiqAlive.register_current_instance
|
29
|
+
# after callbacks
|
30
|
+
begin
|
31
|
+
config.callback.call
|
32
|
+
rescue StandardError
|
33
|
+
nil
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def current_hostname
|
38
|
+
SidekiqAlive.hostname
|
39
|
+
end
|
40
|
+
|
41
|
+
def config
|
42
|
+
SidekiqAlive.config
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
lib = File.expand_path('lib', __dir__)
|
2
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
3
|
+
require 'sidekiq_alive/version'
|
4
|
+
|
5
|
+
Gem::Specification.new do |spec|
|
6
|
+
spec.name = 'sidekiq-alive-next'
|
7
|
+
spec.version = SidekiqAlive::VERSION
|
8
|
+
spec.authors = ['Andrejs Cunskis']
|
9
|
+
spec.email = ['andrejs.cunskis@gmail.com']
|
10
|
+
|
11
|
+
spec.summary = 'Liveness probe for sidekiq on Kubernetes deployments.'
|
12
|
+
spec.description = 'SidekiqAlive offers a solution to add liveness probe of a Sidekiq instance.
|
13
|
+
|
14
|
+
How?
|
15
|
+
|
16
|
+
A http server is started and on each requests validates that a liveness key is stored in Redis. If it is there means is working.
|
17
|
+
|
18
|
+
A Sidekiq job is the responsable to storing this key. If Sidekiq stops processing jobs
|
19
|
+
this key gets expired by Redis an consequently the http server will return a 500 error.
|
20
|
+
|
21
|
+
This Job is responsible to requeue itself for the next liveness probe.'
|
22
|
+
spec.homepage = 'https://github.com/andrcuns/sidekiq-alive'
|
23
|
+
spec.license = 'MIT'
|
24
|
+
|
25
|
+
spec.files = `git ls-files -z`.split("\x0").reject do |f|
|
26
|
+
f.match(%r{^(test|spec|features)/})
|
27
|
+
end
|
28
|
+
spec.bindir = 'exe'
|
29
|
+
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
|
30
|
+
spec.require_paths = ['lib']
|
31
|
+
|
32
|
+
spec.add_development_dependency 'bundler', '> 1.16'
|
33
|
+
spec.add_development_dependency 'mock_redis'
|
34
|
+
spec.add_development_dependency 'rack-test'
|
35
|
+
spec.add_development_dependency 'rake', '~> 13.0'
|
36
|
+
spec.add_development_dependency 'rspec', '~> 3.0'
|
37
|
+
spec.add_development_dependency 'rspec-sidekiq', '~> 3.0'
|
38
|
+
spec.add_dependency 'sidekiq', '>= 5', '< 7'
|
39
|
+
spec.add_dependency 'webrick', '>= 1', '< 2'
|
40
|
+
end
|
metadata
ADDED
@@ -0,0 +1,200 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: sidekiq-alive-next
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 2.2.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Andrejs Cunskis
|
8
|
+
autorequire:
|
9
|
+
bindir: exe
|
10
|
+
cert_chain: []
|
11
|
+
date: 2022-10-15 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: bundler
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '1.16'
|
20
|
+
type: :development
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ">"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '1.16'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: mock_redis
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - ">="
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0'
|
34
|
+
type: :development
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - ">="
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: rack-test
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ">="
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
type: :development
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - ">="
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: rake
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - "~>"
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '13.0'
|
62
|
+
type: :development
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - "~>"
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '13.0'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: rspec
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - "~>"
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '3.0'
|
76
|
+
type: :development
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - "~>"
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '3.0'
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: rspec-sidekiq
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - "~>"
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '3.0'
|
90
|
+
type: :development
|
91
|
+
prerelease: false
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - "~>"
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: '3.0'
|
97
|
+
- !ruby/object:Gem::Dependency
|
98
|
+
name: sidekiq
|
99
|
+
requirement: !ruby/object:Gem::Requirement
|
100
|
+
requirements:
|
101
|
+
- - ">="
|
102
|
+
- !ruby/object:Gem::Version
|
103
|
+
version: '5'
|
104
|
+
- - "<"
|
105
|
+
- !ruby/object:Gem::Version
|
106
|
+
version: '7'
|
107
|
+
type: :runtime
|
108
|
+
prerelease: false
|
109
|
+
version_requirements: !ruby/object:Gem::Requirement
|
110
|
+
requirements:
|
111
|
+
- - ">="
|
112
|
+
- !ruby/object:Gem::Version
|
113
|
+
version: '5'
|
114
|
+
- - "<"
|
115
|
+
- !ruby/object:Gem::Version
|
116
|
+
version: '7'
|
117
|
+
- !ruby/object:Gem::Dependency
|
118
|
+
name: webrick
|
119
|
+
requirement: !ruby/object:Gem::Requirement
|
120
|
+
requirements:
|
121
|
+
- - ">="
|
122
|
+
- !ruby/object:Gem::Version
|
123
|
+
version: '1'
|
124
|
+
- - "<"
|
125
|
+
- !ruby/object:Gem::Version
|
126
|
+
version: '2'
|
127
|
+
type: :runtime
|
128
|
+
prerelease: false
|
129
|
+
version_requirements: !ruby/object:Gem::Requirement
|
130
|
+
requirements:
|
131
|
+
- - ">="
|
132
|
+
- !ruby/object:Gem::Version
|
133
|
+
version: '1'
|
134
|
+
- - "<"
|
135
|
+
- !ruby/object:Gem::Version
|
136
|
+
version: '2'
|
137
|
+
description: |-
|
138
|
+
SidekiqAlive offers a solution to add liveness probe of a Sidekiq instance.
|
139
|
+
|
140
|
+
How?
|
141
|
+
|
142
|
+
A http server is started and on each requests validates that a liveness key is stored in Redis. If it is there means is working.
|
143
|
+
|
144
|
+
A Sidekiq job is the responsable to storing this key. If Sidekiq stops processing jobs
|
145
|
+
this key gets expired by Redis an consequently the http server will return a 500 error.
|
146
|
+
|
147
|
+
This Job is responsible to requeue itself for the next liveness probe.
|
148
|
+
email:
|
149
|
+
- andrejs.cunskis@gmail.com
|
150
|
+
executables: []
|
151
|
+
extensions: []
|
152
|
+
extra_rdoc_files: []
|
153
|
+
files:
|
154
|
+
- ".github/dependabot.yml"
|
155
|
+
- ".github/release.yml"
|
156
|
+
- ".github/workflows/release.yml"
|
157
|
+
- ".github/workflows/test.yml"
|
158
|
+
- ".gitignore"
|
159
|
+
- ".rspec"
|
160
|
+
- ".ruby-version"
|
161
|
+
- ".tool-versions"
|
162
|
+
- CODE_OF_CONDUCT.md
|
163
|
+
- Gemfile
|
164
|
+
- Gemfile.lock
|
165
|
+
- LICENSE.txt
|
166
|
+
- README.md
|
167
|
+
- Rakefile
|
168
|
+
- bin/console
|
169
|
+
- bin/setup
|
170
|
+
- docker-compose.yml
|
171
|
+
- lib/sidekiq-alive-next.rb
|
172
|
+
- lib/sidekiq_alive/config.rb
|
173
|
+
- lib/sidekiq_alive/server.rb
|
174
|
+
- lib/sidekiq_alive/version.rb
|
175
|
+
- lib/sidekiq_alive/worker.rb
|
176
|
+
- sidekiq_alive.gemspec
|
177
|
+
homepage: https://github.com/andrcuns/sidekiq-alive
|
178
|
+
licenses:
|
179
|
+
- MIT
|
180
|
+
metadata: {}
|
181
|
+
post_install_message:
|
182
|
+
rdoc_options: []
|
183
|
+
require_paths:
|
184
|
+
- lib
|
185
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
186
|
+
requirements:
|
187
|
+
- - ">="
|
188
|
+
- !ruby/object:Gem::Version
|
189
|
+
version: '0'
|
190
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
191
|
+
requirements:
|
192
|
+
- - ">="
|
193
|
+
- !ruby/object:Gem::Version
|
194
|
+
version: '0'
|
195
|
+
requirements: []
|
196
|
+
rubygems_version: 3.3.7
|
197
|
+
signing_key:
|
198
|
+
specification_version: 4
|
199
|
+
summary: Liveness probe for sidekiq on Kubernetes deployments.
|
200
|
+
test_files: []
|