rimless 1.2.0 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/documentation.yml +3 -2
- data/.github/workflows/test.yml +6 -9
- data/.rspec +2 -2
- data/.rubocop.yml +16 -2
- data/.simplecov +12 -0
- data/Appraisals +2 -22
- data/CHANGELOG.md +8 -0
- data/Dockerfile +2 -3
- data/Envfile +0 -3
- data/Guardfile +44 -0
- data/LICENSE +1 -1
- data/Makefile +18 -7
- data/Rakefile +13 -68
- data/doc/kafka-playground/.gitignore +2 -0
- data/doc/kafka-playground/Dockerfile +41 -0
- data/doc/kafka-playground/Gemfile +8 -0
- data/doc/kafka-playground/Gemfile.lock +155 -0
- data/doc/kafka-playground/Makefile +209 -0
- data/doc/kafka-playground/README.md +185 -0
- data/doc/kafka-playground/bin/consume-topic +7 -0
- data/doc/kafka-playground/bin/create-topic +42 -0
- data/doc/kafka-playground/bin/delete-topic +22 -0
- data/doc/kafka-playground/bin/list-topics +3 -0
- data/doc/kafka-playground/bin/produce-event +64 -0
- data/doc/kafka-playground/config/avro_schemas/.gitignore +1 -0
- data/doc/kafka-playground/config/avro_schemas/playground_app/item_v1.avsc.erb +36 -0
- data/doc/kafka-playground/config/avro_schemas/playground_app/payment_v1.avsc.erb +59 -0
- data/doc/kafka-playground/config/avro_schemas/playground_app/payment_v1_event.avsc.erb +18 -0
- data/doc/kafka-playground/config/docker/shell/.bash_profile +3 -0
- data/doc/kafka-playground/config/docker/shell/.bashrc +231 -0
- data/doc/kafka-playground/config/docker/shell/.config/kcat.conf +3 -0
- data/doc/kafka-playground/config/docker/shell/.gemrc +2 -0
- data/doc/kafka-playground/config/docker/shell/.inputrc +17 -0
- data/doc/kafka-playground/config/environment.rb +69 -0
- data/doc/kafka-playground/doc/assets/project.svg +68 -0
- data/doc/kafka-playground/docker-compose.yml +83 -0
- data/doc/kafka-playground/examples/rimless-produce +48 -0
- data/gemfiles/rails_5.2.gemfile +2 -2
- data/lib/rimless/configuration_handling.rb +11 -1
- data/lib/rimless/consumer.rb +4 -2
- data/lib/rimless/dependencies.rb +3 -0
- data/lib/rimless/kafka_helpers.rb +2 -0
- data/lib/rimless/karafka/avro_deserializer.rb +3 -3
- data/lib/rimless/rspec/helpers.rb +3 -0
- data/lib/rimless/rspec/matchers.rb +3 -4
- data/lib/rimless/rspec.rb +1 -1
- data/lib/rimless/tasks/consumer.rake +3 -0
- data/lib/rimless/tasks/generator.rake +3 -0
- data/lib/rimless/tasks/stats.rake +5 -2
- data/lib/rimless/version.rb +18 -1
- data/lib/rimless.rb +0 -1
- data/rimless.gemspec +43 -29
- metadata +119 -76
- data/gemfiles/rails_4.2.gemfile +0 -8
- data/gemfiles/rails_5.0.gemfile +0 -8
- data/gemfiles/rails_5.1.gemfile +0 -8
- data/gemfiles/rails_6.0.gemfile +0 -8
@@ -0,0 +1,231 @@
|
|
1
|
+
# ~/.bashrc: executed by bash(1) for non-login shells.
|
2
|
+
# see /usr/share/doc/bash/examples/startup-files (in the package bash-doc)
|
3
|
+
# for examples
|
4
|
+
|
5
|
+
_GEM_PATHS=$(ls -d1 ${HOME}/.gem/ruby/*/bin 2>/dev/null | paste -sd ':')
|
6
|
+
_APP_PATHS=$(ls -d1 /app/vendor/bundle/ruby/*/bin 2>/dev/null | paste -sd ':')
|
7
|
+
|
8
|
+
export PATH="${_GEM_PATHS}:${_APP_PATHS}:${PATH}"
|
9
|
+
export PATH="/app/node_modules/.bin:${HOME}/.bin:/app/bin:${PATH}"
|
10
|
+
export MAKE_ENV=baremetal
|
11
|
+
|
12
|
+
# Disable the autostart of all supervisord units
|
13
|
+
sudo sed -i 's/autostart=.*/autostart=false/g' /etc/supervisor/conf.d/*
|
14
|
+
|
15
|
+
# Start the supervisord (empty, no units)
|
16
|
+
sudo supervisord >/dev/null 2>&1 &
|
17
|
+
|
18
|
+
# Wait for supervisord
|
19
|
+
while ! supervisorctl status >/dev/null 2>&1; do sleep 1; done
|
20
|
+
|
21
|
+
# Boot the mDNS stack
|
22
|
+
echo '# Start the mDNS stack'
|
23
|
+
sudo supervisorctl start dbus avahi
|
24
|
+
echo
|
25
|
+
|
26
|
+
# Start the ssh-agent
|
27
|
+
echo '# Start the SSH agent'
|
28
|
+
eval "$(ssh-agent -s)" >/dev/null
|
29
|
+
|
30
|
+
# Run a user script for adding the relevant ssh keys
|
31
|
+
if [ -f ~/.ssh/add-all ]; then
|
32
|
+
. ~/.ssh/add-all
|
33
|
+
fi
|
34
|
+
|
35
|
+
# If not running interactively, don't do anything
|
36
|
+
case $- in
|
37
|
+
*i*) ;;
|
38
|
+
*) return;;
|
39
|
+
esac
|
40
|
+
|
41
|
+
# Clear the color for the first time
|
42
|
+
echo -e "\e[0m"
|
43
|
+
|
44
|
+
export HISTCONTROL="ignoreboth:erasedups"
|
45
|
+
export HISTSIZE=1000000
|
46
|
+
|
47
|
+
# Enable less mouse scrolling
|
48
|
+
export LESS=-r
|
49
|
+
|
50
|
+
# Default Editor
|
51
|
+
export EDITOR=vim
|
52
|
+
|
53
|
+
# set variable identifying the chroot you work in (used in the prompt below)
|
54
|
+
if [ -z "${debian_chroot:-}" ] && [ -r /etc/debian_chroot ]; then
|
55
|
+
debian_chroot=$(cat /etc/debian_chroot)
|
56
|
+
fi
|
57
|
+
|
58
|
+
# If this is an xterm set the title to user@host:dir
|
59
|
+
case "$TERM" in
|
60
|
+
xterm*|rxvt*)
|
61
|
+
PS1="\[\e]0;${debian_chroot:+($debian_chroot)}\u@\h: \w\a\]$PS1"
|
62
|
+
;;
|
63
|
+
*)
|
64
|
+
;;
|
65
|
+
esac
|
66
|
+
|
67
|
+
# enable color support of ls and also add handy aliases
|
68
|
+
if [ -x /usr/bin/dircolors ]; then
|
69
|
+
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" \
|
70
|
+
|| eval "$(dircolors -b)"
|
71
|
+
fi
|
72
|
+
|
73
|
+
if [ -f ~/.bash_aliases ]; then
|
74
|
+
. ~/.bash_aliases
|
75
|
+
fi
|
76
|
+
|
77
|
+
# enable programmable completion features (you don't need to enable
|
78
|
+
# this, if it's already enabled in /etc/bash.bashrc and /etc/profile
|
79
|
+
# sources /etc/bash.bashrc).
|
80
|
+
if ! shopt -oq posix; then
|
81
|
+
if [ -f /usr/share/bash-completion/bash_completion ]; then
|
82
|
+
. /usr/share/bash-completion/bash_completion
|
83
|
+
elif [ -f /etc/bash_completion ]; then
|
84
|
+
. /etc/bash_completion
|
85
|
+
fi
|
86
|
+
fi
|
87
|
+
|
88
|
+
export COLOR_OPTIONS='--color=auto'
|
89
|
+
|
90
|
+
alias ..="cd .."
|
91
|
+
alias ...="cd ../.."
|
92
|
+
alias ....="cd ../../.."
|
93
|
+
alias .....="cd ../../../.."
|
94
|
+
alias ls='ls $COLOR_OPTIONS --group-directories-first --time-style="+%F, %T "'
|
95
|
+
alias ll='ls $COLOR_OPTIONS -lh'
|
96
|
+
alias l='ls $COLOR_OPTIONS -lAh'
|
97
|
+
alias grep='grep $COLOR_OPTIONS'
|
98
|
+
alias egrep='egrep $COLOR_OPTIONS'
|
99
|
+
alias g='git'
|
100
|
+
alias p='pwd'
|
101
|
+
alias mkdir='mkdir -p -v'
|
102
|
+
alias less='less -R'
|
103
|
+
alias x='exit'
|
104
|
+
alias playground='ls -1 /app/bin | sed "s/^/* /g"'
|
105
|
+
alias pl='playground'
|
106
|
+
|
107
|
+
# Bash won't get SIGWINCH if another process is in the foreground.
|
108
|
+
# Enable checkwinsize so that bash will check the terminal size when
|
109
|
+
# it regains control. #65623
|
110
|
+
# http://cnswww.cns.cwru.edu/~chet/bash/FAQ (E11)
|
111
|
+
shopt -s checkwinsize
|
112
|
+
|
113
|
+
# Enable history appending instead of overwriting.
|
114
|
+
shopt -s histappend
|
115
|
+
|
116
|
+
# Enable extended globbing
|
117
|
+
shopt -s extglob
|
118
|
+
|
119
|
+
# Enable globbing for dotfiles
|
120
|
+
shopt -s dotglob
|
121
|
+
|
122
|
+
# Enable globstars for recursive globbing
|
123
|
+
shopt -s globstar
|
124
|
+
|
125
|
+
# Auto "cd" when entering just a path
|
126
|
+
shopt -s autocd
|
127
|
+
|
128
|
+
# Disable XOFF (interrupt data flow)
|
129
|
+
stty -ixoff
|
130
|
+
|
131
|
+
# Disable XON (interrupt data flow)
|
132
|
+
stty -ixon
|
133
|
+
|
134
|
+
bind "set completion-ignore-case on" # note: bind used instead of sticking these in .inputrc
|
135
|
+
bind "set bell-style none" # no bell
|
136
|
+
bind "set show-all-if-ambiguous On" # show list automatically, without double tab
|
137
|
+
|
138
|
+
# use ctl keys to move forward and back in words
|
139
|
+
bind '"\e[1;5C": forward-word'
|
140
|
+
bind '"\e[1;5D": backward-word'
|
141
|
+
bind '"\e[5C": forward-word'
|
142
|
+
bind '"\e[5D": backward-word'
|
143
|
+
bind '"\e\e[C": forward-word'
|
144
|
+
bind '"\e\e[D": backward-word'
|
145
|
+
|
146
|
+
# use arrow keys to fast search
|
147
|
+
bind '"\e[A": history-search-backward'
|
148
|
+
bind '"\e[B": history-search-forward'
|
149
|
+
|
150
|
+
# Enable colors for ls, etc. Prefer ~/.dir_colors #64489
|
151
|
+
if type -P dircolors >/dev/null ; then
|
152
|
+
if [[ -f ~/.dir_colors ]] ; then
|
153
|
+
eval $(dircolors -b ~/.dir_colors)
|
154
|
+
elif [[ -f /etc/DIR_COLORS ]] ; then
|
155
|
+
eval $(dircolors -b /etc/DIR_COLORS)
|
156
|
+
fi
|
157
|
+
fi
|
158
|
+
|
159
|
+
function watch-make-test()
|
160
|
+
{
|
161
|
+
while [ 1 ]; do
|
162
|
+
inotifywait --quiet -r `pwd` -e close_write --format '%e -> %w%f'
|
163
|
+
make test
|
164
|
+
done
|
165
|
+
}
|
166
|
+
|
167
|
+
function watch-make()
|
168
|
+
{
|
169
|
+
while [ 1 ]; do
|
170
|
+
inotifywait --quiet -r `pwd` -e close_write --format '%e -> %w%f'
|
171
|
+
make $@
|
172
|
+
done
|
173
|
+
}
|
174
|
+
|
175
|
+
function watch-run()
|
176
|
+
{
|
177
|
+
while [ 1 ]; do
|
178
|
+
inotifywait --quiet -r `pwd` -e close_write --format '%e -> %w%f'
|
179
|
+
bash -c "$@"
|
180
|
+
done
|
181
|
+
}
|
182
|
+
|
183
|
+
PROMPT_COMMAND='RET=$?;'
|
184
|
+
RET_OUT='$(if [[ $RET = 0 ]]; then echo -ne "\[\e[0;32m\][G]"; else echo -ne "\[\e[0;31m\][Err: $RET]"; fi;)'
|
185
|
+
RET_OUT="\n$RET_OUT"
|
186
|
+
|
187
|
+
HOST="${MDNS_HOSTNAME}"
|
188
|
+
if [ -z "${HOST}" ]; then
|
189
|
+
HOST="\h"
|
190
|
+
fi
|
191
|
+
|
192
|
+
_TIME='\t'
|
193
|
+
_FILES="\$(ls -a1 | grep -vE '\.$' | wc -l)"
|
194
|
+
_SIZE="\$(ls -lah | head -n1 | cut -d ' ' -f2)"
|
195
|
+
_META="${_TIME} | Files: ${_FILES} | Size: ${_SIZE} | \[\e[0;36m\]\w"
|
196
|
+
META=" \[\e[0;31m\][\[\e[1;37m\]${_META}\[\e[0;31m\]]\[\e[0;32m\]\033]2;\w\007"
|
197
|
+
|
198
|
+
PSL1=${RET_OUT}${META}
|
199
|
+
PSL2="\n\[\e[0;31m\][\u\[\e[0;33m\]@\[\e[0;37m\]${HOST}\[\e[0;31m\]] \[\e[0;31m\]$\[\e[0;32m\] "
|
200
|
+
|
201
|
+
export PS1=${PSL1}${PSL2}
|
202
|
+
|
203
|
+
clear
|
204
|
+
NORMAL=$(printf '\e[0m')
|
205
|
+
COLOR=$(printf '\e[1;34m')
|
206
|
+
R=$(printf '\e[0;31m')
|
207
|
+
B=$(printf '\e[0;34m')
|
208
|
+
Z=$(printf '\e[0m')
|
209
|
+
cat <<EOF
|
210
|
+
##########
|
211
|
+
#
|
212
|
+
# ${R}++${Z}
|
213
|
+
# ${R}++++++${Z}
|
214
|
+
# ${R}++++++++${Z} ${B}_ _ _ _${Z}
|
215
|
+
# ${R}++++++++++${Z} ${B}| | | | | | | |${Z}
|
216
|
+
# ${R}+++++++++++${Z} ${B}| |__| | __ _ _ _ ___ __ _ ___ | | __| |${Z}
|
217
|
+
# ${R}++++++++++${Z} ${B}| __ |/ _\` | | | / __|/ _\` |/ _ \| |/ _\` |${Z}
|
218
|
+
# ${R}++++++++++${Z} ${B}| | | | (_| | |_| \__ \ (_| | (_) | | (_| |${Z}
|
219
|
+
# ${R}+++++++++${Z} ${B}|_| |_|\__,_|\__,_|___/\__, |\___/|_|\__,_|${Z}
|
220
|
+
# ${R}+++++++${Z} ${B}+++${Z} ${B}__/ |${Z}
|
221
|
+
# ${R}++++++${Z} ${B}+++++${Z} ${B}|___/${Z}
|
222
|
+
# ${R}++++${Z} ${B}+++++++${Z}
|
223
|
+
# ${R}+++${Z} ${B}++++++++++${Z} ${COLOR}Apache Kafka Playground${NORMAL}
|
224
|
+
#
|
225
|
+
##########
|
226
|
+
|
227
|
+
EOF
|
228
|
+
playground
|
229
|
+
|
230
|
+
# Rebind enter key to insert newline before command output
|
231
|
+
trap 'echo -e "\e[0m"' DEBUG
|
@@ -0,0 +1,17 @@
|
|
1
|
+
# mappings for Ctrl-left-arrow and Ctrl-right-arrow for word moving
|
2
|
+
"\e[1;5C": forward-word
|
3
|
+
"\e[1;5D": backward-word
|
4
|
+
"\e[5C": forward-word
|
5
|
+
"\e[5D": backward-word
|
6
|
+
"\e\e[C": forward-word
|
7
|
+
"\e\e[D": backward-word
|
8
|
+
|
9
|
+
# handle common Home/End escape codes
|
10
|
+
"\e[1~": beginning-of-line
|
11
|
+
"\e[4~": end-of-line
|
12
|
+
"\e[7~": beginning-of-line
|
13
|
+
"\e[8~": end-of-line
|
14
|
+
"\eOH": beginning-of-line
|
15
|
+
"\eOF": end-of-line
|
16
|
+
"\e[H": beginning-of-line
|
17
|
+
"\e[F": end-of-line
|
@@ -0,0 +1,69 @@
|
|
1
|
+
ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../Gemfile', __dir__)
|
2
|
+
require 'bundler/setup' # Set up gems listed in the Gemfile.
|
3
|
+
require 'active_support'
|
4
|
+
require 'active_support/all'
|
5
|
+
require 'json'
|
6
|
+
require 'pp'
|
7
|
+
|
8
|
+
Bundler.require(:default)
|
9
|
+
ActiveSupport.eager_load!
|
10
|
+
|
11
|
+
AppLogger = Logger.new(STDOUT)
|
12
|
+
AppLogger.level = Logger::FATAL
|
13
|
+
AppLogger.level = Logger::DEBUG if ENV.fetch('DEBUG', '').match? /true|1|on/
|
14
|
+
|
15
|
+
Rimless.configure do |conf|
|
16
|
+
conf.env = 'production'
|
17
|
+
conf.app_name = 'playground_app'
|
18
|
+
conf.client_id = 'playground'
|
19
|
+
conf.logger = AppLogger
|
20
|
+
conf.kafka_brokers = ['kafka://kafka.playground.local:9092']
|
21
|
+
conf.schema_registry_url = 'http://schema-registry.playground.local'
|
22
|
+
end
|
23
|
+
|
24
|
+
KafkaClient = Kafka.new(Rimless.configuration.kafka_brokers, logger: AppLogger)
|
25
|
+
|
26
|
+
# +Resolv+ is a thread-aware DNS resolver library written in Ruby. Some newer
|
27
|
+
# networking libraries like excon (>=0.85.0) makes use of it instead of the
|
28
|
+
# regular glibc facility. This raises an issue for our local development as we
|
29
|
+
# use the mDNS stack which is configured in every Docker image accordingly
|
30
|
+
# (Avahi, libnss[-mdns]). The default resolver of +Resolv+ does not include the
|
31
|
+
# mDNS stack so we have to reconfigure it here for local usage only.
|
32
|
+
#
|
33
|
+
# See: https://docs.ruby-lang.org/en/2.5.0/Resolv.html
|
34
|
+
require 'resolv'
|
35
|
+
Resolv::DefaultResolver.replace_resolvers(
|
36
|
+
[
|
37
|
+
Resolv::Hosts.new,
|
38
|
+
Resolv::MDNS.new,
|
39
|
+
Resolv::DNS.new
|
40
|
+
]
|
41
|
+
)
|
42
|
+
|
43
|
+
def topic?(name)
|
44
|
+
@topic_conf = KafkaClient.describe_topic(name)
|
45
|
+
rescue Kafka::UnknownTopicOrPartition
|
46
|
+
false
|
47
|
+
end
|
48
|
+
|
49
|
+
def args!
|
50
|
+
app = Thor.descendants.map(&:to_s)
|
51
|
+
.reject { |klass| klass.include? '::' }.first
|
52
|
+
raise "No Thor application class was found." unless app
|
53
|
+
app = app.constantize
|
54
|
+
|
55
|
+
help = ARGV.any? { |arg| %w[help -h --help].include?(arg) }
|
56
|
+
known_cmd = app.all_tasks.key? ARGV.first
|
57
|
+
|
58
|
+
if ARGV.blank? || help || known_cmd
|
59
|
+
ARGV.replace(['help', app.default_task])
|
60
|
+
else
|
61
|
+
ARGV.unshift(app.default_task)
|
62
|
+
end
|
63
|
+
|
64
|
+
ARGV
|
65
|
+
end
|
66
|
+
|
67
|
+
def debug!(opts)
|
68
|
+
AppLogger.level = Logger::DEBUG if opts[:verbose]
|
69
|
+
end
|
@@ -0,0 +1,68 @@
|
|
1
|
+
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
2
|
+
<svg
|
3
|
+
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
4
|
+
xmlns:cc="http://creativecommons.org/ns#"
|
5
|
+
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
6
|
+
xmlns:svg="http://www.w3.org/2000/svg"
|
7
|
+
xmlns="http://www.w3.org/2000/svg"
|
8
|
+
version="1.1"
|
9
|
+
id="Ebene_1"
|
10
|
+
x="0px"
|
11
|
+
y="0px"
|
12
|
+
viewBox="0 0 800 200"
|
13
|
+
xml:space="preserve"
|
14
|
+
width="800"
|
15
|
+
height="200"><metadata
|
16
|
+
id="metadata33"><rdf:RDF><cc:Work
|
17
|
+
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
|
18
|
+
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title></dc:title></cc:Work></rdf:RDF></metadata><defs
|
19
|
+
id="defs31" />
|
20
|
+
<style
|
21
|
+
type="text/css"
|
22
|
+
id="style2">
|
23
|
+
.st0{fill-rule:evenodd;clip-rule:evenodd;fill:#E73E11;}
|
24
|
+
.st1{fill-rule:evenodd;clip-rule:evenodd;fill:#0371B9;}
|
25
|
+
.st2{fill:#132E48;}
|
26
|
+
.st3{font-family:'OpenSans-Bold';}
|
27
|
+
.st4{font-size:29.5168px;}
|
28
|
+
.st5{fill-rule:evenodd;clip-rule:evenodd;fill:none;}
|
29
|
+
.st6{opacity:0.5;fill:#132E48;}
|
30
|
+
.st7{font-family:'OpenSans';}
|
31
|
+
.st8{font-size:12px;}
|
32
|
+
</style>
|
33
|
+
<g
|
34
|
+
transform="translate(0,1.53584)"
|
35
|
+
id="g828"><g
|
36
|
+
transform="translate(35.93985,35.66416)"
|
37
|
+
id="g8">
|
38
|
+
<path
|
39
|
+
style="clip-rule:evenodd;fill:#e73e11;fill-rule:evenodd"
|
40
|
+
id="path4"
|
41
|
+
d="m -0.1,124.4 c 0,0 33.7,-123.2 66.7,-123.2 12.8,0 26.9,21.9 38.8,47.2 -23.6,27.9 -66.6,59.7 -94,76 -7.1,0 -11.5,0 -11.5,0 z"
|
42
|
+
class="st0" />
|
43
|
+
<path
|
44
|
+
style="clip-rule:evenodd;fill:#0371b9;fill-rule:evenodd"
|
45
|
+
id="path6"
|
46
|
+
d="m 88.1,101.8 c 13.5,-10.4 18.4,-16.2 27.1,-25.4 10,25.7 16.7,48 16.7,48 0,0 -41.4,0 -78,0 14.6,-7.9 18.7,-10.7 34.2,-22.6 z"
|
47
|
+
class="st1" />
|
48
|
+
</g><text
|
49
|
+
y="106.40316"
|
50
|
+
x="192.43155"
|
51
|
+
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:29.51733398px;font-family:'Open Sans', sans-serif;-inkscape-font-specification:'OpenSans-Bold, Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#132e48"
|
52
|
+
id="text10"
|
53
|
+
class="st2 st3 st4">Apache Kafka Playground</text>
|
54
|
+
<rect
|
55
|
+
style="clip-rule:evenodd;fill:none;fill-rule:evenodd"
|
56
|
+
id="rect12"
|
57
|
+
height="24"
|
58
|
+
width="314.5"
|
59
|
+
class="st5"
|
60
|
+
y="118.06416"
|
61
|
+
x="194.23985" /><text
|
62
|
+
y="127.22146"
|
63
|
+
x="194.21715"
|
64
|
+
style="font-size:12px;font-family:'Open Sans', sans-serif;opacity:0.5;fill:#132e48;-inkscape-font-specification:'Open Sans, sans-serif, Normal';font-weight:normal;font-style:normal;font-stretch:normal;font-variant:normal;text-anchor:start;text-align:start;writing-mode:lr;font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;"
|
65
|
+
id="text14"
|
66
|
+
class="st6 st7 st8">A ready to use local Apache Kafka setup based on containers/mDNS</text>
|
67
|
+
</g>
|
68
|
+
</svg>
|
@@ -0,0 +1,83 @@
|
|
1
|
+
version: "3"
|
2
|
+
services:
|
3
|
+
zookeeper:
|
4
|
+
image: zookeeper:3.7
|
5
|
+
network_mode: bridge
|
6
|
+
ports: ["2181"]
|
7
|
+
ulimits:
|
8
|
+
# Due to systemd/pam RLIMIT_NOFILE settings (max int inside the
|
9
|
+
# container), the Java process seams to allocate huge limits which result
|
10
|
+
# in a +unable to allocate file descriptor table - out of memory+ error.
|
11
|
+
# Lowering this value fixes the issue for now.
|
12
|
+
#
|
13
|
+
# See: http://bit.ly/2U62A80
|
14
|
+
# See: http://bit.ly/2T2Izit
|
15
|
+
nofile:
|
16
|
+
soft: 100000
|
17
|
+
hard: 100000
|
18
|
+
|
19
|
+
kafka:
|
20
|
+
image: hausgold/kafka:2.1
|
21
|
+
network_mode: bridge
|
22
|
+
environment:
|
23
|
+
MDNS_HOSTNAME: kafka.playground.local
|
24
|
+
# See: http://bit.ly/2UDzgqI for Kafka downscaling
|
25
|
+
KAFKA_HEAP_OPTS: -Xmx256M -Xms32M
|
26
|
+
links:
|
27
|
+
- zookeeper
|
28
|
+
extra_hosts:
|
29
|
+
# Due to missing nss-mdns support on Alpine Linux
|
30
|
+
# and the requirement to tell our self who we are
|
31
|
+
# for Apache Kafka, we register ourself as hostent.
|
32
|
+
- kafka.playground.local:127.0.0.1
|
33
|
+
ulimits:
|
34
|
+
# Due to systemd/pam RLIMIT_NOFILE settings (max int inside the
|
35
|
+
# container), the Java process seams to allocate huge limits which result
|
36
|
+
# in a +unable to allocate file descriptor table - out of memory+ error.
|
37
|
+
# Lowering this value fixes the issue for now.
|
38
|
+
#
|
39
|
+
# See: http://bit.ly/2U62A80
|
40
|
+
# See: http://bit.ly/2T2Izit
|
41
|
+
nofile:
|
42
|
+
soft: 100000
|
43
|
+
hard: 100000
|
44
|
+
|
45
|
+
schema-registry:
|
46
|
+
image: hausgold/schema-registry:5.1.2
|
47
|
+
network_mode: bridge
|
48
|
+
environment:
|
49
|
+
MDNS_HOSTNAME: schema-registry.playground.local
|
50
|
+
# Set the default Apache Avro schema compatibility
|
51
|
+
#
|
52
|
+
# See: http://bit.ly/2TcpoY1
|
53
|
+
# See: http://bit.ly/2Hfo4wj
|
54
|
+
SCHEMA_REGISTRY_AVRO_COMPATIBILITY_LEVEL: full
|
55
|
+
links:
|
56
|
+
- zookeeper
|
57
|
+
ulimits:
|
58
|
+
# Due to systemd/pam RLIMIT_NOFILE settings (max int inside the
|
59
|
+
# container), the Java process seams to allocate huge limits which result
|
60
|
+
# in a +unable to allocate file descriptor table - out of memory+ error.
|
61
|
+
# Lowering this value fixes the issue for now.
|
62
|
+
#
|
63
|
+
# See: http://bit.ly/2U62A80
|
64
|
+
# See: http://bit.ly/2T2Izit
|
65
|
+
nofile:
|
66
|
+
soft: 100000
|
67
|
+
hard: 100000
|
68
|
+
|
69
|
+
schema-registry-ui:
|
70
|
+
image: hausgold/schema-registry-ui:0.9.5
|
71
|
+
network_mode: bridge
|
72
|
+
environment:
|
73
|
+
MDNS_HOSTNAME: schema-registry-ui.playground.local
|
74
|
+
SCHEMAREGISTRY_URL: http://schema-registry.playground.local
|
75
|
+
|
76
|
+
app:
|
77
|
+
build: .
|
78
|
+
network_mode: bridge
|
79
|
+
working_dir: /app
|
80
|
+
volumes:
|
81
|
+
- .:/app:${DOCKER_MOUNT_MODE:-rw}
|
82
|
+
environment:
|
83
|
+
MDNS_HOSTNAME: app.playground.local
|
@@ -0,0 +1,48 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require_relative '../config/environment'
|
4
|
+
|
5
|
+
# Setup classes which are Apache Avro schema compatible, can be anything which
|
6
|
+
# be converted to a hash via +#to_h+ (eg. OpenStruct, RecursiveOpenStruct,
|
7
|
+
# Class, Struct, etc)
|
8
|
+
PaymentEvent = Struct.new(:event, :payment,
|
9
|
+
keyword_init: true)
|
10
|
+
|
11
|
+
Payment = Struct.new(:gid, :currency, :net_amount_sum, :items, :state,
|
12
|
+
:created_at, :updated_at,
|
13
|
+
keyword_init: true)
|
14
|
+
|
15
|
+
PaymentItem = Struct.new(:gid, :net_amount, :tax_rate, :created_at, :updated_at,
|
16
|
+
keyword_init: true)
|
17
|
+
|
18
|
+
# Setup the real data instances which we serialize with Apache Avro and push to
|
19
|
+
# Apache Kafka
|
20
|
+
item_1_id = SecureRandom.uuid
|
21
|
+
item_1 = PaymentItem.new(gid: "gid://playground-app/PaymentItem/#{item_1_id}",
|
22
|
+
net_amount: 499,
|
23
|
+
tax_rate: 19,
|
24
|
+
created_at: Time.current,
|
25
|
+
updated_at: nil)
|
26
|
+
|
27
|
+
item_2_id = SecureRandom.uuid
|
28
|
+
item_2 = PaymentItem.new(gid: "gid://playground-app/PaymentItem/#{item_2_id}",
|
29
|
+
net_amount: 1,
|
30
|
+
tax_rate: 19,
|
31
|
+
created_at: Time.current,
|
32
|
+
updated_at: nil)
|
33
|
+
|
34
|
+
payment_id = SecureRandom.uuid
|
35
|
+
payment = Payment.new(gid: "gid://playground-app/Payment/#{payment_id}",
|
36
|
+
currency: :eur,
|
37
|
+
net_amount_sum: 500,
|
38
|
+
state: :authorized,
|
39
|
+
items: [item_1, item_2],
|
40
|
+
created_at: Time.current,
|
41
|
+
updated_at: Time.current)
|
42
|
+
|
43
|
+
event = PaymentEvent.new(event: :payment_authorized, payment: payment)
|
44
|
+
event_hash = Rimless.avro_sanitize(event)
|
45
|
+
|
46
|
+
pp event_hash
|
47
|
+
|
48
|
+
Rimless.message(data: event_hash, topic: :payments, schema: :payment_v1_event)
|
data/gemfiles/rails_5.2.gemfile
CHANGED
@@ -4,6 +4,8 @@ module Rimless
|
|
4
4
|
# The top-level configuration handling.
|
5
5
|
#
|
6
6
|
# rubocop:disable Style/ClassVars because we split module code
|
7
|
+
# rubocop:disable Metrics/BlockLength because this is how
|
8
|
+
# an +ActiveSupport::Concern+ looks like
|
7
9
|
module ConfigurationHandling
|
8
10
|
extend ActiveSupport::Concern
|
9
11
|
|
@@ -61,8 +63,15 @@ module Rimless
|
|
61
63
|
# Check if a application is defined
|
62
64
|
return if Rails.application.nil?
|
63
65
|
|
66
|
+
# We need a little compatibility here, as in Rails 6.1+ there is no
|
67
|
+
# more +parent_name+, instead they renamed it to +module_parent_name+
|
68
|
+
app_class = Rails.application.class
|
69
|
+
parent_name = app_class.module_parent_name \
|
70
|
+
if app_class.respond_to?(:module_parent_name)
|
71
|
+
parent_name ||= app_class.parent_name
|
72
|
+
|
64
73
|
# Pass back the URI compatible application name
|
65
|
-
|
74
|
+
parent_name.underscore.dasherize
|
66
75
|
end
|
67
76
|
|
68
77
|
# Retrieve the current configured logger instance.
|
@@ -72,4 +81,5 @@ module Rimless
|
|
72
81
|
end
|
73
82
|
end
|
74
83
|
# rubocop:enable Style/ClassVars
|
84
|
+
# rubocop:enable Metrics/BlockLength
|
75
85
|
end
|
data/lib/rimless/consumer.rb
CHANGED
@@ -47,7 +47,7 @@ module Rimless
|
|
47
47
|
return unless rails_env.exist?
|
48
48
|
|
49
49
|
ENV['RAILS_ENV'] ||= 'development'
|
50
|
-
ENV['KARAFKA_ENV'] = ENV
|
50
|
+
ENV['KARAFKA_ENV'] = ENV.fetch('RAILS_ENV', nil)
|
51
51
|
require rails_env
|
52
52
|
Rails.application.eager_load!
|
53
53
|
end
|
@@ -67,6 +67,7 @@ module Rimless
|
|
67
67
|
# Configure the pure basics on the Karafka application.
|
68
68
|
#
|
69
69
|
# rubocop:disable Metrics/MethodLength because of the various settings
|
70
|
+
# rubocop:disable Metrics/AbcSize dito
|
70
71
|
def initialize_karafka!
|
71
72
|
setup do |config|
|
72
73
|
mapper = Rimless::Karafka::PassthroughMapper.new
|
@@ -82,13 +83,14 @@ module Rimless
|
|
82
83
|
end
|
83
84
|
end
|
84
85
|
# rubocop:enable Metrics/MethodLength
|
86
|
+
# rubocop:enable Metrics/AbcSize
|
85
87
|
|
86
88
|
# When we run in development mode, we want to write the logs
|
87
89
|
# to file and to stdout.
|
88
90
|
def initialize_logger!
|
89
91
|
return unless Rimless.env.development? && server?
|
90
92
|
|
91
|
-
|
93
|
+
$stdout.sync = true
|
92
94
|
Rimless.logger.extend(ActiveSupport::Logger.broadcast(
|
93
95
|
ActiveSupport::Logger.new($stdout)
|
94
96
|
))
|
data/lib/rimless/dependencies.rb
CHANGED
@@ -17,6 +17,8 @@ module Rimless
|
|
17
17
|
end
|
18
18
|
|
19
19
|
# Set sensible defaults for the +WaterDrop+ gem.
|
20
|
+
#
|
21
|
+
# rubocop:disable Metrics/AbcSize because of the configuration mapping
|
20
22
|
def configure_waterdrop
|
21
23
|
# Skip WaterDrop configuration when no brokers/client id is available,
|
22
24
|
# because it will raise. Its fine to have none available for situations
|
@@ -43,6 +45,7 @@ module Rimless
|
|
43
45
|
config.kafka.required_acks = -1
|
44
46
|
end
|
45
47
|
end
|
48
|
+
# rubocop:enable Metrics/AbcSize
|
46
49
|
|
47
50
|
# Set sensible defaults for the +AvroTurf+ gem and (re)compile the Apache
|
48
51
|
# Avro schema templates (ERB), so the gem can handle them properly.
|
@@ -21,6 +21,7 @@ module Rimless
|
|
21
21
|
# Rimless.topic(name: 'test', app: :fancy_app)
|
22
22
|
#
|
23
23
|
# rubocop:disable Metrics/AbcSize because of the usage flexibility
|
24
|
+
# rubocop:disable Metrics/CyclomaticComplexity dito
|
24
25
|
def topic(*args)
|
25
26
|
opts = args.last
|
26
27
|
name = args.first if [String, Symbol].member?(args.first.class)
|
@@ -38,6 +39,7 @@ module Rimless
|
|
38
39
|
"#{Rimless.topic_prefix(app)}#{name}".tr('_', '-')
|
39
40
|
end
|
40
41
|
# rubocop:enable Metrics/AbcSize
|
42
|
+
# rubocop:enable Metrics/CyclomaticComplexity
|
41
43
|
|
42
44
|
# Send a single message to Apache Kafka. The data is encoded according to
|
43
45
|
# the given Apache Avro schema. The destination Kafka topic may be a
|
@@ -19,9 +19,9 @@ module Rimless
|
|
19
19
|
# occurence should be rare.
|
20
20
|
Rimless
|
21
21
|
.decode(params.raw_payload)
|
22
|
-
.
|
23
|
-
.
|
24
|
-
.
|
22
|
+
.then { |data| Sparsify(data, sparse_array: true) }
|
23
|
+
.then { |data| data.transform_keys { |key| key.delete('\\') } }
|
24
|
+
.then { |data| Unsparsify(data, sparse_array: true) }
|
25
25
|
.deep_symbolize_keys
|
26
26
|
end
|
27
27
|
end
|
@@ -23,6 +23,8 @@ module Rimless
|
|
23
23
|
# @return [OpenStruct] the fake deserialized Kafka message
|
24
24
|
#
|
25
25
|
# rubocop:disable Metrics/MethodLength because of the various properties
|
26
|
+
# rubocop:disable Style/OpenStructUse because existing specs may rely
|
27
|
+
# on this data type
|
26
28
|
def kafka_message(topic: nil, headers: {}, **payload)
|
27
29
|
OpenStruct.new(
|
28
30
|
topic: Rimless.topic(topic),
|
@@ -38,6 +40,7 @@ module Rimless
|
|
38
40
|
)
|
39
41
|
end
|
40
42
|
# rubocop:enable Metrics/MethodLength
|
43
|
+
# rubocop:enable Style/OpenStructUse
|
41
44
|
|
42
45
|
# Capture all Apache Kafka messages of the given block.
|
43
46
|
#
|