rimless 1.2.0 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (58) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/documentation.yml +3 -2
  3. data/.github/workflows/test.yml +6 -9
  4. data/.rspec +2 -2
  5. data/.rubocop.yml +16 -2
  6. data/.simplecov +12 -0
  7. data/Appraisals +2 -22
  8. data/CHANGELOG.md +8 -0
  9. data/Dockerfile +2 -3
  10. data/Envfile +0 -3
  11. data/Guardfile +44 -0
  12. data/LICENSE +1 -1
  13. data/Makefile +18 -7
  14. data/Rakefile +13 -68
  15. data/doc/kafka-playground/.gitignore +2 -0
  16. data/doc/kafka-playground/Dockerfile +41 -0
  17. data/doc/kafka-playground/Gemfile +8 -0
  18. data/doc/kafka-playground/Gemfile.lock +155 -0
  19. data/doc/kafka-playground/Makefile +209 -0
  20. data/doc/kafka-playground/README.md +185 -0
  21. data/doc/kafka-playground/bin/consume-topic +7 -0
  22. data/doc/kafka-playground/bin/create-topic +42 -0
  23. data/doc/kafka-playground/bin/delete-topic +22 -0
  24. data/doc/kafka-playground/bin/list-topics +3 -0
  25. data/doc/kafka-playground/bin/produce-event +64 -0
  26. data/doc/kafka-playground/config/avro_schemas/.gitignore +1 -0
  27. data/doc/kafka-playground/config/avro_schemas/playground_app/item_v1.avsc.erb +36 -0
  28. data/doc/kafka-playground/config/avro_schemas/playground_app/payment_v1.avsc.erb +59 -0
  29. data/doc/kafka-playground/config/avro_schemas/playground_app/payment_v1_event.avsc.erb +18 -0
  30. data/doc/kafka-playground/config/docker/shell/.bash_profile +3 -0
  31. data/doc/kafka-playground/config/docker/shell/.bashrc +231 -0
  32. data/doc/kafka-playground/config/docker/shell/.config/kcat.conf +3 -0
  33. data/doc/kafka-playground/config/docker/shell/.gemrc +2 -0
  34. data/doc/kafka-playground/config/docker/shell/.inputrc +17 -0
  35. data/doc/kafka-playground/config/environment.rb +69 -0
  36. data/doc/kafka-playground/doc/assets/project.svg +68 -0
  37. data/doc/kafka-playground/docker-compose.yml +83 -0
  38. data/doc/kafka-playground/examples/rimless-produce +48 -0
  39. data/gemfiles/rails_5.2.gemfile +2 -2
  40. data/lib/rimless/configuration_handling.rb +11 -1
  41. data/lib/rimless/consumer.rb +4 -2
  42. data/lib/rimless/dependencies.rb +3 -0
  43. data/lib/rimless/kafka_helpers.rb +2 -0
  44. data/lib/rimless/karafka/avro_deserializer.rb +3 -3
  45. data/lib/rimless/rspec/helpers.rb +3 -0
  46. data/lib/rimless/rspec/matchers.rb +3 -4
  47. data/lib/rimless/rspec.rb +1 -1
  48. data/lib/rimless/tasks/consumer.rake +3 -0
  49. data/lib/rimless/tasks/generator.rake +3 -0
  50. data/lib/rimless/tasks/stats.rake +5 -2
  51. data/lib/rimless/version.rb +18 -1
  52. data/lib/rimless.rb +0 -1
  53. data/rimless.gemspec +43 -29
  54. metadata +119 -76
  55. data/gemfiles/rails_4.2.gemfile +0 -8
  56. data/gemfiles/rails_5.0.gemfile +0 -8
  57. data/gemfiles/rails_5.1.gemfile +0 -8
  58. data/gemfiles/rails_6.0.gemfile +0 -8
@@ -0,0 +1,209 @@
1
+ MAKEFLAGS += --warn-undefined-variables -j1
2
+ SHELL := bash
3
+ .SHELLFLAGS := -eu -o pipefail -c
4
+ .DEFAULT_GOAL := all
5
+ .DELETE_ON_ERROR:
6
+ .SUFFIXES:
7
+ .PHONY:
8
+
9
+ # Environment switches
10
+ MAKE_ENV ?= docker
11
+ MAKE_SUB_ENV ?= false
12
+ DOCKER_MOUNT_MODE ?= rw
13
+ IMAGE_VENDOR ?= hausgold
14
+ PREBUILD_IMAGE_SEARCH ?= kafka-playground
15
+ PROJECT_NAME ?= kafka-playground
16
+ START ?= foreground
17
+ START_CONTAINERS ?= kafka schema-registry schema-registry-ui
18
+ COMPOSE_RUN_SHELL_FLAGS ?= --rm
19
+ BASH_RUN_SHELL_FLAGS ?=
20
+ BUNDLE_FLAGS ?=
21
+
22
+ # Directories
23
+ LOG_DIR ?= log
24
+
25
+ # Host binaries
26
+ AWK ?= awk
27
+ BASH ?= bash
28
+ CHMOD ?= chmod
29
+ COMPOSE ?= docker-compose
30
+ CP ?= cp
31
+ CUT ?= cut
32
+ DOCKER ?= docker
33
+ ECHO ?= echo
34
+ FIND ?= find
35
+ GREP ?= grep
36
+ HEAD ?= head
37
+ LS ?= ls
38
+ MKDIR ?= mkdir
39
+ MV ?= mv
40
+ NODE ?= node
41
+ NPM ?= npm
42
+ NPROC ?= nproc
43
+ PRINTF ?= printf
44
+ RM ?= rm
45
+ TAIL ?= tail
46
+ TEE ?= tee
47
+ TEST ?= test
48
+ WC ?= wc
49
+ XARGS ?= xargs
50
+
51
+ # Container binaries
52
+ BUNDLE ?= bundle
53
+
54
+ # Check all binaries which needs to be available
55
+ CHECK_BINS ?= AWK BASH CHMOD ECHO HEAD FIND GREP LS MKDIR \
56
+ MV NODE NPM NPROC PRINTF TAIL TEE TEST WC XARGS
57
+
58
+ ifeq ($(MAKE_ENV),docker)
59
+ # Check also the docker binaries
60
+ CHECK_BINS += COMPOSE DOCKER
61
+ else ifeq ($(MAKE_ENV),baremetal)
62
+ # Nothing to do here - just a env check
63
+ else
64
+ $(error MAKE_ENV got an invalid value. Use `docker` or `baremetal`)
65
+ endif
66
+
67
+ all:
68
+ # Apache Kafka Playground
69
+ #
70
+ # install Install the dependencies
71
+ # start Start the containers
72
+ # stop Stop all running containers
73
+ # logs Monitor the started containers
74
+ # update-images Pull the latest Docker images and rebuild ours
75
+ #
76
+ # shell Start an interactive shell session
77
+ #
78
+ # clean Clean all temporary application files
79
+ # clean-containers Clean the Docker containers (also database data)
80
+ # distclean Same as clean and cleans Docker images
81
+
82
+ # Check a binary
83
+ # $1 - The binary
84
+ define check-binary
85
+ $(shell if [ -n "`which g$($(1)) 2>/dev/null`" ]; then \
86
+ echo 'g$($(1))'; \
87
+ elif [ -n "`which $($(1)) 2>/dev/null`" ]; then \
88
+ echo '$($(1))'; \
89
+ else \
90
+ echo '$$(error Neither "$($(1))" nor "g$($(1))" is available ($(1)))'; \
91
+ fi)
92
+ endef
93
+
94
+ # Define a generic shell run wrapper
95
+ # $1 - The command to run
96
+ ifeq ($(MAKE_ENV),docker)
97
+ define run-shell
98
+ $(PRINTF) '# (Docker mount mode: $(DOCKER_MOUNT_MODE))\n'; \
99
+ $(COMPOSE) run $(COMPOSE_RUN_SHELL_FLAGS) \
100
+ -e LANG=en_US.UTF-8 -e LANGUAGE=en_US.UTF-8 -e LC_ALL=en_US.UTF-8 \
101
+ -u app app bash $(BASH_RUN_SHELL_FLAGS) -c 'sleep 0.1; echo; $(1)'
102
+ endef
103
+ else ifeq ($(MAKE_ENV),baremetal)
104
+ define run-shell
105
+ $(1)
106
+ endef
107
+ endif
108
+
109
+ # Define a retry helper
110
+ # $1 - The command to run
111
+ define retry
112
+ if eval "$(call run-shell,$(1))"; then exit 0; fi; \
113
+ for i in 1; do sleep 10s; echo "Retrying $$i..."; \
114
+ if eval "$(call run-shell,$(1))"; then exit 0; fi; \
115
+ done; \
116
+ exit 1
117
+ endef
118
+
119
+ # Check all binaries
120
+ _ := $(foreach BIN,$(CHECK_BINS),$(eval $(BIN) := $(call check-binary,$(BIN))))
121
+
122
+ COMPOSE := $(COMPOSE) -p $(PROJECT_NAME)
123
+ PREBUILT_IMAGE ?= $(PROJECT_NAME)_app:latest
124
+
125
+ .interactive:
126
+ @$(eval BASH_RUN_SHELL_FLAGS = --login)
127
+
128
+ .not-implemented:
129
+ # Not yet implemented.
130
+
131
+ install:
132
+ # Install the dependencies
133
+ ifeq ($(MAKE_ENV),docker)
134
+ @$(eval INSTALL_NAME = $(PROJECT_NAME)_install)
135
+ @$(eval COMPOSE_RUN_SHELL_FLAGS = --no-deps --name $(INSTALL_NAME))
136
+ @$(DOCKER) rm -f $(INSTALL_NAME) 2>/dev/null || true
137
+ endif
138
+ @$(call retry,$(BUNDLE) check || \
139
+ $(BUNDLE) install --jobs $(shell $(NPROC)) \
140
+ --retry 3 $(BUNDLE_FLAGS))
141
+ ifeq ($(MAKE_ENV),docker)
142
+ @$(DOCKER) commit $(INSTALL_NAME) $(PREBUILT_IMAGE)
143
+ @$(DOCKER) rm -f $(INSTALL_NAME) 2>/dev/null || true
144
+ endif
145
+
146
+ start: stop
147
+ # Start the application
148
+ ifeq ($(START),foreground)
149
+ @$(COMPOSE) up $(START_CONTAINERS)
150
+ else
151
+ $(error START got an invalid value. Use `foreground`.)
152
+ endif
153
+
154
+ restart:
155
+ # Restart the application
156
+ @$(MAKE) stop start
157
+
158
+ logs:
159
+ # Monitor the started application
160
+ @$(COMPOSE) logs -f --tail='all'
161
+
162
+ stop: clean-containers
163
+ stop-containers:
164
+ # Stop all running containers
165
+ @$(COMPOSE) stop -t 5 || true
166
+ @$(DOCKER) ps -a | $(GREP) $(PROJECT_NAME)_ | $(CUT) -d ' ' -f1 \
167
+ | $(XARGS) -rn10 $(DOCKER) stop -t 5 || true
168
+
169
+ shell:
170
+ # Start an interactive shell session
171
+ @$(call run-shell,$(BASH) -i)
172
+
173
+ update-images: clean-containers clean-images
174
+ # Pull latest Docker images
175
+ @$(GREP) -Pih 'from|image:' docker-compose.yml Dockerfile \
176
+ | $(GREP) -Po '$(IMAGE_VENDOR).*' \
177
+ | $(XARGS) -rn1 $(DOCKER) pull
178
+ @$(MAKE) install
179
+
180
+ clean-logs:
181
+ # Clean logs
182
+ @$(RM) -rf $(LOG_DIR)
183
+ @$(MKDIR) -p $(LOG_DIR)
184
+
185
+ clean-containers: stop-containers
186
+ # Stop and kill all containers
187
+ @$(COMPOSE) rm -vf || true
188
+ @$(DOCKER) ps -a | $(GREP) $(PROJECT_NAME)_ | $(CUT) -d ' ' -f1 \
189
+ | $(XARGS) -rn10 $(DOCKER) rm -vf || true
190
+
191
+ clean-images: clean-containers
192
+ # Remove all docker images
193
+ $(eval APP_NAME = $(shell $(CUT) -d: -f2 <<< $(PREBUILD_IMAGE_SEARCH)))
194
+ $(eval EMPTY = ) $(eval CLEAN_IMAGES = $(PROJECT_NAME)_ $(PREBUILT_IMAGE))
195
+ $(eval CLEAN_IMAGES += $(PREBUILD_IMAGE_SEARCH) \s+$(APP_NAME): <none>)
196
+ @$(DOCKER) images -a --format '{{.ID}} {{.Repository}}:{{.Tag}}' \
197
+ | $(GREP) -P "$(subst $(EMPTY) $(EMPTY),|,$(CLEAN_IMAGES))" \
198
+ | $(AWK) '{print $$0}' \
199
+ | $(XARGS) -rn1 $(DOCKER) rmi -f 2>&1 \
200
+ | $(GREP) -vP 'cannot be forced|invalid reference' || true
201
+
202
+ clean: clean-logs clean-containers
203
+ distclean: clean clean-images
204
+
205
+ usage: .not-implemented
206
+ docs: .not-implemented
207
+ stats: .not-implemented
208
+ test: .not-implemented
209
+ watch: .not-implemented
@@ -0,0 +1,185 @@
1
+ ![Apache Kafka Playground](doc/assets/project.svg)
2
+
3
+ This sub-project is dedicated to allow a simple local bootstrap of the Apache
4
+ Kafka ecosystem with the help of containers/Docker. **Heads up!** This
5
+ configuration is not designed to be used in production.
6
+
7
+ - [Requirements](#requirements)
8
+ - [Getting started](#getting-started)
9
+ - [What's in the box](#whats-in-the-box)
10
+ - [Examples](#examples)
11
+ - [Simple Message Producing/Consuming](#simple-message-producingconsuming)
12
+ - [Message Producing/Consuming with Rimless (Apache Avro)](#message-producingconsuming-with-rimless-apache-avro)
13
+
14
+ ## Requirements
15
+
16
+ * [GNU Make](https://www.gnu.org/software/make/) (>=4.2.1)
17
+ * [Docker](https://www.docker.com/get-docker) (>=17.06.0-ce)
18
+ * [Docker Compose](https://docs.docker.com/compose/install/) (>=1.15.0)
19
+ * [Host enabled mDNS stack](#mdns-host-configuration)
20
+
21
+ ## Getting started
22
+
23
+ First you need to clone this repository from Github:
24
+
25
+ ```bash
26
+ # Clone the repository
27
+ $ git clone git@github.com:hausgold/rimless.git
28
+ # Go in the repository directory
29
+ $ cd rimless/doc/kafka-playground
30
+ ```
31
+
32
+ We assume you have prepared the requirements in advance. The only thing
33
+ which is left, is to install and start the application:
34
+
35
+ ```shell
36
+ $ make install
37
+ $ make start
38
+ ```
39
+
40
+ ## mDNS host configuration
41
+
42
+ If you running Ubuntu/Debian, all required packages should be in place out of
43
+ the box. On older versions (Ubuntu < 18.10, Debian < 10) the configuration is
44
+ also fine out of the box. When you however find yourself unable to resolve the
45
+ domains or if you are a lucky user of newer Ubuntu/Debian versions, read on.
46
+
47
+ **Heads up:** This is the Arch Linux way. (package and service names may
48
+ differ, config is the same) Install the `nss-mdns` and `avahi` packages, enable
49
+ and start the `avahi-daemon.service`. Then, edit the file `/etc/nsswitch.conf`
50
+ and change the hosts line like this:
51
+
52
+ ```bash
53
+ hosts: ... mdns4 [NOTFOUND=return] resolve [!UNAVAIL=return] dns ...
54
+ ```
55
+
56
+ Afterwards create (or overwrite) the `/etc/mdns.allow` file when not yet
57
+ present with the following content:
58
+
59
+ ```bash
60
+ .local.
61
+ .local
62
+ ```
63
+
64
+ This is the regular way for nss-mdns > 0.10 package versions (the
65
+ default now). If you use a system with 0.10 or lower take care of using
66
+ `mdns4_minimal` instead of `mdns4` on the `/etc/nsswitch.conf` file and skip
67
+ the creation of the `/etc/mdns.allow` file.
68
+
69
+ **Further readings**
70
+ * Archlinux howto: https://wiki.archlinux.org/index.php/avahi
71
+ * Ubuntu/Debian howto: https://wiki.ubuntuusers.de/Avahi/
72
+ * Further detail on nss-mdns: https://github.com/lathiat/nss-mdns
73
+
74
+ ## What's in the box
75
+
76
+ After the installation and bootup processes are finished you should have a
77
+ working Apache Kafka setup which includes the following:
78
+
79
+ * A single node [Apache Kafka](https://kafka.apache.org/) broker via [Zookeeper](https://zookeeper.apache.org/)
80
+ * [Confluent Schema Registry](https://docs.confluent.io/platform/current/schema-registry/index.html), used for [Apache Avro](https://avro.apache.org/docs/current/) schemas
81
+ * [Lenses.io Schema Registry UI](https://github.com/lensesio/schema-registry-ui), you can access it via mDNS at http://schema-registry-ui.playground.local
82
+ * A Ruby 2.5 enabled playground container with configured Rimless support
83
+
84
+ ## Examples
85
+
86
+ ### Simple Message Producing/Consuming
87
+
88
+ Start a playground container with `$ make start` and run the following:
89
+
90
+ ```shell
91
+ $ create-topic -v test
92
+ ```
93
+
94
+ ```shell
95
+ $ list-topics
96
+
97
+ Metadata for all topics (from broker 1001: kafka.playground.local:9092/1001):
98
+ 1 brokers:
99
+ broker 1001 at kafka.playground.local:9092 (controller)
100
+ 2 topics:
101
+ topic "_schemas" with 1 partitions:
102
+ partition 0, leader 1001, replicas: 1001, isrs: 1001
103
+ topic "test" with 1 partitions:
104
+ ```
105
+
106
+ Now start a second teminal playground container with `$ make shell` and run:
107
+
108
+ ```shell
109
+ # Terminal B
110
+
111
+ $ consume-topic test
112
+
113
+ % Waiting for group rebalance
114
+ % Group kcat rebalanced (memberid kcat-1ec7324b-463c-4c1e-ab47-b58aa886a98d): assigned: test [0]
115
+ % Reached end of topic test [0] at offset 0
116
+ ```
117
+
118
+ At the first container session run:
119
+
120
+ ```shell
121
+ # Terminal A
122
+
123
+ $ echo '{"test":true}' | produce-event test -
124
+
125
+ Processing lines of '/dev/stdin' ..
126
+ {"test":true}
127
+ ```
128
+
129
+ And see that the consumer at the second terminal output changed to:
130
+
131
+ ```shell
132
+ # Terminal B
133
+
134
+ $ consume-topic test
135
+
136
+ % Waiting for group rebalance
137
+ % Group kcat rebalanced (memberid kcat-1ec7324b-463c-4c1e-ab47-b58aa886a98d): assigned: test [0]
138
+ % Reached end of topic test [0] at offset 0
139
+ {"test":true}
140
+
141
+ % Reached end of topic test [0] at offset 1
142
+ ```
143
+
144
+ ### Message Producing/Consuming with Rimless (Apache Avro)
145
+
146
+ Setup two terminal playground session containers with `$ make shell` and run
147
+ the following snippets to produce an Apache Avro message and consume it with
148
+ [kcat](https://github.com/edenhill/kcat):
149
+
150
+ ```shell
151
+ # Terminal A
152
+
153
+ $ create-topic production.playground-app.payments
154
+ $ consume-topic -s value=avro production.playground-app.payments
155
+ ```
156
+
157
+ And at the otherside run:
158
+
159
+ ```shell
160
+ # Terminal B
161
+
162
+ $ examples/rimless-produce
163
+
164
+ {"event"=>"payment_authorized",
165
+ "payment"=>
166
+ {"gid"=>"gid://playground-app/Payment/19da4f09-56c8-47d6-8a01-dc7ec2f9daff",
167
+ "currency"=>"eur",
168
+ "net_amount_sum"=>500,
169
+ "items"=>
170
+ [{"gid"=>
171
+ "gid://playground-app/PaymentItem/9f2d9746-52a8-4b8a-a614-4f8f8b4ef4a5",
172
+ "net_amount"=>499,
173
+ "tax_rate"=>19,
174
+ "created_at"=>"2021-10-27T15:09:06.990+00:00",
175
+ "updated_at"=>nil},
176
+ {"gid"=>
177
+ "gid://playground-app/PaymentItem/c8f9f718-03fd-442a-9677-1a4e64349c2c",
178
+ "net_amount"=>1,
179
+ "tax_rate"=>19,
180
+ "created_at"=>"2021-10-27T15:09:06.990+00:00",
181
+ "updated_at"=>nil}],
182
+ "state"=>"authorized",
183
+ "created_at"=>"2021-10-27T15:09:06.990+00:00",
184
+ "updated_at"=>"2021-10-27T15:09:06.990+00:00"}}
185
+ ```
@@ -0,0 +1,7 @@
1
+ #!/bin/bash
2
+
3
+ if [ -z "${GROUP}" ]; then
4
+ GROUP="kcat"
5
+ fi
6
+
7
+ kcat -G "${GROUP}" ${@}
@@ -0,0 +1,42 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require_relative '../config/environment'
4
+
5
+ class CreateTopic < Thor
6
+ default_command :create
7
+
8
+ desc 'NAME [CONFIGS...]', 'create a new Apache Kafka topic'
9
+ option :partitions, aliases: '-p', type: :numeric, default: 1,
10
+ desc: 'The number of partitions'
11
+ option :replicas, aliases: '-r', type: :numeric, default: 1,
12
+ desc: 'The number of replications'
13
+ option :verbose, aliases: '-v', type: :boolean,
14
+ desc: 'Enable verbose outputs'
15
+ def create(name, *configs)
16
+ debug! options
17
+
18
+ opts = {
19
+ num_partitions: options[:partitions].to_i,
20
+ replication_factor: options[:replicas].to_i,
21
+ }
22
+ config = configs.map { |conf| conf.split('=').map(&:strip) }.to_h
23
+
24
+ if topic?(name)
25
+ STDERR.puts "The topic '#{name}' already exists."
26
+ puts JSON.pretty_generate(@topic_conf)
27
+ exit
28
+ end
29
+
30
+ # Create the topic
31
+ KafkaClient.create_topic(name, **opts, config: config)
32
+
33
+ # Fetch the topic config
34
+ puts JSON.pretty_generate(KafkaClient.describe_topic(name))
35
+ rescue Kafka::InvalidConfig
36
+ STDOUT.puts "Could not create the topic '#{name}'."
37
+ STDOUT.puts "The given configuration is invalid:\n\n"
38
+ puts JSON.pretty_generate(config)
39
+ exit 1
40
+ end
41
+ end
42
+ CreateTopic.start(args!)
@@ -0,0 +1,22 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require_relative '../config/environment'
4
+
5
+ class DeleteTopic < Thor
6
+ default_command :delete
7
+
8
+ desc 'NAME', 'delete an existing Apache Kafka topic'
9
+ option :verbose, aliases: '-v', type: :boolean,
10
+ desc: 'Enable verbose outputs'
11
+ def delete(name)
12
+ debug! options
13
+
14
+ unless topic?(name)
15
+ STDERR.puts "The topic '#{name}' does not exists."
16
+ exit 1
17
+ end
18
+
19
+ KafkaClient.delete_topic name
20
+ end
21
+ end
22
+ DeleteTopic.start(args!)
@@ -0,0 +1,3 @@
1
+ #!/bin/bash
2
+
3
+ kcat -L ${@}
@@ -0,0 +1,64 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require_relative '../config/environment'
4
+
5
+ class ProduceTopic < Thor
6
+ default_command :produce
7
+
8
+ desc 'TOPIC FILE...', 'produce a new event at a given Apache Kafka topic'
9
+ option :partition, aliases: '-p', type: :numeric,
10
+ desc: 'The topic partitions to write to'
11
+ option :partition_key, aliases: '-k', type: :string,
12
+ desc: 'The partition key to use to select the partition'
13
+ option :verbose, aliases: '-v', type: :boolean,
14
+ desc: 'Enable verbose outputs'
15
+ def produce(topic, *files)
16
+ debug! options
17
+
18
+ opts = {
19
+ topic: topic,
20
+ partition: options[:partition]&.to_i,
21
+ partition_key: options[:partition_key]
22
+ }.compact
23
+
24
+ if options.key?(:partition) && options.key?(:partition_key)
25
+ STDERR.puts 'Either use the fixed partition or a partition key.'
26
+ STDERR.puts 'But not both together.'
27
+ exit 1
28
+ end
29
+
30
+ files = files.map do |file|
31
+ next '/dev/stdin' if file == '-'
32
+
33
+ unless File.file? file
34
+ STDERR.puts "File '#{file}' does not exist."
35
+ next
36
+ end
37
+
38
+ file
39
+ end.compact.uniq
40
+
41
+ if files.empty?
42
+ STDERR.puts 'No files given or exists.'
43
+ STDERR.puts 'You have to specify file(s) or use `-\' for stdin.'
44
+ exit 1
45
+ end
46
+
47
+ producer = KafkaClient.producer
48
+
49
+ files.each do |file|
50
+ puts "Processing lines of '#{file}' .."
51
+ File.open(file, 'r') do |f|
52
+ f.each_line.lazy.each do |line|
53
+ puts line
54
+ producer.produce(line, **opts)
55
+ puts
56
+ end
57
+ end
58
+ producer.deliver_messages
59
+ end
60
+ rescue Interrupt
61
+ producer.deliver_messages
62
+ end
63
+ end
64
+ ProduceTopic.start(args!)
@@ -0,0 +1 @@
1
+ compiled/
@@ -0,0 +1,36 @@
1
+ {
2
+ "name": "item_v1",
3
+ "doc": "Playground App - item attached to a payment",
4
+ "namespace": "<%= namespace %>",
5
+ "type": "record",
6
+ "fields": [
7
+ {
8
+ "name": "gid",
9
+ "type": "string",
10
+ "doc": "The global ID of the item that is being sold"
11
+ },
12
+ {
13
+ "name": "net_amount",
14
+ "type": "int",
15
+ "doc": "The price of the item in cents (without tax)"
16
+ },
17
+ {
18
+ "name": "tax_rate",
19
+ "type": "float",
20
+ "doc": "The tax rate for the item"
21
+ },
22
+ {
23
+ "name": "created_at",
24
+ "type": "string",
25
+ "doc": "When the user was created (ISO 8601)"
26
+ },
27
+ {
28
+ "name": "updated_at",
29
+ "type": [
30
+ "null",
31
+ "string"
32
+ ],
33
+ "doc": "When the item was last updated (ISO 8601)"
34
+ }
35
+ ]
36
+ }
@@ -0,0 +1,59 @@
1
+ {
2
+ "name": "payment_v1",
3
+ "doc": "Playground App - the payment",
4
+ "namespace": "<%= namespace %>",
5
+ "type": "record",
6
+ "fields": [
7
+ {
8
+ "name": "gid",
9
+ "type": "string",
10
+ "doc": "Global ID of the payment (UUID)"
11
+ },
12
+ {
13
+ "name": "currency",
14
+ "type": "string",
15
+ "doc": "The currency used for the payment"
16
+ },
17
+ {
18
+ "name": "net_amount_sum",
19
+ "type": "int",
20
+ "doc": "The price sum with tax included in cents"
21
+ },
22
+ {
23
+ "name": "items",
24
+ "type": {
25
+ "items": "<%= namespace %>.item_v1",
26
+ "type": "array"
27
+ },
28
+ "doc": "All connected payment items"
29
+ },
30
+ {
31
+ "name": "state",
32
+ "type": {
33
+ "name": "state",
34
+ "type": "enum",
35
+ "symbols": [
36
+ "pending",
37
+ "authorized",
38
+ "settled",
39
+ "cancelled",
40
+ "failed"
41
+ ]
42
+ },
43
+ "doc": "The current state of the payment"
44
+ },
45
+ {
46
+ "name": "created_at",
47
+ "type": "string",
48
+ "doc": "When the payment was created (ISO 8601)"
49
+ },
50
+ {
51
+ "name": "updated_at",
52
+ "type": [
53
+ "null",
54
+ "string"
55
+ ],
56
+ "doc": "When the payment was last updated (ISO 8601)"
57
+ }
58
+ ]
59
+ }
@@ -0,0 +1,18 @@
1
+ {
2
+ "name": "payment_v1_event",
3
+ "namespace": "<%= namespace %>",
4
+ "type": "record",
5
+ "doc": "Playground App - payment event representation",
6
+ "fields": [
7
+ {
8
+ "doc": "The event name/type",
9
+ "name": "event",
10
+ "type": "string"
11
+ },
12
+ {
13
+ "doc": "The corresponding payment of the event",
14
+ "name": "payment",
15
+ "type": "<%= namespace %>.payment_v1"
16
+ }
17
+ ]
18
+ }
@@ -0,0 +1,3 @@
1
+ if [ -f ~/.bashrc ]; then
2
+ . ~/.bashrc
3
+ fi