quake-log-parser 1.0.0 → 1.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7bdff44e4bb78e09438ca2c9a2b7112cfcc35362d2e902cf559d52e79ca097b8
4
- data.tar.gz: 1bbfd5525b1f8ba829388541564ffe1865a3929fb0fc4c77a7704bacee45effe
3
+ metadata.gz: a3432357346962f3de62da5245ba27d2ff6034ed78c7692d43999ad4fe13ecb7
4
+ data.tar.gz: 0d899c7d18cc5d7b45f54367bb5a301d05509ce3acc31272abf8ea886f2087dc
5
5
  SHA512:
6
- metadata.gz: 501640c4b5278aee65618854cdf4de8ff1c5425a18f73d37ca85c0819ab0f9879add1fa4b02c3d54dee42b7c7d4e31d2c68fc5ca1b225c814da963c3efc343f3
7
- data.tar.gz: 6cfc084db65c8d1c2b07157a2cc04bf7189db4cd08c15642a9f771a5a0d9423e57524535ea09fe6c355e20b024e9f0dee8b8fe0905e3aa5ad164241353ca7c8e
6
+ metadata.gz: 193f317ca950907029fe6b476f45cfad9a264e7390f3e298441b71f4908e799c3d4cb9e503b90b507bad5d72053e4e92335e9c43ae68cf3f3e1d1741d978bbda
7
+ data.tar.gz: b4b499bea6a19fb2948cf3467183fcd870825ef61ed7638464484c7a2ad54f2e9822be0b3205cfd773fc9b34e24db4548c914a2ea9436e46d1d61cd6296234c0
data/.gitignore CHANGED
@@ -2,3 +2,5 @@
2
2
  /Gemfile.lock
3
3
  *.gem
4
4
  coverage/
5
+ log/
6
+ .irb-history
data/.rubocop.yml CHANGED
@@ -7,6 +7,12 @@ AllCops:
7
7
  Style/FrozenStringLiteralComment:
8
8
  Enabled: false
9
9
 
10
+ Style/Next:
11
+ Enabled: false
12
+
13
+ Style/SafeNavigation:
14
+ Enabled: false
15
+
10
16
  Layout/EndOfLine:
11
17
  Enabled: false
12
18
 
data/CHANGELOG.md CHANGED
@@ -1,5 +1,9 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## [1.1.0]
4
+
5
+ - Kafka integration
6
+
3
7
  ## [1.0.0]
4
8
 
5
9
  - Final release to publish the gem
data/Gemfile CHANGED
@@ -1,3 +1,5 @@
1
1
  source 'https://rubygems.org'
2
2
  gemspec
3
3
  gem 'codecov', require: false, group: :test
4
+ gem 'karafka'
5
+ gem 'karafka-web'
data/README.md CHANGED
@@ -3,7 +3,6 @@
3
3
  [![Maintainability](https://api.codeclimate.com/v1/badges/8bbcb90abf1f392d7e68/maintainability)](https://codeclimate.com/github/pedrofurtado/quake-log-parser/maintainability)
4
4
  ![CI](https://github.com/pedrofurtado/quake-log-parser/actions/workflows/ci.yml/badge.svg)
5
5
  [![codecov](https://codecov.io/gh/pedrofurtado/quake-log-parser/graph/badge.svg?token=DUC0CORI0N)](https://codecov.io/gh/pedrofurtado/quake-log-parser)
6
- [![Gem Version](https://badge.fury.io/rb/quake-log-parser.svg)](https://badge.fury.io/rb/quake-log-parser)
7
6
 
8
7
  Ruby gem for quake log parsing.
9
8
 
@@ -23,7 +22,7 @@ Or install it yourself as:
23
22
 
24
23
  gem install quake-log-parser
25
24
 
26
- ## Usage
25
+ ## Usage - Log reader
27
26
 
28
27
  ```ruby
29
28
  require 'quake-log-parser'
@@ -40,6 +39,87 @@ parser.read
40
39
  puts JSON.pretty_generate(parser.results)
41
40
  ```
42
41
 
42
+ ## Usage - Emulated Kafka topic reader
43
+
44
+ ```ruby
45
+ =begin
46
+ All kafka topic messages must have this structure:
47
+
48
+ {
49
+ "lines": [
50
+ "content of line 01 with slashs escaped",
51
+ "content of line 02 with slashs escaped",
52
+ "content of line 03 with slashs escaped",
53
+ "..."
54
+ ]
55
+ }
56
+
57
+ Example of message (with slashs escaped):
58
+
59
+ {
60
+ "lines": [
61
+ "0:00 InitGame: \\sv_floodProtect\\1\\sv_maxPing\\0\\sv_minPing\\0\\sv_maxRate\\10000\\sv_minRate\\0\\sv_hostname\\Code Miner Server\\g_gametype\\0\\sv_privateClients\\2\\sv_maxclients\\16\\sv_allowDownload\\0\\dmflags\\0\fraglimit\\20\\timelimit\\15\\g_maxGameClients\\0\\capturelimit\\8\\version\\ioq3 1.36 linux-x86_64 Apr 12 2009\\protocol\\68\\mapname\\q3dm17\\gamename\baseq3\\g_needpass\\0",
62
+ "20:34 ClientUserinfoChanged: 2 n\\Isgalamido\\t\\0\\model\\xian/default\\hmodel\\xian/default\\g_redteam\\g_blueteam\\c1\\4\\c2\\5\\hc\\100\\w\\0\\l\\0\\tt\\0\\tl\\0",
63
+ "20:54 Kill: 1022 2 22: <world> killed Isgalamido by MOD_TRIGGER_HURT",
64
+ "21:51 ClientUserinfoChanged: 3 n\\Dono da Bola\\t\\0\\model\\sarge/krusade\\hmodel\\sarge/krusade\\g_redteam\\g_blueteam\\c1\\5\\c2\\5\\hc\\95\\w\\0\\l\\0\\tt\\0\\tl\\0",
65
+ "22:06 Kill: 2 3 7: Isgalamido killed Dono da Bola by MOD_ROCKET_SPLASH"
66
+ ]
67
+ }
68
+ =end
69
+
70
+ require 'quake-log-parser'
71
+ require 'ostruct'
72
+
73
+ if defined?(Rails)
74
+ QuakeLogParser::Logger.logger = Rails.logger
75
+ else
76
+ QuakeLogParser::Logger.logger = Logger.new($stdout)
77
+ end
78
+
79
+ quake_log_lines = [
80
+ 'content of line 01 with slashs escaped',
81
+ 'content of line 02 with slashs escaped',
82
+ 'content of line 03 with slashs escaped',
83
+ '...'
84
+ ]
85
+
86
+ emulated_messages_from_kafka = [
87
+ OpenStruct.new(payload: { 'lines' => quake_log_lines })
88
+ ]
89
+
90
+ parser = QuakeLogParser::KafkaTopicReader.new(emulated_messages_from_kafka)
91
+ parser.read
92
+
93
+ puts JSON.pretty_generate(parser.results)
94
+ ```
95
+
96
+ ## Usage - Kafka producer inside Karafka console
97
+
98
+ ```ruby
99
+ # Enter the Karafka console with the command below:
100
+ # docker-compose up --build -d (Wait Kafka components and Karafka consumer to be ready)
101
+ # docker container exec -it quake-log-parser_kafka_consumer_1 bundle exec karafka console
102
+
103
+ # Then, open a second terminal to see logs of Karafka consumer with the command below:
104
+ # docker container logs -f quake-log-parser_kafka_consumer_1 | grep -i 'quake'
105
+
106
+ # Then, run the commands below to produce a message to a Kafka topic:
107
+
108
+ topic_name = 'quake_log_parser_topic'
109
+ key = SecureRandom.uuid
110
+ payload = {
111
+ "lines" => [
112
+ "0:00 InitGame: \\sv_floodProtect\\1\\sv_maxPing\\0\\sv_minPing\\0\\sv_maxRate\\10000\\sv_minRate\\0\\sv_hostname\\Code Miner Server\\g_gametype\\0\\sv_privateClients\\2\\sv_maxclients\\16\\sv_allowDownload\\0\\dmflags\\0\fraglimit\\20\\timelimit\\15\\g_maxGameClients\\0\\capturelimit\\8\\version\\ioq3 1.36 linux-x86_64 Apr 12 2009\\protocol\\68\\mapname\\q3dm17\\gamename\baseq3\\g_needpass\\0",
113
+ "20:34 ClientUserinfoChanged: 2 n\\Isgalamido\\t\\0\\model\\xian/default\\hmodel\\xian/default\\g_redteam\\g_blueteam\\c1\\4\\c2\\5\\hc\\100\\w\\0\\l\\0\\tt\\0\\tl\\0",
114
+ "20:54 Kill: 1022 2 22: <world> killed Isgalamido by MOD_TRIGGER_HURT",
115
+ "21:51 ClientUserinfoChanged: 3 n\\Dono da Bola\\t\\0\\model\\sarge/krusade\\hmodel\\sarge/krusade\\g_redteam\\g_blueteam\\c1\\5\\c2\\5\\hc\\95\\w\\0\\l\\0\\tt\\0\\tl\\0",
116
+ "22:06 Kill: 2 3 7: Isgalamido killed Dono da Bola by MOD_ROCKET_SPLASH"
117
+ ]
118
+ }
119
+ producer = Karafka.producer.produce_sync(topic: topic_name, payload: payload.to_json, key: key)
120
+ Karafka.logger.info("[QuakeLogParser::KafkaTopicProducer] send_sync topic=#{topic_name} key=#{key} offset=#{producer.offset} partition=#{producer.partition} payload=#{payload}")
121
+ ```
122
+
43
123
  ## Execute tests/specs
44
124
 
45
125
  To execute gem tests locally, use Docker with the commands below:
@@ -56,7 +136,7 @@ docker run --rm -v $(pwd):/app/ -it quake-log-parser_specs
56
136
 
57
137
  # Or, if you want to run a example of usage of gem,
58
138
  # you can run the command below.
59
- docker run --rm -v $(pwd):/app/ -it quake-log-parser_specs ruby real_example_to_run.rb
139
+ docker run --rm -v $(pwd):/app/ -it quake-log-parser_specs bundle exec ruby real_example_to_run.rb
60
140
  ```
61
141
 
62
142
  ## Contributing
@@ -0,0 +1,49 @@
1
+ version: '3'
2
+
3
+ services:
4
+ zookeeper:
5
+ restart: always
6
+ image: confluentinc/cp-zookeeper:7.6.0
7
+ environment:
8
+ ZOOKEEPER_CLIENT_PORT: 2181
9
+
10
+ kafka:
11
+ restart: always
12
+ image: confluentinc/cp-kafka:7.6.0
13
+ depends_on:
14
+ - zookeeper
15
+ ports:
16
+ - "4000:9092"
17
+ - "3004:9094"
18
+ environment:
19
+ KAFKA_BROKER_ID: 1
20
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
21
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
22
+ KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
23
+ KAFKA_LISTENERS: INTERNAL://:9092,OUTSIDE://:9094
24
+ KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,OUTSIDE://host.docker.internal:9094
25
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,OUTSIDE:PLAINTEXT
26
+
27
+ control-center:
28
+ restart: always
29
+ image: confluentinc/cp-enterprise-control-center:7.6.0
30
+ hostname: control-center
31
+ depends_on:
32
+ - kafka
33
+ ports:
34
+ - "9998:9021"
35
+ environment:
36
+ CONTROL_CENTER_BOOTSTRAP_SERVERS: 'kafka:9092'
37
+ CONTROL_CENTER_REPLICATION_FACTOR: 1
38
+ CONTROL_CENTER_CONNECT_CLUSTER: http://kafka-connect:8083
39
+ PORT: 9021
40
+
41
+ kafka_consumer:
42
+ depends_on:
43
+ - kafka
44
+ restart: always
45
+ image: ruby:3.1.6
46
+ working_dir: /app
47
+ volumes:
48
+ - ./:/app
49
+ command: /bin/bash -c "bundle install && bundle exec karafka-web migrate && bundle exec karafka topics migrate && bundle exec karafka server"
data/karafka.rb ADDED
@@ -0,0 +1,46 @@
1
+ $stdout.sync = true
2
+ $stderr.sync = true
3
+
4
+ ENV['KARAFKA_ENV'] ||= 'development'
5
+ Bundler.require(:default, ENV['KARAFKA_ENV'])
6
+
7
+ require_relative 'lib/quake-log-parser'
8
+
9
+ class KarafkaApp < Karafka::App
10
+ setup do |config|
11
+ config.kafka = {
12
+ 'bootstrap.servers': 'kafka:9092',
13
+ 'allow.auto.create.topics': false
14
+ }
15
+ config.client_id = 'quake_log_parser_client'
16
+ config.initial_offset = 'earliest'
17
+ config.consumer_persistence = ENV['KARAFKA_ENV'] != 'development'
18
+ end
19
+
20
+ Karafka.monitor.subscribe(Karafka::Instrumentation::LoggerListener.new)
21
+ Karafka.producer.monitor.subscribe(
22
+ WaterDrop::Instrumentation::LoggerListener.new(
23
+ Karafka.logger,
24
+ log_messages: false
25
+ )
26
+ )
27
+
28
+ routes.draw do
29
+ topic :quake_log_parser_topic do
30
+ config(partitions: 3)
31
+ consumer QuakeLogParser::KafkaTopicConsumer
32
+ dead_letter_queue(
33
+ topic: 'quake_log_parser_dead_letter_topic',
34
+ max_retries: 0,
35
+ independent: false
36
+ )
37
+ end
38
+ end
39
+ end
40
+
41
+ Karafka::Web.setup do |config|
42
+ config.ui.sessions.secret = 'dda09abf2cbd7d2e171dc44ed11877560a8bbc1ddbf54d2a58b0171634863513'
43
+ config.tracking.interval = 5_000
44
+ end
45
+
46
+ Karafka::Web.enable!
@@ -0,0 +1,12 @@
1
+ require 'karafka'
2
+
3
+ module QuakeLogParser
4
+ class KafkaTopicConsumer < ::Karafka::BaseConsumer
5
+ def consume
6
+ ::Karafka.logger.info "[QuakeLogParser::KafkaTopicConsumer] Consuming #{messages.size} messages from TOPIC #{topic.name}"
7
+ kafka_topic_reader = ::QuakeLogParser::KafkaTopicReader.new(messages)
8
+ kafka_topic_reader.read
9
+ ::Karafka.logger.info "[QuakeLogParser::KafkaTopicConsumer] Results: #{kafka_topic_reader.results}"
10
+ end
11
+ end
12
+ end
@@ -0,0 +1,29 @@
1
+ module QuakeLogParser
2
+ class KafkaTopicReader
3
+ def initialize(messages)
4
+ @messages = messages
5
+ @line_handler = QuakeLogParser::LineHandler.new
6
+ end
7
+
8
+ def read
9
+ @messages.each do |message|
10
+ if message.payload['lines']
11
+ message.payload['lines'].each do |line|
12
+ case line
13
+ when QuakeLogParser::Patterns.new_game
14
+ @line_handler.handle_new_game(line)
15
+ when QuakeLogParser::Patterns.new_player
16
+ @line_handler.handle_new_player(line)
17
+ when QuakeLogParser::Patterns.new_kill
18
+ @line_handler.handle_new_kill(line)
19
+ end
20
+ end
21
+ end
22
+ end
23
+ end
24
+
25
+ def results
26
+ @line_handler.results
27
+ end
28
+ end
29
+ end
@@ -1,3 +1,3 @@
1
1
  module QuakeLogParser
2
- VERSION = '1.0.0'.freeze
2
+ VERSION = '1.1.0'.freeze
3
3
  end
@@ -1,4 +1,6 @@
1
1
  require_relative 'quake-log-parser/version'
2
+ require_relative 'quake-log-parser/kafka_topic_reader'
3
+ require_relative 'quake-log-parser/kafka_topic_consumer'
2
4
  require_relative 'quake-log-parser/logger'
3
5
  require_relative 'quake-log-parser/patterns'
4
6
  require_relative 'quake-log-parser/kill'
@@ -21,6 +21,9 @@ Gem::Specification.new do |spec|
21
21
  spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
22
22
  spec.require_paths = ['lib']
23
23
 
24
+ spec.add_dependency 'karafka'
25
+ spec.add_dependency 'karafka-web'
26
+ spec.add_development_dependency 'karafka-testing'
24
27
  spec.add_development_dependency 'rake'
25
28
  spec.add_development_dependency 'rspec'
26
29
  spec.add_development_dependency 'rubocop'
metadata CHANGED
@@ -1,15 +1,57 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: quake-log-parser
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.0
4
+ version: 1.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Pedro Furtado
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-09-20 00:00:00.000000000 Z
11
+ date: 2024-09-27 00:00:00.000000000 Z
12
12
  dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: karafka
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: '0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: karafka-web
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: karafka-testing
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ">="
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ">="
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
13
55
  - !ruby/object:Gem::Dependency
14
56
  name: rake
15
57
  requirement: !ruby/object:Gem::Requirement
@@ -75,8 +117,12 @@ files:
75
117
  - Rakefile
76
118
  - bin/console
77
119
  - bin/setup
120
+ - docker-compose.yaml
121
+ - karafka.rb
78
122
  - lib/quake-log-parser.rb
79
123
  - lib/quake-log-parser/game.rb
124
+ - lib/quake-log-parser/kafka_topic_consumer.rb
125
+ - lib/quake-log-parser/kafka_topic_reader.rb
80
126
  - lib/quake-log-parser/kill.rb
81
127
  - lib/quake-log-parser/line_handler.rb
82
128
  - lib/quake-log-parser/log_reader.rb