fluent-plugin-querycombiner 0.0.0.pre → 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 432eccbf19e7b303cf6457b3560a43779b7051b3
4
- data.tar.gz: 8a3c7c461b1db05ff3968d49f46c1dcd694b7e7a
3
+ metadata.gz: 65fedb30907434824303a1a024616f1bc3431d3e
4
+ data.tar.gz: 12a5292be841fc2c060892f0fc588a0172ce01df
5
5
  SHA512:
6
- metadata.gz: 00f8c3f21610a21e5ad6c6887377fef64c70186cd73c1e84813240581ef9f7b2d68c140fa15543052eea7134b7086ac8f6903bb7472fc077aaa2d18fac8de5c3
7
- data.tar.gz: a8bdcf68fe8adfb36cda540c8f691829e73476a38713f5105ddd3bf18084e5a980b162f863c696ee055d0a37cc2fc362301946e94d29f606e2717eadf660c769
6
+ metadata.gz: fee92e2a0ffbe5c606663d55c7de4773ebd5d44b976f3523cd467dde981347a87ed752f803c0e4206070ca025a6db478703a7e0b4662771289467ed831132c57
7
+ data.tar.gz: 546fa4062b3ab8e6b2820c29062f3ad615b3ab43e1acc5209570e614c68ae4479241a73fe3b090064e1b6b424acd5734d17b52d3b72caf200193468bc315980f
data/LICENSE.txt ADDED
@@ -0,0 +1,13 @@
1
+ Copyright (c) 2014- Takahiro Kamatani
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
data/NOTICE ADDED
@@ -0,0 +1,6 @@
1
+ fluent-plugin-querycombiner
2
+
3
+ Copyright 2014- Takahiro Kamatani
4
+
5
+ The portion of this product was originally developed by Yuyang Lan, for fluent-plugin-onlineuser.
6
+ fluent-plugin-onlineuser is licensed under Apache License v2.0 and available at https://github.com/y-lan/fluent-plugin-onlineuser
data/README.md CHANGED
@@ -8,60 +8,216 @@ This plugin is based on [fluent-plugin-onlineuser](https://github.com/y-lan/flue
8
8
  ## Requirement
9
9
  * a running Redis
10
10
 
11
+ ## Installation
11
12
 
12
- ## Get started
13
+ ```
14
+ $ fluent-gem install fluent-plugin-querycombiner
15
+ ```
16
+
17
+
18
+ ## Tutorial
19
+ ### Simple combination
20
+
21
+ Suppose you have the sequence of event messages like:
22
+
23
+ ```
24
+ {
25
+ 'event_id': '01234567',
26
+ 'status': 'event-start',
27
+ 'started_at': '2001-02-03T04:05:06Z',
28
+ }
29
+ ```
30
+
31
+ and:
32
+
33
+ ```
34
+ {
35
+ 'event_id': '01234567',
36
+ 'status': 'event-finish',
37
+ 'finished_at': '2001-02-03T04:15:11Z',
38
+ }
39
+ ```
40
+
41
+ Now you can combine these messages with this configuration:
13
42
 
14
43
  ```
15
- <match combiner.**>
44
+ <match event.**>
16
45
  type query_combiner
17
46
  tag combined.test
18
47
 
19
- flush_interval 0.5
20
-
48
+ # redis settings
21
49
  host localhost
22
50
  port 6379
23
51
  db_index 0
24
- redis_retry 3
25
52
 
26
- query_identify session-id, task-id
27
- query_ttl 3 # sec
28
- buffer_size 10 # queries
53
+ query_identify event_id # field to combine together
54
+ query_ttl 3600 # messages time-to-live[sec]
55
+ buffer_size 1000 # max queries to store in redis
29
56
 
30
57
  <catch>
31
- condition status == 'recog-init'
32
- replace time => time_init, status => status_init
58
+ condition status == 'event-start'
33
59
  </catch>
34
60
 
35
- <prolong>
36
- condition status == 'recog-break'
37
- </prolong>
61
+ <dump>
62
+ condition status == 'event-finish'
63
+ </dump>
64
+
65
+ </match>
66
+ ```
67
+
68
+ Combined results will be:
69
+
70
+ ```
71
+ {
72
+ "event_id": "01234567",
73
+ "status": "event-finish",
74
+ "started_at": "2001-02-03T04:05:06Z",
75
+ "finished_at": "2001-02-03T04:05:06Z"
76
+ }
77
+ ```
78
+
79
+ ### Replace some field names
80
+
81
+ If messages has the same fields, these are overwritten in the combination process. You can use `replace` sentence in `<catch>` and `<dump>` blocks to avoid overwrite such fields.
82
+
83
+ For example, you have some event messages like:
84
+
85
+ ```
86
+ {
87
+ 'event_id': '01234567',
88
+ 'status': 'event-start',
89
+ 'time': '2001-02-03T04:05:06Z',
90
+ }
91
+ ```
92
+
93
+ and:
94
+
95
+ ```
96
+ {
97
+ 'event_id': '01234567',
98
+ 'status': 'event-finish',
99
+ 'time': '2001-02-03T04:15:11Z',
100
+ }
101
+ ```
102
+
103
+ You can keep `time` fields which defined both `event-start` and `event-finish` by using `replace` sentence.
104
+
105
+ ```
106
+ <match event.**>
107
+ (...type, tag and redis configuration...)
108
+
109
+ query_identify event_id # field to combine together
110
+ query_ttl 3600 # messages time-to-live[sec]
111
+ buffer_size 1000 # max queries to store in redis
112
+
113
+ <catch>
114
+ condition status == 'event-start'
115
+ replace time => time_start
116
+
117
+ </catch>
38
118
 
39
119
  <dump>
40
- condition status == 'recog-finish'
41
- replace time => time_finish, result => result_finish, status => status_finish
120
+ condition status == 'event-finish'
121
+ replace time => time_finish
42
122
  </dump>
43
123
 
124
+ </match>
125
+ ```
126
+
127
+ Combined results will be:
128
+
129
+ ```
130
+ {
131
+ "event_id": "01234567",
132
+ "status": "event-finish",
133
+ "time_start": "2001-02-03T04:05:06Z",
134
+ "time_finish": "2001-02-03T04:15:11Z"
135
+ }
136
+ ```
137
+
138
+ ### \<release\> block
139
+
140
+ In previous examples, messages with `"status": "event-start"` will be watched by plugin immediately.
141
+
142
+ Suppose some error events occur and you don't want to watch or combine these messages.
143
+
144
+ In this case `<release>` block will be useful.
145
+
146
+ For example, your error messages are such like:
147
+
148
+ ```
149
+ {
150
+ "event_id": "01234567",
151
+ "status": "event-error",
152
+ "time": "2001-02-03T04:05:06Z"
153
+ }
154
+ ```
155
+
156
+ Append this `<release>` block to the configuration and error events will not be watched or combined:
157
+
158
+ ```
44
159
  <release>
45
- condition status == 'recog-error'
160
+ condition status == 'event-error'
46
161
  </release>
162
+ ```
47
163
 
48
- </match>
164
+ You cannot use `replace` sentence in the `<release>` block.
165
+
166
+
167
+ ### \<prolong\> block
168
+
169
+ Suppose your `query_ttl` is **600** (10 minutes) and almost events are finished within **10 minutes**. But occasionally very-long events occur which finish about **1 hours**. These very-long events send `status: 'event-continue'` messages every 5 minutes for keep-alive.
170
+
171
+ In this case you can use `<prolong>` block to reset expired time.
172
+
173
+ ```
174
+ <prolong>
175
+ condition status == 'event-continue'
176
+ </prolong>
49
177
  ```
50
178
 
179
+ You cannot use `replace` sentence in the `<prolong>` block.
180
+
181
+ Also you cannot combine messages which defined `<prolong>` blocks.
182
+
183
+
51
184
  ## Configuration
52
- #### host, port, db_index
185
+
186
+ ### tag
187
+ The tag prefix for emitted event messages. By default it's `query_combiner`.
188
+
189
+ ### host, port, db_index
53
190
  The basic information for connecting to Redis. By default it's **redis://127.0.0.1:6379/0**
54
191
 
55
- #### redis_retry
192
+ ### redis_retry
56
193
  How many times should the plugin retry when performing a redis operation before raising a error.
57
194
  By default it's 3.
58
195
 
59
- ### session_timeout
60
- The inactive expire time in seconds. By default it's 1800 (30 minutes).
196
+ ### querl_ttl
197
+ The inactive expire time in seconds. By default it's **1800** (30 minutes).
61
198
 
199
+ ### buffer_size
200
+ The max queries to store in redis. By default it's **1000**.
201
+
202
+ ### remove_interval
203
+ The interval time to delete expired or overflowed queries which configured by `query_ttl` and `buffer_size`. By default it's 10 [sec].
204
+
205
+
206
+ ### redis_key_prefix
207
+
208
+ The key prefix for data stored in Redis. By default it's `query_combiner:`.
209
+
210
+ ### query_identify
211
+
212
+ Indicates how to extract the query identity from event record.
213
+ It can be set as a single field name or multiple field names join by comma (`,`).
214
+
215
+
216
+ ## TODO
217
+
218
+ - Multi-query combination
219
+ - Support hyphen `-` and dollar `$` contained field names
62
220
 
63
- ### tag
64
- The tag prefix for emitted event messages. By default it's `query_combiner`.
65
221
 
66
222
  ## Copyright
67
223
 
@@ -3,7 +3,7 @@ $:.push File.expand_path("../lib", __FILE__)
3
3
 
4
4
  Gem::Specification.new do |spec|
5
5
  spec.name = "fluent-plugin-querycombiner"
6
- spec.version = "0.0.0.pre"
6
+ spec.version = "0.0.1"
7
7
  spec.authors = ["Takahiro Kamatani"]
8
8
  spec.email = ["buhii314@gmail.com"]
9
9
  spec.description = %q{Fluent plugin to combine multiple queries.}
@@ -12,7 +12,7 @@ module Fluent
12
12
  config_param :redis_key_prefix, :string, :default => 'query_combiner:'
13
13
  config_param :query_identify, :string, :default => 'session-id'
14
14
  config_param :query_ttl, :integer, :default => 1800
15
- config_param :buffer_size, :integer, :default => 100
15
+ config_param :buffer_size, :integer, :default => 1000
16
16
 
17
17
  config_param :flush_interval, :integer, :default => 60
18
18
  config_param :remove_interval, :integer, :default => 10
@@ -36,7 +36,10 @@ module Fluent
36
36
 
37
37
  # Create functions for each conditions
38
38
  @_cond_funcs = {}
39
- @_replace_keys = {}
39
+ @_replace_keys = {
40
+ 'catch' => {},
41
+ 'dump' => {},
42
+ }
40
43
 
41
44
  def get_arguments(eval_str)
42
45
  eval_str.scan(/[\"\']?[a-zA-Z][\w\d\.\-\_]*[\"\']?/).uniq.select{|x|
@@ -88,6 +91,10 @@ module Fluent
88
91
  end
89
92
  }
90
93
  }
94
+
95
+ if not (@_cond_funcs.has_key?('catch') and @_cond_funcs.has_key?('dump'))
96
+ raise Fluent::ConfigError, "Must have <catch> and <dump> blocks"
97
+ end
91
98
  end
92
99
 
93
100
  def has_all_keys?(record, argv)
@@ -1,5 +1,6 @@
1
1
  # -*- coding: utf-8 -*-
2
2
  require 'helper'
3
+ require 'redis'
3
4
 
4
5
  class QueryCombinerOutputTest < Test::Unit::TestCase
5
6
  def setup
@@ -7,19 +8,321 @@ class QueryCombinerOutputTest < Test::Unit::TestCase
7
8
  end
8
9
 
9
10
  CONFIG = %[
11
+ query_identify event_id
12
+ <catch>
13
+ condition status == 'start'
14
+ replace time => time_start
15
+ </catch>
16
+
17
+ <dump>
18
+ condition status == 'finish'
19
+ replace time => time_finish
20
+ </dump>
10
21
  ]
11
22
 
12
- def create_driver(conf = CONFIG, tag='test.input')
13
- Fluent::Test::OutputTestDriver.new(Fluent::QueryCombinerOutput, tag).configure(conf)
23
+ @redis
24
+
25
+ def setup
26
+ Fluent::Test.setup
27
+ @redis = Redis.new(:host => 'localhost', :port => 6379, :thread_safe => true, :db => 0)
28
+ end
29
+
30
+ def teardown
31
+ @redis.quit
32
+ end
33
+
34
+ def create_driver(conf, tag='test')
35
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::QueryCombinerOutput, tag).configure(conf)
14
36
  end
15
37
 
16
38
  def test_configure
17
39
  assert_raise(Fluent::ConfigError) {
18
40
  d = create_driver('')
19
41
  }
42
+ # Must have <catch> and <dump> conditions
43
+ assert_raise(Fluent::ConfigError) {
44
+ d = create_driver %[
45
+ query_identify event_id
46
+ ]
47
+ }
48
+ assert_raise(Fluent::ConfigError) {
49
+ d = create_driver %[
50
+ query_identify event_id
51
+ <catch>
52
+ condition status == 'start'
53
+ </catch>
54
+ ]
55
+ }
56
+ assert_raise(Fluent::ConfigError) {
57
+ d = create_driver %[
58
+ query_identify event_id
59
+ <dump>
60
+ condition status == 'finish'
61
+ </dump>
62
+ ]
63
+ }
64
+
65
+ # `replace` configuration only allowed in <catch> and <dump>
66
+ assert_raise(Fluent::ConfigError) {
67
+ d = create_driver %[
68
+ query_identify event_id
69
+ <catch>
70
+ condition status == 'start'
71
+ replace hoge => hoge_start
72
+ </catch>
73
+ <release>
74
+ condition status == 'error'
75
+ replace hoge => hoge_error
76
+ </release>
77
+ <dump>
78
+ condition status == 'finish'
79
+ replace hoge => hoge_finish
80
+ </dump>
81
+ ]
82
+ }
83
+
84
+ end
85
+
86
+ def test_readme_sample_basic_example
87
+ d = create_driver %[
88
+ query_identify event_id
89
+ query_ttl 3600 # time to live[sec]
90
+ buffer_size 1000 # queries
91
+
92
+ <catch>
93
+ condition status == 'event-start'
94
+ </catch>
95
+
96
+ <dump>
97
+ condition status == 'event-finish'
98
+ </dump>
99
+ ]
100
+ time = Time.now.to_i
101
+ d.emit({"event_id"=>"01234567", "status"=>"event-start", "started_at"=>"2001-02-03T04:05:06Z"}, time)
102
+ d.emit({"event_id"=>"01234567", "status"=>"event-finish", "finished_at"=>"2001-02-03T04:05:06Z"}, time)
103
+ d.run
104
+ assert_equal d.emits.length, 1
105
+ assert_equal d.emits[0][2], {
106
+ "event_id"=>"01234567",
107
+ "status"=>"event-finish",
108
+ "started_at"=>"2001-02-03T04:05:06Z",
109
+ "finished_at"=>"2001-02-03T04:05:06Z"}
110
+ end
111
+
112
+ def test_readme_sample_replace_sentence
113
+ d = create_driver %[
114
+ query_identify event_id
115
+ query_ttl 3600 # time to live[sec]
116
+ buffer_size 1000 # queries
117
+
118
+ <catch>
119
+ condition status == 'event-start'
120
+ replace time => time_start
121
+ </catch>
122
+
123
+ <dump>
124
+ condition status == 'event-finish'
125
+ replace time => time_finish
126
+ </dump>
127
+ ]
128
+ time = Time.now.to_i
129
+ d.emit({"event_id"=>"01234567", "status"=>"event-start", "time"=>"2001-02-03T04:05:06Z"}, time)
130
+ d.emit({"event_id"=>"01234567", "status"=>"event-finish", "time"=>"2001-02-03T04:15:11Z"}, time)
131
+ d.run
132
+ assert_equal d.emits.length, 1
133
+ assert_equal d.emits[0][2], {
134
+ "event_id"=>"01234567",
135
+ "status"=>"event-finish",
136
+ "time_start"=>"2001-02-03T04:05:06Z",
137
+ "time_finish"=>"2001-02-03T04:15:11Z"}
138
+ end
139
+
140
+ def test_readme_sample_release
141
+ d = create_driver %[
142
+ query_identify event_id
143
+ query_ttl 3600 # time to live[sec]
144
+ buffer_size 1000 # queries
145
+
146
+ <catch>
147
+ condition status == 'event-start'
148
+ </catch>
149
+
150
+ <dump>
151
+ condition status == 'event-finish'
152
+ </dump>
153
+
154
+ <release>
155
+ condition status == 'event-error'
156
+ </release>
157
+ ]
158
+ time = Time.now.to_i
159
+ d.emit({"event_id"=>"01234567", "status"=>"event-start", "time"=>"2001-02-03T04:05:06Z"}, time)
160
+ d.emit({"event_id"=>"01234567", "status"=>"event-error", "time"=>"2001-02-03T04:05:06Z"}, time)
161
+ d.run
162
+ assert_equal d.emits.length, 0
163
+ end
164
+
165
+ def test_readme_sample_prolong
166
+ d = create_driver %[
167
+ query_identify event_id
168
+ query_ttl 3600 # time to live[sec]
169
+ buffer_size 1000 # queries
170
+
171
+ <catch>
172
+ condition status == 'event-start'
173
+ </catch>
174
+
175
+ <dump>
176
+ condition status == 'event-finish'
177
+ </dump>
178
+
179
+ <prolong>
180
+ condition status == 'event-continue'
181
+ </prolong>
182
+
183
+ <release>
184
+ condition status == 'event-error'
185
+ </release>
186
+ ]
187
+ time = Time.now.to_i
188
+ d.emit({"event_id"=>"01234567", "status"=>"event-start", "time"=>"2001-02-03T04:05:06Z"}, time)
189
+ d.emit({"event_id"=>"01234567", "status"=>"event-continue", "time"=>"2001-02-03T04:05:07Z"}, time)
190
+ d.emit({"event_id"=>"01234567", "status"=>"event-continue", "time"=>"2001-02-03T04:05:08Z"}, time)
191
+ d.emit({"event_id"=>"01234567", "status"=>"event-continue", "time"=>"2001-02-03T04:05:09Z"}, time)
192
+ d.emit({"event_id"=>"01234567", "status"=>"event-continue", "time"=>"2001-02-03T04:05:10Z"}, time)
193
+ d.emit({"event_id"=>"01234567", "status"=>"event-finish", "time"=>"2001-02-03T04:05:11Z"}, time)
194
+ d.run
195
+ assert_equal d.emits.length, 1
196
+ assert_equal d.emits[0][2], {
197
+ "event_id"=>"01234567",
198
+ "status"=>"event-finish",
199
+ "time"=>"2001-02-03T04:05:11Z"}
200
+ end
201
+
202
+ def test_simple_events
203
+ d = create_driver CONFIG
204
+ time = Time.now.to_i
205
+ d.emit({"event_id"=>"001", "status"=>"start", "time"=>"21:00"}, time)
206
+ d.emit({"event_id"=>"002", "status"=>"start", "time"=>"22:00"}, time)
207
+ d.emit({"event_id"=>"001", "status"=>"finish", "time"=>"23:00"}, time)
208
+ d.emit({"event_id"=>"002", "status"=>"finish", "time"=>"24:00"}, time)
209
+ d.run
210
+ assert_equal d.emits[0][2], {
211
+ "event_id"=>"001",
212
+ "status"=>"finish",
213
+ "time_start"=>"21:00",
214
+ "time_finish"=>"23:00"}
215
+ assert_equal d.emits[1][2], {
216
+ "event_id"=>"002",
217
+ "status"=>"finish",
218
+ "time_start"=>"22:00",
219
+ "time_finish"=>"24:00"}
220
+ end
221
+
222
+ def test_catch_dump_release
223
+ d = create_driver %[
224
+ buffer_size 1001
225
+ query_identify event_id
226
+
227
+ <catch>
228
+ condition status == 'start'
229
+ replace time => time_start
230
+ </catch>
231
+
232
+ <dump>
233
+ condition status == 'finish'
234
+ replace time => time_finish
235
+ </dump>
236
+
237
+ <release>
238
+ condition status == 'error'
239
+ </release>
240
+ ]
241
+ def emit(d, event_id, status, t)
242
+ d.emit({"event_id"=>event_id, "status"=>status, "time"=>t}, Time.now.to_i)
243
+ end
244
+
245
+ (0..1000).each { |num|
246
+ emit(d, num, "start", "21:00")
247
+ }
248
+ finish_list = []
249
+ (0..1000).each { |num|
250
+ status = if rand >= 0.5 then
251
+ finish_list.push(num)
252
+ "finish"
253
+ else
254
+ "error"
255
+ end
256
+ emit(d, num, status, "22:00")
257
+ }
258
+
259
+ d.run
260
+ finish_list.each_with_index { |num, index|
261
+ assert_equal d.emits[index][2], {
262
+ "event_id" => num,
263
+ "status" => "finish",
264
+ "time_start" => "21:00",
265
+ "time_finish" => "22:00",
266
+ }
267
+ }
268
+ assert_equal d.emits.size, finish_list.size
269
+ end
270
+
271
+ def test_multi_query_identifier
272
+ d = create_driver %[
273
+ buffer_size 1001
274
+ query_identify aid, bid, cid
275
+
276
+ <catch>
277
+ condition status == 'start'
278
+ </catch>
279
+
280
+ <dump>
281
+ condition status == 'finish'
282
+ </dump>
283
+ ]
284
+ def emit(d, aid, bid, cid, status, t)
285
+ d.emit(
286
+ {"aid"=>aid, "bid"=>bid, "cid"=>cid, "status"=>status, "time"=>t},
287
+ Time.now.to_i
288
+ )
289
+ end
290
+
291
+ finish_list = []
292
+ (0..1000).each { |num|
293
+ aid = (rand * 1000).to_i
294
+ bid = (rand * 1000).to_i
295
+ cid = (rand * 1000).to_i
296
+ emit(d, aid, bid, cid, "start", "22:00")
297
+ finish_list.push([aid, bid, cid])
298
+ }
299
+
300
+ t_list = []
301
+ finish_list.each { |ids|
302
+ t = (rand * 100000).to_i
303
+ emit(d, ids[0], ids[1], ids[2], "finish", t)
304
+ t_list.push(t)
305
+ }
306
+ d.run
307
+
308
+ finish_list.each_with_index { |ids, index|
309
+ assert_equal d.emits[index][2], {
310
+ "aid" => ids[0],
311
+ "bid" => ids[1],
312
+ "cid" => ids[2],
313
+ "status" => "finish",
314
+ "time" => t_list[index]
315
+ }
316
+ }
317
+ assert_equal d.emits.size, finish_list.size
318
+ end
319
+
320
+ def test_buffer_size
321
+
20
322
  end
21
323
 
22
- def test_write
23
- d = create_driver
324
+ def test_query_ttl
325
+
24
326
  end
327
+
25
328
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-querycombiner
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.0.pre
4
+ version: 0.0.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Takahiro Kamatani
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-07-11 00:00:00.000000000 Z
11
+ date: 2014-07-15 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rake
@@ -61,6 +61,8 @@ extra_rdoc_files: []
61
61
  files:
62
62
  - ".gitignore"
63
63
  - Gemfile
64
+ - LICENSE.txt
65
+ - NOTICE
64
66
  - README.md
65
67
  - Rakefile
66
68
  - fluent-plugin-querycombiner.gemspec
@@ -82,9 +84,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
82
84
  version: '0'
83
85
  required_rubygems_version: !ruby/object:Gem::Requirement
84
86
  requirements:
85
- - - ">"
87
+ - - ">="
86
88
  - !ruby/object:Gem::Version
87
- version: 1.3.1
89
+ version: '0'
88
90
  requirements: []
89
91
  rubyforge_project:
90
92
  rubygems_version: 2.2.2