nchan_tools 0.1.5 → 0.1.10

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: fc307ac259708a9a1179d17d2faeb4bf4ae9942b9f98560aac6272d849500b6f
4
- data.tar.gz: e93c9a63ec00ea307fb80ed6b0ccfd8e174099a6fd588a53cffd3bb2147dbce8
3
+ metadata.gz: e17f44cb591b9f74d1791e1f9ec8574613fbc840c6aac91953c8a526a8a63006
4
+ data.tar.gz: 405aabdbc70b14571c6cc545ca13f0ea7e5ad65b130ab37194fee1ed7ccf7d5c
5
5
  SHA512:
6
- metadata.gz: 4d2dc2bdf4e8fc50f506453433a780b8428d66cbe0c3e9cb066198fed262abb3920d37d76d58b1d95c185f7eb0d5cb54161a1a7a9a20a8043c4a22a82bb37d6f
7
- data.tar.gz: 5240f7aaf08af1cad360f07c43eba880bed7840aa547535d9562ce55f9f262d2e552e3e19a28aae3bb57a17e816c69f49f46ee94a5ec33efdfc4344ae8101b4f
6
+ metadata.gz: 31e391ad81b9a43519d014a538f77b87b25706a009180b83239eea014c174497d8d7e3c9ca26d173b075cd42cba4e82ab92dc696ce459058080a325c29f808e3
7
+ data.tar.gz: 66654049eaf6f7f26e16fd98df88b1570a5d80f7168cc2f774e5d7168ae4d42a668b1384c3280fd82b8473f78e63ec4774b005f9971be4db7e9a2d4d65a01264
@@ -9,7 +9,7 @@ require "HDRHistogram"
9
9
 
10
10
  verbose = false
11
11
  save_csv = false
12
-
12
+ csv_columns = NchanTools::Benchmark::CSV_COLUMNS_DEFAULT
13
13
  init_args = {}
14
14
 
15
15
  opt_parser=OptionParser.new do |opts|
@@ -19,6 +19,9 @@ opt_parser=OptionParser.new do |opts|
19
19
  opts.on("--csv FILENAME", "Append results to file in CSV format") do |f|
20
20
  save_csv = f
21
21
  end
22
+ opts.on("--csv-columns col1,col2,...", "csv columns list") do |f|
23
+ csv_columns = f.split(/\W+/).map(&:to_sym)
24
+ end
22
25
  opts.on("-t", "--time TIME", "Time to run benchmark") do |v|
23
26
  init_args[:time] = v
24
27
  end
@@ -43,6 +46,7 @@ urls += ARGV
43
46
  begin
44
47
  urls += STDIN.read_nonblock(100000).split /\s*\n+\s*/
45
48
  rescue IO::WaitReadable
49
+ rescue EOFError
46
50
  end
47
51
 
48
52
  urls.uniq!
@@ -50,5 +54,5 @@ urls.uniq!
50
54
  benchan = NchanTools::Benchmark.new urls, init_args
51
55
  benchan.run
52
56
  benchan.results
53
- benchan.append_csv_file(save_csv) if save_csv
57
+ benchan.append_csv_file(save_csv, csv_columns) if save_csv
54
58
 
@@ -54,7 +54,7 @@ puts "Publishing to #{url}."
54
54
 
55
55
  loopmsg=("\r"*20) + "sending message #"
56
56
 
57
- pub = Publisher.new url, nostore: true, timeout: timeout, verbose: verbose, websocket: websocket
57
+ pub = NchanTools::Publisher.new url, nostore: true, timeout: timeout, verbose: verbose, websocket: websocket
58
58
  pub.accept=accept
59
59
  pub.nofail=true
60
60
  repeat=true
@@ -0,0 +1,57 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require "redis"
4
+ require "optparse"
5
+ require 'nchan_tools/rdsck'
6
+
7
+ opt = {
8
+ url: "redis://127.0.0.1:6379/",
9
+ verbose: false,
10
+ command: nil
11
+ }
12
+
13
+ opt_parser=OptionParser.new do |opts|
14
+ opts.on("--url", "--url REDIS_URL (#{opt[:url]})", "Redis server and port..") do |v|
15
+ opt[:url]=v
16
+ end
17
+ opts.on("-q", "--quiet", "output only results without any other information") do
18
+ opt[:quiet]=false
19
+ opts[:verbose] = !opt[:quiet]
20
+ end
21
+ opts.on("--list-channels", "list all Nchan channels on Redis server or cluster") do |v|
22
+ opt[:command]=:filter_channels
23
+ end
24
+ opts.on("--filter-channels-min-subscribers=[NUMBER]") do |v|
25
+ opt[:command]=:filter_channels
26
+ opt[:min_subscribers]=v.to_i
27
+ end
28
+ end
29
+ opt_parser.banner= <<~EOB
30
+ Debugging tools for the Redis server or cluster backing Nchan.
31
+ Usage: nchan-redis-debug [options]
32
+
33
+ WARNING: --list-channels and --filter-channels-* options for this tool
34
+ use the Redis SCAN command. This may increase CPU load on the Redis
35
+ server and may affect the latency of Nchan requests.
36
+ USE THESE OPTIONS WITH GREAT CARE
37
+
38
+ Example:
39
+ nchan-redis-debug --url redis:// --filter-channels-min-subscribers=10
40
+ EOB
41
+ opt_parser.parse!
42
+
43
+ rdsck = Rdsck.new opt
44
+ if not rdsck.connect
45
+ STDERR.puts "failed to connect to #{opt[:url]}"
46
+ exit 1
47
+ end
48
+
49
+ case opt[:command]
50
+ when :filter_channels
51
+ puts "# scanning for channels #{opt[:min_subscribers] && "with subscribers >= #{opt[:min_subscribers]}"}"
52
+ chans = rdsck.filter_channels(min_subscribers: opt[:min_subscribers])
53
+ puts "# found #{chans.count} channel#{chans.count != 1 && "s"}#{chans.count == 0 ? "." : ":"}"
54
+ puts chans.join("\n")
55
+ else
56
+ puts "Nothing to do"
57
+ end
@@ -33,7 +33,7 @@ opt_parser=OptionParser.new do |opts|
33
33
  opts.on("-p", "--parallel NUM (#{par})", "number of parallel clients"){|v| par = v.to_i}
34
34
  opts.on("-t", "--timeout SEC (#{opt[:timeout]})", "Long-poll timeout"){|v| opt[:timeout] = v}
35
35
  opts.on("-q", "--quit STRING (#{opt[:quit_message]})", "Quit message"){|v| opt[:quit_message] = v}
36
- opts.on("-c", "--client STRING (#{opt[:client]})", "sub client (one of #{Subscriber::Client.unique_aliases.join ', '})") do |v|
36
+ opts.on("-c", "--client STRING (#{opt[:client]})", "sub client (one of #{NchanTools::Subscriber::Client.unique_aliases.join ', '})") do |v|
37
37
  opt[:client] = v.to_sym
38
38
  end
39
39
  opts.on("--content-type", "show received content-type"){|v| print_content_type = true}
@@ -68,7 +68,7 @@ if origin
68
68
  opt[:extra_headers]['Origin'] = origin
69
69
  end
70
70
 
71
- sub = Subscriber.new url, par, opt
71
+ sub = NchanTools::Subscriber.new url, par, opt
72
72
 
73
73
 
74
74
  NOMSGF="\r"*30 + "Received message %i, len:%i"
@@ -111,7 +111,7 @@ sub.on_message do |msg|
111
111
  end
112
112
 
113
113
  sub.on_failure do |err_msg|
114
- if Subscriber::IntervalPollClient === sub.client
114
+ if NchanTools::Subscriber::IntervalPollClient === sub.client
115
115
  unless err_msg.match(/\(code 304\)/)
116
116
  false
117
117
  end
@@ -5,6 +5,8 @@ require 'json'
5
5
 
6
6
  module NchanTools
7
7
  class Benchmark
8
+ CSV_COLUMNS_ALL=%i[servers runtime channels channels_K channels_M subscribers message_length messages_sent messages_send_confirmed messages_send_unconfirmed messages_send_failed messages_received messages_unreceived messages_send_rate messages_receive_rate messages_send_rate_per_channel messages_receive_rate_per_subscriber message_publishing_avg message_publishing_99th message_publishing_max message_publishing_stddev message_publishing_count message_delivery_avg message_delivery_99th message_delivery_max message_delivery_stddev message_delivery_count]
9
+ CSV_COLUMNS_DEFAULT=%i[servers runtime channels subscribers message_length messages_sent messages_send_confirmed messages_send_unconfirmed messages_send_failed messages_received messages_unreceived messages_send_rate messages_receive_rate messages_send_rate_per_channel messages_receive_rate_per_subscriber message_publishing_avg message_publishing_99th message_publishing_max message_publishing_stddev message_publishing_count message_delivery_avg message_delivery_99th message_delivery_max message_delivery_stddev message_delivery_count]
8
10
  class BenchmarkError < StandardError
9
11
  end
10
12
  def initialize(urls, init_args=nil)
@@ -19,9 +21,7 @@ class Benchmark
19
21
  @failed = {}
20
22
 
21
23
  @init_args = init_args
22
-
23
- @hdrh_publish = nil
24
- @hdrh_receive = nil
24
+ @histograms = {}
25
25
 
26
26
  subs = []
27
27
  end
@@ -46,17 +46,27 @@ class Benchmark
46
46
  puts " #{sub.url} ok"
47
47
  @ready +=1
48
48
  if @ready == @n
49
- control :run
50
49
  puts "start benchmark..."
50
+ control :run
51
51
  end
52
52
  when /^RUNNING/
53
53
  puts " #{sub.url} running"
54
54
  when /^RESULTS\n/
55
55
  msg = msg[8..-1]
56
56
  parsed = JSON.parse msg
57
+
58
+ #backwards-compatible histogram fields
59
+ parsed["histograms"]||={}
60
+ if parsed[:message_publishing_histogram] then
61
+ parsed[:histograms]["message publishing"]=parsed[:message_publishing_histogram]
62
+ end
63
+ if parsed[:message_delivery_histogram] then
64
+ parsed[:histograms]["message delivery"]=parsed[:message_delivery_histogram]
65
+ end
66
+
57
67
  @results[sub.url] = parsed
58
68
  @results[sub.url]["raw"] = msg if @results[sub.url]
59
- 1+1
69
+ sub.client.send_close
60
70
  when /^INITIALIZING/
61
71
  #do nothing
62
72
  else
@@ -124,8 +134,7 @@ class Benchmark
124
134
  @messages_send_failed = 0
125
135
  @messages_received = 0
126
136
  @messages_unreceived = 0
127
- @hdrh_publish = nil
128
- @hdrh_receive = nil
137
+ @histograms = {}
129
138
  @results.each do |url, data|
130
139
  @channels += data["channels"]
131
140
  @runtime << data["run_time_sec"]
@@ -137,27 +146,21 @@ class Benchmark
137
146
  @messages_send_failed += data["messages"]["send_failed"]
138
147
  @messages_received += data["messages"]["received"]
139
148
  @messages_unreceived += data["messages"]["unreceived"]
140
-
141
- if data["message_publishing_histogram"]
142
- hdrh = HDRHistogram.unserialize(data["message_publishing_histogram"], unit: :ms, multiplier: 0.001)
143
- if @hdrh_publish
144
- @hdrh_publish.merge! hdrh
145
- else
146
- @hdrh_publish = hdrh
147
- end
148
- end
149
- if data["message_delivery_histogram"]
150
- hdrh = HDRHistogram.unserialize(data["message_delivery_histogram"], unit: :ms, multiplier: 0.001)
151
- if @hdrh_receive
152
- @hdrh_receive.merge! hdrh
153
- else
154
- @hdrh_receive = hdrh
149
+ if data["histograms"]
150
+ data["histograms"].each do |name, str|
151
+ name = name.to_sym
152
+ hdrh = HDRHistogram.unserialize(str, unit: :ms, multiplier: 0.001)
153
+ if @histograms[name]
154
+ @histograms[name].merge! hdrh
155
+ else
156
+ @histograms[name] = hdrh
157
+ end
155
158
  end
156
159
  end
157
160
  end
158
161
 
159
- @message_length = @message_length.sum.to_f / @message_length.size
160
- @runtime = @runtime.sum.to_f / @runtime.size
162
+ @message_length = @message_length.inject(0, :+).to_f / @message_length.size
163
+ @runtime = @runtime.inject(0, :+).to_f / @runtime.size
161
164
 
162
165
  fmt = <<-END.gsub(/^ {6}/, '')
163
166
  Nchan servers: %d
@@ -187,26 +190,50 @@ class Benchmark
187
190
  (@messages_sent.to_f* 60)/(@runtime * @channels),
188
191
  (@messages_received.to_f * 60)/(@runtime * @subscribers)
189
192
  ]
190
-
191
- out << hdrhistogram_stats("message publishing latency", @hdrh_publish) if @hdrh_publish
192
- out << hdrhistogram_stats("message delivery latency", @hdrh_receive) if @hdrh_receive
193
+ @histograms.each do |name, histogram|
194
+ out << hdrhistogram_stats("#{name} latency:", histogram)
195
+ end
193
196
 
194
197
  puts out
195
198
  end
196
199
 
197
- def append_csv_file(file)
200
+ def append_csv_file(file, columns=Benchmark::CSV_COLUMNS_DEFAULT)
198
201
  require "csv"
199
- write_headers = File.zero?(file)
200
- headers = %i[servers runtime channels subscribers message_length messages_sent messages_send_confirmed messages_send_unconfirmed messages_send_failed messages_send_received messages_send_unreceived messages_send_rate messages_receive_rate messages_send_rate_per_channel messages_receive_rate_per_subscriber message_publishing_response_avg message_publishing_response_99percentile message_publishing_response_max message_publishing_response_stddev message_publishing_response_count message_delivery_avg message_delivery_99percentile message_delivery_max message_delivery_stddev message_delivery_count]
202
+ write_headers = File.zero?(file) || !File.exists?(file)
203
+ headers = columns
204
+ vals = {
205
+ servers: @n,
206
+ runtime: @runtime,
207
+ channels: @channels,
208
+ channels_K: @channels/1000.0,
209
+ channels_M: @channels/1000000.0,
210
+ subscribers: @subscribers * @channels,
211
+ message_length: @message_length,
212
+ messages_sent: @messages_sent,
213
+ messages_send_confirmed: @messages_send_confirmed,
214
+ messages_send_unconfirmed: @messages_send_unconfirmed,
215
+ messages_send_failed: @messages_send_failed,
216
+ messages_received: @messages_received,
217
+ messages_unreceived: @messages_unreceived,
218
+ messages_send_rate: @messages_sent.to_f/@runtime,
219
+ messages_receive_rate: @messages_received.to_f/@runtime,
220
+ messages_send_rate_per_channel: (@messages_sent.to_f* 60)/(@runtime * @channels),
221
+ messages_receive_rate_per_subscriber: (@messages_received.to_f * 60)/(@runtime * @subscribers * @channels)
222
+ }
223
+ @histograms.each do |name, histogram|
224
+ vals["#{name}_avg".to_sym]=histogram.mean
225
+ vals["#{name}_95th".to_sym]=histogram.percentile(95.0)
226
+ vals["#{name}_99th".to_sym]=histogram.percentile(99.0)
227
+ vals["#{name}_max".to_sym]=histogram.max
228
+ vals["#{name}_stddev".to_sym]=histogram.stddev
229
+ vals["#{name}_count".to_sym]=histogram.count
230
+ end
231
+
232
+ row = []
233
+ headers.each { |header| row << (vals[header.to_sym] || "-")}
234
+
201
235
  csv = CSV.open(file, "a", {headers: headers, write_headers: write_headers})
202
- csv << [@n, @runtime, @channels, @subscribers,
203
- @message_length, @messages_sent, @messages_send_confirmed, @messages_send_unconfirmed, @messages_send_failed,
204
- @messages_received, @messages_unreceived,
205
- @messages_sent.to_f/@runtime, @messages_received.to_f/@runtime,
206
- (@messages_sent.to_f* 60)/(@runtime * @channels), (@messages_received.to_f * 60)/(@runtime * @subscribers),
207
- @hdrh_publish.mean, @hdrh_publish.percentile(99.0), @hdrh_publish.max, @hdrh_publish.stddev, @hdrh_publish.count,
208
- @hdrh_receive.mean, @hdrh_receive.percentile(99.0), @hdrh_receive.max, @hdrh_receive.stddev, @hdrh_receive.count
209
- ]
236
+ csv << row
210
237
  csv.flush
211
238
  csv.close
212
239
  end
@@ -487,6 +487,14 @@ class Subscriber
487
487
  @ws.binary data
488
488
  end
489
489
 
490
+ def send_ping(msg=nil)
491
+ @ws.ping(msg)
492
+ end
493
+
494
+ def send_close(reason=nil, code=1000)
495
+ @ws.close(reason, code)
496
+ end
497
+
490
498
  def write(data)
491
499
  @sock.write data
492
500
  end
@@ -579,11 +587,11 @@ class Subscriber
579
587
  end
580
588
 
581
589
  bundle.ws.on :ping do |ev|
582
- @on_ping.call if @on_ping
590
+ @subscriber.on(:ping).call ev, bundle
583
591
  end
584
592
 
585
593
  bundle.ws.on :pong do |ev|
586
- @on_pong.call if @on_pong
594
+ @subscriber.on(:pong).call ev, bundle
587
595
  end
588
596
 
589
597
  bundle.ws.on :error do |ev|
@@ -645,13 +653,6 @@ class Subscriber
645
653
  end
646
654
  end
647
655
 
648
- def on_ping
649
- @on_ping = Proc.new if block_given?
650
- end
651
- def on_pong
652
- @on_pong = Proc.new if block_given?
653
- end
654
-
655
656
  def listen(bundle)
656
657
  while @ws[bundle]
657
658
  begin
@@ -681,10 +682,10 @@ class Subscriber
681
682
  private :ws_client
682
683
 
683
684
  def send_ping(data=nil)
684
- ws_client.ping data
685
+ ws_client.send_ping data
685
686
  end
686
- def send_close(code=1000, reason=nil)
687
- ws_client.send_close code, reason
687
+ def send_close(reason=nil, code=1000)
688
+ ws_client.send_close reason, code
688
689
  end
689
690
  def send_data(data)
690
691
  ws_client.send_data data
@@ -694,17 +695,18 @@ class Subscriber
694
695
  end
695
696
 
696
697
  def close(bundle)
697
- if bundle
698
+ if bundle then
698
699
  @ws.delete bundle
699
700
  bundle.sock.close unless bundle.sock.closed?
700
701
  end
701
702
  @connected -= 1
702
- if @connected <= 0
703
- sleep 0.1 until @ws.count == 0
703
+ if @connected <= 0 then
704
+ until @ws.count == 0 do
705
+ sleep 0.1
706
+ end
704
707
  @cooked.signal true
705
708
  end
706
709
  end
707
-
708
710
  end
709
711
 
710
712
  class LongPollClient < Client
@@ -1518,6 +1520,8 @@ class Subscriber
1518
1520
 
1519
1521
  attr_accessor :url, :client, :messages, :max_round_trips, :quit_message, :errors, :concurrency, :waiting, :finished, :client_class, :log
1520
1522
  def initialize(url, concurrency=1, opt={})
1523
+ @empty_block = Proc.new {}
1524
+ @on={}
1521
1525
  @care_about_message_ids=opt[:use_message_id].nil? ? true : opt[:use_message_id]
1522
1526
  @url=url
1523
1527
  @quit_message = opt[:quit_message]
@@ -1612,6 +1616,14 @@ class Subscriber
1612
1616
  @client.poke until_what, timeout
1613
1617
  end
1614
1618
 
1619
+ def on(evt_name = nil, &block)
1620
+ if block_given?
1621
+ @on[evt_name.to_sym] = block
1622
+ else
1623
+ @on[evt_name.to_sym] or @empty_block
1624
+ end
1625
+ end
1626
+
1615
1627
  def on_message(msg=nil, bundle=nil, &block)
1616
1628
  #puts "received message #{msg && msg.to_s[0..15]}"
1617
1629
  if block_given?
@@ -1657,6 +1669,7 @@ class Publisher
1657
1669
  @accept = opt[:accept]
1658
1670
  @verbose = opt[:verbose]
1659
1671
  @on_response = opt[:on_response]
1672
+ @http2 = opt[:http2]
1660
1673
 
1661
1674
  @ws_wait_until_response = true
1662
1675
 
@@ -1797,6 +1810,7 @@ class Publisher
1797
1810
  headers = {:'Content-Type' => content_type, :'Accept' => accept}
1798
1811
  headers[:'X-Eventsource-Event'] = eventsource_event if eventsource_event
1799
1812
  headers.merge! @extra_headers if @extra_headers
1813
+
1800
1814
  post = Typhoeus::Request.new(
1801
1815
  @url,
1802
1816
  headers: headers,
@@ -1804,7 +1818,8 @@ class Publisher
1804
1818
  body: body,
1805
1819
  timeout: @timeout || PUBLISH_TIMEOUT,
1806
1820
  connecttimeout: @timeout || PUBLISH_TIMEOUT,
1807
- verbose: @verbose
1821
+ verbose: @verbose,
1822
+ http_version: @http2 ? :httpv2_0 : :none
1808
1823
  )
1809
1824
  if body && @messages
1810
1825
  msg=Message.new body
@@ -0,0 +1,120 @@
1
+ class Rdsck
2
+ attr_accessor :url, :verbose, :namespace
3
+ attr_accessor :redis, :masters
4
+
5
+ def dbg(*args)
6
+ if @verbose
7
+ print("# ")
8
+ puts(*args)
9
+ end
10
+ end
11
+
12
+ def initialize(opt)
13
+ @url=opt[:url]
14
+ @verbose=opt[:verbose]
15
+ @namespace=opt[:namespace]
16
+ @channel_id=opt[:channel_id]
17
+ end
18
+
19
+ def cluster?
20
+ @cluster_mode
21
+ end
22
+
23
+ def connect
24
+ begin
25
+ @redis=Redis.new url: @url
26
+ mode = redis.info["redis_mode"]
27
+ rescue StandardError => e
28
+ STDERR.puts e.message
29
+ return false
30
+ end
31
+
32
+ if mode == "cluster"
33
+ @redis.close
34
+ begin
35
+ @redis=Redis.new cluster: [@url]
36
+ @redis.ping
37
+ rescue StandardError => e
38
+ STDERR.puts e.message
39
+ return false
40
+ end
41
+
42
+ @cluster_mode = true
43
+ @masters = []
44
+
45
+ redis.connection.each do |c|
46
+ node = Redis.new url: c[:id]
47
+ @masters << node
48
+ end
49
+ else
50
+ @masters = [@redis]
51
+ end
52
+
53
+ dbg "Connected to Redis #{mode == "cluster" ? "cluster" : "server"}"
54
+ (Array === @redis.connection ? @redis.connection : [@redis.connection]) .each do |v|
55
+ dbg " #{v[:id]}"
56
+ end
57
+ self
58
+ end
59
+
60
+ def key(subkey=nil)
61
+ k = "{channel:#{@namespace}/#{@channel_id}}"
62
+ return subkey ? "#{k}:#{subkey}" : k
63
+ end
64
+
65
+ def info
66
+ channel_hash=@redis.hgetall key
67
+ hash_ttl=@redis.ttl key
68
+ channel_subs=@redis.hgetall key("subscribers")
69
+ #...
70
+ end
71
+
72
+ def filter_channels(filters={})
73
+ script = <<~EOF
74
+ local prev_cursor = ARGV[1]
75
+ local pattern = ARGV[2]
76
+ local scan_batch_size = ARGV[3]
77
+
78
+ local min_subscribers = ARGV[4] and #ARGV[4] > 0 and tonumber(ARGV[4])
79
+
80
+ local cursor, iteration
81
+ if pattern and #pattern > 0 then
82
+ cursor, iteration = unpack(redis.call("SCAN", prev_cursor, "MATCH", pattern, "COUNT", scan_batch_size))
83
+ else
84
+ cursor, iteration = unpack(redis.call("SCAN", prev_cursor, "COUNT", scan_batch_size))
85
+ end
86
+
87
+ local matched = {}
88
+ for _, chankey in pairs(iteration) do
89
+ local match = true
90
+ if min_subscribers then
91
+ match = match and (tonumber(redis.call('HGET', chankey, 'fake_subscribers') or 0) >= min_subscribers)
92
+ end
93
+ if match then
94
+ table.insert(matched, chankey)
95
+ end
96
+ end
97
+
98
+ return {cursor, matched}
99
+ EOF
100
+
101
+ results = []
102
+ batch_size=500
103
+ masters.each do |m|
104
+ hash = m.script "load", script
105
+ cursor, pattern = "0", "{channel:*}"
106
+ loop do
107
+ cursor, batch_results = m.evalsha hash, keys: [], argv: [cursor, pattern, batch_size, filters[:min_subscribers]]
108
+ results += batch_results
109
+ pattern = ""
110
+ break if cursor.to_i == 0
111
+ end
112
+ end
113
+ results
114
+
115
+ results.map! do |key|
116
+ m = key.match(/^\{channel\:(.*)\}$/)
117
+ m[1] || key
118
+ end
119
+ end
120
+ end
@@ -1,3 +1,3 @@
1
1
  module NchanTools
2
- VERSION = "0.1.5"
2
+ VERSION = "0.1.10"
3
3
  end
@@ -29,15 +29,16 @@ Gem::Specification.new do |spec|
29
29
  spec.add_dependency "celluloid"
30
30
  spec.add_dependency "celluloid-io"
31
31
  spec.add_dependency "HDRHistogram"
32
+ spec.add_dependency "redis"
32
33
 
33
34
  spec.add_dependency "websocket-driver"
34
35
  spec.add_dependency 'websocket-extensions'
35
36
  spec.add_dependency "permessage_deflate"
36
37
  spec.add_dependency 'http_parser.rb'
37
- if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new('2.2.2')
38
- spec.add_dependency 'http-2'
39
- end
38
+ if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new('2.2.2')
39
+ spec.add_dependency 'http-2'
40
+ end
40
41
  spec.add_development_dependency "pry"
41
- spec.add_development_dependency "bundler", "~> 1.16"
42
- spec.add_development_dependency "rake", "~> 10.0"
42
+ spec.add_development_dependency "bundler"
43
+ spec.add_development_dependency "rake"
43
44
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: nchan_tools
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.5
4
+ version: 0.1.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Leo Ponomarev
8
- autorequire:
8
+ autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2018-10-24 00:00:00.000000000 Z
11
+ date: 2021-01-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: typhoeus
@@ -94,6 +94,20 @@ dependencies:
94
94
  - - ">="
95
95
  - !ruby/object:Gem::Version
96
96
  version: '0'
97
+ - !ruby/object:Gem::Dependency
98
+ name: redis
99
+ requirement: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - ">="
102
+ - !ruby/object:Gem::Version
103
+ version: '0'
104
+ type: :runtime
105
+ prerelease: false
106
+ version_requirements: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - ">="
109
+ - !ruby/object:Gem::Version
110
+ version: '0'
97
111
  - !ruby/object:Gem::Dependency
98
112
  name: websocket-driver
99
113
  requirement: !ruby/object:Gem::Requirement
@@ -182,36 +196,37 @@ dependencies:
182
196
  name: bundler
183
197
  requirement: !ruby/object:Gem::Requirement
184
198
  requirements:
185
- - - "~>"
199
+ - - ">="
186
200
  - !ruby/object:Gem::Version
187
- version: '1.16'
201
+ version: '0'
188
202
  type: :development
189
203
  prerelease: false
190
204
  version_requirements: !ruby/object:Gem::Requirement
191
205
  requirements:
192
- - - "~>"
206
+ - - ">="
193
207
  - !ruby/object:Gem::Version
194
- version: '1.16'
208
+ version: '0'
195
209
  - !ruby/object:Gem::Dependency
196
210
  name: rake
197
211
  requirement: !ruby/object:Gem::Requirement
198
212
  requirements:
199
- - - "~>"
213
+ - - ">="
200
214
  - !ruby/object:Gem::Version
201
- version: '10.0'
215
+ version: '0'
202
216
  type: :development
203
217
  prerelease: false
204
218
  version_requirements: !ruby/object:Gem::Requirement
205
219
  requirements:
206
- - - "~>"
220
+ - - ">="
207
221
  - !ruby/object:Gem::Version
208
- version: '10.0'
222
+ version: '0'
209
223
  description: publishing, subscribing, testing, and benchmarking utilities for Nchan.
210
224
  email:
211
225
  - leo@nchan.io
212
226
  executables:
213
227
  - nchan-benchmark
214
228
  - nchan-pub
229
+ - nchan-redis-debug
215
230
  - nchan-sub
216
231
  extensions: []
217
232
  extra_rdoc_files: []
@@ -226,17 +241,19 @@ files:
226
241
  - bin/setup
227
242
  - exe/nchan-benchmark
228
243
  - exe/nchan-pub
244
+ - exe/nchan-redis-debug
229
245
  - exe/nchan-sub
230
246
  - lib/nchan_tools.rb
231
247
  - lib/nchan_tools/benchmark.rb
232
248
  - lib/nchan_tools/pubsub.rb
249
+ - lib/nchan_tools/rdsck.rb
233
250
  - lib/nchan_tools/version.rb
234
251
  - nchan_tools.gemspec
235
252
  homepage: https://nchan.io
236
253
  licenses:
237
254
  - WTFPL
238
255
  metadata: {}
239
- post_install_message:
256
+ post_install_message:
240
257
  rdoc_options: []
241
258
  require_paths:
242
259
  - lib
@@ -251,9 +268,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
251
268
  - !ruby/object:Gem::Version
252
269
  version: '0'
253
270
  requirements: []
254
- rubyforge_project:
255
- rubygems_version: 2.7.7
256
- signing_key:
271
+ rubygems_version: 3.1.4
272
+ signing_key:
257
273
  specification_version: 4
258
274
  summary: Development and testing utilities for Nchan
259
275
  test_files: []