datasift 3.0.0.beta2 → 3.0.0.beta4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,13 +9,13 @@ class StreamingApi < DataSiftExample
9
9
 
10
10
  def run
11
11
  begin
12
- rubyReceived = 0
13
- pythonReceived = 0
12
+ ruby_received = 0
13
+ python_received = 0
14
14
  ruby = 'interaction.content contains "ruby"'
15
- rubyStream = @datasift.compile ruby
15
+ ruby_stream = @datasift.compile ruby
16
16
 
17
17
  python = 'interaction.content contains "python"'
18
- pythonStream = @datasift.compile python
18
+ python_stream = @datasift.compile python
19
19
 
20
20
  on_delete = lambda { |stream, m| puts 'We must delete this to be compliant ==> ' + m }
21
21
 
@@ -25,20 +25,20 @@ class StreamingApi < DataSiftExample
25
25
  end
26
26
 
27
27
  on_message_ruby = lambda do |message, stream, hash|
28
- rubyReceived += 1
29
- puts "Ruby #{rubyReceived}, #{message}"
28
+ ruby_received += 1
29
+ puts "Ruby #{ruby_received}, #{message}"
30
30
 
31
- if rubyReceived >= 10
31
+ if ruby_received >= 10
32
32
  puts 'un-subscribing from ruby stream '+ hash
33
33
  stream.unsubscribe hash
34
34
  end
35
35
  end
36
36
 
37
37
  on_message_python = lambda do |message, stream, hash|
38
- pythonReceived += 1
39
- puts "python #{pythonReceived}, #{message}"
38
+ python_received += 1
39
+ puts "python #{python_received}, #{message}"
40
40
 
41
- if pythonReceived >= 10
41
+ if python_received >= 10
42
42
  puts 'un-subscribing from python stream '+ hash
43
43
  stream.unsubscribe hash
44
44
  end
@@ -46,18 +46,18 @@ class StreamingApi < DataSiftExample
46
46
 
47
47
  on_connect = lambda do |stream|
48
48
  #
49
- puts 'subscribing to python stream '+ pythonStream[:data][:hash]
50
- stream.subscribe(pythonStream[:data][:hash], on_message_python)
51
- puts 'Subscribed to '+ pythonStream[:data][:hash]
49
+ puts 'subscribing to python stream '+ python_stream[:data][:hash]
50
+ stream.subscribe(python_stream[:data][:hash], on_message_python)
51
+ puts 'Subscribed to '+ python_stream[:data][:hash]
52
52
  sleep 1
53
53
  #
54
- puts 'subscribing to ruby stream '+ rubyStream[:data][:hash]
55
- stream.subscribe(rubyStream[:data][:hash], on_message_ruby)
56
- puts 'Subscribed to '+ rubyStream[:data][:hash]
54
+ puts 'subscribing to ruby stream '+ ruby_stream[:data][:hash]
55
+ stream.subscribe(ruby_stream[:data][:hash], on_message_ruby)
56
+ puts 'Subscribed to '+ ruby_stream[:data][:hash]
57
57
  end
58
58
 
59
- on_close = lambda do |stream|
60
- puts 'closed'
59
+ on_close = lambda do |stream,msg|
60
+ puts msg
61
61
  end
62
62
 
63
63
  on_datasift_message = lambda do |stream, message, hash|
@@ -13,30 +13,30 @@ class PushApi < DataSiftExample
13
13
  subscription = create_push(stream[:data][:hash])
14
14
 
15
15
  subscription_id = subscription[:data][:id]
16
- # Pull interactions from the push queue - this will only work if we have set
17
- # the Push Subscription output_type above to 'pull'
16
+ #pull a bunch of interactions from the push queue - only work if we had set the output_type above to pull
17
+ #pull @datasift.pull subscription_id
18
+
19
+ puts 'pullinga'
20
+ @datasift.push.pull(subscription_id).each { |e| puts e }
18
21
 
19
- puts 'Waiting...'
20
22
  sleep 10
21
23
 
22
- # Passing a lambda is more efficient because it is executed once for each interaction received
23
- # this saves having to iterate over the array returned so the same iteration isn't done twice
24
- puts 'Pulling'
25
- @datasift.push.pull(subscription_id, 20971520, '', lambda{ |e| puts "on_message => #{e}" })
24
+ puts 'pullingb'
25
+ @datasift.push.pull(subscription_id).each { |e| puts e }
26
26
 
27
- puts 'Waiting...'
28
27
  sleep 10
29
- puts 'Pulling'
30
- @datasift.push.pull(subscription_id, 20971520, '', lambda{ |e| puts "on_message => #{e}" })
31
28
 
32
- puts 'Stop Subscription'
33
- @datasift.push.stop subscription_id
29
+ puts 'pullingc'
30
+ #passing a lambda is more efficient because it is executed once for each interaction received
31
+ #this saves having to iterate over the array returned so the same iteration isn't done twice
32
+ @datasift.push.pull(subscription_id,20971520,'', lambda{ |e| puts "on_message => #{e}" })
34
33
 
35
- puts 'Delete Subscription'
34
+ puts 'pullingdelete'
36
35
  @datasift.push.delete subscription_id
37
36
  end
37
+ #rescue DataSiftError
38
38
  rescue DataSiftError => dse
39
- puts dse
39
+ puts dse.inspect
40
40
  end
41
41
  end
42
42
 
@@ -4,53 +4,30 @@ class PushApi < DataSiftExample
4
4
  super
5
5
  end
6
6
 
7
- def run
7
+ def run(count)
8
8
  begin
9
- puts 'Validating'
10
- if @datasift.push.valid? @params
11
- stream = @datasift.compile 'interaction.content contains "datasift"'
12
- subscription = create_push(stream[:data][:hash])
9
+ subscription = create_push('5cdb0c8b4f3f6ca26f6ba1b086f22edd', count)
13
10
 
14
- subscription_id = subscription[:data][:id]
15
- #pull a bunch of interactions from the push queue - only work if we had set the output_type above to pull
16
- #pull @datasift.pull subscription_id
11
+ subscription_id = subscription[:data][:id]
12
+ #pull a bunch of interactions from the push queue - only work if we had set the output_type above to pull
13
+ #pull @datasift.pull subscription_id
17
14
 
18
- puts 'updating subscription'
19
- # update the info we just used to create
20
- # id, name and output_params.* are valid
21
- puts @datasift.push.update @params.merge({:id => subscription_id, :name => 'My updated awesome name'})
22
-
23
- puts 'getting subscription info'
24
- # get details for a subscription also available are
25
- # push.[get, get_by_hash,get_by_historics_id]
26
- puts @datasift.push.get_by_subscription subscription_id
27
-
28
- puts 'getting logs for subscription'
29
- # get log messages for a subscription id
30
- #also available push.logs to fetch logs for all subscriptions
31
- puts @datasift.push.logs_for subscription_id
32
-
33
- puts 'pausing subscription'
34
- #pause the subscription that was created
35
- puts @datasift.push.pause subscription_id
36
-
37
- puts 'resuming subscription'
38
- # resume the subscription that was just paused
39
- puts @datasift.push.resume subscription_id
40
-
41
- puts 'stopping subscription'
42
- # stop the subscription
43
- puts @datasift.push.stop subscription_id
44
-
45
- puts 'deleting subscription'
46
- #and delete it
47
- puts @datasift.push.delete subscription_id
48
- end
49
- #rescue DataSiftError
15
+ puts 'getting subscription info'
16
+ # get details for a subscription also available are
17
+ # push.[get, get_by_hash,get_by_historics_id]
18
+ puts @datasift.push.get_by_subscription subscription_id
50
19
  rescue DataSiftError => dse
51
20
  puts dse.message
52
21
  end
53
22
  end
54
23
 
24
+ def get_all
25
+ puts MultiJson.dump(@datasift.push.get(1, 500))
26
+ end
55
27
  end
56
- PushApi.new().run
28
+
29
+ p = PushApi.new()
30
+ #for i in 1..1000
31
+ # p.run(i)
32
+ #end
33
+ p.get_all()
@@ -7,7 +7,7 @@ module DataSift
7
7
  config[:api_host] = 'api.datasift.com' unless config.has_key?(:api_host)
8
8
  config[:stream_host] = 'websocket.datasift.com' unless config.has_key?(:stream_host)
9
9
  config[:api_version] = 'v1' unless config.has_key?(:api_version)
10
- config[:enable_ssl] = true unless config.has_key?(:enable_ssl)
10
+ config[:enable_ssl] = false unless config.has_key?(:enable_ssl)
11
11
  # max 320 seconds retry - http://dev.datasift.com/docs/streaming-api/reconnecting
12
12
  config[:max_retry_time] = 320 unless config.has_key?(:max_retry_time)
13
13
  end
@@ -20,4 +20,4 @@ module DataSift
20
20
  }
21
21
  end
22
22
  end
23
- end
23
+ end
@@ -0,0 +1,233 @@
1
+ require 'optparse'
2
+ require 'optparse/time'
3
+ require 'ostruct'
4
+ require 'pp'
5
+ require 'multi_json'
6
+ require_relative '../lib/datasift'
7
+
8
+ def to_output(r)
9
+ MultiJson.dump({
10
+ :status => r[:http][:status],
11
+ :headers => r[:http][:headers],
12
+ :body => r[:data]
13
+ },
14
+ :pretty => true)
15
+ end
16
+
17
+ def opt(val, default)
18
+ val ? val : default
19
+ end
20
+
21
+ def err(m)
22
+ puts MultiJson.dump({:error => m})
23
+ end
24
+
25
+ def parse(args)
26
+ options = OpenStruct.new
27
+ options.auth = nil
28
+ options.endpoint = 'core'
29
+ options.command = nil
30
+ options.params = {}
31
+ options.api = 'api.datasift.com'
32
+
33
+ opt_parser = OptionParser.new do |opts|
34
+ opts.banner = 'Usage: cli.rb [-c] [--api] -a -e [-p*]'
35
+ opts.separator 'Specific options:'
36
+
37
+ opts.on('-a', '--auth AUTH', 'DataSift username:api_key') do |username|
38
+ api_key = ARGV.length>0 && ARGV[0].index('-') == 0 ? '' : ARGV[0]
39
+ if username == nil || api_key == nil || username.empty? || api_key.empty?
40
+ err 'Unable to parse username and API key, they must be in the format username api_key'
41
+ err parse(%w(-h))
42
+ exit
43
+ end
44
+ options.auth = {:username => username, :api_key => api_key}
45
+ end
46
+
47
+ opts.on('-e', '--endpoint ENDPOINT', 'Defaults to core, must be one of core,push,historics,preview,sources') do |e|
48
+ options.endpoint = e
49
+ end
50
+
51
+ opts.on('-c', '--command COMMAND', 'DataSift endpoint, depends on the endpoint') do |e|
52
+ options.command = e|| 'core'
53
+ end
54
+
55
+ opts.on('-p', '--param PARAM', 'Command specific parameters e.g. -p name value') do |k|
56
+ # value is ARGV[0] unless ARGV[0] starts with a hyphen
57
+ options.params[k] = ARGV.length>0 && ARGV[0].index('-') == 0 ? '' : ARGV[0]
58
+ end
59
+
60
+ opts.on('-u', '--url API_HOSTNAME', 'Override the API URL') do |e|
61
+ options.api = e
62
+ end
63
+
64
+ opts.on_tail('-h', '--help', 'Show this message') do
65
+ err opts
66
+ exit
67
+ end
68
+
69
+ opts.on_tail('--version', 'Show version') do
70
+ err :: DataSift::VERSION
71
+ exit
72
+ end
73
+ end
74
+
75
+ opt_parser.parse!(args)
76
+ options #.marshal_dump
77
+ end
78
+
79
+ def run_core_command (c, command, p)
80
+ case command
81
+ when 'validate'
82
+ c.valid?(p['csdl'], false)
83
+ when 'compile'
84
+ c.compile(p['csdl'])
85
+ when 'usage'
86
+ c.usage(usage = p['period'] ? p['period'].to_sym : :hour)
87
+ when 'balance'
88
+ c.balance
89
+ when 'dpu'
90
+ c.dpu(p['hash'])
91
+ else
92
+ err 'Unknown command for the core endpoint'
93
+ exit
94
+ end
95
+ end
96
+
97
+ def run_historics_command (c, command, p)
98
+ case command
99
+ when 'prepare'
100
+ c.historics.prepare(p['hash'], p['start'], p['end'], p['name'], opt(p['sources'], 'twitter'), opt(p['sample'], 10))
101
+ when 'start'
102
+ c.historics.start(p['id'])
103
+ when 'stop'
104
+ c.historics.stop(p['id'], opt(p['reason'], ''))
105
+ when 'status'
106
+ c.historics.status(p['start'], p['end'], opt(p['sources'], 'twitter'))
107
+ when 'update'
108
+ c.historics.update(p['id'], p['name'])
109
+ when 'delete'
110
+ c.historics.delete(p['id'])
111
+ when 'get'
112
+ c.historics.get(opt(p['max'], 20), opt(p['page'], 1), opt(p['with_estimate'], 1))
113
+ else
114
+ err 'Unknown command for the historics endpoint'
115
+ exit
116
+ end
117
+ end
118
+
119
+ def run_preview_command (c, command, p)
120
+ case command
121
+ when 'create'
122
+ c.historics_preview.create(p['hash'], p['parameters'], p['start'], opt(p['end'], nil))
123
+ when 'get'
124
+ c.historics_preview.get(p['id'])
125
+ else
126
+ err 'Unknown command for the historics preview endpoint'
127
+ exit
128
+ end
129
+ end
130
+
131
+ def run_sources_command (c, command, p)
132
+ case command
133
+ when 'create'
134
+ c.managed_source.create(p['source_type'],p['name'], opt(p['parameters'], {}),
135
+ opt(p['resources'], []), opt(p['auth'], []))
136
+ when 'update'
137
+ c.managed_source.update(p['id'], p['source_type'], p['name'], opt(p['parameters'], {}),
138
+ opt(p['resources'], []),
139
+ opt(p['auth'], []))
140
+ when 'delete'
141
+ c.managed_source.delete(p['id'])
142
+ when 'stop'
143
+ c.managed_source.stop(p['id'])
144
+ when 'start'
145
+ c.managed_source.start(p['id'])
146
+ when 'log'
147
+ c.managed_source.log(p['id'], opt(p['page'], 1), opt(p['per_page'], 20))
148
+ when 'get'
149
+ c.managed_source.get(opt(p['id'], nil), opt(p['source_type'], nil), opt(p['page'], 1), opt(p['per_page'], 20))
150
+ else
151
+ err 'Unknown command for the historics preview endpoint'
152
+ exit
153
+ end
154
+ end
155
+
156
+ def run_push_command (c, command, p)
157
+ case command
158
+ when 'validate'
159
+ c.push.valid? p, false
160
+ when 'create'
161
+ c.push.create p
162
+ when 'pause'
163
+ c.push.pause p['id']
164
+ when 'resume'
165
+ c.push.resume p['id']
166
+ when 'update'
167
+ c.push.update p
168
+ when 'stop'
169
+ c.push.stop p['id']
170
+ when 'delete'
171
+ c.push.delete p['id']
172
+ when 'log'
173
+ p['id'] ?
174
+ c.push.logs_for(p['id'], opt(p['page'], 0), opt(p['per_page'], 20), opt(p['order_by'], :request_time), opt(p['order_dir'], :desc)) :
175
+ c.push.logs(opt(p['page'], 0), opt(p['per_page'], 20), opt(p['order_by'], :request_time), opt(p['order_dir'], :desc))
176
+ when 'get'
177
+ if p['id']
178
+ c.push.get_by_subscription(p['id'], opt(p['page'], 0), opt(p['per_page'], 20), opt(p['order_by'], :request_time))
179
+ elsif p['hash']
180
+ c.push.get_by_hash(p['hash'], opt(p['page'], 0), opt(p['per_page'], 20), opt(p['order_by'], :request_time), opt(p['order_dir'], :desc))
181
+ elsif p['playback_id']
182
+ c.push.get_by_historics_id(p['playback_id'], opt(p['page'], 0), opt(p['per_page'], 20), opt(p['order_by'], :request_time), opt(p['order_dir'], :desc))
183
+ else
184
+ c.push.get(opt(p['page'], 0), opt(p['per_page'], 20), opt(p['order_by'], :request_time), opt(p['order_dir'], :desc))
185
+ end
186
+ when 'pull'
187
+ c.push.pull(p['id'], opt(p['size'], 20971520), opt(p['cursor'], ''))
188
+ else
189
+ err 'Unknown command for the core endpoint'
190
+ exit
191
+ end
192
+ end
193
+
194
+ begin
195
+ options = parse(ARGV)
196
+ req = [:auth, :command]
197
+ missing = req.select { |param| options.send(param).nil? }
198
+ unless missing.empty?
199
+ err "The following options are required : #{missing.join(', ')}"
200
+ err parse(%w(-h))
201
+ exit
202
+ end
203
+ config =
204
+ {
205
+ :username => options.auth[:username],
206
+ :api_key => options.auth[:api_key],
207
+ :api_host => options.api
208
+ }
209
+ datasift = DataSift::Client.new(config)
210
+
211
+ res = case options.endpoint
212
+ when 'core'
213
+ run_core_command(datasift, options.command, options.params)
214
+ when 'historics'
215
+ run_historics_command(datasift, options.command, options.params)
216
+ when 'push'
217
+ run_push_command(datasift, options.command, options.params)
218
+ when 'preview'
219
+ run_preview_command(datasift, options.command, options.params)
220
+ when 'managed_sources'
221
+ run_sources_command(datasift, options.command, options.params)
222
+ else
223
+ err 'Unsupported/Unknown endpoint'
224
+ exit
225
+ end
226
+ puts to_output(res)
227
+ rescue DataSiftError => e
228
+ err e.message
229
+ rescue OptionParser::InvalidOption, OptionParser::MissingArgument
230
+ err $!.to_s
231
+ err parse(%w(-h))
232
+ exit
233
+ end
@@ -1,30 +1,45 @@
1
1
  dir = File.dirname(__FILE__)
2
-
2
+ #
3
3
  require 'uri'
4
4
  require 'rest_client'
5
5
  require 'multi_json'
6
6
  require 'websocket_td'
7
-
7
+ #
8
8
  require dir + '/api/api_resource'
9
-
9
+ #
10
10
  require dir + '/errors'
11
11
  require dir + '/push'
12
12
  require dir + '/historics'
13
13
  require dir + '/historics_preview'
14
14
  require dir + '/managed_source'
15
15
  require dir + '/live_stream'
16
-
16
+ #
17
17
  require 'rbconfig'
18
18
 
19
19
  module DataSift
20
20
  #
21
- IS_WINDOWS = (RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/)
22
- VERSION = File.open(File.dirname(__FILE__) + '/../VERSION').first
21
+ IS_WINDOWS = (RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/)
22
+ VERSION = File.open(File.join(File.dirname(__FILE__), '../') + '/VERSION').first
23
+ KNOWN_SOCKETS = {}
24
+ DETECT_DEAD_SOCKETS = true
25
+ SOCKET_DETECTOR_TIMEOUT = 6.5
26
+
27
+ Thread.new do
28
+ while DETECT_DEAD_SOCKETS
29
+ now = Time.now.to_i
30
+ KNOWN_SOCKETS.clone.map { |connection, last_time|
31
+ if now - last_time > SOCKET_DETECTOR_TIMEOUT
32
+ connection.stream.reconnect
33
+ end
34
+ }
35
+ sleep SOCKET_DETECTOR_TIMEOUT * 10
36
+ end
37
+ end
23
38
 
24
39
  class Client < ApiResource
25
40
 
26
41
  #+config+:: A hash containing configuration options for the client for e.g.
27
- # {:username => 'some_user', :api_key => 'ds_api_key', :enable_ssl => true, :open_timeout => 30, :timeout => 30}
42
+ # {username => 'some_user', api_key => 'ds_api_key', open_timeout => 30, timeout => 30}
28
43
  def initialize (config)
29
44
  if config == nil
30
45
  raise InvalidConfigError.new ('Config cannot be nil')
@@ -44,10 +59,12 @@ module DataSift
44
59
 
45
60
  ##
46
61
  # Checks if the syntax of the given CSDL is valid
47
- def valid?(csdl)
62
+ #+boolResponse+ If true then a boolean is returned indicating whether the CSDL is valid, otherwise
63
+ # the response object itself is returned
64
+ def valid?(csdl, boolResponse = true)
48
65
  requires({:csdl => csdl})
49
66
  res= DataSift.request(:POST, 'validate', @config, {:csdl => csdl})
50
- res[:http][:status] == 200
67
+ boolResponse ? res[:http][:status] == 200 : res
51
68
  end
52
69
 
53
70
  ##
@@ -120,7 +137,8 @@ module DataSift
120
137
  :open_timeout => open_timeout,
121
138
  :timeout => timeout,
122
139
  :payload => payload,
123
- :url => url
140
+ :url => url,
141
+ :ssl_version => 'TLSv1'
124
142
  )
125
143
 
126
144
  begin
@@ -128,7 +146,7 @@ module DataSift
128
146
  if response != nil && response.length > 0
129
147
  if new_line_separated
130
148
  res_arr = response.split("\n")
131
- data = []
149
+ data = []
132
150
  res_arr.each { |e|
133
151
  interaction = MultiJson.load(e, :symbolize_keys => true)
134
152
  data.push(interaction)
@@ -164,7 +182,7 @@ module DataSift
164
182
  body = e.http_body
165
183
  if code && body
166
184
  error = MultiJson.load(body)
167
- handle_api_error(e.http_code, error['error'] + " for URL #{url}")
185
+ handle_api_error(e.http_code, (error['error'] ? error['error'] : '') + " for URL #{url}")
168
186
  else
169
187
  process_client_error(e)
170
188
  end
@@ -215,7 +233,7 @@ module DataSift
215
233
  when RestClient::SSLCertificateNotVerified
216
234
  message = 'Failed to complete SSL verification'
217
235
  when SocketError
218
- message = 'Communication with DataSift failed. Are you able to resolve api.datasift.com?'
236
+ message = 'Communication with DataSift failed. Are you able to resolve the API hostname?'
219
237
  else
220
238
  message = 'Unexpected error.'
221
239
  end
@@ -231,58 +249,76 @@ module DataSift
231
249
  if on_delete == nil || on_error == nil
232
250
  raise NotConfiguredError.new 'on_delete and on_error are required before you can connect'
233
251
  end
234
-
252
+ raise BadParametersError.new('on_delete - 2 parameter required') unless on_delete.arity == 2
253
+ raise BadParametersError.new('on_error - 2 parameter required') unless on_error.arity == 2
254
+ if on_open != nil
255
+ raise BadParametersError.new('on_open - 1 parameter required') unless on_open.arity == 1
256
+ end
257
+ if on_close != nil
258
+ raise BadParametersError.new('on_close - 2 parameter required') unless on_close.arity == 2
259
+ end
235
260
  begin
236
- stream = WebsocketTD::Websocket.new('websocket.datasift.com', '/multi', "username=#{config[:username]}&api_key=#{config[:api_key]}")
237
- connection = LiveStream.new(config, stream)
238
-
239
- stream.on_open = lambda {
261
+ stream = WebsocketTD::Websocket.new('websocket.datasift.com', '/multi', "username=#{config[:username]}&api_key=#{config[:api_key]}")
262
+ connection = LiveStream.new(config, stream)
263
+ KNOWN_SOCKETS[connection] = Time.new.to_i
264
+ stream.on_ping = lambda { |data|
265
+ KNOWN_SOCKETS[connection] = Time.new.to_i
266
+ }
267
+ stream.on_open =lambda {
240
268
  connection.connected = true
241
269
  connection.retry_timeout = 0
242
270
  on_open.call(connection) if on_open != nil
243
271
  }
244
272
 
245
- stream.on_close = lambda {
273
+ stream.on_close =lambda { |message|
246
274
  connection.connected = false
247
- retry_connect(config, connection, on_delete, on_error, on_open, on_close, '', true)
275
+ retry_connect(config, connection, on_delete, on_error, on_open, on_close, message, true)
248
276
  }
249
- stream.on_error = lambda {
277
+ stream.on_error =lambda { |message|
250
278
  connection.connected = false
251
- on_error.call(connection) if on_close != nil
252
- retry_connect(config, connection, on_delete, on_error, on_open, on_close)
279
+ retry_connect(config, connection, on_delete, on_error, on_open, on_close, message)
253
280
  }
254
- stream.on_message =lambda { |msg|
255
- data = MultiJson.load(msg.data, :symbolize_keys => true)
281
+ stream.on_message=lambda { |msg|
282
+ data = MultiJson.load(msg.data, :symbolize_keys => true)
283
+ KNOWN_SOCKETS[connection] = Time.new.to_i
256
284
  if data.has_key?(:deleted)
257
285
  on_delete.call(connection, data)
258
286
  elsif data.has_key?(:status)
259
287
  connection.fire_ds_message(data)
288
+ elsif data.has_key?(:reconnect)
289
+ connection.stream.reconnect
260
290
  else
261
291
  connection.fire_on_message(data[:hash], data[:data])
262
292
  end
263
293
  }
264
294
  rescue Exception => e
265
- retry_connect(config, connection, on_delete, on_error, on_open, on_close, e.message)
295
+ case e
296
+ when DataSiftError, ArgumentError
297
+ raise e
298
+ else
299
+ retry_connect(config, connection, on_delete, on_error, on_open, on_close, e.message)
300
+ end
266
301
  end
267
302
  connection
268
303
  end
269
304
 
270
305
  def self.retry_connect(config, connection, on_delete, on_error, on_open, on_close, message = '', use_closed = false)
271
- connection.retry_timeout = connection.retry_timeout == 0 ? 10 : connection.retry_timeout * 2
272
- if connection.retry_timeout > config[:max_retry_time]
306
+ config[:retry_timeout] = config[:retry_timeout] == 0 || config[:retry_timeout] == nil ? 10 : config[:retry_timeout] * 2
307
+ connection.retry_timeout = config[:retry_timeout]
308
+ if config[:retry_timeout] > config[:max_retry_time]
273
309
  if use_closed && on_close != nil
274
- on_close.call(connection)
310
+ on_close.call(connection, message)
275
311
  else
276
- on_error.call ReconnectTimeoutError.new "Connecting to DataSift has failed, re-connection was attempted but
312
+ on_error.call(connection, ReconnectTimeoutError.new("Connecting to DataSift has failed, re-connection was attempted but
277
313
  multiple consecutive failures where encountered. As a result no further
278
314
  re-connection will be automatically attempted. Manually invoke connect() after
279
315
  investigating the cause of the failure, be sure to observe DataSift's
280
316
  re-connect policies available at http://dev.datasift.com/docs/streaming-api/reconnecting
281
- - Error { #{message}}"
317
+ - Error { #{message}}"))
282
318
  end
283
319
  else
284
- sleep connection.retry_timeout
320
+ sleep config[:retry_timeout]
285
321
  new_stream(config, on_delete, on_error, on_open, on_close)
286
322
  end
287
323
  end
288
- end
324
+ end