datasift 2.1.1 → 3.0.0.beta
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/CHANGELOG.md +100 -0
- data/Gemfile.lock +32 -0
- data/README.md +38 -79
- data/VERSION +1 -1
- data/datasift.gemspec +21 -24
- data/examples/auth.rb +44 -0
- data/examples/core_api_eg.rb +46 -0
- data/examples/historics_eg.rb +50 -0
- data/examples/historics_preview_eg.rb +30 -0
- data/examples/live_stream_eg.rb +89 -0
- data/examples/managed_source_eg.rb +56 -0
- data/examples/pull.rb +44 -0
- data/examples/push_eg.rb +56 -0
- data/lib/api/api_resource.rb +23 -0
- data/lib/datasift.rb +287 -14
- data/lib/errors.rb +59 -0
- data/lib/historics.rb +76 -0
- data/lib/historics_preview.rb +20 -0
- data/lib/live_stream.rb +53 -0
- data/lib/managed_source.rb +57 -0
- data/lib/push.rb +156 -0
- data/tests/core_api_test.rb +42 -0
- metadata +51 -73
- data/Rakefile +0 -34
- data/config.yml +0 -2
- data/examples/consume-stream.rb +0 -63
- data/examples/deletes.rb +0 -52
- data/examples/dpu.rb +0 -115
- data/examples/football-buffered.rb +0 -51
- data/examples/football.rb +0 -53
- data/examples/historics.sh +0 -2
- data/examples/historics/create-from-csdl.rb +0 -71
- data/examples/historics/create-from-hash.rb +0 -65
- data/examples/historics/delete.rb +0 -30
- data/examples/historics/env.rb +0 -37
- data/examples/historics/list.rb +0 -30
- data/examples/historics/start.rb +0 -30
- data/examples/historics/stop.rb +0 -30
- data/examples/historics/view.rb +0 -28
- data/examples/push.sh +0 -2
- data/examples/push/delete.rb +0 -33
- data/examples/push/env.rb +0 -53
- data/examples/push/list.rb +0 -30
- data/examples/push/pause.rb +0 -33
- data/examples/push/push-from-hash.rb +0 -72
- data/examples/push/push-historic-from-csdl.rb +0 -98
- data/examples/push/push-stream-from-csdl.rb +0 -70
- data/examples/push/resume.rb +0 -33
- data/examples/push/stop.rb +0 -33
- data/examples/push/view-log.rb +0 -45
- data/examples/push/view.rb +0 -31
- data/examples/twitter-track.rb +0 -61
- data/lib/DataSift/apiclient.rb +0 -73
- data/lib/DataSift/definition.rb +0 -202
- data/lib/DataSift/exceptions.rb +0 -33
- data/lib/DataSift/historic.rb +0 -316
- data/lib/DataSift/managed_source.rb +0 -263
- data/lib/DataSift/mockapiclient.rb +0 -44
- data/lib/DataSift/push_definition.rb +0 -115
- data/lib/DataSift/push_subscription.rb +0 -330
- data/lib/DataSift/stream_consumer.rb +0 -166
- data/lib/DataSift/stream_consumer_http.rb +0 -188
- data/lib/DataSift/user.rb +0 -311
- data/test/helper.rb +0 -95
- data/test/test_definition.rb +0 -273
- data/test/test_historics.rb +0 -233
- data/test/test_pushdefinition.rb +0 -92
- data/test/test_pushsubscription.rb +0 -17
- data/test/test_user.rb +0 -130
- data/test/testdata.yml +0 -30
@@ -1,166 +0,0 @@
|
|
1
|
-
module DataSift
|
2
|
-
#This is the base class for all StreamConsumer implementation.
|
3
|
-
class StreamConsumer
|
4
|
-
#Constant for the HTTP StreamConsumer implementation option.
|
5
|
-
TYPE_HTTP = 'HTTP'
|
6
|
-
|
7
|
-
#Constant for the "stopped" status.
|
8
|
-
STATE_STOPPED = 0
|
9
|
-
#Constant for the "starting" status.
|
10
|
-
STATE_STARTING = 1
|
11
|
-
#Constant for the "running" status.
|
12
|
-
STATE_RUNNING = 2
|
13
|
-
#Constant for the "stopping" status.
|
14
|
-
STATE_STOPPING = 3
|
15
|
-
|
16
|
-
#Factory function. Creates a StreamConsumer-derived object for the given
|
17
|
-
#type.
|
18
|
-
#=== Parameters
|
19
|
-
#* +type+ - Use the TYPE_ constants
|
20
|
-
#* +definition+ - CSDL string or a Definition object.
|
21
|
-
#=== Returns
|
22
|
-
#A StreamConsumer-derived object.
|
23
|
-
def self.factory(user, type, definition)
|
24
|
-
type ||= TYPE_HTTP
|
25
|
-
@klass = Module.const_get('DataSift').const_get('StreamConsumer_' + type)
|
26
|
-
@klass.new(user, definition)
|
27
|
-
end
|
28
|
-
|
29
|
-
#Whether the consumer should automatically try to reconnect if the
|
30
|
-
#connection is dropped.
|
31
|
-
attr_accessor :auto_reconnect
|
32
|
-
#The current state of the consumer.
|
33
|
-
attr_reader :state
|
34
|
-
#The reason the consumer was stopped.
|
35
|
-
attr_reader :stop_reason
|
36
|
-
|
37
|
-
#Constructor. Do not use this directly, use the factory method instead.
|
38
|
-
#=== Parameters
|
39
|
-
#* +user+ - The user this consumer will run as.
|
40
|
-
#* +definition+ - CSDL string or a Definition object.
|
41
|
-
def initialize(user, definition)
|
42
|
-
raise InvalidDataError, 'Please supply a valid User object when creating a Definition object.' unless user.is_a? DataSift::User
|
43
|
-
|
44
|
-
if definition.is_a? String
|
45
|
-
@definition = user.createDefinition(definition)
|
46
|
-
elsif definition.is_a? Definition
|
47
|
-
@definition = definition
|
48
|
-
else
|
49
|
-
raise InvalidDataError, 'The definition must be a CSDL string or a DataSift_Definition object'
|
50
|
-
end
|
51
|
-
|
52
|
-
@user = user
|
53
|
-
@auto_reconnect = true
|
54
|
-
@stop_reason = 'Unknown reason'
|
55
|
-
@state = STATE_STOPPED
|
56
|
-
@stream_timeout = 65
|
57
|
-
|
58
|
-
# Get the hash which will compile the CSDL if necessary
|
59
|
-
@definition.hash
|
60
|
-
end
|
61
|
-
|
62
|
-
#Called when a deletion notification is received.
|
63
|
-
#=== Parameters
|
64
|
-
#* +interaction+ - Minimal details about the interaction that was deleted.
|
65
|
-
def onDeleted(&block)
|
66
|
-
if block_given?
|
67
|
-
@on_deleted = block
|
68
|
-
self
|
69
|
-
else
|
70
|
-
@on_deleted
|
71
|
-
end
|
72
|
-
end
|
73
|
-
|
74
|
-
#This is called when an error message is received.
|
75
|
-
#=== Parameters
|
76
|
-
#* +message+ - The error message.
|
77
|
-
def onError(&block)
|
78
|
-
if block_given?
|
79
|
-
@on_error = block
|
80
|
-
self
|
81
|
-
else
|
82
|
-
@on_error
|
83
|
-
end
|
84
|
-
end
|
85
|
-
|
86
|
-
#This is called when an error message is received.
|
87
|
-
#=== Parameters
|
88
|
-
#* +message+ - The error message.
|
89
|
-
def onWarning(&block)
|
90
|
-
if block_given?
|
91
|
-
@on_warning = block
|
92
|
-
self
|
93
|
-
else
|
94
|
-
@on_warning
|
95
|
-
end
|
96
|
-
end
|
97
|
-
|
98
|
-
#This is called when the consumer is stopped.
|
99
|
-
#=== Parameters
|
100
|
-
#* +reason+ - The reason why the consumer stopped.
|
101
|
-
def onStopped(&block)
|
102
|
-
if block_given?
|
103
|
-
@on_stopped = block
|
104
|
-
self
|
105
|
-
else
|
106
|
-
@on_stopped
|
107
|
-
end
|
108
|
-
end
|
109
|
-
|
110
|
-
#Once an instance of a StreamConsumer is ready for use, call this to
|
111
|
-
#start consuming. Extending classes should implement onStart to handle
|
112
|
-
#actually starting.
|
113
|
-
#=== Parameters
|
114
|
-
#* +auto_reconnect+ - Whether the consumer should automatically reconnect.
|
115
|
-
#* +block+ - An optional block to receive incoming interactions.
|
116
|
-
def consume(auto_reconnect = true, &block)
|
117
|
-
@auto_reconnect = auto_reconnect;
|
118
|
-
|
119
|
-
# Start consuming
|
120
|
-
@state = STATE_STARTING
|
121
|
-
onStart do |interaction|
|
122
|
-
if interaction.has_key?('status')
|
123
|
-
if interaction['status'] == 'error' || interaction['status'] == 'failure'
|
124
|
-
onError.call(interaction['message'])
|
125
|
-
elsif interaction['status'] == 'warning'
|
126
|
-
onWarning.call(interaction['message'])
|
127
|
-
else
|
128
|
-
# Tick
|
129
|
-
end
|
130
|
-
else
|
131
|
-
if interaction.has_key?('deleted') and interaction['deleted']
|
132
|
-
onDeleted.call(interaction) unless onDeleted.nil?
|
133
|
-
else
|
134
|
-
block.call(interaction) unless block.nil?
|
135
|
-
end
|
136
|
-
end
|
137
|
-
end
|
138
|
-
end
|
139
|
-
|
140
|
-
#Called when the consumer should start consuming the stream.
|
141
|
-
def onStart()
|
142
|
-
abort('onStart method has not been overridden!')
|
143
|
-
end
|
144
|
-
|
145
|
-
#This method can be called at any time to *request* that the consumer
|
146
|
-
#stop consuming. This method sets the state to STATE_STOPPING and it's
|
147
|
-
#up to the consumer implementation to notice that this has changed, stop
|
148
|
-
#consuming and call the onStopped method.
|
149
|
-
def stop()
|
150
|
-
raise InvalidDataError, 'Consumer state must be RUNNING before it can be stopped' unless @state = StreamConsumer::STATE_RUNNING
|
151
|
-
@state = StreamConsumer::STATE_STOPPING
|
152
|
-
end
|
153
|
-
|
154
|
-
#Default implementation of onStop. It's unlikely that this method will
|
155
|
-
#ever be used in isolation, but rather it should be called as the final
|
156
|
-
#step in the extending class's implementation.
|
157
|
-
#=== Parameters
|
158
|
-
#* +reason+ - The reason why the consumer stopped.
|
159
|
-
def onStop(reason = '')
|
160
|
-
reason = 'Unexpected' unless @state != StreamConsumer::STATE_STOPPING and reason.length == 0
|
161
|
-
@state = StreamConsumer::STATE_STOPPED
|
162
|
-
@stop_reason = reason
|
163
|
-
onStopped.call(reason) unless onStopped.nil?
|
164
|
-
end
|
165
|
-
end
|
166
|
-
end
|
@@ -1,188 +0,0 @@
|
|
1
|
-
$LOAD_PATH.unshift(File.dirname(__FILE__) + '/../')
|
2
|
-
|
3
|
-
require 'uri'
|
4
|
-
require 'socket'
|
5
|
-
require 'yajl'
|
6
|
-
require 'cgi'
|
7
|
-
|
8
|
-
module DataSift
|
9
|
-
#The HTTP implementation of the StreamConsumer.
|
10
|
-
class StreamConsumer_HTTP < StreamConsumer
|
11
|
-
#Constructor. Requires valid user and definition objects.
|
12
|
-
#=== Parameters
|
13
|
-
#* +user+ - The user consuming the data.
|
14
|
-
#* +definition+ - The Definition to consume.
|
15
|
-
def initialize(user, definition)
|
16
|
-
super
|
17
|
-
end
|
18
|
-
|
19
|
-
#Called when the consumer is started.
|
20
|
-
#=== Parameters
|
21
|
-
#* +block+ - A block to receive incoming data.
|
22
|
-
def onStart(&block)
|
23
|
-
begin
|
24
|
-
reconnect() unless !@socket.nil? and !@socket.closed?
|
25
|
-
parser = Yajl::Parser.new
|
26
|
-
parser.on_parse_complete = block if block_given?
|
27
|
-
if @response_head[:headers]["Transfer-Encoding"] == 'chunked'
|
28
|
-
if block_given?
|
29
|
-
chunkLeft = 0
|
30
|
-
while !@socket.eof? && (line = @socket.gets) && @state == StreamConsumer::STATE_RUNNING
|
31
|
-
break if line.match /^0.*?\r\n/
|
32
|
-
|
33
|
-
begin
|
34
|
-
ready = IO.select([@socket], nil, [@socket], @stream_timeout)
|
35
|
-
unless ready
|
36
|
-
reconnect()
|
37
|
-
next
|
38
|
-
end
|
39
|
-
end
|
40
|
-
|
41
|
-
next if line == "\r\n"
|
42
|
-
size = line.hex
|
43
|
-
json = @socket.read(size)
|
44
|
-
next if json.nil?
|
45
|
-
chunkLeft = size-json.size
|
46
|
-
if chunkLeft == 0
|
47
|
-
if json.length > 100
|
48
|
-
parser << json
|
49
|
-
end
|
50
|
-
else
|
51
|
-
# received only part of the chunk, grab the rest
|
52
|
-
received_data = @socket.read(chunkLeft)
|
53
|
-
if not received_data.nil?
|
54
|
-
parser << received_data
|
55
|
-
end
|
56
|
-
end
|
57
|
-
end
|
58
|
-
else
|
59
|
-
raise StreamError, 'Chunked responses detected, but no block given to handle the chunks.'
|
60
|
-
end
|
61
|
-
else
|
62
|
-
content_type = @response_head[:headers]['Content-Type'].split(';')
|
63
|
-
content_type = content_type.first
|
64
|
-
if ALLOWED_MIME_TYPES.include?(content_type)
|
65
|
-
case @response_head[:headers]['Content-Encoding']
|
66
|
-
when 'gzip'
|
67
|
-
return Yajl::Gzip::StreamReader.parse(@socket, opts, &block)
|
68
|
-
when 'deflate'
|
69
|
-
return Yajl::Deflate::StreamReader.parse(@socket, opts.merge({:deflate_options => -Zlib::MAX_WBITS}), &block)
|
70
|
-
when 'bzip2'
|
71
|
-
return Yajl::Bzip2::StreamReader.parse(@socket, opts, &block)
|
72
|
-
else
|
73
|
-
return parser.parse(@socket)
|
74
|
-
end
|
75
|
-
else
|
76
|
-
raise StreamError, 'Unhandled response MIME type ' + content_type
|
77
|
-
end
|
78
|
-
end
|
79
|
-
end while @auto_reconnect and @state == StreamConsumer::STATE_RUNNING
|
80
|
-
|
81
|
-
disconnect()
|
82
|
-
|
83
|
-
if @state == StreamConsumer::STATE_STOPPING
|
84
|
-
@stop_reason = 'Stop requested'
|
85
|
-
else
|
86
|
-
@stop_reason = 'Connection dropped'
|
87
|
-
end
|
88
|
-
|
89
|
-
onStop(@stop_reason)
|
90
|
-
end
|
91
|
-
|
92
|
-
private
|
93
|
-
#Reconnect the stream socket.
|
94
|
-
def reconnect()
|
95
|
-
uri = URI.parse('http' + (@user.use_ssl ? 's' : '') + '://' + User::STREAM_BASE_URL + @definition.hash)
|
96
|
-
|
97
|
-
user_agent = @user.getUserAgent()
|
98
|
-
|
99
|
-
request = "GET #{uri.path}#{uri.query ? "?"+uri.query : nil} HTTP/1.1\r\n"
|
100
|
-
request << "Host: #{uri.host}\r\n"
|
101
|
-
request << "User-Agent: #{user_agent}\r\n"
|
102
|
-
request << "Accept: */*\r\n"
|
103
|
-
request << "Auth: #{@user.username}:#{@user.api_key}\r\n"
|
104
|
-
request << "\r\n"
|
105
|
-
|
106
|
-
connection_delay = 0
|
107
|
-
|
108
|
-
begin
|
109
|
-
# Close the socket if it's open
|
110
|
-
disconnect()
|
111
|
-
|
112
|
-
# Back off a bit if required
|
113
|
-
sleep(connection_delay) if connection_delay > 0
|
114
|
-
|
115
|
-
begin
|
116
|
-
@raw_socket = TCPSocket.new(uri.host, uri.port)
|
117
|
-
if @user.use_ssl
|
118
|
-
@socket = OpenSSL::SSL::SSLSocket.new(@raw_socket)
|
119
|
-
@socket.connect
|
120
|
-
else
|
121
|
-
@socket = @raw_socket
|
122
|
-
end
|
123
|
-
|
124
|
-
@socket.write(request)
|
125
|
-
@response_head = {}
|
126
|
-
@response_head[:headers] = {}
|
127
|
-
|
128
|
-
# Read the headers
|
129
|
-
@socket.each_line do |line|
|
130
|
-
if line == "\r\n" # end of the headers
|
131
|
-
break
|
132
|
-
else
|
133
|
-
header = line.split(": ")
|
134
|
-
if header.size == 1
|
135
|
-
header = header[0].split(" ")
|
136
|
-
@response_head[:version] = header[0]
|
137
|
-
@response_head[:code] = header[1].to_i
|
138
|
-
@response_head[:msg] = header[2]
|
139
|
-
else
|
140
|
-
@response_head[:headers][header[0]] = header[1].strip
|
141
|
-
end
|
142
|
-
end
|
143
|
-
end
|
144
|
-
|
145
|
-
if @response_head[:code].nil?
|
146
|
-
raise StreamError, 'Socket connection refused'
|
147
|
-
elsif @response_head[:code] == 200
|
148
|
-
# Success!
|
149
|
-
@state = StreamConsumer::STATE_RUNNING
|
150
|
-
elsif @response_head[:code] >= 399 && @response_head[:code] < 500 && @response_head[:code] != 420
|
151
|
-
line = ''
|
152
|
-
while !@socket.eof? && line.length < 10
|
153
|
-
line = @socket.gets
|
154
|
-
end
|
155
|
-
data = Yajl::Parser.parse(line)
|
156
|
-
if data.has_key?('message')
|
157
|
-
raise StreamError, data['message']
|
158
|
-
else
|
159
|
-
raise StreamError, 'Connection refused: ' + @response_head[:code] + ' ' + @response_head[:msg]
|
160
|
-
end
|
161
|
-
else
|
162
|
-
if connection_delay == 0
|
163
|
-
connection_delay = 10;
|
164
|
-
elsif connection_delay < 240
|
165
|
-
connection_delay *= 2;
|
166
|
-
else
|
167
|
-
raise StreamError, 'Connection refused: ' + @response_head[:code] + ' ' + @response_head[:msg]
|
168
|
-
end
|
169
|
-
end
|
170
|
-
rescue
|
171
|
-
if connection_delay == 0
|
172
|
-
connection_delay = 1
|
173
|
-
elsif connection_delay <= 16
|
174
|
-
connection_delay += 1
|
175
|
-
else
|
176
|
-
raise StreamError, 'Connection failed due to a network error'
|
177
|
-
end
|
178
|
-
end
|
179
|
-
end while @state != StreamConsumer::STATE_RUNNING
|
180
|
-
end
|
181
|
-
|
182
|
-
#Disconnect the stream socket.
|
183
|
-
def disconnect()
|
184
|
-
@socket.close if !@socket.nil? and !@socket.closed?
|
185
|
-
@raw_socket.close if !@raw_socket.nil? and !@raw_socket.closed?
|
186
|
-
end
|
187
|
-
end
|
188
|
-
end
|
data/lib/DataSift/user.rb
DELETED
@@ -1,311 +0,0 @@
|
|
1
|
-
#This is the official DataSift client library for Ruby.
|
2
|
-
module DataSift
|
3
|
-
#The User class represents a user of the API. Applications should start their
|
4
|
-
#API interactions by creating an instance of this class. Once initialised it
|
5
|
-
#provides factory methods for all of the functionality in the API.
|
6
|
-
class User
|
7
|
-
#The user agent to pass through with all HTTP requests.
|
8
|
-
USER_AGENT = 'DataSiftRuby/' + File.open(File.dirname(File.dirname(File.dirname(__FILE__))) + '/VERSION').first.strip!;
|
9
|
-
#The base URL for API requests.
|
10
|
-
API_BASE_URL = 'api.datasift.com/';
|
11
|
-
#The base URL for streams.
|
12
|
-
STREAM_BASE_URL = 'stream.datasift.com/';
|
13
|
-
|
14
|
-
#The User's DataSift username.
|
15
|
-
attr_reader :username
|
16
|
-
#The User's DataSift API key.
|
17
|
-
attr_reader :api_key
|
18
|
-
#The User's total number of available hourly API credits. This is not
|
19
|
-
#populated until an API request is made.
|
20
|
-
attr_reader :rate_limit
|
21
|
-
#The User's API credits remaining. This is not populated until an API
|
22
|
-
#request is made.
|
23
|
-
attr_reader :rate_limit_remaining
|
24
|
-
#The APIClient class to use when making API requests.
|
25
|
-
attr_reader :api_client
|
26
|
-
#True if streaming connections should use SSL.
|
27
|
-
attr_reader :use_ssl
|
28
|
-
|
29
|
-
#Constructor. A username and API key are required when constructing an
|
30
|
-
#instance of this class.
|
31
|
-
#=== Parameters
|
32
|
-
#* +username+ - The User's DataSift username
|
33
|
-
#* +api_key+ - The User's DataSift API key
|
34
|
-
def initialize(username, api_key, use_ssl = true)
|
35
|
-
username = username.strip
|
36
|
-
api_key = api_key.strip
|
37
|
-
|
38
|
-
raise EInvalidData, 'Please supply valid credentials when creating a User object.' unless username.size > 0 and api_key.size > 0
|
39
|
-
|
40
|
-
@username = username
|
41
|
-
@api_key = api_key
|
42
|
-
@rate_limit = -1;
|
43
|
-
@rate_limit_remaining = -1
|
44
|
-
@use_ssl = use_ssl
|
45
|
-
end
|
46
|
-
|
47
|
-
#Creates and returns a definition object.
|
48
|
-
#=== Parameters
|
49
|
-
#* +csdl+ - Optional CSDL string with which to prime the object.
|
50
|
-
#=== Returns
|
51
|
-
#A Definition object.
|
52
|
-
def createDefinition(csdl = '')
|
53
|
-
DataSift::Definition.new(self, csdl, false)
|
54
|
-
end
|
55
|
-
|
56
|
-
#Create a Historics query based on this Definition.
|
57
|
-
#=== Parameters
|
58
|
-
#* +hash+ - The stream hash for a new Historics query.
|
59
|
-
#* +start_date+ - The start date for a new Historics query.
|
60
|
-
#* +end_date+ - The end date for a new Historics query.
|
61
|
-
#* +sources+ - An array of sources for a new Historics query.
|
62
|
-
#* +name+ - The name for a new Historics query.
|
63
|
-
#* +sample+ - The sample rate for the new Historics query.
|
64
|
-
#=== Returns
|
65
|
-
#A Historic object.
|
66
|
-
def createHistoric(hash, start_date, end_date, sources, sample, name)
|
67
|
-
return Historic.new(self, hash, start_date, end_date, sources, sample, name)
|
68
|
-
end
|
69
|
-
|
70
|
-
#Get a Historics query from the API.
|
71
|
-
#=== Parameters
|
72
|
-
#* +playback_id+ - The playback ID of the Historics query to retrieve.
|
73
|
-
#=== Returns
|
74
|
-
#A Historic object.
|
75
|
-
def getHistoric(playback_id)
|
76
|
-
return Historic.new(self, playback_id)
|
77
|
-
end
|
78
|
-
|
79
|
-
# Get a list of Historics queries in your account.
|
80
|
-
#=== Parameters
|
81
|
-
#* +page+ - The page number to get.
|
82
|
-
#* +per_page+ - The number of items per page.
|
83
|
-
#=== Returns
|
84
|
-
#A Hash containing...
|
85
|
-
#* +count+ - The total number of Historics queries in your account.
|
86
|
-
#* +historics+ - An array of Hashes where each Hash is a Historics query.
|
87
|
-
def listHistorics(page = 1, per_page = 20)
|
88
|
-
return Historic::list(self, page, per_page)
|
89
|
-
end
|
90
|
-
|
91
|
-
|
92
|
-
#Create a Managed Source query based on this Definition.
|
93
|
-
#=== Options
|
94
|
-
#* +hash+ - Contains parameters to set up your Managed Source
|
95
|
-
#=== Returns
|
96
|
-
#A Managed Source object.
|
97
|
-
def createManagedSource(hash = {})
|
98
|
-
managed_source = ManagedSource.new(self, hash)
|
99
|
-
managed_source.create
|
100
|
-
return managed_source
|
101
|
-
end
|
102
|
-
|
103
|
-
#Get a Manged Source query from the API.
|
104
|
-
#=== Parameters
|
105
|
-
#* +managed_source_id+ - The ID of the Managed Source to retrieve.
|
106
|
-
#=== Returns
|
107
|
-
#A Managed Source object.
|
108
|
-
def getManagedSource(managed_source_id)
|
109
|
-
return ManagedSource.get(self, managed_source_id)
|
110
|
-
end
|
111
|
-
|
112
|
-
# Get a list of ManagedSources in your account.
|
113
|
-
#=== Parameters
|
114
|
-
#* +page+ - The page number to get.
|
115
|
-
#* +per_page+ - The number of items per page.
|
116
|
-
#=== Returns
|
117
|
-
#A Hash containing...
|
118
|
-
#* +count+ - The total number of Managed Sources in your account.
|
119
|
-
#* +managed_sources+ - An array of Hashes where each Hash is a managed source.
|
120
|
-
def listManagedSources(page = 1, per_page = 20, source_type = '')
|
121
|
-
return ManagedSource::list(self, page, per_page, source_type)
|
122
|
-
end
|
123
|
-
|
124
|
-
#Get the log entries for all Managed Sources or the given Managed Source.
|
125
|
-
#=== Parameters
|
126
|
-
#* +managed_source_id+ - Optional Managed Source ID.
|
127
|
-
#=== Returns
|
128
|
-
#A Hash containing...
|
129
|
-
#* +count+ - The total number of matching log entries.
|
130
|
-
#* +log_entries+ - An array of Hashes where each Hash is a log entry.
|
131
|
-
def getManagedSourcesLog(managed_source_id, page = 1, per_page = 20)
|
132
|
-
return getManagedSource(managed_source_id).getLogs(page = 1, per_page = 20)
|
133
|
-
end
|
134
|
-
|
135
|
-
#Create a new PushDefinition object for this user.
|
136
|
-
#=== Returns
|
137
|
-
#A PushDefinition object.
|
138
|
-
def createPushDefinition()
|
139
|
-
return PushDefinition.new(self)
|
140
|
-
end
|
141
|
-
|
142
|
-
#Get an existing PushSubscription from the API.
|
143
|
-
#=== Parameters
|
144
|
-
#* +subscription_id+ - The ID of the subscription to fetch.
|
145
|
-
#=== Returns
|
146
|
-
#A PushSubscription object.
|
147
|
-
def getPushSubscription(subscription_id)
|
148
|
-
return PushSubscription.get(self, subscription_id)
|
149
|
-
end
|
150
|
-
|
151
|
-
#Get the log entries for all push subscription or the given subscription.
|
152
|
-
#=== Parameters
|
153
|
-
#* +subscription_id+ - Optional subscription ID.
|
154
|
-
#=== Returns
|
155
|
-
#A Hash containing...
|
156
|
-
#* +count+ - The total number of matching log entries.
|
157
|
-
#* +log_entries+ - An array of Hashes where each Hash is a log entry.
|
158
|
-
def getPushSubscriptionLog(subscription_id = false)
|
159
|
-
if subscription_id
|
160
|
-
return getPushSubscription(subscription_id).getLog()
|
161
|
-
else
|
162
|
-
return PushSubscription.getLogs(self)
|
163
|
-
end
|
164
|
-
end
|
165
|
-
|
166
|
-
#Get a page of Push subscriptions in the given user's account, where each
|
167
|
-
#page contains up to per_page items. Results will be ordered according to
|
168
|
-
#the supplied ordering parameters.
|
169
|
-
#=== Parameters
|
170
|
-
#* +page+ - The page number to get.
|
171
|
-
#* +per_page+ - The number of items per page.
|
172
|
-
#* +order_by+ - The field by which to order the results.
|
173
|
-
#* +order_dir+ - Ascending or descending.
|
174
|
-
#* +include_finished+ - True to include subscriptions against finished Historics queries.
|
175
|
-
#=== Returns
|
176
|
-
#A Hash containing...
|
177
|
-
#* +count+ - The total number of matching Push subscriptions in your account.
|
178
|
-
#* +subscriptions+ - An array of Hashes where each Hash is a Push subscription.
|
179
|
-
def listPushSubscriptions(page = 1, per_page = 20, order_by = PushSubscription::ORDERBY_CREATED_AT, order_dir = PushSubscription::ORDERDIR_ASC, include_finished = false)
|
180
|
-
return PushSubscription.list(self, page, per_page, order_by, order_dir, include_finished)
|
181
|
-
end
|
182
|
-
|
183
|
-
#Get a page of Push subscriptions in the given user's account, where each
|
184
|
-
#page contains up to per_page items. Results will be ordered according to
|
185
|
-
#the supplied ordering parameters.
|
186
|
-
#=== Parameters
|
187
|
-
#* +hash+ - The stream hash.
|
188
|
-
#* +page+ - The page number to get.
|
189
|
-
#* +per_page+ - The number of items per page.
|
190
|
-
#* +order_by+ - The field by which to order the results.
|
191
|
-
#* +order_dir+ - Ascending or descending.
|
192
|
-
#* +include_finished+ - True to include subscriptions against finished Historics queries.
|
193
|
-
#=== Returns
|
194
|
-
#A Hash containing...
|
195
|
-
#* +count+ - The total number of matching Push subscriptions in your account.
|
196
|
-
#* +subscriptions+ - An array of Hashes where each Hash is a Push subscription.
|
197
|
-
def listPushSubscriptionsToStreamHash(hash, page = 1, per_page = 20, order_by = PushSubscription::ORDERBY_CREATED_AT, order_dir = PushSubscription::ORDERDIR_ASC, include_finished = false)
|
198
|
-
return PushSubscription.listByStreamHash(self, hash, page, per_page, order_by, order_dir)
|
199
|
-
end
|
200
|
-
|
201
|
-
#Get a page of Push subscriptions in the given user's account, where each
|
202
|
-
#page contains up to per_page items. Results will be ordered according to
|
203
|
-
#the supplied ordering parameters.
|
204
|
-
#=== Parameters
|
205
|
-
#* +hash+ - The stream hash.
|
206
|
-
#* +page+ - The page number to get.
|
207
|
-
#* +per_page+ - The number of items per page.
|
208
|
-
#* +order_by+ - The field by which to order the results.
|
209
|
-
#* +order_dir+ - Ascending or descending.
|
210
|
-
#* +include_finished+ - True to include subscriptions against finished Historics queries.
|
211
|
-
#=== Returns
|
212
|
-
#A Hash containing...
|
213
|
-
#* +count+ - The total number of matching Push subscriptions in your account.
|
214
|
-
#* +subscriptions+ - An array of Hashes where each Hash is a Push subscription.
|
215
|
-
def listPushSubscriptionsToPlaybackId(playback_id, page = 1, per_page = 20, order_by = PushSubscription::ORDERBY_CREATED_AT, order_dir = PushSubscription::ORDERDIR_ASC, include_finished = false)
|
216
|
-
return PushSubscription.listByPlaybackId(self, playback_id, page, per_page, order_by, order_dir)
|
217
|
-
end
|
218
|
-
|
219
|
-
#Returns a StreamConsumer-derived object for the given hash, for the
|
220
|
-
#given type.
|
221
|
-
#=== Parameters
|
222
|
-
#* +type+ - The consumer type for which to construct a consumer.
|
223
|
-
#* +hash+ - The hash to be consumed.
|
224
|
-
#=== Returns
|
225
|
-
#A StreamConsumer-derived object.
|
226
|
-
def getConsumer(type = nil, hash = nil, on_interaction = nil, on_stopped = nil)
|
227
|
-
StreamConsumer.factory(self, type, Definition.new(self, nil, hash))
|
228
|
-
end
|
229
|
-
|
230
|
-
#Returns the account balance information for this user.
|
231
|
-
#=== Returns
|
232
|
-
#A Hash containing the balance information.
|
233
|
-
def getBalance
|
234
|
-
return callAPI('balance')['balance']
|
235
|
-
end
|
236
|
-
|
237
|
-
#Returns the usage data for this user. If a hash is provided then a more
|
238
|
-
#detailed breakdown using interaction types is retrieved and returned.
|
239
|
-
#=== Parameters
|
240
|
-
#* +period+ - An optional period for which to fetch data ('hour' or 'day')
|
241
|
-
#=== Returns
|
242
|
-
#A Hash containing the usage information.
|
243
|
-
def getUsage(period = 'hour')
|
244
|
-
params = { 'period' => period }
|
245
|
-
|
246
|
-
return callAPI('usage', params)
|
247
|
-
end
|
248
|
-
|
249
|
-
#Returns the user agent this library should use for all API calls.
|
250
|
-
#=== Returns
|
251
|
-
#The user agent string.
|
252
|
-
def getUserAgent()
|
253
|
-
return USER_AGENT
|
254
|
-
end
|
255
|
-
|
256
|
-
#Sets the ApiClient object to use to access the API
|
257
|
-
#=== Parameters
|
258
|
-
#* +client+ - The API client object to be used.
|
259
|
-
def setApiClient(client)
|
260
|
-
@api_client = client
|
261
|
-
end
|
262
|
-
|
263
|
-
#Sets whether to use SSL for API and stream communication.
|
264
|
-
#=== Parameters
|
265
|
-
#* +use_ssl+ - Pass true to use SSL.
|
266
|
-
def enableSSL(use_ssl = true)
|
267
|
-
@use_ssl = use_ssl
|
268
|
-
end
|
269
|
-
|
270
|
-
#Make a call to a DataSift API endpoint.
|
271
|
-
#=== Parameters
|
272
|
-
#* +endpoint+ - The endpoint of the API call.
|
273
|
-
#* +params+ - A Hash of parameters to be passed along with the request.
|
274
|
-
#=== Returns
|
275
|
-
#A Hash containing the response data.
|
276
|
-
def callAPI(endpoint, params = {})
|
277
|
-
if !@api_client
|
278
|
-
@api_client = ApiClient.new()
|
279
|
-
end
|
280
|
-
|
281
|
-
res = @api_client.call(self, endpoint, params, getUserAgent())
|
282
|
-
|
283
|
-
# Set up the return value
|
284
|
-
retval = res['data']
|
285
|
-
|
286
|
-
# Update the rate limits from the headers
|
287
|
-
@rate_limit = res['rate_limit']
|
288
|
-
@rate_limit_remaining = res['rate_limit_remaining']
|
289
|
-
|
290
|
-
case res['response_code']
|
291
|
-
when 200
|
292
|
-
when 201
|
293
|
-
when 202
|
294
|
-
when 204
|
295
|
-
# Do nothing
|
296
|
-
when 401
|
297
|
-
# Authentication failure
|
298
|
-
raise AccessDeniedError, retval.has_key?('error') ? retval['error'] : 'Authentication failed'
|
299
|
-
when 403
|
300
|
-
# Check the rate limit
|
301
|
-
raise RateLimitExceededError, retval['comment'] if @rate_limit_remaining == 0
|
302
|
-
# Rate limit is ok, raise a generic exception
|
303
|
-
raise APIError.new(res['response_code']), retval.has_key?('error') ? retval['error'] : 'Unknown error'
|
304
|
-
else
|
305
|
-
raise APIError.new(res['response_code']), retval.has_key?('error') ? retval['error'] : 'Unknown error'
|
306
|
-
end
|
307
|
-
|
308
|
-
return retval
|
309
|
-
end
|
310
|
-
end
|
311
|
-
end
|