datasift 3.7.2 → 3.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -0
- data/datasift.gemspec +6 -3
- data/lib/api/api_resource.rb +1 -1
- data/lib/datasift.rb +53 -16
- data/lib/errors.rb +8 -0
- data/lib/live_stream.rb +7 -7
- data/lib/pylon.rb +59 -26
- data/lib/tasks.rb +51 -0
- data/lib/version.rb +3 -0
- metadata +5 -83
- data/.gitignore +0 -12
- data/.rubocop.yml +0 -12
- data/.rubocop_todo.yml +0 -497
- data/.travis.yml +0 -17
- data/Gemfile +0 -15
- data/LICENSE +0 -19
- data/MIGRATING_TO_V.3.0.0.md +0 -262
- data/Rakefile +0 -10
- data/VERSION +0 -1
- data/examples/account_eg.rb +0 -33
- data/examples/account_identity_eg.rb +0 -50
- data/examples/account_identity_limit_eg.rb +0 -72
- data/examples/account_identity_token_eg.rb +0 -73
- data/examples/auth.rb +0 -55
- data/examples/cli.sh +0 -155
- data/examples/core_api_eg.rb +0 -50
- data/examples/historics_eg.rb +0 -66
- data/examples/historics_preview_eg.rb +0 -31
- data/examples/ingestion/twitter_gnip_batch_eg.rb +0 -61
- data/examples/live_stream_eg.rb +0 -92
- data/examples/managed_source_fb_eg.rb +0 -129
- data/examples/managed_source_ig_eg.rb +0 -126
- data/examples/pull.rb +0 -47
- data/examples/push_eg.rb +0 -51
- data/examples/pylon/pylon_api_v1.2_eg.rb +0 -166
- data/examples/pylon/pylon_api_v1.3_eg.rb +0 -201
- data/test/datasift/account/account_api_test.rb +0 -35
- data/test/datasift/core_api_test.rb +0 -179
- data/test/datasift/historics_preview_api_test.rb +0 -56
- data/test/datasift/odp/batch_upload_test.rb +0 -52
- data/test/datasift/push_api_test.rb +0 -238
- data/test/fixtures/cassettes/account/usage/default.json +0 -1
- data/test/fixtures/cassettes/account/usage/invalid.json +0 -1
- data/test/fixtures/cassettes/account/usage/valid_params.json +0 -1
- data/test/fixtures/cassettes/core/after_historic_dpu.json +0 -1
- data/test/fixtures/cassettes/core/balance_get.json +0 -1
- data/test/fixtures/cassettes/core/before_dpu.json +0 -1
- data/test/fixtures/cassettes/core/before_historic_dpu.json +0 -1
- data/test/fixtures/cassettes/core/compile_success.json +0 -1
- data/test/fixtures/cassettes/core/dpu_get_cost.json +0 -1
- data/test/fixtures/cassettes/core/dpu_throw_badrequest.json +0 -1
- data/test/fixtures/cassettes/core/historic_dpu.json +0 -1
- data/test/fixtures/cassettes/core/usage_success.json +0 -1
- data/test/fixtures/cassettes/core/validate_invalid_hash.json +0 -1
- data/test/fixtures/cassettes/core/validate_success_bool.json +0 -1
- data/test/fixtures/cassettes/core/validate_success_hash.json +0 -1
- data/test/fixtures/cassettes/odp/batch/after_upload.json +0 -1
- data/test/fixtures/cassettes/odp/batch/before_upload.json +0 -1
- data/test/fixtures/cassettes/odp/batch/upload_failure_no_source.json +0 -1
- data/test/fixtures/cassettes/odp/batch/upload_success.json +0 -1
- data/test/fixtures/cassettes/preview/before_preview_create.json +0 -1
- data/test/fixtures/cassettes/preview/before_preview_get.json +0 -1
- data/test/fixtures/cassettes/preview/preview_create_success.json +0 -1
- data/test/fixtures/cassettes/preview/preview_get_success.json +0 -1
- data/test/fixtures/cassettes/push/after_push_create.json +0 -1
- data/test/fixtures/cassettes/push/after_push_get.json +0 -1
- data/test/fixtures/cassettes/push/after_push_log.json +0 -1
- data/test/fixtures/cassettes/push/after_push_pause.json +0 -1
- data/test/fixtures/cassettes/push/after_push_resume.json +0 -1
- data/test/fixtures/cassettes/push/after_push_stop.json +0 -1
- data/test/fixtures/cassettes/push/after_push_update.json +0 -1
- data/test/fixtures/cassettes/push/before_push_create.json +0 -1
- data/test/fixtures/cassettes/push/before_push_delete.json +0 -1
- data/test/fixtures/cassettes/push/before_push_get.json +0 -1
- data/test/fixtures/cassettes/push/before_push_log.json +0 -1
- data/test/fixtures/cassettes/push/before_push_pause.json +0 -1
- data/test/fixtures/cassettes/push/before_push_resume.json +0 -1
- data/test/fixtures/cassettes/push/before_push_stop.json +0 -1
- data/test/fixtures/cassettes/push/before_push_update.json +0 -1
- data/test/fixtures/cassettes/push/push_create.json +0 -1
- data/test/fixtures/cassettes/push/push_delete.json +0 -1
- data/test/fixtures/cassettes/push/push_get_by_id.json +0 -1
- data/test/fixtures/cassettes/push/push_log_with_id.json +0 -1
- data/test/fixtures/cassettes/push/push_pause.json +0 -1
- data/test/fixtures/cassettes/push/push_resume.json +0 -1
- data/test/fixtures/cassettes/push/push_stop.json +0 -1
- data/test/fixtures/cassettes/push/push_update.json +0 -1
- data/test/fixtures/cassettes/push/push_validate.json +0 -1
- data/test/fixtures/data/fake_gnip_tweets.json +0 -10
- data/test/test_helper.rb +0 -49
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 28fe865c20b2b0580a481a1c40388e9f288ee650
|
4
|
+
data.tar.gz: 81355aa4c5a023e7642bc209749e9dd4422a68a2
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: cb529c831a844bcec7333bb4603943e3068a963707ee89c8764d76de0e9f8b57b650cc7b5be605117789ec175d29012eeef00493a4f87736f6055d46f53f1e60
|
7
|
+
data.tar.gz: 9a8b501082d89ad15ffa27735196ba2e0cf3cb9a1b195d0fa660176d6193f42d24cf1cac224b6026fb281a9784df509835da220451b2cace71e4f864fcfe8a02
|
data/CHANGELOG.md
CHANGED
@@ -7,6 +7,14 @@ CHANGELOG
|
|
7
7
|
* Designed to make the most of DataSift's latest API version and features
|
8
8
|
* Designed for Ruby 2.3+. Use features like keyword parameters across the board
|
9
9
|
|
10
|
+
## v.3.8.0 (2017-05-23)
|
11
|
+
### Added
|
12
|
+
* Support for PYLON Task API
|
13
|
+
* Ads explicit support for additional HTTP response codes
|
14
|
+
|
15
|
+
### Changed
|
16
|
+
* Uses API v1.5 by default
|
17
|
+
|
10
18
|
## v.3.7.2 (2016-11-08)
|
11
19
|
### Fixes
|
12
20
|
* Uses correct timestamp params in PYLON Sample API calls. `start_time` -> `start` and `end_time` - `end`
|
data/datasift.gemspec
CHANGED
@@ -1,6 +1,9 @@
|
|
1
|
+
$:.unshift(File.join(File.dirname(__FILE__), 'lib'))
|
2
|
+
require 'version'
|
3
|
+
|
1
4
|
Gem::Specification.new do |s|
|
2
5
|
s.name = 'datasift'
|
3
|
-
s.version =
|
6
|
+
s.version = DataSift::VERSION
|
4
7
|
s.authors = ['DataSift', 'Courtney Robinson', 'Jason Dugdale']
|
5
8
|
s.email = ['support@datasift.com']
|
6
9
|
s.description = %q{The official Ruby library for accessing the DataSift API. See http://datasift.com/ for full details and to sign up for an account.}
|
@@ -11,8 +14,8 @@ Gem::Specification.new do |s|
|
|
11
14
|
s.platform = Gem::Platform::RUBY
|
12
15
|
s.required_rubygems_version = '>= 1.3.5'
|
13
16
|
s.required_ruby_version = '>= 2.0.0'
|
14
|
-
s.files =
|
15
|
-
s.test_files =
|
17
|
+
s.files = %w(.yardopts CHANGELOG.md README.md datasift.gemspec) + Dir['lib/**/*.rb']
|
18
|
+
s.test_files = s.files.grep(%r{^(test)/})
|
16
19
|
s.require_paths = ["lib"]
|
17
20
|
|
18
21
|
s.add_runtime_dependency 'rest-client', ['>= 1.8', '< 3.0']
|
data/lib/api/api_resource.rb
CHANGED
@@ -14,7 +14,7 @@ module DataSift
|
|
14
14
|
config[:api_host] = 'api.datasift.com' unless config.has_key?(:api_host)
|
15
15
|
config[:stream_host] = 'websocket.datasift.com' unless config.has_key?(:stream_host)
|
16
16
|
config[:ingestion_host] = 'in.datasift.com' unless config.has_key?(:ingestion_host)
|
17
|
-
config[:api_version] = 'v1.
|
17
|
+
config[:api_version] = 'v1.5' unless config.has_key?(:api_version)
|
18
18
|
config[:enable_ssl] = true unless config.has_key?(:enable_ssl)
|
19
19
|
|
20
20
|
ssl_default = TLSv1_2
|
data/lib/datasift.rb
CHANGED
@@ -17,18 +17,19 @@ require dir + '/managed_source_auth'
|
|
17
17
|
require dir + '/managed_source_resource'
|
18
18
|
require dir + '/live_stream'
|
19
19
|
require dir + '/pylon'
|
20
|
+
require dir + '/tasks'
|
20
21
|
require dir + '/account'
|
21
22
|
require dir + '/account_identity'
|
22
23
|
require dir + '/account_identity_token'
|
23
24
|
require dir + '/account_identity_limit'
|
24
25
|
require dir + '/odp'
|
26
|
+
require dir + '/version'
|
25
27
|
#
|
26
28
|
require 'rbconfig'
|
27
29
|
|
28
30
|
module DataSift
|
29
31
|
#
|
30
32
|
IS_WINDOWS = (RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/)
|
31
|
-
VERSION = File.open(File.join(File.dirname(__FILE__), '../') + '/VERSION').first
|
32
33
|
KNOWN_SOCKETS = {}
|
33
34
|
DETECT_DEAD_SOCKETS = true
|
34
35
|
SOCKET_DETECTOR_TIMEOUT = 6.5
|
@@ -37,9 +38,16 @@ module DataSift
|
|
37
38
|
HEAD = 'HEAD'.freeze
|
38
39
|
DELETE = 'DELETE'.freeze
|
39
40
|
APPLICATION_JSON = 'application/json'.freeze
|
41
|
+
# Rate limits
|
40
42
|
X_RATELIMIT_LIMIT = 'x_ratelimit_limit'.freeze
|
41
43
|
X_RATELIMIT_REMAINING = 'x_ratelimit_remaining'.freeze
|
42
44
|
X_RATELIMIT_COST = 'x_ratelimit_cost'.freeze
|
45
|
+
X_TASKS_QUEUED = 'x_tasks_queued'.freeze
|
46
|
+
X_TASKS_QUEUE_LIMIT = 'x_tasks_queue_limit'.freeze
|
47
|
+
X_ANALYSIS_TASKS_QUEUE_LIMIT = 'x_analysis_tasks_queue_limit'.freeze
|
48
|
+
X_ANALYSIS_TASKS_QUEUED = 'x_analysis_tasks_queued'.freeze
|
49
|
+
X_INSIGHT_TASKS_QUEUE_LIMIT = 'x_insight_tasks_queue_limit'.freeze
|
50
|
+
X_INSIGHT_TASKS_QUEUED = 'x_insight_tasks_queued'.freeze
|
43
51
|
|
44
52
|
Thread.new do
|
45
53
|
while DETECT_DEAD_SOCKETS
|
@@ -71,6 +79,7 @@ module DataSift
|
|
71
79
|
@managed_source_auth = DataSift::ManagedSourceAuth.new(config)
|
72
80
|
@historics_preview = DataSift::HistoricsPreview.new(config)
|
73
81
|
@pylon = DataSift::Pylon.new(config)
|
82
|
+
@task = DataSift::Task.new(config)
|
74
83
|
@account = DataSift::Account.new(config)
|
75
84
|
@account_identity = DataSift::AccountIdentity.new(config)
|
76
85
|
@account_identity_token = DataSift::AccountIdentityToken.new(config)
|
@@ -78,9 +87,9 @@ module DataSift
|
|
78
87
|
@odp = DataSift::Odp.new(config)
|
79
88
|
end
|
80
89
|
|
81
|
-
attr_reader :historics, :push, :managed_source, :managed_source_resource,
|
82
|
-
:managed_source_auth, :historics_preview, :pylon, :account,
|
83
|
-
:
|
90
|
+
attr_reader :config, :historics, :push, :managed_source, :managed_source_resource,
|
91
|
+
:managed_source_auth, :historics_preview, :pylon, :account, :account_identity,
|
92
|
+
:account_identity_token, :account_identity_limit, :odp, :task
|
84
93
|
|
85
94
|
# Checks if the syntax of the given CSDL is valid
|
86
95
|
#
|
@@ -166,7 +175,7 @@ module DataSift
|
|
166
175
|
url = build_url(path, config)
|
167
176
|
|
168
177
|
headers.update(
|
169
|
-
:user_agent => "DataSift/#{config[:api_version]} Ruby/v#{VERSION}",
|
178
|
+
:user_agent => "DataSift/#{config[:api_version]} Ruby/v#{DataSift::VERSION}",
|
170
179
|
:authorization => "#{config[:username]}:#{config[:api_key]}",
|
171
180
|
:accept => '*/*'
|
172
181
|
)
|
@@ -212,11 +221,7 @@ module DataSift
|
|
212
221
|
end
|
213
222
|
{
|
214
223
|
:data => data,
|
215
|
-
:datasift =>
|
216
|
-
X_RATELIMIT_LIMIT => response.headers[:x_ratelimit_limit],
|
217
|
-
X_RATELIMIT_REMAINING => response.headers[:x_ratelimit_remaining],
|
218
|
-
X_RATELIMIT_COST => response.headers[:x_ratelimit_cost]
|
219
|
-
},
|
224
|
+
:datasift => build_headers(response.headers),
|
220
225
|
:http => {
|
221
226
|
:status => response.code,
|
222
227
|
:headers => response.headers
|
@@ -240,11 +245,7 @@ module DataSift
|
|
240
245
|
end
|
241
246
|
response_on_error = {
|
242
247
|
:data => nil,
|
243
|
-
:datasift =>
|
244
|
-
X_RATELIMIT_LIMIT => e.response.headers[:x_ratelimit_limit],
|
245
|
-
X_RATELIMIT_REMAINING => e.response.headers[:x_ratelimit_remaining],
|
246
|
-
X_RATELIMIT_COST => e.response.headers[:x_ratelimit_cost]
|
247
|
-
},
|
248
|
+
:datasift => build_headers(e.response.headers),
|
248
249
|
:http => {
|
249
250
|
:status => e.response.code,
|
250
251
|
:headers => e.response.headers
|
@@ -262,6 +263,20 @@ module DataSift
|
|
262
263
|
end
|
263
264
|
end
|
264
265
|
|
266
|
+
# Only to be used for building URI paths for /pylon API calls. API v1.4+ requires a 'service'
|
267
|
+
# param to be passed as part of the URI. This checks the API version, and adds the service
|
268
|
+
# if necessary
|
269
|
+
def build_path(service, path, config)
|
270
|
+
# We need to add the service param to PYLON API URLs for API v1.4+
|
271
|
+
if config[:api_version].split('v')[1].to_f >= 1.4
|
272
|
+
split_path = path.split('/')
|
273
|
+
path = split_path[0] + '/' + service + '/' + split_path[1]
|
274
|
+
end
|
275
|
+
puts path
|
276
|
+
|
277
|
+
return path
|
278
|
+
end
|
279
|
+
|
265
280
|
private
|
266
281
|
|
267
282
|
def self.build_url(path, config)
|
@@ -287,14 +302,36 @@ module DataSift
|
|
287
302
|
params.collect { |param, value| [param, CGI.escape(value.to_s)].join('=') }.join('&')
|
288
303
|
end
|
289
304
|
|
305
|
+
def self.build_headers(headers)
|
306
|
+
# rest_client downcases, and replaces hyphens in headers with underscores. Actual headers
|
307
|
+
# returned by DS API can be found at:
|
308
|
+
# http://dev.datasift.com/docs/platform/api/rest-api/api-rate-limiting
|
309
|
+
response = {}
|
310
|
+
response.merge!(X_TASKS_QUEUED => headers[:x_tasks_queued]) if headers.key?(:x_tasks_queued)
|
311
|
+
response.merge!(X_TASKS_QUEUE_LIMIT => headers[:x_tasks_queue_limit]) if headers.key?(:x_tasks_queue_limit)
|
312
|
+
response.merge!(X_ANALYSIS_TASKS_QUEUE_LIMIT => headers[:x_analysis_tasks_queue_limit]) if headers.key?(:x_analysis_tasks_queue_limit)
|
313
|
+
response.merge!(X_ANALYSIS_TASKS_QUEUED => headers[:x_analysis_tasks_queued]) if headers.key?(:x_analysis_tasks_queued)
|
314
|
+
response.merge!(X_INSIGHT_TASKS_QUEUE_LIMIT => headers[:x_insight_tasks_queue_limit]) if headers.key?(:x_insight_tasks_queue_limit)
|
315
|
+
response.merge!(X_INSIGHT_TASKS_QUEUED => headers[:x_insight_tasks_queued]) if headers.key?(:x_insight_tasks_queued)
|
316
|
+
response.merge!(
|
317
|
+
X_RATELIMIT_LIMIT => headers[:x_ratelimit_limit],
|
318
|
+
X_RATELIMIT_REMAINING => headers[:x_ratelimit_remaining],
|
319
|
+
X_RATELIMIT_COST => headers[:x_ratelimit_cost]
|
320
|
+
)
|
321
|
+
end
|
322
|
+
|
290
323
|
def self.handle_api_error(code, body, response)
|
291
324
|
case code
|
292
325
|
when 400
|
293
326
|
raise BadRequestError.new(code, body, response)
|
294
327
|
when 401
|
295
328
|
raise AuthError.new(code, body, response)
|
329
|
+
when 403
|
330
|
+
raise ForbiddenError.new(code, body, response)
|
296
331
|
when 404
|
297
332
|
raise ApiResourceNotFoundError.new(code, body, response)
|
333
|
+
when 405
|
334
|
+
raise MethodNotAllowedError.new(code, body, response)
|
298
335
|
when 409
|
299
336
|
raise ConflictError.new(code, body, response)
|
300
337
|
when 410
|
@@ -413,7 +450,7 @@ module DataSift
|
|
413
450
|
"encountered. As a result no further re-connection will be automatically " \
|
414
451
|
"attempted. Manually invoke connect() after investigating the cause of the " \
|
415
452
|
"failure, be sure to observe DataSift's re-connect policies available at " \
|
416
|
-
"
|
453
|
+
"https://dev.datasift.com/docs/platform/api/streaming-api/reconnecting - Error {#{message}}"))
|
417
454
|
end
|
418
455
|
else
|
419
456
|
sleep config[:retry_timeout]
|
data/lib/errors.rb
CHANGED
@@ -36,10 +36,18 @@ end
|
|
36
36
|
class ConnectionError < DataSiftError
|
37
37
|
end
|
38
38
|
|
39
|
+
# Standard error returned when receiving a 403 response from the API
|
40
|
+
class ForbiddenError < DataSiftError
|
41
|
+
end
|
42
|
+
|
39
43
|
# Standard error returned when receiving a 404 response from the API
|
40
44
|
class ApiResourceNotFoundError < DataSiftError
|
41
45
|
end
|
42
46
|
|
47
|
+
# Standard error returned when receiving a 405 response from the API
|
48
|
+
class MethodNotAllowedError < DataSiftError
|
49
|
+
end
|
50
|
+
|
43
51
|
# Standard error returned when receiving a 409 response from the API
|
44
52
|
class ConflictError < DataSiftError
|
45
53
|
end
|
data/lib/live_stream.rb
CHANGED
@@ -1,10 +1,10 @@
|
|
1
1
|
module DataSift
|
2
2
|
class LiveStream < DataSift::ApiResource
|
3
3
|
|
4
|
-
@stream
|
4
|
+
@stream = nil
|
5
5
|
@on_datasift_message = lambda {}
|
6
6
|
|
7
|
-
def initialize
|
7
|
+
def initialize(config, stream)
|
8
8
|
super(config)
|
9
9
|
@stream = stream
|
10
10
|
@retry_timeout = 0
|
@@ -30,11 +30,11 @@ module DataSift
|
|
30
30
|
hash = message[:hash]
|
31
31
|
end
|
32
32
|
message.merge!({
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
33
|
+
:is_failure => message[:status] == 'failure',
|
34
|
+
:is_success => message[:status] == 'success',
|
35
|
+
:is_warning => message[:status] == 'warning',
|
36
|
+
:is_tick => message[:status] == 'connected'
|
37
|
+
})
|
38
38
|
@on_datasift_message.call(self, message, hash)
|
39
39
|
end
|
40
40
|
|
data/lib/pylon.rb
CHANGED
@@ -6,39 +6,48 @@ module DataSift
|
|
6
6
|
#
|
7
7
|
# @param csdl [String] CSDL you wish to validate
|
8
8
|
# @param boolResponse [Boolean] True if you want a boolean response.
|
9
|
+
# @param service [String] The PYLON service to make this API call against
|
9
10
|
# False if you want the full response object
|
10
11
|
# @return [Boolean, Object] Dependent on value of boolResponse
|
11
|
-
def valid?(csdl = '', boolResponse = true)
|
12
|
+
def valid?(csdl = '', boolResponse = true, service = 'facebook')
|
12
13
|
fail BadParametersError, 'csdl is required' if csdl.empty?
|
14
|
+
fail BadParametersError, 'service is required' if service.empty?
|
15
|
+
|
13
16
|
params = { csdl: csdl }
|
14
17
|
|
15
|
-
res = DataSift.request(:POST, 'pylon/validate', @config, params)
|
18
|
+
res = DataSift.request(:POST, build_path(service, 'pylon/validate', @config), @config, params)
|
16
19
|
boolResponse ? res[:http][:status] == 200 : res
|
17
20
|
end
|
18
21
|
|
19
22
|
# Compile PYLON CSDL by making an /pylon/compile API call
|
20
23
|
#
|
21
24
|
# @param csdl [String] CSDL you wish to compile
|
25
|
+
# @param service [String] The PYLON service to make this API call against
|
22
26
|
# @return [Object] API reponse object
|
23
|
-
def compile(csdl)
|
27
|
+
def compile(csdl, service = 'facebook')
|
24
28
|
fail BadParametersError, 'csdl is required' if csdl.empty?
|
29
|
+
fail BadParametersError, 'service is required' if service.empty?
|
30
|
+
|
25
31
|
params = { csdl: csdl }
|
26
32
|
|
27
|
-
DataSift.request(:POST, 'pylon/compile', @config, params)
|
33
|
+
DataSift.request(:POST, build_path(service, 'pylon/compile', @config), @config, params)
|
28
34
|
end
|
29
35
|
|
30
36
|
# Perform /pylon/get API call to query status of your PYLON recordings
|
31
37
|
#
|
32
38
|
# @param hash [String] Hash you with the get the status for
|
33
39
|
# @param id [String] The ID of the PYLON recording to get
|
40
|
+
# @param service [String] The PYLON service to make this API call against
|
34
41
|
# @return [Object] API reponse object
|
35
|
-
def get(hash = '', id = '')
|
42
|
+
def get(hash = '', id = '', service = 'facebook')
|
36
43
|
fail BadParametersError, 'hash or id is required' if hash.empty? && id.empty?
|
44
|
+
fail BadParametersError, 'service is required' if service.empty?
|
45
|
+
|
37
46
|
params = {}
|
38
47
|
params.merge!(hash: hash) unless hash.empty?
|
39
48
|
params.merge!(id: id) unless id.empty?
|
40
49
|
|
41
|
-
DataSift.request(:GET, 'pylon/get', @config, params)
|
50
|
+
DataSift.request(:GET, build_path(service, 'pylon/get', @config), @config, params)
|
42
51
|
end
|
43
52
|
|
44
53
|
# Perform /pylon/get API call to list all PYLON Recordings
|
@@ -48,15 +57,18 @@ module DataSift
|
|
48
57
|
# @param order_by [String, Symbol] Which field to sort results by
|
49
58
|
# @param order_dir [String, Symbol] Order results in ascending or descending
|
50
59
|
# order
|
60
|
+
# @param service [String] The PYLON service to make this API call against
|
51
61
|
# @return [Object] API reponse object
|
52
|
-
def list(page = nil, per_page = nil, order_by = '', order_dir = '')
|
62
|
+
def list(page = nil, per_page = nil, order_by = '', order_dir = '', service = 'facebook')
|
63
|
+
fail BadParametersError, 'service is required' if service.empty?
|
64
|
+
|
53
65
|
params = {}
|
54
66
|
params.merge!(page: page) unless page.nil?
|
55
67
|
params.merge!(per_page: per_page) unless per_page.nil?
|
56
68
|
params.merge!(order_by: order_by) unless order_by.empty?
|
57
69
|
params.merge!(order_dir: order_dir) unless order_dir.empty?
|
58
70
|
|
59
|
-
DataSift.request(:GET, 'pylon/get', @config, params)
|
71
|
+
DataSift.request(:GET, build_path(service, 'pylon/get', @config), @config, params)
|
60
72
|
end
|
61
73
|
|
62
74
|
# Perform /pylon/update API call to update a given PYLON Recording
|
@@ -64,13 +76,16 @@ module DataSift
|
|
64
76
|
# @param id [String] The ID of the PYLON recording to update
|
65
77
|
# @param hash [String] The CSDL filter hash this recording should be subscribed to
|
66
78
|
# @param name [String] Update the name of your recording
|
79
|
+
# @param service [String] The PYLON service to make this API call against
|
67
80
|
# @return [Object] API reponse object
|
68
|
-
def update(id, hash = '', name = '')
|
69
|
-
|
81
|
+
def update(id, hash = '', name = '', service = 'facebook')
|
82
|
+
fail BadParametersError, 'service is required' if service.empty?
|
83
|
+
|
84
|
+
params = { id: id }
|
70
85
|
params.merge!(hash: hash) unless hash.empty?
|
71
86
|
params.merge!(name: name) unless name.empty?
|
72
87
|
|
73
|
-
DataSift.request(:PUT, 'pylon/update', @config, params)
|
88
|
+
DataSift.request(:PUT, build_path(service, 'pylon/update', @config), @config, params)
|
74
89
|
end
|
75
90
|
|
76
91
|
# Start recording a PYLON filter by making an /pylon/start API call
|
@@ -79,15 +94,18 @@ module DataSift
|
|
79
94
|
# @param name [String] Give your recording a name. Required when starting a
|
80
95
|
# @param id [String] ID of the recording you wish to start
|
81
96
|
# new recording
|
97
|
+
# @param service [String] The PYLON service to make this API call against
|
82
98
|
# @return [Object] API reponse object
|
83
|
-
def start(hash = '', name = '', id = '')
|
99
|
+
def start(hash = '', name = '', id = '', service = 'facebook')
|
84
100
|
fail BadParametersError, 'hash or id is required' if hash.empty? && id.empty?
|
101
|
+
fail BadParametersError, 'service is required' if service.empty?
|
102
|
+
|
85
103
|
params = {}
|
86
104
|
params.merge!(hash: hash) unless hash.empty?
|
87
105
|
params.merge!(name: name) unless name.empty?
|
88
106
|
params.merge!(id: id) unless id.empty?
|
89
107
|
|
90
|
-
DataSift.request(:PUT, 'pylon/start', @config, params)
|
108
|
+
DataSift.request(:PUT, build_path(service, 'pylon/start', @config), @config, params)
|
91
109
|
end
|
92
110
|
|
93
111
|
# Restart an existing PYLON recording by making an /pylon/start API call with a recording ID
|
@@ -95,27 +113,33 @@ module DataSift
|
|
95
113
|
# @param id [String] CSDL you wish to begin (or resume) recording
|
96
114
|
# @param name [String] Give your recording a name. Required when starting a
|
97
115
|
# new recording
|
116
|
+
# @param service [String] The PYLON service to make this API call against
|
98
117
|
# @return [Object] API reponse object
|
99
|
-
def restart(id, name = '')
|
118
|
+
def restart(id, name = '', service = 'facebook')
|
100
119
|
fail BadParametersError, 'id is required' if id.empty?
|
120
|
+
fail BadParametersError, 'service is required' if service.empty?
|
121
|
+
|
101
122
|
params = { id: id }
|
102
123
|
params.merge!(name: name) unless name.empty?
|
103
124
|
|
104
|
-
DataSift.request(:PUT, 'pylon/start', @config, params)
|
125
|
+
DataSift.request(:PUT, build_path(service, 'pylon/start', @config), @config, params)
|
105
126
|
end
|
106
127
|
|
107
128
|
# Stop an active PYLON recording by making an /pylon/stop API call
|
108
129
|
#
|
109
130
|
# @param hash [String] CSDL you wish to stop recording
|
110
131
|
# @param id [String] ID of the recording you wish to stop
|
132
|
+
# @param service [String] The PYLON service to make this API call against
|
111
133
|
# @return [Object] API reponse object
|
112
|
-
def stop(hash = '', id = '')
|
134
|
+
def stop(hash = '', id = '', service = 'facebook')
|
113
135
|
fail BadParametersError, 'hash or id is required' if hash.empty? && id.empty?
|
136
|
+
fail BadParametersError, 'service is required' if service.empty?
|
137
|
+
|
114
138
|
params = {}
|
115
139
|
params.merge!(hash: hash) unless hash.empty?
|
116
140
|
params.merge!(id: id) unless id.empty?
|
117
141
|
|
118
|
-
DataSift.request(:PUT, 'pylon/stop', @config, params)
|
142
|
+
DataSift.request(:PUT, build_path(service, 'pylon/stop', @config), @config, params)
|
119
143
|
end
|
120
144
|
|
121
145
|
# Perform a PYLON analysis query by making an /pylon/analyze API call
|
@@ -130,10 +154,13 @@ module DataSift
|
|
130
154
|
# @param start_time [Integer] Optional start timestamp for filtering by date
|
131
155
|
# @param end_time [Integer] Optional end timestamp for filtering by date
|
132
156
|
# @param id [String] ID of the recording you wish to analyze
|
157
|
+
# @param service [String] The PYLON service to make this API call against
|
133
158
|
# @return [Object] API reponse object
|
134
|
-
def analyze(hash = '', parameters = '', filter = '', start_time = nil, end_time = nil, id = '')
|
159
|
+
def analyze(hash = '', parameters = '', filter = '', start_time = nil, end_time = nil, id = '', service = 'facebook')
|
135
160
|
fail BadParametersError, 'hash or id is required' if hash.empty? && id.empty?
|
136
161
|
fail BadParametersError, 'parameters is required' if parameters.empty?
|
162
|
+
fail BadParametersError, 'service is required' if service.empty?
|
163
|
+
|
137
164
|
params = { parameters: parameters }
|
138
165
|
params.merge!(hash: hash) unless hash.empty?
|
139
166
|
params.merge!(id: id) unless id.empty?
|
@@ -141,7 +168,7 @@ module DataSift
|
|
141
168
|
params.merge!(start: start_time) unless start_time.nil?
|
142
169
|
params.merge!(end: end_time) unless end_time.nil?
|
143
170
|
|
144
|
-
DataSift.request(:POST, 'pylon/analyze', @config, params)
|
171
|
+
DataSift.request(:POST, build_path(service, 'pylon/analyze', @config), @config, params)
|
145
172
|
end
|
146
173
|
|
147
174
|
# Query the tag hierarchy on interactions populated by a particular
|
@@ -149,14 +176,17 @@ module DataSift
|
|
149
176
|
#
|
150
177
|
# @param hash [String] Hash of the recording you wish to query
|
151
178
|
# @param id [String] ID of the recording you wish to query
|
179
|
+
# @param service [String] The PYLON service to make this API call against
|
152
180
|
# @return [Object] API reponse object
|
153
|
-
def tags(hash = '', id = '')
|
181
|
+
def tags(hash = '', id = '', service = 'facebook')
|
154
182
|
fail BadParametersError, 'hash or id is required' if hash.empty? && id.empty?
|
183
|
+
fail BadParametersError, 'service is required' if service.empty?
|
184
|
+
|
155
185
|
params = {}
|
156
186
|
params.merge!(hash: hash) unless hash.empty?
|
157
187
|
params.merge!(id: id) unless id.empty?
|
158
188
|
|
159
|
-
DataSift.request(:GET, 'pylon/tags', @config, params)
|
189
|
+
DataSift.request(:GET, build_path(service, 'pylon/tags', @config), @config, params)
|
160
190
|
end
|
161
191
|
|
162
192
|
# Hit the PYLON Sample endpoint to pull public sample data from a PYLON recording
|
@@ -167,21 +197,24 @@ module DataSift
|
|
167
197
|
# @param end_time [Integer] Optional end timestamp for filtering by date
|
168
198
|
# @param filter [String] Optional PYLON CSDL for a query filter
|
169
199
|
# @param id [String] ID of the recording you wish to sample
|
200
|
+
# @param service [String] The PYLON service to make this API call against
|
170
201
|
# @return [Object] API reponse object
|
171
|
-
def sample(hash = '', count = nil, start_time = nil, end_time = nil, filter = '', id = '')
|
202
|
+
def sample(hash = '', count = nil, start_time = nil, end_time = nil, filter = '', id = '', service = 'facebook')
|
172
203
|
fail BadParametersError, 'hash or id is required' if hash.empty? && id.empty?
|
204
|
+
fail BadParametersError, 'service is required' if service.empty?
|
205
|
+
|
173
206
|
params = {}
|
174
207
|
params.merge!(hash: hash) unless hash.empty?
|
175
208
|
params.merge!(id: id) unless id.empty?
|
176
209
|
params.merge!(count: count) unless count.nil?
|
177
|
-
params.merge!(
|
178
|
-
params.merge!(
|
210
|
+
params.merge!(start_time: start_time) unless start_time.nil?
|
211
|
+
params.merge!(end_time: end_time) unless end_time.nil?
|
179
212
|
|
180
213
|
if filter.empty?
|
181
|
-
DataSift.request(:GET, 'pylon/sample', @config, params)
|
214
|
+
DataSift.request(:GET, build_path(service, 'pylon/sample', @config), @config, params)
|
182
215
|
else
|
183
216
|
params.merge!(filter: filter)
|
184
|
-
DataSift.request(:POST, 'pylon/sample', @config, params)
|
217
|
+
DataSift.request(:POST, build_path(service, 'pylon/sample', @config), @config, params)
|
185
218
|
end
|
186
219
|
end
|
187
220
|
end
|