datasift 3.4.0 → 3.5.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (72) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop_todo.yml +1 -1
  3. data/.travis.yml +9 -0
  4. data/CHANGELOG.md +14 -3
  5. data/Gemfile +1 -1
  6. data/VERSION +1 -1
  7. data/datasift.gemspec +4 -4
  8. data/examples/account_eg.rb +33 -0
  9. data/examples/account_identity_eg.rb +10 -8
  10. data/examples/account_identity_limit_eg.rb +10 -8
  11. data/examples/account_identity_token_eg.rb +5 -2
  12. data/examples/auth.rb +28 -29
  13. data/examples/core_api_eg.rb +1 -1
  14. data/examples/live_stream_eg.rb +3 -3
  15. data/examples/pull.rb +23 -20
  16. data/examples/push_eg.rb +9 -8
  17. data/examples/pylon_eg.rb +10 -1
  18. data/lib/account.rb +17 -1
  19. data/lib/api/api_resource.rb +4 -2
  20. data/lib/datasift.rb +32 -19
  21. data/lib/errors.rb +16 -0
  22. data/lib/odp.rb +6 -2
  23. data/lib/pylon.rb +23 -0
  24. data/test/datasift/account/account_api_test.rb +35 -0
  25. data/test/fixtures/cassettes/account/usage/default.json +1 -0
  26. data/test/fixtures/cassettes/account/usage/invalid.json +1 -0
  27. data/test/fixtures/cassettes/account/usage/valid_params.json +1 -0
  28. data/test/fixtures/cassettes/core/after_historic_dpu.json +1 -1
  29. data/test/fixtures/cassettes/core/balance_get.json +1 -1
  30. data/test/fixtures/cassettes/core/before_dpu.json +1 -1
  31. data/test/fixtures/cassettes/core/before_historic_dpu.json +1 -1
  32. data/test/fixtures/cassettes/core/compile_success.json +1 -1
  33. data/test/fixtures/cassettes/core/dpu_get_cost.json +1 -1
  34. data/test/fixtures/cassettes/core/dpu_throw_badrequest.json +1 -1
  35. data/test/fixtures/cassettes/core/historic_dpu.json +1 -1
  36. data/test/fixtures/cassettes/core/usage_success.json +1 -1
  37. data/test/fixtures/cassettes/core/validate_invalid_hash.json +1 -1
  38. data/test/fixtures/cassettes/core/validate_success_bool.json +1 -1
  39. data/test/fixtures/cassettes/core/validate_success_hash.json +1 -1
  40. data/test/fixtures/cassettes/odp/batch/after_upload.json +1 -1
  41. data/test/fixtures/cassettes/odp/batch/before_upload.json +1 -1
  42. data/test/fixtures/cassettes/odp/batch/upload_failure_no_source.json +1 -1
  43. data/test/fixtures/cassettes/odp/batch/upload_success.json +1 -1
  44. data/test/fixtures/cassettes/preview/before_preview_create.json +1 -1
  45. data/test/fixtures/cassettes/preview/before_preview_get.json +1 -1
  46. data/test/fixtures/cassettes/preview/preview_create_success.json +1 -1
  47. data/test/fixtures/cassettes/preview/preview_get_success.json +1 -1
  48. data/test/fixtures/cassettes/push/after_push_create.json +1 -1
  49. data/test/fixtures/cassettes/push/after_push_get.json +1 -1
  50. data/test/fixtures/cassettes/push/after_push_log.json +1 -1
  51. data/test/fixtures/cassettes/push/after_push_pause.json +1 -1
  52. data/test/fixtures/cassettes/push/after_push_resume.json +1 -1
  53. data/test/fixtures/cassettes/push/after_push_stop.json +1 -1
  54. data/test/fixtures/cassettes/push/after_push_update.json +1 -1
  55. data/test/fixtures/cassettes/push/before_push_create.json +1 -1
  56. data/test/fixtures/cassettes/push/before_push_delete.json +1 -1
  57. data/test/fixtures/cassettes/push/before_push_get.json +1 -1
  58. data/test/fixtures/cassettes/push/before_push_log.json +1 -1
  59. data/test/fixtures/cassettes/push/before_push_pause.json +1 -1
  60. data/test/fixtures/cassettes/push/before_push_resume.json +1 -1
  61. data/test/fixtures/cassettes/push/before_push_stop.json +1 -1
  62. data/test/fixtures/cassettes/push/before_push_update.json +1 -1
  63. data/test/fixtures/cassettes/push/push_create.json +1 -1
  64. data/test/fixtures/cassettes/push/push_delete.json +1 -1
  65. data/test/fixtures/cassettes/push/push_get_by_id.json +1 -1
  66. data/test/fixtures/cassettes/push/push_log_with_id.json +1 -1
  67. data/test/fixtures/cassettes/push/push_pause.json +1 -1
  68. data/test/fixtures/cassettes/push/push_resume.json +1 -1
  69. data/test/fixtures/cassettes/push/push_stop.json +1 -1
  70. data/test/fixtures/cassettes/push/push_update.json +1 -1
  71. data/test/fixtures/cassettes/push/push_validate.json +1 -1
  72. metadata +15 -4
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 231dfbf3fbb478090969896e459179651d9aab7d
4
- data.tar.gz: dc955abee233f6cb82a5765065796563a89ef268
3
+ metadata.gz: 2fead364f5c1631d28e826b6ab3a1bc0feaf570b
4
+ data.tar.gz: 66b66356edcf1446051e3fc4fd8e35c72f5bf561
5
5
  SHA512:
6
- metadata.gz: 5adc6aa0777edfef9835b08e1e133a63805cf10e926ab67fda130cca85ca79509e466d91b1c7eb21affa44f6829dd7fdc1c2807e0ae66666581c44aa532b3161
7
- data.tar.gz: 7074591477540122765b3c91bd3dec1dbd1f2d65371277fb59b24a523455e791f44c21d2fa2572fa4abe04ce21f20a52203485bab10957bc349b8d1ee39ac8b3
6
+ metadata.gz: f8a5b9f74d8c5bb4d00271b921b1167e9ab2be70b254837042e368340d771546d995f83870ba9e743c2d17f40ac1f7824aef37d10291382ab369649c99ac26f3
7
+ data.tar.gz: 0a032a3711190b06f10ac7c3c1cc288ae4b4dd4af46eb0b8c7c2a006fa74b372c2406dc80b215654a725fc3026d67dbb198011311e3c3c6840530940ce1c508c
@@ -43,7 +43,7 @@ Metrics/CyclomaticComplexity:
43
43
  # Offense count: 118
44
44
  # Configuration parameters: AllowURI, URISchemes.
45
45
  LineLength:
46
- Max: 90
46
+ Max: 100
47
47
 
48
48
  # Offense count: 33
49
49
  # Configuration parameters: CountComments.
@@ -1,7 +1,16 @@
1
1
  language: ruby
2
2
  sudo: false
3
+ cache: bundler
4
+ bundler_args: --without development --retry=3 --jobs=3
5
+
3
6
  rvm:
4
7
  - 1.9.3
5
8
  - 2.0.0
6
9
  - 2.1
7
10
  - 2.2
11
+ - ruby-head
12
+
13
+ matrix:
14
+ allow_failures:
15
+ - ruby-head
16
+ fast_finish: true
@@ -1,10 +1,21 @@
1
1
  CHANGELOG
2
2
  ================================
3
+ ## v.3.5.0 (2015-11-13)
4
+ ### Added
5
+ * Support for the [/account/usage](http://dev.datasift.com/pylon/docs/api/acct-api-endpoints/get-accountusage) API endpoint
6
+ * Added explicit support for 412, 415, 503 and 504 HTTP error responses
7
+ * Support for the [/pylon/sample](http://dev.datasift.com/pylon/docs/api/pylon-api-endpoints/pylonsample) API endpoint. Full details about the feature can be found in our [platform release notes](http://community.datasift.com/t/pylon-1-6-release-notes/1859)
8
+
9
+ ### Changed
10
+ * Only set ```Content-Type``` HTTP header for POST/PUT requests; it's not necessary unless we are passing a request entity
11
+ * Teased out some minor performance enhancements by allocating fewer objects on each request
12
+ * Loosen some Gem dependancies. Successfully tested against [rest-client](https://github.com/rest-client/rest-client) v2.0.0
13
+
3
14
  ## v.3.4.0 (2015-08-20)
4
15
  ### Added
5
- * Support for [Open Data Processing](https://datasift.com/products/open-data-processing-for-twitter/) batch uploads (Thanks @giovannelli)
6
- * Explicit supprot for 413 and 422 errors from API
7
- * Ability to get at API response headers using the ```object.response``` accessor. (Thanks again @giovannelli)
16
+ * Support for [Open Data Processing](https://datasift.com/products/open-data-processing-for-twitter/) batch uploads (Thanks [@giovannelli](https://github.com/giovannelli))
17
+ * Explicit support for 413 and 422 errors from API
18
+ * Ability to get at API response headers using the ```object.response``` accessor. (Thanks again [@giovannelli](https://github.com/giovannelli))
8
19
 
9
20
  ### Changed
10
21
  * Bumped [rest-client](https://github.com/rest-client/rest-client) dependency to ~> 1.8
data/Gemfile CHANGED
@@ -7,7 +7,7 @@ group :test do
7
7
  gem 'minitest', '~> 5.0'
8
8
  gem 'rubocop', '>= 0.27'
9
9
  gem 'simplecov', '>= 0.9'
10
- gem 'shoulda', '~> 2.11'
10
+ gem 'shoulda', '>= 2.11'
11
11
  gem 'vcr', '~> 2.9'
12
12
  gem 'webmock'
13
13
  end
data/VERSION CHANGED
@@ -1 +1 @@
1
- 3.4.0
1
+ 3.5.0
@@ -15,8 +15,8 @@ Gem::Specification.new do |s|
15
15
  s.test_files = `git ls-files -- {test}/*`.split("\n")
16
16
  s.require_paths = ["lib"]
17
17
 
18
- s.add_runtime_dependency('rest-client', '~> 1.8')
19
- s.add_runtime_dependency('multi_json', '~> 1.7')
20
- s.add_runtime_dependency('websocket-td', '~> 0.0.5')
21
- s.add_development_dependency('bundler', '~> 1.0')
18
+ s.add_runtime_dependency 'rest-client', ['>= 1.8', '< 3.0']
19
+ s.add_runtime_dependency 'multi_json', '~> 1.7'
20
+ s.add_runtime_dependency 'websocket-td', '~> 0.0.5'
21
+ s.add_development_dependency 'bundler', '~> 1.0'
22
22
  end
@@ -0,0 +1,33 @@
1
+ require './auth'
2
+ class AccountEg < DataSiftExample
3
+ def initialize
4
+ super
5
+ @datasift = DataSift::Client.new(@config)
6
+ run
7
+ end
8
+
9
+ def run
10
+ begin
11
+ puts "Get account usage for the default period"
12
+ puts @datasift.account.usage[:data].to_json
13
+
14
+ puts "\nGet account usage for the past month"
15
+ puts @datasift.account.usage('monthly')[:data].to_json
16
+
17
+ rescue DataSiftError => dse
18
+ puts dse.message
19
+ # Then match specific error to take action;
20
+ # All errors thrown by the client extend DataSiftError
21
+ case dse
22
+ when ConnectionError
23
+ # some connection error
24
+ when AuthError
25
+ when BadRequestError
26
+ else
27
+ # do something else...
28
+ end
29
+ end
30
+ end
31
+ end
32
+
33
+ AccountEg.new
@@ -10,27 +10,27 @@ class AccountIdentityEg < DataSiftExample
10
10
  begin
11
11
  puts "Create a new identity"
12
12
  identity = @datasift.account_identity.create(
13
- "Ruby Identity", "active", false
13
+ "Ruby Identity #{DateTime.now}", "active", false
14
14
  )
15
15
  identity_id = identity[:data][:id]
16
- puts identity.to_json
16
+ puts identity[:data].to_json
17
17
 
18
18
  puts "\nList all existing identities"
19
- puts @datasift.account_identity.list.to_json
19
+ puts @datasift.account_identity.list[:data].to_json
20
20
 
21
21
  puts "\nGet existing identity"
22
- puts @datasift.account_identity.get(identity_id).to_json
22
+ puts @datasift.account_identity.get(identity_id)[:data].to_json
23
23
 
24
24
  puts "\nUpdate an identity"
25
25
  puts @datasift.account_identity.update(
26
- identity_id, 'Updated Ruby Identity'
27
- ).to_json
26
+ identity_id, "Updated Ruby Identity #{DateTime.now}"
27
+ )[:data].to_json
28
28
 
29
29
  puts "\nDelete an identity"
30
- puts @datasift.account_identity.delete(identity_id).to_json
30
+ @datasift.account_identity.delete(identity_id)
31
31
 
32
32
  rescue DataSiftError => dse
33
- puts dse.message
33
+ puts dse.inspect
34
34
  # Then match specific error to take action;
35
35
  # All errors thrown by the client extend DataSiftError
36
36
  case dse
@@ -41,6 +41,8 @@ class AccountIdentityEg < DataSiftExample
41
41
  else
42
42
  # do something else...
43
43
  end
44
+ puts "\nClean up and delete the identity"
45
+ @datasift.account_identity.delete(identity_id)
44
46
  end
45
47
  end
46
48
  end
@@ -13,44 +13,44 @@ class AccountIdentityLimitEg < DataSiftExample
13
13
  "Ruby Identity for Token Limits", "active", false
14
14
  )
15
15
  identity_id = identity[:data][:id]
16
- puts identity.to_json
16
+ puts identity[:data].to_json
17
17
 
18
18
  puts "\nCreate a Limit for our Identity"
19
19
  puts @datasift.account_identity_limit.create(
20
20
  identity_id,
21
21
  'facebook',
22
22
  100_000
23
- )
23
+ )[:data].to_json
24
24
 
25
25
  puts "\nList all existing Limits for this Service"
26
26
  puts @datasift.account_identity_limit.list(
27
27
  'facebook'
28
- ).to_json
28
+ )[:data].to_json
29
29
 
30
30
  puts "\nGet existing Limit by Identity and Service"
31
31
  puts @datasift.account_identity_limit.get(
32
32
  identity_id,
33
33
  'facebook'
34
- ).to_json
34
+ )[:data].to_json
35
35
 
36
36
  puts "\nUpdate a Limit for a given Identity"
37
37
  puts @datasift.account_identity_limit.update(
38
38
  identity_id,
39
39
  'facebook',
40
40
  250_000
41
- ).to_json
41
+ )[:data].to_json
42
42
 
43
43
  puts "\nRemove the Limit from a given Identity and Service"
44
- puts @datasift.account_identity_limit.delete(
44
+ @datasift.account_identity_limit.delete(
45
45
  identity_id,
46
46
  'facebook'
47
- ).to_json
47
+ )
48
48
 
49
49
  puts "\nCleanup and remove the Identity"
50
50
  @datasift.account_identity.delete(identity_id)
51
51
 
52
52
  rescue DataSiftError => dse
53
- puts dse.message
53
+ puts dse.inspect
54
54
  # Then match specific error to take action;
55
55
  # All errors thrown by the client extend DataSiftError
56
56
  case dse
@@ -61,6 +61,8 @@ class AccountIdentityLimitEg < DataSiftExample
61
61
  else
62
62
  # do something else...
63
63
  end
64
+ puts "\nCleanup and remove the Identity"
65
+ @datasift.account_identity.delete(identity_id)
64
66
  end
65
67
  end
66
68
  end
@@ -10,7 +10,7 @@ class AccountIdentityTokenEg < DataSiftExample
10
10
  begin
11
11
  puts "Create a new identity to create tokens for"
12
12
  identity = @datasift.account_identity.create(
13
- "Ruby Identity for Tokens",
13
+ "Ruby Identity for Tokena",
14
14
  "active",
15
15
  false
16
16
  )
@@ -52,7 +52,7 @@ class AccountIdentityTokenEg < DataSiftExample
52
52
  @datasift.account_identity.delete(identity_id)
53
53
 
54
54
  rescue DataSiftError => dse
55
- puts dse.message
55
+ puts dse.inspect
56
56
  # Then match specific error to take action;
57
57
  # All errors thrown by the client extend DataSiftError
58
58
  case dse
@@ -60,9 +60,12 @@ class AccountIdentityTokenEg < DataSiftExample
60
60
  # some connection error
61
61
  when AuthError
62
62
  when BadRequestError
63
+ puts '[WARNING] You will need to use a valid token to run through this example'
63
64
  else
64
65
  # do something else...
65
66
  end
67
+ puts "\nCleanup and remove the Identity"
68
+ @datasift.account_identity.delete(identity_id)
66
69
  end
67
70
  end
68
71
  end
@@ -3,29 +3,29 @@ class DataSiftExample
3
3
 
4
4
  def initialize
5
5
  @config = {
6
- :username => 'DATASIFT_USERNAME',
7
- :api_key => 'DATASIFT_API_KEY',
8
- :api_version => 'v1.2'
6
+ username: 'DATASIFT_USERNAME',
7
+ api_key: 'DATASIFT_API_KEY',
8
+ api_version: 'v1.2'
9
9
  }
10
10
  @params = {
11
- :output_type => 's3',
12
- :output_params => {
13
- :bucket => 'YOUR_BUCKET_NAME',
14
- :directory => 'ruby',
15
- :acl => 'private',
16
- :delivery_frequency => 0,
17
- :max_size => 104857600,
18
- :file_prefix => 'DataSift',
19
- :auth => {
20
- :access_key => 'ADD_YOUR_ACCESS_KEY',
21
- :secret_key => 'ADD_YOUR_SECRET_KEY',
11
+ output_type: 's3',
12
+ output_params: {
13
+ bucket: 'YOUR_BUCKET_NAME',
14
+ directory: 'ruby',
15
+ acl: 'private',
16
+ delivery_frequency: 0,
17
+ max_size: 104857600,
18
+ file_prefix: 'DataSift',
19
+ auth: {
20
+ access_key: 'ADD_YOUR_ACCESS_KEY',
21
+ secret_key: 'ADD_YOUR_SECRET_KEY',
22
22
  }
23
23
  }
24
24
  }
25
25
  @pull_params = {
26
- :output_type => 'pull',
27
- :output_params => {
28
- :max_size => 52428800
26
+ output_type: 'pull',
27
+ output_params: {
28
+ max_size: 52428800
29
29
  }
30
30
  }
31
31
  @datasift = DataSift::Client.new(@config)
@@ -35,22 +35,21 @@ class DataSiftExample
35
35
 
36
36
  def create_push(hash, is_historics_id = false)
37
37
  create_params = @params.merge ({
38
- #hash or historics_id can be used but not both
39
- :name => 'My awesome push subscription',
40
- :initial_status => 'active', # or 'paused' or 'waiting_for_start'
38
+ # Hash or historics_id can be used, but not both
39
+ name: 'My awesome push subscription',
40
+ initial_status: 'active', # or 'paused' or 'waiting_for_start'
41
41
  })
42
42
  if is_historics_id
43
- create_params.merge!({:historics_id => hash})
43
+ create_params.merge!(historics_id: hash)
44
44
  else
45
- create_params.merge!({:hash => hash,
46
- #start and end are not valid for historics
47
- :start => Time.now.to_i,
48
- :end => Time.now.to_i + 320
49
- })
45
+ # Start and end are not valid for historics
46
+ create_params.merge!(
47
+ hash: hash,
48
+ start: Time.now.to_i,
49
+ end: Time.now.to_i + 320
50
+ )
50
51
  end
51
52
  puts 'Creating subscription'
52
- subscription = @datasift.push.create create_params
53
- puts 'Create push => ' + subscription.to_s
54
- subscription
53
+ @datasift.push.create create_params
55
54
  end
56
55
  end
@@ -35,7 +35,7 @@ class CoreApiEg < DataSiftExample
35
35
 
36
36
  rescue DataSiftError => dse
37
37
  puts dse.message
38
- # Then match specific one to take action - All errors thrown by the client extend DataSiftError
38
+ # Then match specific one to take action; All errors thrown by the client extend DataSiftError
39
39
  case dse
40
40
  when ConnectionError
41
41
  # some connection error
@@ -68,15 +68,15 @@ class StreamingApi < DataSiftExample
68
68
  puts "DataSift Message #{hash} ==> #{message}"
69
69
  end
70
70
 
71
- conn = DataSift::new_stream(@config, on_delete, on_error, on_connect, on_close)
71
+ conn = DataSift::new_stream(@config, on_delete, on_error, on_connect, on_close)
72
72
  conn.on_datasift_message = on_datasift_message
73
73
  #can do something else here now...
74
74
  puts 'Do some other business stuff...'
75
75
  conn.stream.read_thread.join
76
76
  #rescue DataSiftError
77
77
  rescue DataSiftError => dse
78
- puts "Error #{dse.message}"
79
- # Then match specific one to take action - All errors thrown by the client extend DataSiftError
78
+ puts dse.inspect
79
+ # Then match specific one to take action; All errors thrown by the client extend DataSiftError
80
80
  case dse
81
81
  when ConnectionError
82
82
  # some connection error
@@ -6,34 +6,37 @@ class PushApi < DataSiftExample
6
6
 
7
7
  def run
8
8
  begin
9
- @params = {:output_type => 'pull'}
9
+ @params = {output_type: 'pull'}
10
10
  puts 'Validating the Pull subscription'
11
- if @datasift.push.valid? @params
12
- stream = @datasift.compile 'interaction.content contains "music"'
13
- subscription = create_push(stream[:data][:hash])
11
+ fail InvalidParamError unless @datasift.push.valid? @params
14
12
 
15
- subscription_id = subscription[:data][:id]
16
- #pull a bunch of interactions from the push queue - only work if we had set the output_type above to pull
17
- #pull @datasift.pull subscription_id
13
+ stream = @datasift.compile 'interaction.content contains "music"'
14
+ subscription = create_push(stream[:data][:hash])
18
15
 
19
- puts "\nPulling data a first time, then waiting 10 seconds"
20
- @datasift.push.pull(subscription_id).each { |e| puts e }
21
-
22
- sleep 10
16
+ subscription_id = subscription[:data][:id]
17
+ # Pull a bunch of interactions from the push queue. This only works if we had set the
18
+ # output_type above to "pull"
23
19
 
24
- puts "\nPulling data a second time, then waiting 10 seconds"
20
+ 2.times do
21
+ puts "\nPulling data, then waiting 10 seconds"
25
22
  @datasift.push.pull(subscription_id).each { |e| puts e }
26
-
27
23
  sleep 10
24
+ end
28
25
 
29
- puts "\nPulling data the third and final time time"
30
- #passing a lambda is more efficient because it is executed once for each interaction received
31
- #this saves having to iterate over the array returned so the same iteration isn't done twice
32
- @datasift.push.pull(subscription_id, 20971520, '', lambda{ |e| puts "on_message => #{e}" })
26
+ puts "\nPulling data the third and final time time"
27
+ # Passing a lambda is more efficient because it is executed once for each interaction
28
+ # received this saves having to iterate over the array returned so the same iteration
29
+ # isn't processed twice
30
+ @datasift.push.pull(
31
+ subscription_id,
32
+ 20_971_520,
33
+ '',
34
+ lambda{ |e| puts "on_message => #{e}" }
35
+ )
36
+
37
+ puts "\nDeleting the Pull subscription"
38
+ @datasift.push.delete subscription_id
33
39
 
34
- puts "\nDeleting the Pull subscription"
35
- @datasift.push.delete subscription_id
36
- end
37
40
  #rescue DataSiftError
38
41
  rescue DataSiftError => dse
39
42
  puts dse.inspect
@@ -8,14 +8,17 @@ class PushApi < DataSiftExample
8
8
  def run
9
9
  begin
10
10
  puts 'Creating Push subscription'
11
- subscription = @datasift.push.create @params.merge(hash: '54dbfc8464258de162b7f1a057e630c5', name: 'Ruby Client Example')
11
+ subscription = @datasift.push.create @params.merge(
12
+ hash: '54dbfc8464258de162b7f1a057e630c5',
13
+ name: 'Ruby Client Example'
14
+ )
12
15
 
13
16
  subscription_id = subscription[:data][:id]
14
17
  puts "\nPush subscription created! Push Subscription ID #{subscription_id}"
15
18
 
16
19
  puts "\nGetting subscription info"
17
20
  # Get details for a subscription. Also available are
18
- # push.[get, get_by_hash,get_by_historics_id]
21
+ # push.[get, get_by_hash, get_by_historics_id]
19
22
  puts @datasift.push.get_by_subscription subscription_id
20
23
 
21
24
  puts "\nPausing Push subscription"
@@ -27,18 +30,16 @@ class PushApi < DataSiftExample
27
30
  @datasift.push.resume subscription_id
28
31
 
29
32
  puts "\nGetting subscription logs"
30
- # Get logs for a subscription. Also available is
31
- # push.log to get logs for all subscriptions
33
+ # Get logs for a subscription. Also available is push.log to get logs for all subscriptions
32
34
  puts @datasift.push.log_for subscription_id
33
35
 
34
36
  puts "\nStopping Push subscription"
35
- # Push subscriptions can be stopped. Once stopped, a
36
- # subscription can not be resumed
37
+ # Push subscriptions can be stopped. Once stopped, a subscription can not be resumed
37
38
  @datasift.push.stop subscription_id
38
39
 
39
40
  puts "\nDeleting Push subscription"
40
- # Push subscriptions can be deleted. On delete, any undelivered
41
- # data is dropped. A delete is permenent.
41
+ # Push subscriptions can be deleted. On delete, any undelivered data is dropped. A delete
42
+ # is permenent.
42
43
  @datasift.push.delete subscription_id
43
44
 
44
45
  rescue DataSiftError => dse