datasift 3.2.0 → 3.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +1 -0
  3. data/CHANGELOG.md +12 -0
  4. data/VERSION +1 -1
  5. data/examples/account_identity_token_eg.rb +6 -6
  6. data/examples/auth.rb +5 -3
  7. data/examples/cli.sh +1 -1
  8. data/examples/historics_eg.rb +5 -2
  9. data/examples/historics_preview_eg.rb +1 -1
  10. data/examples/managed_source_fb_eg.rb +129 -0
  11. data/examples/managed_source_ig_eg.rb +126 -0
  12. data/examples/pylon_eg.rb +36 -2
  13. data/lib/api/api_resource.rb +1 -1
  14. data/lib/cli.rb +2 -2
  15. data/lib/datasift.rb +14 -12
  16. data/lib/errors.rb +4 -0
  17. data/lib/historics.rb +2 -2
  18. data/test/datasift/historics_preview_api_test.rb +7 -3
  19. data/test/fixtures/cassettes/core/after_historic_dpu.json +1 -1
  20. data/test/fixtures/cassettes/core/balance_get.json +1 -1
  21. data/test/fixtures/cassettes/core/before_dpu.json +1 -1
  22. data/test/fixtures/cassettes/core/before_historic_dpu.json +1 -1
  23. data/test/fixtures/cassettes/core/compile_success.json +1 -1
  24. data/test/fixtures/cassettes/core/dpu_get_cost.json +1 -1
  25. data/test/fixtures/cassettes/core/dpu_throw_badrequest.json +1 -1
  26. data/test/fixtures/cassettes/core/historic_dpu.json +1 -1
  27. data/test/fixtures/cassettes/core/usage_success.json +1 -1
  28. data/test/fixtures/cassettes/core/validate_invalid_hash.json +1 -1
  29. data/test/fixtures/cassettes/core/validate_success_bool.json +1 -1
  30. data/test/fixtures/cassettes/core/validate_success_hash.json +1 -1
  31. data/test/fixtures/cassettes/preview/before_preview_create.json +1 -1
  32. data/test/fixtures/cassettes/preview/before_preview_get.json +1 -1
  33. data/test/fixtures/cassettes/preview/preview_create_success.json +1 -1
  34. data/test/fixtures/cassettes/preview/preview_get_success.json +1 -1
  35. data/test/fixtures/cassettes/push/after_push_create.json +1 -1
  36. data/test/fixtures/cassettes/push/after_push_get.json +1 -1
  37. data/test/fixtures/cassettes/push/after_push_log.json +1 -1
  38. data/test/fixtures/cassettes/push/after_push_pause.json +1 -1
  39. data/test/fixtures/cassettes/push/after_push_resume.json +1 -1
  40. data/test/fixtures/cassettes/push/after_push_stop.json +1 -1
  41. data/test/fixtures/cassettes/push/after_push_update.json +1 -1
  42. data/test/fixtures/cassettes/push/before_push_create.json +1 -1
  43. data/test/fixtures/cassettes/push/before_push_delete.json +1 -1
  44. data/test/fixtures/cassettes/push/before_push_get.json +1 -1
  45. data/test/fixtures/cassettes/push/before_push_log.json +1 -1
  46. data/test/fixtures/cassettes/push/before_push_pause.json +1 -1
  47. data/test/fixtures/cassettes/push/before_push_resume.json +1 -1
  48. data/test/fixtures/cassettes/push/before_push_stop.json +1 -1
  49. data/test/fixtures/cassettes/push/before_push_update.json +1 -1
  50. data/test/fixtures/cassettes/push/push_create.json +1 -1
  51. data/test/fixtures/cassettes/push/push_delete.json +1 -1
  52. data/test/fixtures/cassettes/push/push_get_by_id.json +1 -1
  53. data/test/fixtures/cassettes/push/push_log_with_id.json +1 -1
  54. data/test/fixtures/cassettes/push/push_pause.json +1 -1
  55. data/test/fixtures/cassettes/push/push_resume.json +1 -1
  56. data/test/fixtures/cassettes/push/push_stop.json +1 -1
  57. data/test/fixtures/cassettes/push/push_update.json +1 -1
  58. data/test/fixtures/cassettes/push/push_validate.json +1 -1
  59. metadata +4 -3
  60. data/examples/managed_source_eg.rb +0 -113
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: c6c428274cf28d80ef439f0ca06a5a247cf70140
4
- data.tar.gz: dc0df1b7d90456facf9cdc193ee69a2ebd5ab210
3
+ metadata.gz: b1fe731a05bf2f78cee04bd0d37a053991ee0b08
4
+ data.tar.gz: 119a97e10c022290c0a239c0fd192b63a13eca3d
5
5
  SHA512:
6
- metadata.gz: c150becbafbc9ddf74a49b482bd2b83f93b67595701779c7383c6232b78739099982f1e4cdd6170b0fe8b13b05ef28c01ab96e28b709d8f31fd399dd12e04956
7
- data.tar.gz: 9188cf1a6dead964bfee1a77ea8d3ecad37e4261af29eab15e505c7f51d9a0b4d7998aee69eae75f141670eed02bfd55459f8e28460e047d1314628a13f189e4
6
+ metadata.gz: 660b017e290d8218413938369e93f5bdf3c784971d954497d6b8d14de9049dca430b4a78f13867acf458fe10085815357d7d7b0a0016ec4a962e2eaf87c0890c
7
+ data.tar.gz: d99b43dc735b245c684fffde08cdb65ee4fa47c892c91330e6ee67def6a634df015ddeb46c16b6ba7c4ebfcac07c917610c6cc52cb391c3f375f74a74e76dd7c
data/.travis.yml CHANGED
@@ -1,4 +1,5 @@
1
1
  language: ruby
2
+ sudo: false
2
3
  rvm:
3
4
  - 1.9.3
4
5
  - 2.0.0
data/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  CHANGELOG
2
2
  ================================
3
+ ## v.3.3.0 (2015-08-05)
4
+ ### Added
5
+ * Explicit support for 429 errors from the API
6
+ * PYLON Nested query example
7
+
8
+ ### Changed
9
+ * Default API version to 1.2
10
+ * Improved Managed Sources examples (added dedicated Facebook Pages example)
11
+
12
+ ### Removed
13
+ * References to the Twitter data source (being deprecated on August 13th, 2015)
14
+
3
15
  ## v.3.2.0 (2015-05-13)
4
16
  ### Added
5
17
  * Support for [PYLON API](http://dev.datasift.com/pylon/docs/api-endpoints)
data/VERSION CHANGED
@@ -1 +1 @@
1
- 3.2.0
1
+ 3.3.0
@@ -15,38 +15,38 @@ class AccountIdentityTokenEg < DataSiftExample
15
15
  false
16
16
  )
17
17
  identity_id = identity[:data][:id]
18
- puts identity.to_json
18
+ puts identity[:data].to_json
19
19
 
20
20
  puts "\nCreate a Token for our Identity"
21
21
  puts @datasift.account_identity_token.create(
22
22
  identity_id,
23
23
  'facebook',
24
24
  'YOUR_TOKEN'
25
- )
25
+ )[:data].to_json
26
26
 
27
27
  puts "\nList all existing Tokens for this Identity"
28
28
  puts @datasift.account_identity_token.list(
29
29
  identity_id
30
- ).to_json
30
+ )[:data].to_json
31
31
 
32
32
  puts "\nGet existing Token by Identity and Service"
33
33
  puts @datasift.account_identity_token.get(
34
34
  identity_id,
35
35
  'facebook'
36
- ).to_json
36
+ )[:data].to_json
37
37
 
38
38
  puts "\nUpdate a Token for a given Identity"
39
39
  puts @datasift.account_identity_token.update(
40
40
  identity_id,
41
41
  'facebook',
42
42
  'YOUR_NEW_TOKEN'
43
- ).to_json
43
+ )[:data].to_json
44
44
 
45
45
  puts "\nDelete an Token for a given Identity and Service"
46
46
  puts @datasift.account_identity_token.delete(
47
47
  identity_id,
48
48
  'facebook'
49
- ).to_json
49
+ )[:data].to_json
50
50
 
51
51
  puts "\nCleanup and remove the Identity"
52
52
  @datasift.account_identity.delete(identity_id)
data/examples/auth.rb CHANGED
@@ -2,9 +2,11 @@ class DataSiftExample
2
2
  require 'datasift'
3
3
 
4
4
  def initialize
5
- @username = 'DATASIFT_USERNAME'
6
- @api_key = 'DATASIFT_API_KEY'
7
- @config = { :username => @username, :api_key => @api_key }
5
+ @config = {
6
+ :username => 'DATASIFT_USERNAME',
7
+ :api_key => 'DATASIFT_API_KEY',
8
+ :api_version => 'v1.2'
9
+ }
8
10
  @params = {
9
11
  :output_type => 's3',
10
12
  :output_params => {
data/examples/cli.sh CHANGED
@@ -108,7 +108,7 @@ ds -e push -c delete -p id ${push_id} | jq .
108
108
  #todo update push, pull
109
109
 
110
110
  echo "Attempting to create a Historics preview"
111
- preview=$(ds -e preview -c create -p start ${start} -p end ${end} -p hash ${hash} -p sources twitter \
111
+ preview=$(ds -e preview -c create -p start ${start} -p end ${end} -p hash ${hash} -p sources tumblr \
112
112
  -p parameters 'interaction.author.link,targetVol,hour;interaction.type,freqDist,10')
113
113
 
114
114
  echo ${preview} | jq .
@@ -16,8 +16,11 @@ class HistoricsApi < DataSiftExample
16
16
  puts @datasift.historics.status(start, end_time)
17
17
 
18
18
  puts "\nPreparing"
19
- #prepare a historics to start from three hours ago and run for 1 hour using twitter as a data source and using only a 10% sample
20
- historics = @datasift.historics.prepare(hash, start, end_time, 'My ruby historics', 'twitter', 10)
19
+ # Prepare an Historics query to start from three hours ago and run for 1
20
+ # hour using Tumblr as a data source and using only a 10% sample
21
+ historics = @datasift.historics.prepare(
22
+ hash, start, end_time, 'My ruby historics', 'tumblr', 10
23
+ )
21
24
  puts historics
22
25
 
23
26
  id = historics[:data][:id]
@@ -13,7 +13,7 @@ class HistoricsPreviewApi < DataSiftExample
13
13
 
14
14
  puts "\nCreating a preview"
15
15
  # see http://dev.datasift.com/docs/rest-api/previewcreate for docs
16
- sources = 'facebook,twitter'
16
+ sources = 'tumblr'
17
17
  parameters = 'interaction.author.link,targetVol,hour;interaction.type,freqDist,10'
18
18
  start = Time.now.to_i - (3600 * 48) # 48hrs ago
19
19
  source = @datasift.historics_preview.create(hash, sources, parameters, start)
@@ -0,0 +1,129 @@
1
+ require './auth'
2
+ class ManagedSourceApi < DataSiftExample
3
+ def initialize
4
+ super
5
+ run
6
+ end
7
+
8
+ def run
9
+ puts "Creating a managed source\n--"
10
+ parameters = {
11
+ likes: true,
12
+ posts_by_others: true,
13
+ comments: true,
14
+ page_likes: true
15
+ }
16
+ resources = [{
17
+ parameters: {
18
+ id: 130871750291428,
19
+ title: 'DataSift'
20
+ }
21
+ }]
22
+ auth = [{
23
+ parameters: {
24
+ value: 'vnHQocnOEChoOsYYOHVIn80......EOMmZ63go6s0DzxsAmJaDeE2ljdQjDqJVT'
25
+ }
26
+ }]
27
+
28
+ source = @datasift.managed_source.create(
29
+ 'facebook_page', 'Ruby test', parameters, resources, auth
30
+ )
31
+ puts source[:data].to_json
32
+
33
+ id = source[:data][:id]
34
+
35
+ puts "\nStarting delivery for my Managed Source\n--"
36
+ puts @datasift.managed_source.start(id)[:data].to_json
37
+
38
+ # Define new resources to be added
39
+ update_resources = [{
40
+ parameters: {
41
+ id: 10513336322,
42
+ title: 'The Guardian'
43
+ }
44
+ }]
45
+
46
+ # Push each of the existing resources back into the new resources array
47
+ source[:data][:resources].each do |resource|
48
+ update_resources.push(resource)
49
+ end
50
+
51
+ puts "\nUpdating; adding a new resource, and changing the name\n--"
52
+ puts @datasift.managed_source.update(
53
+ id, 'facebook_page', 'Updated Ruby test', source[:data][:parameters],
54
+ update_resources, source[:data][:auth]
55
+ )[:data].to_json
56
+
57
+ puts "\nGetting info from DataSift about my source\n--"
58
+ puts @datasift.managed_source.get(id)[:data].to_json
59
+
60
+ # Define new resources to add to Managed Source
61
+ new_resources = [
62
+ {
63
+ parameters: {
64
+ id: 5281959998,
65
+ title: 'The New York Times'
66
+ }
67
+ },
68
+ {
69
+ parameters: {
70
+ id: 18468761129,
71
+ title: 'The Huffington Post'
72
+ }
73
+ },
74
+ {
75
+ parameters: {
76
+ id: 97212224368,
77
+ title: 'CNBC'
78
+ }
79
+ }
80
+ ]
81
+
82
+ new_auth = [{
83
+ parameters: {
84
+ value: 'CAAIUKbXn8xsBAL7eP......9hcU0b4ZVwlMe9dH5G93Nmvfi2EHJ7nXkRfc7'
85
+ }
86
+ }]
87
+
88
+ puts "\nAdding new resources to my source (as an array of Ruby objects)\n--"
89
+ puts @datasift.managed_source_resource.add(
90
+ id, new_resources
91
+ )[:data].to_json
92
+
93
+ puts "\nAdding a new token to my source (as an array of Ruby objects)\n--"
94
+ puts @datasift.managed_source_auth.add(id, new_auth)[:data].to_json
95
+
96
+ puts "\nGetting info from DataSift about my source (notice the new " \
97
+ "resources and tokens have been added)\n--"
98
+ source = @datasift.managed_source.get id
99
+ puts source[:data].to_json
100
+
101
+ puts "\nRemoving a resource from my source by resource_id\n--"
102
+ puts @datasift.managed_source_resource.remove(
103
+ id, [source[:data][:resources][0][:resource_id]]
104
+ )[:data].to_json
105
+
106
+ puts "\nRemoving an auth token from my source by identity_id\n--"
107
+ puts @datasift.managed_source_auth.remove(
108
+ id, [source[:data][:auth][0][:identity_id]]
109
+ )[:data].to_json
110
+
111
+ puts "\nGetting info from DataSift about my source (notice an auth " \
112
+ "token and resource have been removed)\n--"
113
+ puts @datasift.managed_source.get(id)[:data].to_json
114
+
115
+ puts "\nFetching logs (Any error logs for your source will appear here)\n--"
116
+ puts @datasift.managed_source.log(id)[:data].to_json
117
+
118
+ puts "\nStopping the Managed Source\n--"
119
+ puts @datasift.managed_source.stop(id)[:data].to_json
120
+
121
+ puts "\nDeleting the Managed Source\n--"
122
+ puts @datasift.managed_source.delete(id)[:data].to_json
123
+
124
+ rescue DataSiftError => dse
125
+ puts dse.message
126
+ end
127
+ end
128
+
129
+ ManagedSourceApi.new
@@ -0,0 +1,126 @@
1
+ require './auth'
2
+ class ManagedSourceApi < DataSiftExample
3
+ def initialize
4
+ super
5
+ run
6
+ end
7
+
8
+ def run
9
+ puts "Creating a managed source--\n"
10
+ parameters = {
11
+ likes: true,
12
+ comments: true
13
+ }
14
+ resources = [{
15
+ parameters: {
16
+ type: 'user',
17
+ value: 25025320
18
+ }
19
+ }]
20
+ auth = [{
21
+ parameters: {
22
+ value: '10942122.00a3229.fff654d524854054bdb0288a05cdbdd1'
23
+ }
24
+ }]
25
+
26
+ source = @datasift.managed_source.create(
27
+ 'instagram', 'Ruby test', parameters, resources, auth
28
+ )
29
+ puts source[:data].to_json
30
+
31
+ id = source[:data][:id]
32
+
33
+ puts "\nStarting delivery for my Managed Source--\n"
34
+ puts @datasift.managed_source.start(id)[:data].to_json
35
+
36
+ # Define new resources to be added
37
+ update_resources = [{
38
+ parameters: {
39
+ type: 'user',
40
+ value: 8139971
41
+ }
42
+ }]
43
+
44
+ # Push each of the existing resources back into the new resources array
45
+ source[:data][:resources].each do |resource|
46
+ update_resources.push(resource)
47
+ end
48
+
49
+ puts "\nUpdating; adding a new resource, and changing the name--\n"
50
+ puts @datasift.managed_source.update(
51
+ id, 'instagram', 'Updated Ruby test', source[:data][:parameters],
52
+ update_resources, source[:data][:auth]
53
+ )[:data].to_json
54
+
55
+ puts "\nGetting info from DataSift about my source--\n"
56
+ puts @datasift.managed_source.get(id)[:data].to_json
57
+
58
+ # Define new resources to add to Managed Source
59
+ new_resources = [
60
+ {
61
+ parameters: {
62
+ type: 'tag',
63
+ value: 'sun'
64
+ }
65
+ },
66
+ {
67
+ parameters: {
68
+ type: 'tag',
69
+ value: 'sea'
70
+ }
71
+ },
72
+ {
73
+ parameters: {
74
+ type: 'tag',
75
+ value: 'surf'
76
+ }
77
+ }
78
+ ]
79
+
80
+ new_auth = [{
81
+ parameters: {
82
+ value: '10942111.1f2134f.8837abb205b44ece801022f6fa989cc4'
83
+ }
84
+ }]
85
+
86
+ puts "\nAdding a new resource to my source (as array of Ruby objects)--\n"
87
+ puts @datasift.managed_source_resource.add(
88
+ id, new_resources
89
+ )[:data].to_json
90
+
91
+ puts "\nAdding a new token to my source (as array of Ruby objects)--\n"
92
+ puts @datasift.managed_source_auth.add(id, new_auth)[:data].to_json
93
+
94
+ puts "\nGetting info from DataSift about my source (notice the new " \
95
+ "resources and tokens have been added)--\n"
96
+ source = @datasift.managed_source.get id
97
+ puts source[:data].to_json
98
+
99
+ puts "\nRemoving a resource from my source by resource_id--\n"
100
+ puts @datasift.managed_source_resource.remove(
101
+ id, [source[:data][:resources][0][:resource_id]]
102
+ )[:data].to_json
103
+
104
+ puts "\nRemoving an auth token from my source by identity_id--\n"
105
+ puts @datasift.managed_source_auth.remove(
106
+ id, [source[:data][:auth][0][:identity_id]]
107
+ )[:data].to_json
108
+
109
+ puts "\nGetting info from DataSift about my source (notice an auth " \
110
+ "token and resource have been removed)--\n"
111
+ puts @datasift.managed_source.get(id)[:data].to_json
112
+
113
+ puts "\nFetching logs--\n"
114
+ puts @datasift.managed_source.log(id)[:data].to_json
115
+
116
+ puts "\nStopping--\n"
117
+ puts @datasift.managed_source.stop(id)[:data].to_json
118
+
119
+ puts "\nDeleting--\n"
120
+ puts @datasift.managed_source.delete(id)[:data].to_json
121
+ rescue DataSiftError => dse
122
+ puts dse.message
123
+ end
124
+ end
125
+
126
+ ManagedSourceApi.new
data/examples/pylon_eg.rb CHANGED
@@ -9,7 +9,7 @@ class AnalysisApi < DataSiftExample
9
9
  begin
10
10
  puts "Create a new identity to make PYLON API calls"
11
11
  identity = @datasift.account_identity.create(
12
- "JASON_#{Time.now.to_i}",
12
+ "RUBY_LIB_#{Time.now.to_i}",
13
13
  "active",
14
14
  false
15
15
  )
@@ -91,7 +91,41 @@ class AnalysisApi < DataSiftExample
91
91
  }
92
92
  }
93
93
  filter = ''
94
- start_time = Time.now.to_i - (60 * 60 * 12) # 7 days ago
94
+ start_time = Time.now.to_i - (60 * 60 * 24 * 7) # 7 days ago
95
+ end_time = Time.now.to_i
96
+ puts @datasift.pylon.analyze(
97
+ hash,
98
+ params,
99
+ filter,
100
+ start_time,
101
+ end_time
102
+ )[:data].to_json
103
+
104
+ puts "\nFrequency Distribution with nested queries. Find the top three " \
105
+ "age groups for each gender by country"
106
+ filter = ''
107
+ params = {
108
+ analysis_type: 'freqDist',
109
+ parameters: {
110
+ threshold: 4,
111
+ target: 'fb.author.country'
112
+ },
113
+ child: {
114
+ analysis_type: 'freqDist',
115
+ parameters: {
116
+ threshold: 2,
117
+ target: 'fb.author.gender'
118
+ },
119
+ child: {
120
+ analysis_type: 'freqDist',
121
+ parameters: {
122
+ threshold: 3,
123
+ target: 'fb.author.age'
124
+ }
125
+ }
126
+ }
127
+ }
128
+ start_time = Time.now.to_i - (60 * 60 * 24 * 7)
95
129
  end_time = Time.now.to_i
96
130
  puts @datasift.pylon.analyze(
97
131
  hash,