datasift 3.5.2 → 3.6.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: b02269afad134b1065df764ff2a4c4400a96d55c
4
- data.tar.gz: b7f6cede19b4a62ee7a1eeaa3784fa01bcba3c1a
3
+ metadata.gz: ced2e15492b4c926ee5329ed799ff6c0edc4ef1f
4
+ data.tar.gz: 97a2ee4cee298c822517913341db932bcae1aec2
5
5
  SHA512:
6
- metadata.gz: 0ecb7edc5b1e12329c021b156b245cc1eaba269116618f5daad8c5486c854ee368e28325623c84089da199a9ed497fa69ba91718711946d7285f5b1b0f855366
7
- data.tar.gz: 1aaaf7b124c91ab0be40d32c21a16feab784291c50b7feda1dcba3d0e5649ee03ac3f1aa233c836acd59a6c1ebd503f449e77030d60dc4c7d9fd1a0f1220d840
6
+ metadata.gz: 0d16d5dd9ed6c4a0a4a0b9e55734859fe7ea17c15be46cd953ce3a81728337e5f31ec5980fb440b70996f4f5648476dea2fb050e8ae35143f61c6e4a62014e0b
7
+ data.tar.gz: a3617a40236f7dec6fe1d7412f2f95af5075d313c4278c3b57b26e6ea516b8d76039c3e67709b817733528a9c98e636d18342dd5d1a384355a89368730f8e23f
@@ -1,14 +1,16 @@
1
1
  CHANGELOG
2
2
  ================================
3
- ## v.3.5.2 (2016-11-08)
4
- ### Fixes
5
- * Uses correct timestamp params in PYLON Sample API calls. `start_time` -> `start` and `end_time` -> `end`
6
-
7
- ## v.3.5.1 (2016-10-06)
3
+ ## v.4.0 (Under development)
4
+ ### Planned Features
5
+ * Better thought out API design in a more traditional Gem style
6
+ * More of an SDK style library; for example, you should perform actions (start/stop/analyze) on a PYLON recording object
7
+ * Designed to make the most of DataSift's latest API version and features
8
+ * Designed for Ruby 2.3+. Use features like keyword parameters across the board
9
+
10
+ ## v.3.6.0 (2016-02-19)
8
11
  ### Added
9
- * Explicit support for 500 and 502 responses from the API
10
- ### Fixes
11
- * Resolves [#86](https://github.com/datasift/datasift-ruby/issues/86); not handling 502 responses well
12
+ * Support for the [/pylon/update](dev.datasift.com/pylon/docs/api/pylon-api-endpoints/pylonupdate) API endpoint
13
+ * Support for [API v1.3](http://dev.datasift.com/docs/api/api-changelog)
12
14
 
13
15
  ## v.3.5.0 (2015-11-13)
14
16
  ### Added
data/VERSION CHANGED
@@ -1 +1 @@
1
- 3.5.2
1
+ 3.6.0
@@ -5,7 +5,7 @@ class DataSiftExample
5
5
  @config = {
6
6
  username: 'DATASIFT_USERNAME',
7
7
  api_key: 'DATASIFT_API_KEY',
8
- api_version: 'v1.2'
8
+ api_version: 'v1.3'
9
9
  }
10
10
  @params = {
11
11
  output_type: 's3',
@@ -1,4 +1,8 @@
1
- require './auth'
1
+ ##
2
+ # This script runs through all PYLON API endpoints using v1.2 of the API
3
+ ##
4
+
5
+ require './../auth'
2
6
  class AnalysisApi < DataSiftExample
3
7
  def initialize
4
8
  super
@@ -20,15 +24,18 @@ class AnalysisApi < DataSiftExample
20
24
  token = @datasift.account_identity_token.create(
21
25
  identity_id,
22
26
  'facebook',
23
- 'YOUR_TOKEN'
27
+ '125595667777713|5aef9cfdb31d8be64b87204c3bca820f'
24
28
  )
25
29
  puts token[:data].to_json
26
30
 
27
31
  puts "\nNow make PYLON API calls using the Identity's API key"
28
- @config.merge!(api_key: identity[:data][:api_key])
32
+ @config.merge!(
33
+ api_key: identity[:data][:api_key],
34
+ api_version: 'v1.2'
35
+ )
29
36
  @datasift = DataSift::Client.new(@config)
30
37
 
31
- csdl = 'return { fb.content contains "data" }'
38
+ csdl = "return { fb.content any \"data, #{Time.now}\" }"
32
39
 
33
40
  puts "Check this CSDL is valid: #{csdl}"
34
41
  puts "Valid? #{@datasift.pylon.valid?(csdl)}"
@@ -75,7 +82,7 @@ class AnalysisApi < DataSiftExample
75
82
  target: 'fb.author.age'
76
83
  }
77
84
  }
78
- filter = 'fb.content contains "starbucks"'
85
+ filter = ''
79
86
  puts @datasift.pylon.analyze(
80
87
  hash,
81
88
  params,
@@ -0,0 +1,201 @@
1
+ ##
2
+ # This script runs through all PYLON API endpoints using v1.3 of the API
3
+ ##
4
+
5
+ require './../auth'
6
+ class AnalysisApi < DataSiftExample
7
+ def initialize
8
+ super
9
+ run_analysis
10
+ end
11
+
12
+ def run_analysis
13
+ begin
14
+ puts "Create a new identity to make PYLON API calls"
15
+ identity = @datasift.account_identity.create(
16
+ "RUBY_LIB_#{Time.now.to_i}",
17
+ "active",
18
+ false
19
+ )
20
+ identity_id = identity[:data][:id]
21
+ puts identity[:data].to_json
22
+
23
+ puts "\nCreate a Token for our Identity"
24
+ token = @datasift.account_identity_token.create(
25
+ identity_id,
26
+ 'facebook',
27
+ '125595667777713|5aef9cfdb31d8be64b87204c3bca820f'
28
+ )
29
+ puts token[:data].to_json
30
+
31
+ puts "\nNow make PYLON API calls using the Identity's API key"
32
+ @pylon_config = @config.dup
33
+ @pylon_config.merge!(
34
+ api_key: identity[:data][:api_key],
35
+ api_version: 'v1.3'
36
+ )
37
+ @datasift = DataSift::Client.new(@pylon_config)
38
+
39
+ csdl = "return { fb.all.content any \"data, #{Time.now}\" }"
40
+
41
+ puts "Check this CSDL is valid: #{csdl}"
42
+ puts "Valid? #{@datasift.pylon.valid?(csdl)}"
43
+
44
+ puts "\nCompile my CSDL"
45
+ compiled = @datasift.pylon.compile csdl
46
+ hash = compiled[:data][:hash]
47
+ puts "Hash: #{hash}"
48
+
49
+ puts "\nStart recording with hash #{hash}"
50
+ recording = @datasift.pylon.start(
51
+ hash,
52
+ 'Facebook Pylon Test Recording'
53
+ )
54
+ puts recording[:data].to_json
55
+
56
+ puts "\nSleep for 10 seconds to record a little data"
57
+ sleep(10)
58
+
59
+ puts "\nGet details of our running recording by ID"
60
+ puts @datasift.pylon.get('', recording[:data][:id])[:data].to_json
61
+
62
+ puts "\nYou can also list running recordings"
63
+ puts @datasift.pylon.list[:data].to_json
64
+
65
+ puts "\nFrequency distribution analysis on fb.author.country"
66
+ params = {
67
+ analysis_type: 'freqDist',
68
+ parameters: {
69
+ threshold: 3,
70
+ target: 'fb.author.country'
71
+ }
72
+ }
73
+ puts @datasift.pylon.analyze(
74
+ '',
75
+ params,
76
+ '',
77
+ nil,
78
+ nil,
79
+ recording[:data][:id]
80
+ )[:data].to_json
81
+
82
+ puts "\nFrequency distribution analysis on fb.author.age with filter"
83
+ params = {
84
+ analysis_type: 'freqDist',
85
+ parameters: {
86
+ threshold: 1,
87
+ target: 'fb.author.age'
88
+ }
89
+ }
90
+ filter = 'fb.parent.content any "facebook"'
91
+ puts @datasift.pylon.analyze(
92
+ '',
93
+ params,
94
+ filter,
95
+ nil,
96
+ nil,
97
+ recording[:data][:id]
98
+ )[:data].to_json
99
+
100
+ puts "\nTime series analysis"
101
+ params = {
102
+ analysis_type: 'timeSeries',
103
+ parameters: {
104
+ interval: 'hour',
105
+ span: 12
106
+ }
107
+ }
108
+ filter = ''
109
+ start_time = Time.now.to_i - (60 * 60 * 24 * 7) # 7 days ago
110
+ end_time = Time.now.to_i
111
+ puts @datasift.pylon.analyze(
112
+ '',
113
+ params,
114
+ filter,
115
+ start_time,
116
+ end_time,
117
+ recording[:data][:id]
118
+ )[:data].to_json
119
+
120
+ puts "\nFrequency Distribution with nested queries. Find the top three " \
121
+ "age groups for each gender by country"
122
+ filter = ''
123
+ params = {
124
+ analysis_type: 'freqDist',
125
+ parameters: {
126
+ threshold: 4,
127
+ target: 'fb.author.country'
128
+ },
129
+ child: {
130
+ analysis_type: 'freqDist',
131
+ parameters: {
132
+ threshold: 2,
133
+ target: 'fb.author.gender'
134
+ },
135
+ child: {
136
+ analysis_type: 'freqDist',
137
+ parameters: {
138
+ threshold: 3,
139
+ target: 'fb.author.age'
140
+ }
141
+ }
142
+ }
143
+ }
144
+ start_time = Time.now.to_i - (60 * 60 * 24 * 7)
145
+ end_time = Time.now.to_i
146
+ puts @datasift.pylon.analyze(
147
+ '',
148
+ params,
149
+ filter,
150
+ start_time,
151
+ end_time,
152
+ recording[:data][:id]
153
+ )[:data].to_json
154
+
155
+ puts "\nTags analysis"
156
+ puts @datasift.pylon.tags('',recording[:data][:id])[:data].to_json
157
+
158
+ puts "\nGet Public Posts"
159
+ puts @datasift.pylon.sample(
160
+ '',
161
+ 10,
162
+ Time.now.to_i - (60 * 60), # from 1hr ago
163
+ Time.now.to_i, # to 'now'
164
+ 'fb.content contains_any "your, filter, terms"',
165
+ recording[:data][:id]
166
+ )[:data].to_json
167
+
168
+ puts "\nv1.3+ of the API allows you to update the name or hash of recordings;"
169
+ puts "\nBefore update:"
170
+ puts @datasift.pylon.get(recording[:data][:id])[:data].to_json
171
+
172
+ new_hash = @datasift.pylon.compile("fb.content any \"data, #{Time.now}\"")[:data][:hash]
173
+
174
+ puts "\nAfter update:"
175
+ puts @datasift.pylon.update(
176
+ recording[:data][:id],
177
+ new_hash,
178
+ "Updated at #{Time.now}"
179
+ )[:data].to_json
180
+
181
+ puts "\nStop recording filter with the recording ID #{recording[:data][:id]}"
182
+ puts @datasift.pylon.stop('', recording[:data][:id])[:data].to_json
183
+ sleep(3)
184
+ puts "\nYou can also restart a stopped recording by recording ID #{recording[:data][:id]}"
185
+ puts @datasift.pylon.restart(recording[:data][:id])[:data].to_json
186
+
187
+ # Cleanup.
188
+ # Stop the recording again to clean up
189
+ sleep(3)
190
+ @datasift.pylon.stop('', recording[:data][:id])[:data].to_json
191
+ # Disable the identity created for this example
192
+ @datasift = DataSift::Client.new(@config)
193
+ @datasift.account_identity.update(identity_id, '', 'disabled')
194
+
195
+ rescue DataSiftError => dse
196
+ puts dse.inspect
197
+ end
198
+ end
199
+ end
200
+
201
+ AnalysisApi.new
@@ -230,14 +230,8 @@ module DataSift
230
230
  begin
231
231
  code = e.http_code
232
232
  body = e.http_body
233
- error = nil
234
233
  if code && body
235
- begin
236
- error = MultiJson.load(body)
237
- rescue MultiJson::ParseError
238
- # In cases where we receive 502 responses, Nginx may send HTML rather than JSON
239
- error = body
240
- end
234
+ error = MultiJson.load(body)
241
235
  response_on_error = {
242
236
  :data => nil,
243
237
  :datasift => {
@@ -309,10 +303,6 @@ module DataSift
309
303
  raise UnprocessableEntityError.new(code, body, response)
310
304
  when 429
311
305
  raise TooManyRequestsError.new(code, body, response)
312
- when 500
313
- raise InternalServerError.new(code, body, response)
314
- when 502
315
- raise BadGatewayError.new(code, body, response)
316
306
  when 503
317
307
  raise ServiceUnavailableError.new(code, body, response)
318
308
  when 504
@@ -68,14 +68,6 @@ end
68
68
  class TooManyRequestsError < DataSiftError
69
69
  end
70
70
 
71
- # Standard error returned when receiving a 500 response from the API
72
- class InternalServerError < DataSiftError
73
- end
74
-
75
- # Standard error returned when receiving a 502 response from the API
76
- class BadGatewayError < DataSiftError
77
- end
78
-
79
71
  # Standard error returned when receiving a 503 response from the API
80
72
  class ServiceUnavailableError < DataSiftError
81
73
  end
@@ -30,10 +30,13 @@ module DataSift
30
30
  # Perform /pylon/get API call to query status of your PYLON recordings
31
31
  #
32
32
  # @param hash [String] Hash you with the get the status for
33
+ # @param id [String] The ID of the PYLON recording to get
33
34
  # @return [Object] API reponse object
34
- def get(hash)
35
- fail BadParametersError, 'hash is required' if hash.empty?
36
- params = { hash: hash }
35
+ def get(hash = '', id = '')
36
+ fail BadParametersError, 'hash or id is required' if hash.empty? && id.empty?
37
+ params = {}
38
+ params.merge!(hash: hash) unless hash.empty?
39
+ params.merge!(id: id) unless id.empty?
37
40
 
38
41
  DataSift.request(:GET, 'pylon/get', @config, params)
39
42
  end
@@ -56,15 +59,46 @@ module DataSift
56
59
  DataSift.request(:GET, 'pylon/get', @config, params)
57
60
  end
58
61
 
62
+ # Perform /pylon/update API call to update a given PYLON Recording
63
+ #
64
+ # @param id [String] The ID of the PYLON recording to update
65
+ # @param hash [String] The CSDL filter hash this recording should be subscribed to
66
+ # @param name [String] Update the name of your recording
67
+ # @return [Object] API reponse object
68
+ def update(id, hash = '', name = '')
69
+ params = {id: id}
70
+ params.merge!(hash: hash) unless hash.empty?
71
+ params.merge!(name: name) unless name.empty?
72
+
73
+ DataSift.request(:PUT, 'pylon/update', @config, params)
74
+ end
75
+
59
76
  # Start recording a PYLON filter by making an /pylon/start API call
60
77
  #
61
78
  # @param hash [String] CSDL you wish to begin (or resume) recording
62
79
  # @param name [String] Give your recording a name. Required when starting a
80
+ # @param id [String] ID of the recording you wish to start
81
+ # new recording
82
+ # @return [Object] API reponse object
83
+ def start(hash = '', name = '', id = '')
84
+ fail BadParametersError, 'hash or id is required' if hash.empty? && id.empty?
85
+ params = {}
86
+ params.merge!(hash: hash) unless hash.empty?
87
+ params.merge!(name: name) unless name.empty?
88
+ params.merge!(id: id) unless id.empty?
89
+
90
+ DataSift.request(:PUT, 'pylon/start', @config, params)
91
+ end
92
+
93
+ # Restart an existing PYLON recording by making an /pylon/start API call with a recording ID
94
+ #
95
+ # @param id [String] CSDL you wish to begin (or resume) recording
96
+ # @param name [String] Give your recording a name. Required when starting a
63
97
  # new recording
64
98
  # @return [Object] API reponse object
65
- def start(hash = '', name = '')
66
- fail BadParametersError, 'hash is required' if hash.empty?
67
- params = { hash: hash }
99
+ def restart(id, name = '')
100
+ fail BadParametersError, 'id is required' if id.empty?
101
+ params = { id: id }
68
102
  params.merge!(name: name) unless name.empty?
69
103
 
70
104
  DataSift.request(:PUT, 'pylon/start', @config, params)
@@ -73,10 +107,13 @@ module DataSift
73
107
  # Stop an active PYLON recording by making an /pylon/stop API call
74
108
  #
75
109
  # @param hash [String] CSDL you wish to stop recording
110
+ # @param id [String] ID of the recording you wish to stop
76
111
  # @return [Object] API reponse object
77
- def stop(hash)
78
- fail BadParametersError, 'hash is required' if hash.empty?
79
- params = { hash: hash }
112
+ def stop(hash = '', id = '')
113
+ fail BadParametersError, 'hash or id is required' if hash.empty? && id.empty?
114
+ params = {}
115
+ params.merge!(hash: hash) unless hash.empty?
116
+ params.merge!(id: id) unless id.empty?
80
117
 
81
118
  DataSift.request(:PUT, 'pylon/stop', @config, params)
82
119
  end
@@ -92,14 +129,14 @@ module DataSift
92
129
  # @param filter [String] Optional PYLON CSDL for a query filter
93
130
  # @param start_time [Integer] Optional start timestamp for filtering by date
94
131
  # @param end_time [Integer] Optional end timestamp for filtering by date
132
+ # @param id [String] ID of the recording you wish to analyze
95
133
  # @return [Object] API reponse object
96
- def analyze(hash = '', parameters = '', filter = '', start_time = nil, end_time = nil)
97
- fail BadParametersError, 'hash is required' if hash.empty?
134
+ def analyze(hash = '', parameters = '', filter = '', start_time = nil, end_time = nil, id = '')
135
+ fail BadParametersError, 'hash or id is required' if hash.empty? && id.empty?
98
136
  fail BadParametersError, 'parameters is required' if parameters.empty?
99
- params = {
100
- hash: hash,
101
- parameters: parameters
102
- }
137
+ params = { parameters: parameters }
138
+ params.merge!(hash: hash) unless hash.empty?
139
+ params.merge!(id: id) unless id.empty?
103
140
  params.merge!(filter: filter) unless filter.empty?
104
141
  params.merge!(start: start_time) unless start_time.nil?
105
142
  params.merge!(end: end_time) unless end_time.nil?
@@ -111,10 +148,13 @@ module DataSift
111
148
  # recording
112
149
  #
113
150
  # @param hash [String] Hash of the recording you wish to query
151
+ # @param id [String] ID of the recording you wish to query
114
152
  # @return [Object] API reponse object
115
- def tags(hash)
116
- fail BadParametersError, 'hash is required' if hash.empty?
117
- params = { hash: hash }
153
+ def tags(hash = '', id = '')
154
+ fail BadParametersError, 'hash or id is required' if hash.empty? && id.empty?
155
+ params = {}
156
+ params.merge!(hash: hash) unless hash.empty?
157
+ params.merge!(id: id) unless id.empty?
118
158
 
119
159
  DataSift.request(:GET, 'pylon/tags', @config, params)
120
160
  end
@@ -126,13 +166,16 @@ module DataSift
126
166
  # @param start_time [Integer] Optional start timestamp for filtering by date
127
167
  # @param end_time [Integer] Optional end timestamp for filtering by date
128
168
  # @param filter [String] Optional PYLON CSDL for a query filter
169
+ # @param id [String] ID of the recording you wish to sample
129
170
  # @return [Object] API reponse object
130
- def sample(hash = '', count = nil, start_time = nil, end_time = nil, filter = '')
131
- fail BadParametersError, 'hash is required' if hash.empty?
132
- params = { hash: hash }
171
+ def sample(hash = '', count = nil, start_time = nil, end_time = nil, filter = '', id = '')
172
+ fail BadParametersError, 'hash or id is required' if hash.empty? && id.empty?
173
+ params = {}
174
+ params.merge!(hash: hash) unless hash.empty?
175
+ params.merge!(id: id) unless id.empty?
133
176
  params.merge!(count: count) unless count.nil?
134
- params.merge!(start: start_time) unless start_time.nil?
135
- params.merge!(end: end_time) unless end_time.nil?
177
+ params.merge!(start_time: start_time) unless start_time.nil?
178
+ params.merge!(end_time: end_time) unless end_time.nil?
136
179
 
137
180
  if filter.empty?
138
181
  DataSift.request(:GET, 'pylon/sample', @config, params)
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: datasift
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.5.2
4
+ version: 3.6.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - DataSift
@@ -10,7 +10,7 @@ authors:
10
10
  autorequire:
11
11
  bindir: bin
12
12
  cert_chain: []
13
- date: 2016-11-08 00:00:00.000000000 Z
13
+ date: 2016-03-08 00:00:00.000000000 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
16
  name: rest-client
@@ -110,7 +110,8 @@ files:
110
110
  - examples/managed_source_ig_eg.rb
111
111
  - examples/pull.rb
112
112
  - examples/push_eg.rb
113
- - examples/pylon_eg.rb
113
+ - examples/pylon/pylon_api_v1.2_eg.rb
114
+ - examples/pylon/pylon_api_v1.3_eg.rb
114
115
  - lib/account.rb
115
116
  - lib/account_identity.rb
116
117
  - lib/account_identity_limit.rb
@@ -202,8 +203,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
202
203
  version: 1.3.5
203
204
  requirements: []
204
205
  rubyforge_project:
205
- rubygems_version: 2.6.3
206
+ rubygems_version: 2.4.8
206
207
  signing_key:
207
208
  specification_version: 4
208
209
  summary: DataSift is a simple wrapper for the DataSift API.
209
210
  test_files: []
211
+ has_rdoc: