datasift 3.1.5 → 3.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +3 -2
- data/.yardopts +4 -0
- data/CHANGELOG.md +23 -0
- data/Gemfile +13 -0
- data/README.md +17 -16
- data/VERSION +1 -1
- data/datasift.gemspec +8 -14
- data/examples/account_identity_eg.rb +48 -0
- data/examples/account_identity_limit_eg.rb +68 -0
- data/examples/account_identity_token_eg.rb +70 -0
- data/examples/auth.rb +4 -4
- data/examples/core_api_eg.rb +1 -2
- data/examples/historics_eg.rb +7 -1
- data/examples/pylon_eg.rb +116 -0
- data/lib/account.rb +6 -0
- data/lib/account_identity.rb +73 -0
- data/lib/account_identity_limit.rb +85 -0
- data/lib/account_identity_token.rb +86 -0
- data/lib/api/api_resource.rb +14 -5
- data/lib/cli.rb +306 -132
- data/lib/datasift.rb +82 -47
- data/lib/errors.rb +21 -5
- data/lib/historics.rb +71 -40
- data/lib/historics_preview.rb +25 -8
- data/lib/managed_source.rb +69 -25
- data/lib/managed_source_auth.rb +21 -6
- data/lib/managed_source_resource.rb +21 -6
- data/lib/push.rb +132 -65
- data/lib/pylon.rb +122 -0
- data/test/datasift/core_api_test.rb +116 -90
- data/test/datasift/historics_preview_api_test.rb +27 -58
- data/test/datasift/push_api_test.rb +156 -141
- data/test/fixtures/cassettes/core/after_historic_dpu.json +1 -0
- data/test/fixtures/cassettes/core/balance_get.json +1 -0
- data/test/fixtures/cassettes/core/before_dpu.json +1 -0
- data/test/fixtures/cassettes/core/before_historic_dpu.json +1 -0
- data/test/fixtures/cassettes/core/compile_success.json +1 -0
- data/test/fixtures/cassettes/core/dpu_get_cost.json +1 -0
- data/test/fixtures/cassettes/core/dpu_throw_badrequest.json +1 -0
- data/test/fixtures/cassettes/core/historic_dpu.json +1 -0
- data/test/fixtures/cassettes/core/usage_success.json +1 -0
- data/test/fixtures/cassettes/core/validate_invalid_hash.json +1 -0
- data/test/fixtures/cassettes/core/validate_success_bool.json +1 -0
- data/test/fixtures/cassettes/core/validate_success_hash.json +1 -0
- data/test/fixtures/cassettes/preview/before_preview_create.json +1 -0
- data/test/fixtures/cassettes/preview/before_preview_get.json +1 -0
- data/test/fixtures/cassettes/preview/preview_create_success.json +1 -0
- data/test/fixtures/cassettes/preview/preview_get_success.json +1 -0
- data/test/fixtures/cassettes/push/after_push_create.json +1 -0
- data/test/fixtures/cassettes/push/after_push_get.json +1 -0
- data/test/fixtures/cassettes/push/after_push_log.json +1 -0
- data/test/fixtures/cassettes/push/after_push_pause.json +1 -0
- data/test/fixtures/cassettes/push/after_push_resume.json +1 -0
- data/test/fixtures/cassettes/push/after_push_stop.json +1 -0
- data/test/fixtures/cassettes/push/after_push_update.json +1 -0
- data/test/fixtures/cassettes/push/before_push_create.json +1 -0
- data/test/fixtures/cassettes/push/before_push_delete.json +1 -0
- data/test/fixtures/cassettes/push/before_push_get.json +1 -0
- data/test/fixtures/cassettes/push/before_push_log.json +1 -0
- data/test/fixtures/cassettes/push/before_push_pause.json +1 -0
- data/test/fixtures/cassettes/push/before_push_resume.json +1 -0
- data/test/fixtures/cassettes/push/before_push_stop.json +1 -0
- data/test/fixtures/cassettes/push/before_push_update.json +1 -0
- data/test/fixtures/cassettes/push/push_create.json +1 -0
- data/test/fixtures/cassettes/push/push_delete.json +1 -0
- data/test/fixtures/cassettes/push/push_get_by_id.json +1 -0
- data/test/fixtures/cassettes/push/push_log_with_id.json +1 -0
- data/test/fixtures/cassettes/push/push_pause.json +1 -0
- data/test/fixtures/cassettes/push/push_resume.json +1 -0
- data/test/fixtures/cassettes/push/push_stop.json +1 -0
- data/test/fixtures/cassettes/push/push_update.json +1 -0
- data/test/fixtures/cassettes/push/push_validate.json +1 -0
- data/test/test_helper.rb +31 -1
- metadata +64 -104
- data/examples/dynamic_list_eg.rb +0 -74
- data/examples/dynamic_list_replace_eg.rb +0 -45
- data/lib/dynamic_list.rb +0 -66
- data/lib/dynamic_list_replace.rb +0 -45
- data/test/fixtures/balance.json +0 -1
- data/test/fixtures/compile_csdl_invalid.json +0 -1
- data/test/fixtures/compile_csdl_valid.json +0 -1
- data/test/fixtures/dpu_valid.json +0 -1
- data/test/fixtures/preview_create_valid.json +0 -1
- data/test/fixtures/preview_get_running.json +0 -1
- data/test/fixtures/preview_get_succeeded.json +0 -1
- data/test/fixtures/push_create_valid.json +0 -1
- data/test/fixtures/push_get_list_by_hash_valid.json +0 -1
- data/test/fixtures/push_get_list_by_historics_id_valid.json +0 -1
- data/test/fixtures/push_get_list_valid.json +0 -1
- data/test/fixtures/push_get_valid.json +0 -1
- data/test/fixtures/push_log_valid.json +0 -1
- data/test/fixtures/push_pause_valid.json +0 -1
- data/test/fixtures/push_stop_valid.json +0 -1
- data/test/fixtures/push_validate_valid.json +0 -1
- data/test/fixtures/usage_current.json +0 -1
- data/test/fixtures/validate_csdl_invalid.json +0 -1
- data/test/fixtures/validate_csdl_valid.json +0 -1
data/lib/pylon.rb
ADDED
@@ -0,0 +1,122 @@
|
|
1
|
+
module DataSift
|
2
|
+
#
|
3
|
+
# Class for accessing DataSift's PYLON API
|
4
|
+
class Pylon < DataSift::ApiResource
|
5
|
+
# Check PYLON CSDL is valid by making an /pylon/validate API call
|
6
|
+
#
|
7
|
+
# @param csdl [String] CSDL you wish to validate
|
8
|
+
# @param boolResponse [Boolean] True if you want a boolean response.
|
9
|
+
# False if you want the full response object
|
10
|
+
# @return [Boolean, Object] Dependent on value of boolResponse
|
11
|
+
def valid?(csdl = '', boolResponse = true)
|
12
|
+
fail BadParametersError, 'csdl is required' if csdl.empty?
|
13
|
+
params = { csdl: csdl }
|
14
|
+
|
15
|
+
res = DataSift.request(:POST, 'pylon/validate', @config, params)
|
16
|
+
boolResponse ? res[:http][:status] == 200 : res
|
17
|
+
end
|
18
|
+
|
19
|
+
# Compile PYLON CSDL by making an /pylon/compile API call
|
20
|
+
#
|
21
|
+
# @param csdl [String] CSDL you wish to compile
|
22
|
+
# @return [Object] API reponse object
|
23
|
+
def compile(csdl)
|
24
|
+
fail BadParametersError, 'csdl is required' if csdl.empty?
|
25
|
+
params = { csdl: csdl }
|
26
|
+
|
27
|
+
DataSift.request(:POST, 'pylon/compile', @config, params)
|
28
|
+
end
|
29
|
+
|
30
|
+
# Perform /pylon/get API call to query status of your PYLON recordings
|
31
|
+
#
|
32
|
+
# @param hash [String] Hash you with the get the status for
|
33
|
+
# @return [Object] API reponse object
|
34
|
+
def get(hash)
|
35
|
+
fail BadParametersError, 'hash is required' if hash.empty?
|
36
|
+
params = { hash: hash }
|
37
|
+
|
38
|
+
DataSift.request(:GET, 'pylon/get', @config, params)
|
39
|
+
end
|
40
|
+
|
41
|
+
# Perform /pylon/get API call to list all PYLON Recordings
|
42
|
+
#
|
43
|
+
# @param page [Integer] Which page of recordings to retreive
|
44
|
+
# @param per_page [Integer] How many recordings to return per page
|
45
|
+
# @param order_by [String, Symbol] Which field to sort results by
|
46
|
+
# @param order_dir [String, Symbol] Order results in ascending or descending
|
47
|
+
# order
|
48
|
+
# @return [Object] API reponse object
|
49
|
+
def list(page = nil, per_page = nil, order_by = '', order_dir = '')
|
50
|
+
params = {}
|
51
|
+
params.merge!(page: page) unless page.nil?
|
52
|
+
params.merge!(per_page: per_page) unless per_page.nil?
|
53
|
+
params.merge!(order_by: order_by) unless order_by.empty?
|
54
|
+
params.merge!(order_dir: order_dir) unless order_dir.empty?
|
55
|
+
|
56
|
+
DataSift.request(:GET, 'pylon/get', @config, params)
|
57
|
+
end
|
58
|
+
|
59
|
+
# Start recording a PYLON filter by making an /pylon/start API call
|
60
|
+
#
|
61
|
+
# @param hash [String] CSDL you wish to begin (or resume) recording
|
62
|
+
# @param name [String] Give your recording a name. Required when starting a
|
63
|
+
# new recording
|
64
|
+
# @return [Object] API reponse object
|
65
|
+
def start(hash = '', name = '')
|
66
|
+
fail BadParametersError, 'hash is required' if hash.empty?
|
67
|
+
params = { hash: hash }
|
68
|
+
params.merge!(name: name) unless name.empty?
|
69
|
+
|
70
|
+
DataSift.request(:PUT, 'pylon/start', @config, params)
|
71
|
+
end
|
72
|
+
|
73
|
+
# Stop an active PYLON recording by making an /pylon/stop API call
|
74
|
+
#
|
75
|
+
# @param hash [String] CSDL you wish to stop recording
|
76
|
+
# @return [Object] API reponse object
|
77
|
+
def stop(hash)
|
78
|
+
fail BadParametersError, 'hash is required' if hash.empty?
|
79
|
+
params = { hash: hash }
|
80
|
+
|
81
|
+
DataSift.request(:PUT, 'pylon/stop', @config, params)
|
82
|
+
end
|
83
|
+
|
84
|
+
# Perform a PYLON analysis query by making an /pylon/analyze API call
|
85
|
+
#
|
86
|
+
# @param hash [String] Hash of the recording you wish to perform an
|
87
|
+
# analysis against
|
88
|
+
# @param parameters [String] Parameters of the analysis you wish to perform.
|
89
|
+
# See the
|
90
|
+
# {http://dev.datasift.com/pylon/docs/api-endpoints/pylonanalyze
|
91
|
+
# /pylon/analyze API Docs} for full documentation
|
92
|
+
# @param filter [String] Optional PYLON CSDL for a query filter
|
93
|
+
# @param start_time [Integer] Optional start timestamp for filtering by date
|
94
|
+
# @param end_time [Integer] Optional end timestamp for filtering by date
|
95
|
+
# @return [Object] API reponse object
|
96
|
+
def analyze(hash = '', parameters = '', filter = '', start_time = nil, end_time = nil)
|
97
|
+
fail BadParametersError, 'hash is required' if hash.empty?
|
98
|
+
fail BadParametersError, 'parameters is required' if parameters.empty?
|
99
|
+
params = {
|
100
|
+
hash: hash,
|
101
|
+
parameters: parameters
|
102
|
+
}
|
103
|
+
params.merge!(filter: filter) unless filter.empty?
|
104
|
+
params.merge!(start: start_time) unless start_time.nil?
|
105
|
+
params.merge!(end: end_time) unless end_time.nil?
|
106
|
+
|
107
|
+
DataSift.request(:POST, 'pylon/analyze', @config, params)
|
108
|
+
end
|
109
|
+
|
110
|
+
# Query the tag hierarchy on interactions populated by a particular
|
111
|
+
# recording
|
112
|
+
#
|
113
|
+
# @param hash [String] Hash of the recording you wish to query
|
114
|
+
# @return [Object] API reponse object
|
115
|
+
def tags(hash)
|
116
|
+
fail BadParametersError, 'hash is required' if hash.empty?
|
117
|
+
params = { hash: hash }
|
118
|
+
|
119
|
+
DataSift.request(:GET, 'pylon/tags', @config, params)
|
120
|
+
end
|
121
|
+
end
|
122
|
+
end
|
@@ -6,148 +6,174 @@ describe 'DataSift' do
|
|
6
6
|
auth = DataSiftExample.new
|
7
7
|
@datasift = auth.datasift
|
8
8
|
@data = OpenStruct.new
|
9
|
-
@statuses = OpenStruct.new
|
10
|
-
@headers = OpenStruct.new
|
11
9
|
|
12
10
|
@data.valid_csdl = 'interaction.content contains "test"'
|
13
11
|
@data.invalid_csdl = 'interaction.nonsense is not valid'
|
12
|
+
@data.invalid_hash = 'this_is_not_a_valid_stream_hash'
|
13
|
+
end
|
14
14
|
|
15
|
-
|
16
|
-
|
17
|
-
|
15
|
+
##
|
16
|
+
# Client
|
17
|
+
#
|
18
|
+
describe 'client' do
|
19
|
+
it 'raises_InvalidConfigError_without_auth_credentials' do
|
20
|
+
assert_raises InvalidConfigError do
|
21
|
+
DataSift::Client.new({})
|
22
|
+
end
|
23
|
+
end
|
18
24
|
end
|
19
25
|
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
"x-ratelimit-limit" => "10000", "x-ratelimit-remaining" => "10000", "x-ratelimit-cost" => "5"}
|
26
|
-
|
27
|
-
#valid CSDL
|
28
|
-
stub_request(:post, /api.datasift.com\/.*\/validate/).
|
29
|
-
with(:body => {"csdl" => @data.valid_csdl}).
|
30
|
-
to_return(:status => @statuses.valid,
|
31
|
-
:body => fixture('validate_csdl_valid.json'),
|
32
|
-
:headers => @headers.csdl_compile)
|
33
|
-
#invalid CSDL
|
34
|
-
stub_request(:post, /api.datasift.com\/.*\/validate/).
|
35
|
-
with(:body => {"csdl" => @data.invalid_csdl}).
|
36
|
-
to_return(:status => @statuses.bad_request,
|
37
|
-
:body => fixture('validate_csdl_invalid.json'),
|
38
|
-
:headers => @headers.csdl_compile)
|
39
|
-
#valid stream compilation
|
40
|
-
stub_request(:post, /api.datasift.com\/.*\/compile/).
|
41
|
-
with(:body => {"csdl" => @data.valid_csdl}).
|
42
|
-
to_return(:status => @statuses.valid,
|
43
|
-
:body => fixture('compile_csdl_valid.json'),
|
44
|
-
:headers => @headers.csdl_compile)
|
45
|
-
end
|
46
|
-
|
47
|
-
it 'test_csdl_cant_be_nil_when_validating' do
|
26
|
+
##
|
27
|
+
# /validate
|
28
|
+
#
|
29
|
+
describe '#validate' do
|
30
|
+
it 'csdl_cant_be_nil_when_validating' do
|
48
31
|
assert_raises InvalidParamError do
|
49
32
|
@datasift.valid?(nil)
|
50
33
|
end
|
51
34
|
end
|
52
35
|
|
53
|
-
it '
|
36
|
+
it 'csdl_cant_be_empty_when_validating' do
|
54
37
|
assert_raises InvalidParamError do
|
55
38
|
@datasift.valid?('')
|
56
39
|
end
|
57
40
|
end
|
58
41
|
|
59
|
-
it '
|
60
|
-
|
42
|
+
it 'user_can_get_successful_validation_as_bool' do
|
43
|
+
VCR.use_cassette('core/validate_success_bool') do
|
44
|
+
assert @datasift.valid?(@data.valid_csdl), 'Valid CSDL must return true'
|
45
|
+
end
|
61
46
|
end
|
62
47
|
|
63
|
-
it '
|
64
|
-
|
65
|
-
|
66
|
-
|
48
|
+
it 'user_can_get_successful_validation_as_hash' do
|
49
|
+
VCR.use_cassette('core/validate_success_hash') do
|
50
|
+
validation = @datasift.valid?(@data.valid_csdl, false)
|
51
|
+
assert_kind_of Hash, validation,
|
52
|
+
"Successful validation will return a hash"
|
53
|
+
assert_equal STATUS.valid, validation[:http][:status],
|
54
|
+
"This request should have returned #{STATUS.valid} status"
|
55
|
+
end
|
67
56
|
end
|
68
57
|
|
69
|
-
it '
|
70
|
-
|
71
|
-
|
58
|
+
it 'failing_csdl_validation' do
|
59
|
+
VCR.use_cassette('core/validate_invalid_hash') do
|
60
|
+
assert_raises BadRequestError do
|
61
|
+
@datasift.valid?(@data.invalid_csdl)
|
62
|
+
end
|
72
63
|
end
|
73
64
|
end
|
65
|
+
end
|
74
66
|
|
75
|
-
|
67
|
+
##
|
68
|
+
# /compile
|
69
|
+
#
|
70
|
+
describe '#compile' do
|
71
|
+
it 'csdl_cant_be_nil_when_compiling' do
|
76
72
|
assert_raises InvalidParamError do
|
77
73
|
@datasift.compile(nil)
|
78
74
|
end
|
79
75
|
end
|
80
76
|
|
81
|
-
it '
|
77
|
+
it 'csdl_cant_be_empty_when_compiling' do
|
82
78
|
assert_raises InvalidParamError do
|
83
79
|
@datasift.compile('')
|
84
80
|
end
|
85
81
|
end
|
86
|
-
end
|
87
|
-
|
88
|
-
describe '#usage' do
|
89
|
-
describe 'with valid request' do
|
90
|
-
before do
|
91
|
-
@headers.valid_usage = {
|
92
|
-
"date" => "Thu, 30 Jan 2014 10:09:19 GMT", "content-type" => "application/json",
|
93
|
-
"transfer-encoding" => "chunked", "connection" => "close", "x-api-version" => "1",
|
94
|
-
"x-ratelimit-limit" => "10000", "x-ratelimit-remaining" => "10000", "x-ratelimit-cost" => "25"}
|
95
82
|
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
83
|
+
it 'successful_compilation_returns_hash' do
|
84
|
+
VCR.use_cassette('core/compile_success') do
|
85
|
+
response = @datasift.compile @data.valid_csdl
|
86
|
+
assert_kind_of Hash, response,
|
87
|
+
"Successful compilation will return a hash"
|
88
|
+
assert_equal STATUS.valid, response[:http][:status],
|
89
|
+
"This request should have returned #{STATUS.valid} status"
|
102
90
|
end
|
91
|
+
end
|
92
|
+
end
|
103
93
|
|
104
|
-
|
105
|
-
|
106
|
-
|
94
|
+
##
|
95
|
+
# /usage
|
96
|
+
#
|
97
|
+
describe '#usage' do
|
98
|
+
it 'can_get_users_usage' do
|
99
|
+
VCR.use_cassette('core/usage_success') do
|
100
|
+
response = @datasift.usage
|
101
|
+
assert_equal STATUS.valid, response[:http][:status]
|
102
|
+
assert_kind_of Hash, response
|
107
103
|
end
|
108
104
|
end
|
109
105
|
end
|
110
106
|
|
107
|
+
##
|
108
|
+
# /dpu
|
109
|
+
#
|
111
110
|
describe '#dpu' do
|
112
111
|
before do
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
"x-ratelimit-limit" => "10000", "x-ratelimit-remaining" => "10000", "x-ratelimit-cost" => "5"}
|
112
|
+
VCR.use_cassette('core/before_dpu') do
|
113
|
+
@hash = @datasift.compile(@data.valid_csdl)[:data][:hash]
|
114
|
+
end
|
115
|
+
end
|
118
116
|
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
117
|
+
it 'can_get_dpu_cost' do
|
118
|
+
VCR.use_cassette('core/dpu_get_cost') do
|
119
|
+
response = @datasift.dpu @hash
|
120
|
+
assert_equal STATUS.valid, response[:http][:status]
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
it 'cannot_get_dpu_cost_for_invalid_hash' do
|
125
|
+
VCR.use_cassette('core/dpu_throw_badrequest') do
|
126
|
+
assert_raises BadRequestError do
|
127
|
+
@datasift.dpu @data.invalid_hash
|
128
|
+
end
|
129
|
+
end
|
125
130
|
end
|
126
131
|
|
127
|
-
it '
|
128
|
-
|
129
|
-
|
132
|
+
it 'requires_at_least_one_param' do
|
133
|
+
assert_raises ArgumentError do
|
134
|
+
@datasift.dpu
|
135
|
+
end
|
130
136
|
end
|
131
137
|
end
|
132
138
|
|
133
|
-
describe '#
|
139
|
+
describe '#dpu for Historics' do
|
134
140
|
before do
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
141
|
+
VCR.use_cassette('core/before_historic_dpu') do
|
142
|
+
@hash = @datasift.compile(@data.valid_csdl)[:data][:hash]
|
143
|
+
@historic = @datasift.historics.prepare(
|
144
|
+
@hash,
|
145
|
+
Time.now.to_i - 10800,
|
146
|
+
Time.now.to_i - 7200,
|
147
|
+
'Ruby test suite',
|
148
|
+
'tumblr',
|
149
|
+
10
|
150
|
+
)
|
151
|
+
end
|
152
|
+
end
|
139
153
|
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
headers: @headers.balance)
|
154
|
+
after do
|
155
|
+
VCR.use_cassette('core/after_historic_dpu') do
|
156
|
+
@datasift.historics.delete @historic[:data][:id]
|
157
|
+
end
|
145
158
|
end
|
146
159
|
|
147
|
-
it '
|
148
|
-
|
149
|
-
|
160
|
+
it 'can_get_dpu_cost_for_historic' do
|
161
|
+
VCR.use_cassette('core/historic_dpu') do
|
162
|
+
response = @datasift.dpu('', @historic[:data][:id])
|
163
|
+
assert_equal STATUS.valid, response[:http][:status]
|
164
|
+
end
|
150
165
|
end
|
151
166
|
end
|
152
167
|
|
168
|
+
##
|
169
|
+
# /balance
|
170
|
+
#
|
171
|
+
describe '#balance' do
|
172
|
+
it 'can get account balance' do
|
173
|
+
VCR.use_cassette('core/balance_get') do
|
174
|
+
response = @datasift.balance
|
175
|
+
assert_equal STATUS.valid, response[:http][:status]
|
176
|
+
end
|
177
|
+
end
|
178
|
+
end
|
153
179
|
end
|
@@ -1,83 +1,52 @@
|
|
1
1
|
require File.expand_path('../../test_helper', __FILE__)
|
2
2
|
|
3
3
|
describe 'DataSift::HistoricsPreview' do
|
4
|
-
|
5
4
|
before do
|
6
5
|
auth = DataSiftExample.new
|
7
6
|
@datasift = auth.datasift
|
8
7
|
@data = OpenStruct.new
|
9
|
-
@statuses = OpenStruct.new
|
10
|
-
@headers = OpenStruct.new
|
11
8
|
|
12
|
-
@
|
13
|
-
@
|
9
|
+
@data.valid_csdl = 'interaction.content contains "ruby"'
|
10
|
+
@data.sources = 'facebook,twitter'
|
11
|
+
@data.parameters = 'language.tag,freqDist,5;interaction.id,targetVol,hour'
|
12
|
+
@data.start = '1398898800'
|
13
|
+
@data.end = '1398985200'
|
14
14
|
end
|
15
15
|
|
16
|
+
##
|
17
|
+
# /preview/create
|
18
|
+
#
|
16
19
|
describe '#create' do
|
17
20
|
before do
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
@data.start = '1398898800'
|
22
|
-
@data.end = '1398985200'
|
23
|
-
|
24
|
-
@headers.preview_create = {
|
25
|
-
"date" => "Thu, 30 Jan 2014 10:09:19 GMT", "content-type" => "application/json",
|
26
|
-
"transfer-encoding" => "chunked", "connection" => "close", "x-api-version" => "1",
|
27
|
-
"x-ratelimit-limit" => "10000", "x-ratelimit-remaining" => "10000", "x-ratelimit-cost" => "25"}
|
28
|
-
|
29
|
-
#valid /preview/create request
|
30
|
-
stub_request(:post, /api.datasift.com\/.*\/preview\/create/).
|
31
|
-
with(:body => { :hash => @data.stream_hash,
|
32
|
-
:sources => @data.sources,
|
33
|
-
:parameters => @data.parameters,
|
34
|
-
:start => @data.start,
|
35
|
-
:end => @data.end}).
|
36
|
-
to_return(status: @statuses.valid,
|
37
|
-
body: fixture('preview_create_valid.json'),
|
38
|
-
headers: @headers.preview_create)
|
21
|
+
VCR.use_cassette('preview/before_preview_create') do
|
22
|
+
@hash = @datasift.compile(@data.valid_csdl)[:data][:hash]
|
23
|
+
end
|
39
24
|
end
|
40
25
|
|
41
|
-
it '
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
:end => @data.end,
|
47
|
-
:hash => @data.stream_hash,
|
48
|
-
:sources => @data.sources,
|
49
|
-
:parameters => @data.parameters})
|
26
|
+
it 'can_create_historics_preview' do
|
27
|
+
VCR.use_cassette('preview/preview_create_success') do
|
28
|
+
response = @datasift.historics_preview.create(@hash, @data.sources, @data.parameters, @data.start, @data.end)
|
29
|
+
assert_equal STATUS.accepted, response[:http][:status]
|
30
|
+
end
|
50
31
|
end
|
51
|
-
|
52
32
|
end
|
53
33
|
|
34
|
+
##
|
35
|
+
# /preview/get
|
36
|
+
#
|
54
37
|
describe '#get' do
|
55
38
|
before do
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
"transfer-encoding" => "chunked", "connection" => "close", "x-api-version" => "1",
|
61
|
-
"x-ratelimit-limit" => "10000", "x-ratelimit-remaining" => "10000", "x-ratelimit-cost" => "5"}
|
62
|
-
|
63
|
-
#valid /preview/get running request
|
64
|
-
stub_request(:post, /api.datasift.com\/.*\/preview\/get/).
|
65
|
-
with(:body => { :id => @data.id}).
|
66
|
-
to_return(status: @statuses.accepted,
|
67
|
-
body: fixture('preview_get_running.json'),
|
68
|
-
headers: @headers.preview_get)
|
69
|
-
|
70
|
-
#valid /preview/get succeeded request
|
71
|
-
stub_request(:post, /api.datasift.com\/.*\/preview\/get/).
|
72
|
-
with(:body => { :id => @data.id}).
|
73
|
-
to_return(status: @statuses.valid,
|
74
|
-
body: fixture('preview_get_succeeded.json'),
|
75
|
-
headers: @headers.preview_get)
|
39
|
+
VCR.use_cassette('preview/before_preview_get') do
|
40
|
+
@hash = @datasift.compile(@data.valid_csdl)[:data][:hash]
|
41
|
+
@preview = @datasift.historics_preview.create(@hash, @data.sources, @data.parameters, @data.start, @data.end)
|
42
|
+
end
|
76
43
|
end
|
77
44
|
|
78
45
|
it 'can get an Historics Preview' do
|
79
|
-
|
80
|
-
|
46
|
+
VCR.use_cassette('preview/preview_get_success') do
|
47
|
+
response = @datasift.historics_preview.get(@preview[:data][:id])
|
48
|
+
assert_equal STATUS.accepted, response[:http][:status]
|
49
|
+
end
|
81
50
|
end
|
82
51
|
end
|
83
52
|
end
|