twingly-search 5.0.1 → 5.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +1 -2
  3. data/CHANGELOG.md +16 -0
  4. data/README.md +33 -34
  5. data/Rakefile +0 -6
  6. data/examples/find_all_posts_mentioning_github.rb +3 -3
  7. data/examples/hello_world.rb +2 -2
  8. data/examples/livefeed_loop.rb +24 -0
  9. data/lib/twingly/livefeed/client.rb +121 -0
  10. data/lib/twingly/livefeed/error.rb +28 -0
  11. data/lib/twingly/livefeed/parser.rb +96 -0
  12. data/lib/twingly/livefeed/post.rb +66 -0
  13. data/lib/twingly/livefeed/result.rb +39 -0
  14. data/lib/twingly/livefeed/version.rb +5 -0
  15. data/lib/twingly/livefeed.rb +6 -0
  16. data/lib/twingly/search/client.rb +3 -2
  17. data/lib/twingly/search/error.rb +6 -5
  18. data/lib/twingly/search/parser.rb +39 -13
  19. data/lib/twingly/search/post.rb +65 -21
  20. data/lib/twingly/search/query.rb +46 -16
  21. data/lib/twingly/search/result.rb +11 -0
  22. data/lib/twingly/search/version.rb +1 -1
  23. data/spec/client_spec.rb +2 -2
  24. data/spec/error_spec.rb +27 -7
  25. data/spec/fixtures/incomplete_result.xml +2 -0
  26. data/spec/fixtures/livefeed/empty_api_key_result.xml +3 -0
  27. data/spec/fixtures/livefeed/non_xml_result.xml +1 -0
  28. data/spec/fixtures/livefeed/not_found_result.xml +3 -0
  29. data/spec/fixtures/livefeed/service_unavailable_result.xml +3 -0
  30. data/spec/fixtures/livefeed/unauthorized_api_key_result.xml +3 -0
  31. data/spec/fixtures/livefeed/valid_empty_result.xml +2 -0
  32. data/spec/fixtures/livefeed/valid_result.xml +79 -0
  33. data/spec/fixtures/minimal_valid_result.xml +81 -52
  34. data/spec/fixtures/nonexistent_api_key_result.xml +3 -3
  35. data/spec/fixtures/service_unavailable_result.xml +3 -3
  36. data/spec/fixtures/unauthorized_api_key_result.xml +3 -3
  37. data/spec/fixtures/undefined_error_result.xml +3 -3
  38. data/spec/fixtures/valid_empty_result.xml +2 -2
  39. data/spec/fixtures/valid_links_result.xml +36 -0
  40. data/spec/fixtures/vcr_cassettes/livefeed_valid_request.yml +169 -0
  41. data/spec/fixtures/vcr_cassettes/search_for_spotify_on_sv_blogs.yml +578 -447
  42. data/spec/fixtures/vcr_cassettes/search_without_valid_api_key.yml +15 -14
  43. data/spec/livefeed/client_spec.rb +135 -0
  44. data/spec/livefeed/error_spec.rb +51 -0
  45. data/spec/livefeed/parser_spec.rb +351 -0
  46. data/spec/livefeed/post_spec.rb +26 -0
  47. data/spec/livefeed/result_spec.rb +18 -0
  48. data/spec/parser_spec.rb +191 -94
  49. data/spec/post_spec.rb +25 -6
  50. data/spec/query_spec.rb +41 -34
  51. data/spec/result_spec.rb +1 -0
  52. data/spec/spec_helper.rb +10 -0
  53. data/twingly-search-api-ruby.gemspec +2 -3
  54. metadata +44 -24
  55. data/spec/fixtures/valid_non_blog_result.xml +0 -26
  56. data/spec/fixtures/valid_result.xml +0 -22975
data/spec/parser_spec.rb CHANGED
@@ -14,55 +14,137 @@ describe Parser do
14
14
  subject { result }
15
15
 
16
16
  context "with a valid result" do
17
- let(:fixture) { :valid }
17
+ let(:fixture) { :minimal_valid }
18
18
 
19
19
  it { is_expected.to be_a Result }
20
- end
21
20
 
22
- context "with a minimal valid result" do
23
- let(:fixture) { :minimal_valid }
21
+ describe "#number_of_matches_returned" do
22
+ subject { result.number_of_matches_returned }
23
+ it { is_expected.to eq(3) }
24
+ end
25
+
26
+ describe "#number_of_matches_total" do
27
+ subject { result.number_of_matches_total }
28
+ it { is_expected.to eq(3122050) }
29
+ end
30
+
31
+ describe "#seconds_elapsed" do
32
+ subject { result.seconds_elapsed }
33
+ it { is_expected.to eq(0.369) }
34
+ end
35
+
36
+ describe "#posts" do
37
+ subject { result.posts }
38
+
39
+ it { is_expected.to all(be_a(Post)) }
40
+
41
+ describe "#count" do
42
+ subject { result.posts.count }
43
+
44
+ it { is_expected.to eq(3) }
45
+ end
46
+ end
47
+
48
+ describe "#posts.first" do
49
+ subject(:post) { result.posts.first }
24
50
 
25
- describe "#posts[0]" do
26
- subject(:post) { result.posts[0] }
51
+ describe "#id" do
52
+ subject { post.id }
53
+ it { is_expected.to eq("16405819479794412880") }
54
+ end
55
+
56
+ describe "#author" do
57
+ subject { post.author }
58
+ it { is_expected.to eq("klivinihemligheten") }
59
+ end
27
60
 
28
61
  describe "#url" do
29
62
  subject { post.url }
30
- it { is_expected.to eq("http://oppogner.blogg.no/1409602010_bare_m_ha.html") }
63
+ it { is_expected.to eq("http://nouw.com/klivinihemligheten/planering---men-dalig-30016048") }
31
64
  end
32
65
 
33
66
  describe "#title" do
34
67
  subject { post.title }
35
- it { is_expected.to eq("Bare ha!") }
68
+ it { is_expected.to eq("Planering - men dålig") }
36
69
  end
37
70
 
38
- describe "#summary" do
39
- subject { post.summary }
40
- it { is_expected.to eq("Ja, velkommen til høsten ...") }
71
+ describe "#text" do
72
+ subject { post.text }
73
+ it { is_expected.to start_with("Det vart en förmiddag på boxen med en brud som jag ") }
41
74
  end
42
75
 
43
76
  describe "#language_code" do
44
77
  subject { post.language_code }
45
- it { is_expected.to eq("no") }
78
+ it { is_expected.to eq("sv") }
46
79
  end
47
80
 
48
- describe "#published" do
49
- subject { post.published }
50
- it { is_expected.to eq(Time.parse("2014-09-02 06:53:26Z")) }
81
+ describe "#location_code" do
82
+ subject { post.location_code }
83
+ it { is_expected.to eq("se") }
51
84
  end
52
85
 
53
- describe "#indexed" do
54
- subject { post.indexed }
55
- it { is_expected.to eq(Time.parse("2014-09-02 09:00:53Z")) }
86
+ describe "#coordinates" do
87
+ subject { post.coordinates }
88
+ it { is_expected.to eq({}) }
56
89
  end
57
90
 
58
- describe "#blog_url" do
59
- subject { post.blog_url }
60
- it { is_expected.to eq("http://oppogner.blogg.no/") }
91
+ describe "#links" do
92
+ subject { post.links }
93
+ it { is_expected.to be_empty }
61
94
  end
62
95
 
63
- describe "#authority" do
64
- subject { post.authority }
65
- it { is_expected.to eq(1) }
96
+ describe "#tags" do
97
+ subject { post.tags }
98
+ let(:expected_tags) do
99
+ [
100
+ "Ätas & drickas",
101
+ "Universitet & studentlivet",
102
+ "Träning",
103
+ "To to list",
104
+ ]
105
+ end
106
+
107
+ it { is_expected.to eq(expected_tags) }
108
+ end
109
+
110
+ describe "#images" do
111
+ subject { post.images }
112
+ it { is_expected.to be_empty }
113
+ end
114
+
115
+ describe "#indexed_at" do
116
+ subject { post.indexed_at }
117
+ it { is_expected.to eq(Time.parse("2017-05-04T06:51:23Z")) }
118
+ end
119
+
120
+ describe "#published_at" do
121
+ subject { post.published_at }
122
+ it { is_expected.to eq(Time.parse("2017-05-04T06:50:59Z")) }
123
+ end
124
+
125
+ describe "#reindexed_at" do
126
+ subject { post.reindexed_at }
127
+ it { is_expected.to eq(Time.parse("2017-05-04T08:51:23Z")) }
128
+ end
129
+
130
+ describe "#inlinks_count" do
131
+ subject { post.inlinks_count }
132
+ it { is_expected.to eq(0) }
133
+ end
134
+
135
+ describe "#blog_id" do
136
+ subject { post.blog_id }
137
+ it { is_expected.to eq("5312283800049632348") }
138
+ end
139
+
140
+ describe "#blog_name" do
141
+ subject { post.blog_name }
142
+ it { is_expected.to eq("Love life like a student") }
143
+ end
144
+
145
+ describe "#blog_url" do
146
+ subject { post.blog_url }
147
+ it { is_expected.to eq("http://nouw.com/klivinihemligheten") }
66
148
  end
67
149
 
68
150
  describe "#blog_rank" do
@@ -70,33 +152,38 @@ describe Parser do
70
152
  it { is_expected.to eq(1) }
71
153
  end
72
154
 
73
- describe "#tags" do
74
- subject { post.tags }
75
- it { is_expected.to eq(["Blogg"]) }
155
+ describe "#authority" do
156
+ subject { post.authority }
157
+ it { is_expected.to eq(0) }
76
158
  end
77
159
  end
78
160
 
79
- describe "#posts[1]" do
80
- subject(:post) { result.posts[1] }
161
+ describe "#posts.last" do
162
+ subject(:post) { result.posts.last }
163
+
164
+ describe "#id" do
165
+ subject { post.id }
166
+ it { is_expected.to eq("2770252465384762934") }
167
+ end
168
+
169
+ describe "#author" do
170
+ subject { post.author }
171
+ it { is_expected.to eq("maartiinasvardag") }
172
+ end
81
173
 
82
174
  describe "#url" do
83
175
  subject { post.url }
84
- it { is_expected.to eq("http://www.skvallernytt.se/hardtraning-da-galler-swedish-house-mafia") }
176
+ it { is_expected.to eq("http://nouw.com/maartiinasvardag/god-formiddag-30016041") }
85
177
  end
86
178
 
87
179
  describe "#title" do
88
180
  subject { post.title }
89
- it { is_expected.to eq("Hårdträning då gäller Swedish House Mafia") }
181
+ it { is_expected.to eq("God förmiddag! ☀️") }
90
182
  end
91
183
 
92
- describe "#summary" do
93
- subject { post.summary }
94
- it { is_expected.to eq("Träning. Och Swedish House Mafia. Det verkar vara ett lyckat koncept. \"Don't you worry child\" och \"Greyhound\" är nämligen de två mest spelade träningslåtarna under januari 2013 på Spotify.
95
-
96
- Relaterade inlägg:
97
- Swedish House Mafia – ny låt!
98
- Ny knivattack på Swedish House Mafia-konsert
99
- Swedish House Mafia gör succé i USA") }
184
+ describe "#text" do
185
+ subject { post.text }
186
+ it { is_expected.to start_with("Hmm.... Vad ska man börja ?? Jag vet inte riktigt vad min gnista ") }
100
187
  end
101
188
 
102
189
  describe "#language_code" do
@@ -104,78 +191,64 @@ Swedish House Mafia gör succé i USA") }
104
191
  it { is_expected.to eq("sv") }
105
192
  end
106
193
 
107
- describe "#published" do
108
- subject { post.published }
109
- it { is_expected.to eq(Time.parse("2013-01-29 15:21:56Z")) }
110
- end
111
-
112
- describe "#indexed" do
113
- subject { post.indexed }
114
- it { is_expected.to eq(Time.parse("2013-01-29 15:22:52Z")) }
115
- end
116
-
117
- describe "#blog_url" do
118
- subject { post.blog_url }
119
- it { is_expected.to eq("http://www.skvallernytt.se/") }
194
+ describe "#location_code" do
195
+ subject { post.location_code }
196
+ it { is_expected.to eq("se") }
120
197
  end
121
198
 
122
- describe "#authority" do
123
- subject { post.authority }
124
- it { is_expected.to eq(38) }
199
+ describe "#coordinates" do
200
+ subject { post.coordinates }
201
+ it { is_expected.to eq({}) }
125
202
  end
126
203
 
127
- describe "#blog_rank" do
128
- subject { post.blog_rank }
129
- it { is_expected.to eq(4) }
204
+ describe "#links" do
205
+ subject { post.links }
206
+ it { is_expected.to be_empty }
130
207
  end
131
208
 
132
209
  describe "#tags" do
133
210
  subject { post.tags }
134
- it { is_expected.to eq(["Okategoriserat", "Träning", "greyhound", "koncept", "mafia"]) }
211
+ it { is_expected.to be_empty }
135
212
  end
136
- end
137
213
 
138
- describe "#posts[2]" do
139
- subject(:post) { result.posts[2] }
214
+ describe "#images" do
215
+ subject { post.images }
216
+ it { is_expected.to be_empty }
217
+ end
140
218
 
141
- describe "#url" do
142
- subject { post.url }
143
- it { is_expected.to eq("http://didriksinspesielleverden.blogg.no/1359472349_justin_bieber.html") }
219
+ describe "#indexed_at" do
220
+ subject { post.indexed_at }
221
+ it { is_expected.to eq(Time.parse("2017-05-04T06:50:07Z")) }
144
222
  end
145
223
 
146
- describe "#title" do
147
- subject { post.title }
148
- it { is_expected.to eq("Justin Bieber") }
224
+ describe "#published_at" do
225
+ subject { post.published_at }
226
+ it { is_expected.to eq(Time.parse("2017-05-04T06:49:50Z")) }
149
227
  end
150
228
 
151
- describe "#summary" do
152
- subject { post.summary }
153
- it { is_expected.to eq("OMG! Justin Bieber Believe acoustic albumet er nå ute på spotify. Han er helt super. Love him. Personlig liker jeg best beauty and a beat og as long as you love me, kommenter gjerne hva dere synes! <3 #sus YOLO") }
229
+ describe "#reindexed_at" do
230
+ subject { post.reindexed_at }
231
+ it { is_expected.to eq(Time.parse("0001-01-01T00:00:00Z")) }
154
232
  end
155
233
 
156
- describe "#language_code" do
157
- subject { post.language_code }
158
- it { is_expected.to eq("no") }
234
+ describe "#inlinks_count" do
235
+ subject { post.inlinks_count }
236
+ it { is_expected.to eq(0) }
159
237
  end
160
238
 
161
- describe "#published" do
162
- subject { post.published }
163
- it { is_expected.to eq(Time.parse("2013-01-29 15:12:29Z")) }
239
+ describe "#blog_id" do
240
+ subject { post.blog_id }
241
+ it { is_expected.to eq("1578135310841173675") }
164
242
  end
165
243
 
166
- describe "#indexed" do
167
- subject { post.indexed }
168
- it { is_expected.to eq(Time.parse("2013-01-29 15:14:37Z")) }
244
+ describe "#blog_name" do
245
+ subject { post.blog_name }
246
+ it { is_expected.to eq("maartiinasvardag blogg") }
169
247
  end
170
248
 
171
249
  describe "#blog_url" do
172
250
  subject { post.blog_url }
173
- it { is_expected.to eq("http://didriksinspesielleverden.blogg.no/") }
174
- end
175
-
176
- describe "#authority" do
177
- subject { post.authority }
178
- it { is_expected.to eq(0) }
251
+ it { is_expected.to eq("http://nouw.com/maartiinasvardag") }
179
252
  end
180
253
 
181
254
  describe "#blog_rank" do
@@ -183,18 +256,30 @@ Swedish House Mafia gör succé i USA") }
183
256
  it { is_expected.to eq(1) }
184
257
  end
185
258
 
186
- describe "#tags" do
187
- subject { post.tags }
188
- it { is_expected.to eq([]) }
259
+ describe "#authority" do
260
+ subject { post.authority }
261
+ it { is_expected.to eq(0) }
189
262
  end
190
263
  end
191
264
  end
192
265
 
193
- context "with a valid result containing non-blogs" do
194
- let(:fixture) { :valid_non_blog }
266
+ context "with a valid result containing links" do
267
+ let(:fixture) { :valid_links }
268
+
269
+ describe "#posts.first" do
270
+ subject(:post) { result.posts.first }
271
+ let(:expected_links) do
272
+ %w[
273
+ https://1.bp.blogspot.com/-4uNjjiNQiug/WKguo1sBxwI/AAAAAAAAqKE/_eR7cY8Ft3cd2fYCx-2yXK8AwSHE_A2GgCLcB/s1600/aaea427ee3eaaf8f47d650f48fdf1242.jpg
274
+ http://www.irsn.fr/EN/newsroom/News/Pages/20170213_Detection-of-radioactive-iodine-at-trace-levels-in-Europe-in-January-2017.aspx
275
+ https://www.t.co/2P4IDmovzH
276
+ https://www.twitter.com/Strat2Intel/status/832710701730844672
277
+ ]
278
+ end
195
279
 
196
- it "should exclude the non-blog entries" do
197
- expect(subject.posts.size).to eq(1)
280
+ describe "#links" do
281
+ its(:links) { should eq(expected_links) }
282
+ end
198
283
  end
199
284
  end
200
285
 
@@ -208,11 +293,23 @@ Swedish House Mafia gör succé i USA") }
208
293
  end
209
294
  end
210
295
 
296
+ context "with an complete result" do
297
+ let(:fixture) { :valid_empty }
298
+
299
+ it { is_expected.not_to be_incomplete }
300
+ end
301
+
302
+ context "with an incomplete result" do
303
+ let(:fixture) { :incomplete }
304
+
305
+ it { is_expected.to be_incomplete }
306
+ end
307
+
211
308
  context "with a nonexistent api key result" do
212
309
  let(:fixture) { :nonexistent_api_key }
213
310
 
214
311
  it "should raise AuthError" do
215
- expect { subject }.to raise_error(AuthError)
312
+ expect { subject }.to raise_error(QueryError)
216
313
  end
217
314
  end
218
315
 
data/spec/post_spec.rb CHANGED
@@ -3,15 +3,34 @@ require 'spec_helper'
3
3
  include Twingly::Search
4
4
 
5
5
  describe Post do
6
+ it { should respond_to :id }
7
+ it { should respond_to :author }
6
8
  it { should respond_to :url }
7
9
  it { should respond_to :title }
8
- it { should respond_to :summary }
10
+ it { should respond_to :text }
11
+ it { should respond_to :location_code }
9
12
  it { should respond_to :language_code }
10
- it { should respond_to :indexed }
11
- it { should respond_to :published }
12
- it { should respond_to :blog_url }
13
+ it { should respond_to :coordinates }
14
+ it { should respond_to :links }
15
+ it { should respond_to :tags }
16
+ it { should respond_to :images }
17
+ it { should respond_to :indexed_at }
18
+ it { should respond_to :published_at }
19
+ it { should respond_to :reindexed_at }
20
+ it { should respond_to :inlinks_count }
21
+ it { should respond_to :blog_id }
13
22
  it { should respond_to :blog_name }
14
- it { should respond_to :authority }
23
+ it { should respond_to :blog_url }
15
24
  it { should respond_to :blog_rank }
16
- it { should respond_to :tags }
25
+ it { should respond_to :authority }
26
+
27
+ deprecated_methods = %i(summary indexed published outlinks)
28
+ deprecated_methods.each do |method_name|
29
+ describe "##{method_name}" do
30
+ it do
31
+ expect { subject.public_send(method_name) }
32
+ .to output(/deprecated/).to_stderr
33
+ end
34
+ end
35
+ end
17
36
  end
data/spec/query_spec.rb CHANGED
@@ -1,5 +1,4 @@
1
1
  require 'spec_helper'
2
- require 'vcr_setup'
3
2
 
4
3
  include Twingly::Search
5
4
 
@@ -10,15 +9,30 @@ describe Query do
10
9
  allow(client_double).to receive(:api_key).and_return("api_key")
11
10
  end
12
11
 
13
- subject { described_class.new(client_double) }
12
+ subject(:query) { described_class.new(client_double) }
14
13
 
15
- it { should respond_to(:pattern) }
14
+ it { should respond_to(:search_query) }
16
15
  it { should respond_to(:language) }
17
16
  it { should respond_to(:start_time) }
18
17
  it { should respond_to(:end_time) }
19
18
  it { should respond_to(:execute) }
20
19
  it { should respond_to(:client) }
21
20
 
21
+ describe "#pattern" do
22
+ it { expect { subject.pattern }.to output(/deprecated/).to_stderr }
23
+ it { expect { subject.pattern = "test" }.to output(/deprecated/).to_stderr }
24
+ end
25
+
26
+ describe "#language" do
27
+ it { expect { subject.language }.to output(/deprecated/).to_stderr }
28
+ it { expect { subject.language = "en" }.to output(/deprecated/).to_stderr }
29
+
30
+ it "should be included in the search query" do
31
+ subject.language = "no"
32
+ expect(subject.request_parameters.fetch(:q)).to include("lang:no")
33
+ end
34
+ end
35
+
22
36
  describe ".new" do
23
37
  context "without client" do
24
38
  subject { described_class.new }
@@ -42,49 +56,42 @@ describe Query do
42
56
 
43
57
  describe "#url" do
44
58
  before do
45
- endpoint_url = "https://api.twingly.com/analytics/Analytics.ashx"
59
+ endpoint_url = "https://api.twingly.com/blog/search/api/v3/search"
46
60
  allow(client_double).to receive(:endpoint_url).and_return(endpoint_url)
47
61
  end
48
62
 
49
63
  let(:query) { described_class.new(client_double) }
50
64
 
51
- context "with valid pattern" do
52
- before { query.pattern = "christmas" }
53
- subject { query.url }
54
-
55
- it { should include("xmloutputversion=2") }
56
- end
57
-
58
- context "without valid pattern" do
65
+ context "without valid search query" do
59
66
  it "raises an error" do
60
- expect { query.url }.to raise_error(QueryError, "Missing pattern")
67
+ expect { query.url }.to raise_error(QueryError, /query cannot be empty/)
61
68
  end
62
69
  end
63
70
 
64
- context "with empty pattern" do
65
- before { query.pattern = "" }
71
+ context "with empty search query" do
72
+ before { query.search_query = "" }
66
73
 
67
74
  it "raises an error" do
68
- expect { query.url }.to raise_error(QueryError, "Missing pattern")
75
+ expect { query.url }.to raise_error(QueryError, /query cannot be empty/)
69
76
  end
70
77
  end
71
78
  end
72
79
 
73
80
  describe "#start_time=" do
74
81
  before do
75
- subject.pattern = "semla"
82
+ query.search_query = "semla"
76
83
  end
77
84
 
78
85
  context "when given time in UTC" do
79
86
  before do
80
- subject.start_time = time
87
+ query.start_time = time
81
88
  end
82
89
 
83
90
  let(:timestamp) { "2016-02-09 09:01:22 UTC" }
84
91
  let(:time) { Time.parse(timestamp) }
85
92
 
86
93
  it "should not change timezone" do
87
- expect(subject.request_parameters).to include(ts: "2016-02-09 09:01:22")
94
+ expect(subject.request_parameters.fetch(:q)).to include("2016-02-09T09:01:22")
88
95
  end
89
96
 
90
97
  it "should not modify the given time object" do
@@ -110,7 +117,7 @@ describe Query do
110
117
  let(:time) { Time.parse(timestamp) }
111
118
 
112
119
  it "should convert to UTC" do
113
- expect(subject.request_parameters).to include(ts: "2016-02-09 04:01:22")
120
+ expect(subject.request_parameters.fetch(:q)).to include("2016-02-09T04:01:22")
114
121
  end
115
122
 
116
123
  it "should not modify the given time object" do
@@ -139,7 +146,7 @@ describe Query do
139
146
 
140
147
  describe "#end_time=" do
141
148
  before do
142
- subject.pattern = "semla"
149
+ subject.search_query = "semla"
143
150
  end
144
151
 
145
152
  context "when given time in UTC" do
@@ -151,7 +158,7 @@ describe Query do
151
158
  let(:time) { Time.parse(timestamp) }
152
159
 
153
160
  it "should not change timezone" do
154
- expect(subject.request_parameters).to include(tsTo: "2016-02-09 09:01:22")
161
+ expect(subject.request_parameters.fetch(:q)).to include("end-date:2016-02-09T09:01:22")
155
162
  end
156
163
 
157
164
  it "should not modify the given time object" do
@@ -177,7 +184,7 @@ describe Query do
177
184
  let(:time) { Time.parse(timestamp) }
178
185
 
179
186
  it "should convert to UTC" do
180
- expect(subject.request_parameters).to include(tsTo: "2016-02-09 04:01:22")
187
+ expect(subject.request_parameters.fetch(:q)).to include("end-date:2016-02-09T04:01:22")
181
188
  end
182
189
 
183
190
  it "should not modify the given time object" do
@@ -204,25 +211,25 @@ describe Query do
204
211
  end
205
212
  end
206
213
 
207
- context "with valid pattern" do
208
- before { subject.pattern = "christmas" }
214
+ context "with valid search query" do
215
+ before { subject.search_query = "christmas" }
209
216
 
210
217
  it "should add language" do
211
218
  subject.language = "en"
212
- expect(subject.request_parameters).to include(documentlang: 'en')
219
+ expect(subject.request_parameters.fetch(:q)).to include("lang:en")
213
220
  end
214
221
 
215
222
  it "should encode url paramters" do
216
223
  subject.end_time = Time.parse("2013-12-28 09:01:22 UTC")
217
- expect(subject.url_parameters).to include('tsTo=2013-12-28+09%3A01%3A22')
224
+ expect(subject.url_parameters).to include('end-date%3A2013-12-28T09%3A01%3A22')
218
225
  end
219
226
  end
220
227
 
221
- describe "#pattern" do
222
- before { subject.pattern = "spotify" }
228
+ describe "#search_query" do
229
+ before { subject.search_query = "spotify" }
223
230
 
224
- it "should add searchpattern" do
225
- expect(subject.url_parameters).to include("searchpattern=spotify")
231
+ it "should add q parameter" do
232
+ expect(subject.url_parameters).to include("q=spotify")
226
233
  end
227
234
  end
228
235
 
@@ -230,7 +237,7 @@ describe Query do
230
237
  context "when called" do
231
238
  subject do
232
239
  query = described_class.new(client_double)
233
- query.pattern = 'something'
240
+ query.search_query = 'something'
234
241
  query
235
242
  end
236
243
 
@@ -243,8 +250,8 @@ describe Query do
243
250
 
244
251
  context "when searching for spotify" do
245
252
  subject {
246
- query = described_class.new(Client.new('api_key'))
247
- query.pattern = 'spotify page-size:10'
253
+ query = described_class.new(Client.new)
254
+ query.search_query = 'spotify page-size:10'
248
255
  query.language = 'sv'
249
256
  query
250
257
  }
data/spec/result_spec.rb CHANGED
@@ -10,6 +10,7 @@ describe Result do
10
10
  it { should respond_to :number_of_matches_total }
11
11
  it { should respond_to :seconds_elapsed }
12
12
  it { should respond_to :all_results_returned? }
13
+ it { should respond_to :incomplete? }
13
14
 
14
15
  context "before query has populated responses" do
15
16
  its(:posts) { should be_empty }
data/spec/spec_helper.rb CHANGED
@@ -1,8 +1,18 @@
1
1
  require File.dirname(__FILE__) + '/../lib/twingly/search'
2
+ require File.dirname(__FILE__) + '/../lib/twingly/livefeed'
3
+
4
+ require 'vcr_setup'
5
+ require 'rspec/its'
6
+
7
+ ENV["TWINGLY_SEARCH_KEY"] ||= "api_key"
2
8
 
3
9
  class Fixture
4
10
  def self.get(fixture_name)
5
11
  filename = "spec/fixtures/#{fixture_name}_result.xml"
6
12
  File.read(filename)
7
13
  end
14
+
15
+ def self.livefeed_get(fixture_name)
16
+ get("livefeed/#{fixture_name}")
17
+ end
8
18
  end
@@ -13,7 +13,7 @@ Gem::Specification.new do |spec|
13
13
  spec.summary = "Ruby API client for Twingly Search"
14
14
  spec.description = "Twingly Search is a product from Twingly AB"
15
15
  spec.license = 'MIT'
16
- spec.required_ruby_version = ">= 1.9.3"
16
+ spec.required_ruby_version = ">= 2.1.0"
17
17
 
18
18
  spec.files = `git ls-files`.split($/)
19
19
  spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
@@ -25,7 +25,6 @@ Gem::Specification.new do |spec|
25
25
  spec.add_development_dependency "rspec-its", "~> 1"
26
26
  spec.add_development_dependency "vcr", "~> 2.6"
27
27
  spec.add_development_dependency "webmock", "~> 1.0"
28
- spec.add_development_dependency "rake", "~> 0"
29
- spec.add_development_dependency "github_changelog_generator", "~> 1.8"
28
+ spec.add_development_dependency "rake", "~> 11"
30
29
  spec.add_development_dependency "yard", "~> 0.8"
31
30
  end