powerdlz23 1.2.4 → 1.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/grell/.rspec +2 -0
  2. package/grell/.travis.yml +28 -0
  3. package/grell/CHANGELOG.md +111 -0
  4. package/grell/Gemfile +7 -0
  5. package/grell/LICENSE.txt +22 -0
  6. package/grell/README.md +213 -0
  7. package/grell/Rakefile +2 -0
  8. package/grell/grell.gemspec +36 -0
  9. package/grell/lib/grell/capybara_driver.rb +44 -0
  10. package/grell/lib/grell/crawler.rb +83 -0
  11. package/grell/lib/grell/crawler_manager.rb +84 -0
  12. package/grell/lib/grell/grell_logger.rb +10 -0
  13. package/grell/lib/grell/page.rb +275 -0
  14. package/grell/lib/grell/page_collection.rb +62 -0
  15. package/grell/lib/grell/rawpage.rb +62 -0
  16. package/grell/lib/grell/reader.rb +18 -0
  17. package/grell/lib/grell/version.rb +3 -0
  18. package/grell/lib/grell.rb +11 -0
  19. package/grell/spec/lib/capybara_driver_spec.rb +38 -0
  20. package/grell/spec/lib/crawler_manager_spec.rb +174 -0
  21. package/grell/spec/lib/crawler_spec.rb +361 -0
  22. package/grell/spec/lib/page_collection_spec.rb +159 -0
  23. package/grell/spec/lib/page_spec.rb +418 -0
  24. package/grell/spec/lib/reader_spec.rb +43 -0
  25. package/grell/spec/spec_helper.rb +66 -0
  26. package/heartmagic/config.py +1 -0
  27. package/heartmagic/heart.py +3 -0
  28. package/heartmagic/pytransform/__init__.py +483 -0
  29. package/heartmagic/pytransform/_pytransform.dll +0 -0
  30. package/heartmagic/pytransform/_pytransform.so +0 -0
  31. package/httpStatusCode/README.md +2 -0
  32. package/httpStatusCode/httpStatusCode.js +4 -0
  33. package/httpStatusCode/reasonPhrases.js +344 -0
  34. package/httpStatusCode/statusCodes.js +344 -0
  35. package/package.json +1 -1
  36. package/snapcrawl/.changelog.old.md +157 -0
  37. package/snapcrawl/.gitattributes +1 -0
  38. package/snapcrawl/.github/workflows/test.yml +41 -0
  39. package/snapcrawl/.rspec +3 -0
  40. package/snapcrawl/.rubocop.yml +23 -0
  41. package/snapcrawl/CHANGELOG.md +182 -0
  42. package/snapcrawl/Gemfile +15 -0
  43. package/snapcrawl/LICENSE +21 -0
  44. package/snapcrawl/README.md +135 -0
  45. package/snapcrawl/Runfile +35 -0
  46. package/snapcrawl/bin/snapcrawl +25 -0
  47. package/snapcrawl/lib/snapcrawl/cli.rb +52 -0
  48. package/snapcrawl/lib/snapcrawl/config.rb +60 -0
  49. package/snapcrawl/lib/snapcrawl/crawler.rb +98 -0
  50. package/snapcrawl/lib/snapcrawl/dependencies.rb +21 -0
  51. package/snapcrawl/lib/snapcrawl/exceptions.rb +5 -0
  52. package/snapcrawl/lib/snapcrawl/log_helpers.rb +36 -0
  53. package/snapcrawl/lib/snapcrawl/page.rb +118 -0
  54. package/snapcrawl/lib/snapcrawl/pretty_logger.rb +11 -0
  55. package/snapcrawl/lib/snapcrawl/refinements/pair_split.rb +26 -0
  56. package/snapcrawl/lib/snapcrawl/refinements/string_refinements.rb +13 -0
  57. package/snapcrawl/lib/snapcrawl/screenshot.rb +73 -0
  58. package/snapcrawl/lib/snapcrawl/templates/config.yml +49 -0
  59. package/snapcrawl/lib/snapcrawl/templates/docopt.txt +26 -0
  60. package/snapcrawl/lib/snapcrawl/version.rb +3 -0
  61. package/snapcrawl/lib/snapcrawl.rb +20 -0
  62. package/snapcrawl/snapcrawl.gemspec +27 -0
  63. package/snapcrawl/snapcrawl.yml +41 -0
  64. package/snapcrawl/spec/README.md +16 -0
  65. package/snapcrawl/spec/approvals/bin/help +26 -0
  66. package/snapcrawl/spec/approvals/bin/usage +4 -0
  67. package/snapcrawl/spec/approvals/cli/usage +4 -0
  68. package/snapcrawl/spec/approvals/config/defaults +15 -0
  69. package/snapcrawl/spec/approvals/config/minimal +15 -0
  70. package/snapcrawl/spec/approvals/integration/blacklist +14 -0
  71. package/snapcrawl/spec/approvals/integration/default-config +14 -0
  72. package/snapcrawl/spec/approvals/integration/depth-0 +6 -0
  73. package/snapcrawl/spec/approvals/integration/depth-3 +6 -0
  74. package/snapcrawl/spec/approvals/integration/log-color-no +6 -0
  75. package/snapcrawl/spec/approvals/integration/screenshot-error +3 -0
  76. package/snapcrawl/spec/approvals/integration/whitelist +14 -0
  77. package/snapcrawl/spec/approvals/models/pretty_logger/colors +1 -0
  78. package/snapcrawl/spec/fixtures/config/minimal.yml +4 -0
  79. package/snapcrawl/spec/server/config.ru +97 -0
  80. package/snapcrawl/spec/snapcrawl/bin_spec.rb +15 -0
  81. package/snapcrawl/spec/snapcrawl/cli_spec.rb +9 -0
  82. package/snapcrawl/spec/snapcrawl/config_spec.rb +26 -0
  83. package/snapcrawl/spec/snapcrawl/integration_spec.rb +65 -0
  84. package/snapcrawl/spec/snapcrawl/page_spec.rb +89 -0
  85. package/snapcrawl/spec/snapcrawl/pretty_logger_spec.rb +19 -0
  86. package/snapcrawl/spec/snapcrawl/refinements/pair_split_spec.rb +27 -0
  87. package/snapcrawl/spec/snapcrawl/refinements/string_refinements_spec.rb +29 -0
  88. package/snapcrawl/spec/snapcrawl/screenshot_spec.rb +62 -0
  89. package/snapcrawl/spec/spec_helper.rb +22 -0
  90. package/snapcrawl/spec/spec_mixin.rb +10 -0
@@ -0,0 +1,418 @@
1
+ RSpec.describe Grell::Page do
2
+
3
+ let(:page_id) { rand(10).floor + 10 }
4
+ let(:parent_page_id) { rand(10).floor }
5
+ let(:page) { Grell::Page.new(url, page_id, parent_page_id) }
6
+ let(:host) { 'http://www.example.com' }
7
+ let(:url) { 'http://www.example.com/test' }
8
+ let(:returned_headers) { { 'Other-Header' => 'yes', 'Content-Type' => 'text/html' }}
9
+ let(:now) { Time.now }
10
+
11
+ before do
12
+ allow(Time).to receive(:now).and_return(now)
13
+ Grell.logger = Logger.new(nil) # avoids noise in rspec output
14
+ end
15
+
16
+ it 'gives access to the url' do
17
+ expect(page.url).to eq(url)
18
+ end
19
+
20
+ it 'gives access to the path' do
21
+ expect(page.path).to eq('/test')
22
+ end
23
+
24
+ it 'gives access to the page id' do
25
+ expect(page.id).to eq(page_id)
26
+ end
27
+
28
+ it 'gives access to the parent page id' do
29
+ expect(page.parent_id).to eq(parent_page_id)
30
+ end
31
+
32
+ it 'newly created page does not have status yet' do
33
+ expect(page.status).to eq(nil)
34
+ end
35
+
36
+ shared_examples_for 'a grell page' do
37
+
38
+ it 'returns the correct status' do
39
+ expect(page.status).to eq(status)
40
+ end
41
+
42
+ it 'has the correct body' do
43
+ expect(page.body).to eq(body)
44
+ end
45
+
46
+ it 'has correct headers' do
47
+ expect(page.headers).to include(expected_headers)
48
+ end
49
+
50
+ it 'has the correct links' do
51
+ expect(page.links.sort).to eq(links.sort)
52
+ end
53
+
54
+ it '#visited? returns the correct value' do
55
+ expect(page.visited?).to eq(visited)
56
+ end
57
+
58
+ it 'has correct timestamp' do
59
+ expect(page.timestamp).to eq(now)
60
+ end
61
+
62
+ end
63
+
64
+ describe '#retries' do
65
+ context 'page has not been navigated' do
66
+ it '#retries return 0' do
67
+ expect(page.retries).to eq(0)
68
+ end
69
+ end
70
+
71
+ context 'page has been navigated once' do
72
+ before do
73
+ proxy.stub(url).and_return(body: '', code: 200, headers: {})
74
+ page.navigate
75
+ end
76
+
77
+ it '#retries return 0' do
78
+ expect(page.retries).to eq(0)
79
+ end
80
+ end
81
+
82
+ context 'page has been navigated twice' do
83
+ before do
84
+ proxy.stub(url).and_return(body: '', code: 200, headers: {})
85
+ page.navigate
86
+ page.navigate
87
+ end
88
+
89
+ it '#retries return 1' do
90
+ expect(page.retries).to eq(1)
91
+ end
92
+ end
93
+ end
94
+
95
+ describe '#navigate' do
96
+ before do
97
+ proxy.stub(url).and_return(body: '', code: 200, headers: {})
98
+ end
99
+
100
+ it 'waits for all ajax requests' do
101
+ expect_any_instance_of(Grell::RawPage).to receive(:wait_for_all_ajax_requests).with(0, 0.5)
102
+ page.navigate
103
+ end
104
+ end
105
+
106
+ shared_examples_for 'an errored grell page' do
107
+ it 'returns empty status 404 page after navigating' do
108
+ expect(page.status).to eq(404)
109
+ expect(page.links).to eq([])
110
+ expect(page.headers).to eq(headers)
111
+ expect(page.body).to eq('')
112
+ expect(page.has_selector?('html')).to eq(false)
113
+ expect(page).to be_visited
114
+ expect(page.timestamp).to eq(now)
115
+ expect(page.error?).to eq(true)
116
+ expect(page.instance_variable_get(:@times_visited)).to eq(1)
117
+ end
118
+ end
119
+
120
+ [ Capybara::Poltergeist::JavascriptError, Capybara::Poltergeist::BrowserError, URI::InvalidURIError,
121
+ Capybara::Poltergeist::TimeoutError, Capybara::Poltergeist::StatusFailError,
122
+ Capybara::Poltergeist::DeadClient, Errno::ECONNRESET ].each do |error_type|
123
+
124
+ context "#{error_type}" do
125
+ let(:headers) do
126
+ {
127
+ grellStatus: 'Error',
128
+ errorClass: "#{error_type}",
129
+ errorMessage: error_message
130
+ }
131
+ end
132
+ let(:error_message) { 'Trusmis broke it again' }
133
+ let(:now) { Time.now }
134
+
135
+ before do
136
+ allow_any_instance_of(Grell::RawPage).to receive(:navigate).and_raise(error_type, 'error')
137
+ allow_any_instance_of(error_type).to receive(:message).and_return(error_message)
138
+ page.navigate
139
+ end
140
+
141
+ it_behaves_like 'an errored grell page'
142
+ end
143
+ end
144
+
145
+
146
+ context 'we have not yet navigated to the page' do
147
+ let(:visited) { false }
148
+ let(:status) { nil }
149
+ let(:body) { '' }
150
+ let(:links) { [] }
151
+ let(:expected_headers) { {} }
152
+ let(:now) { nil }
153
+
154
+ before do
155
+ proxy.stub(url).and_return(body: body, code: status, headers: returned_headers.dup)
156
+ end
157
+
158
+ it_behaves_like 'a grell page'
159
+
160
+ end
161
+
162
+ context 'navigating to the URL we get a 404' do
163
+ let(:visited) { true }
164
+ let(:status) { 404 }
165
+ let(:body) { '<html><head></head><body>nothing cool</body></html>' }
166
+ let(:links) { [] }
167
+ let(:expected_headers) { returned_headers }
168
+
169
+ before do
170
+ proxy.stub(url).and_return(body: body, code: status, headers: returned_headers.dup)
171
+ page.navigate
172
+ end
173
+
174
+ it_behaves_like 'a grell page'
175
+
176
+ end
177
+
178
+ context 'navigating to an URL with redirects, follows them transparently' do
179
+ let(:visited) { true }
180
+ let(:status) { 200 }
181
+ let(:body) { '<html><head></head><body>nothing cool</body></html>' }
182
+ let(:links) { [] }
183
+ let(:expected_headers) { returned_headers }
184
+ let(:real_url) { 'http://example.com/other' }
185
+
186
+ before do
187
+ proxy.stub(url).and_return(:redirect_to => real_url)
188
+ proxy.stub(real_url).and_return(body: body, code: status, headers: returned_headers.dup)
189
+ page.navigate
190
+ end
191
+
192
+ it_behaves_like 'a grell page'
193
+
194
+ it 'followed_redirects? is true' do
195
+ expect(page.followed_redirects?).to eq(true)
196
+ end
197
+
198
+ it 'current_url match the url we were redirected to' do
199
+ expect(page.current_url).to eq(real_url)
200
+ end
201
+ end
202
+
203
+ #Here also add examples that may happen for almost all pages (no errors, no redirects)
204
+ context 'navigating to the URL we get page with no links' do
205
+ let(:visited) { true }
206
+ let(:status) { 200 }
207
+ let(:body) { '<html><head></head><body>nothing cool</body></html>' }
208
+ let(:links) { [] }
209
+ let(:expected_headers) { returned_headers }
210
+
211
+ before do
212
+ proxy.stub(url).and_return(body: body, code: status, headers: returned_headers.dup)
213
+ page.navigate
214
+ end
215
+
216
+ it_behaves_like 'a grell page'
217
+
218
+ it 'followed_redirects is false' do
219
+ expect(page.followed_redirects?).to eq(false)
220
+ end
221
+
222
+ it 'current_url is url' do
223
+ expect(page.current_url).to eq(url)
224
+ end
225
+
226
+ it 'does not have errors' do
227
+ expect(page.error?).to eq(false)
228
+ end
229
+ end
230
+
231
+ context 'navigating to the URL we get page with links using a elements' do
232
+ let(:visited) { true }
233
+ let(:status) { 200 }
234
+ let(:body) do
235
+ "<html><head></head><body>
236
+ Hello world!
237
+ <a href=\"/trusmis.html\">trusmis</a>
238
+ <a href=\"/help.html\">help</a>
239
+ <a href=\"http://www.outsidewebsite.com/help.html\">help</a>
240
+ </body></html>"
241
+ end
242
+ let(:links) { ['http://www.example.com/trusmis.html', 'http://www.example.com/help.html'] }
243
+ let(:expected_headers) { returned_headers }
244
+
245
+ before do
246
+ proxy.stub(url).and_return(body: body, code: status, headers: returned_headers.dup)
247
+ page.navigate
248
+ end
249
+
250
+ it_behaves_like 'a grell page'
251
+
252
+ it 'do not return links to external websites' do
253
+ expect(page.links).to_not include('http://www.outsidewebsite.com/help.html')
254
+ end
255
+ end
256
+
257
+
258
+ context 'navigating to the URL we get page with disabled links' do
259
+ let(:visited) { true }
260
+ let(:status) { 200 }
261
+ let(:body) do
262
+ "<html><head></head><body>
263
+ Hello world!
264
+ <a href=\"/trusmis.html\">trusmis</a>
265
+ <a href=\"/help.html\">help</a>
266
+ <a href=\"javascript: void(0)\">help</a>
267
+ <a href=\"/helpdisabled.html\" disabled=\"\">helpdisabled</a>
268
+ </body></html>"
269
+ end
270
+ let(:links) { ['http://www.example.com/trusmis.html', 'http://www.example.com/help.html'] }
271
+ let(:expected_headers) { returned_headers }
272
+
273
+ before do
274
+ proxy.stub(url).and_return(body: body, code: status, headers: returned_headers.dup)
275
+ page.navigate
276
+ end
277
+
278
+ it_behaves_like 'a grell page'
279
+ end
280
+
281
+ context 'navigating to the URL we get page with links with absolute links' do
282
+ let(:visited) { true }
283
+ let(:status) { 200 }
284
+ let(:body) do
285
+ "<html><head></head><body>
286
+ Hello world!
287
+ <a href=\"/trusmis.html\">trusmis</a>
288
+ <a href=\"http://www.example.com/help.html\">help</a>
289
+ <a href=\"http://www.outsidewebsite.com/help.html\">help</a>
290
+ </body></html>"
291
+ end
292
+ let(:links) { ['http://www.example.com/trusmis.html', 'http://www.example.com/help.html'] }
293
+ let(:expected_headers) { returned_headers }
294
+
295
+ before do
296
+ proxy.stub(url).and_return(body: body, code: status, headers: returned_headers.dup)
297
+ page.navigate
298
+ end
299
+
300
+ it_behaves_like 'a grell page'
301
+
302
+ it 'do not return links to external websites' do
303
+ expect(page.links).to_not include('http://www.outsidewebsite.com/help.html')
304
+ end
305
+ end
306
+
307
+ context 'navigating to the URL we get page with links using a mix of elements' do
308
+ let(:visited) { true }
309
+ let(:status) { 200 }
310
+ let(:body) do
311
+ "<html><head></head><body>
312
+ Hello world!
313
+ <a href=\"/trusmis.html\">trusmis</a>
314
+ <table>
315
+ <tbody>
316
+ <tr href=\"/help_me.html\"><td>help</td></tr>
317
+ <tr data-href=\"/help.html\"><td>help</td></tr>
318
+ </tbody>
319
+ </table>
320
+ <div data-href=\"http://www.example.com/more_help.html\">help</div>
321
+ <div data-href=\"http://www.outsidewebsite.com/help.html\">help</div>
322
+ </body></html>"
323
+ end
324
+ let(:links) do
325
+ [ 'http://www.example.com/trusmis.html', 'http://www.example.com/help.html',
326
+ 'http://www.example.com/more_help.html', 'http://www.example.com/help_me.html' ]
327
+ end
328
+ let(:expected_headers) { returned_headers }
329
+
330
+ before do
331
+ proxy.stub(url).and_return(body: body, code: status, headers: returned_headers.dup)
332
+ page.navigate
333
+ end
334
+
335
+ it_behaves_like 'a grell page'
336
+
337
+ describe '#path' do
338
+ context 'proper url' do
339
+ let(:url) { 'http://www.anyurl.com/path' }
340
+ let(:page) { Grell::Page.new(url, page_id, parent_page_id) }
341
+
342
+ it 'returns the path' do
343
+ expect(page.path).to eq('/path')
344
+ end
345
+ end
346
+
347
+ context 'broken url' do
348
+ let(:url) { 'www.an.asda.fasfasf.yurl.com/path' }
349
+ let(:page) { Grell::Page.new(url, page_id, parent_page_id) }
350
+
351
+ it 'returns the path' do
352
+ expect(page.path).to eq(url)
353
+ end
354
+ end
355
+ end
356
+
357
+ it 'do not return links to external websites' do
358
+ expect(page.links).to_not include('http://www.outsidewebsite.com/help.html')
359
+ end
360
+ end
361
+
362
+ context 'navigating to the URL we get page with links inside the header section of the code' do
363
+ let(:visited) { true }
364
+ let(:status) { 200 }
365
+ let(:css) { '/application.css' }
366
+ let(:favicon) { '/favicon.ico' }
367
+ let(:body) do
368
+ "<html><head>
369
+ <title>mimi</title>
370
+ <link href=\"#{css}\" rel=\"stylesheet\">
371
+ <link href=\"#{favicon}\" rel=\"shortcut icon\" type=\"image/vnd.microsoft.icon\">
372
+ </head>
373
+ <body>
374
+ Hello world!
375
+ <a href=\"/trusmis.html\">trusmis</a>
376
+ </body></html>"
377
+ end
378
+ let(:links) do
379
+ ['http://www.example.com/trusmis.html']
380
+ end
381
+ let(:expected_headers) { returned_headers }
382
+
383
+ before do
384
+ proxy.stub(url).and_return(body: body, code: status, headers: returned_headers.dup)
385
+ #We need to stub this or Phantomjs will get stuck trying to retrieve the resources
386
+ proxy.stub(host + css).and_return(body: '', code: status)
387
+ proxy.stub(host + favicon).and_return(body: '', code: status)
388
+ page.navigate
389
+ end
390
+
391
+ it_behaves_like 'a grell page'
392
+
393
+ it 'do not return links to resources in the header' do
394
+ expect(page.links).to_not include('http://www.example.com/application.css')
395
+ end
396
+
397
+ end
398
+
399
+ context 'status is never set' do #this may happen when there is nothing comming from the site
400
+ before do
401
+ stub_const('Grell::Page::WAIT_TIME', 0)
402
+ allow_any_instance_of(Grell::RawPage).to receive(:status).and_return(nil)
403
+ allow_any_instance_of(Grell::RawPage).to receive(:headers).and_return({})
404
+ allow_any_instance_of(Grell::RawPage).to receive(:body).and_return('')
405
+ proxy.stub(url).and_return(body: body, code: nil, headers: {})
406
+ page.navigate
407
+ end
408
+
409
+ let(:visited) { true }
410
+ let(:status) { nil }
411
+ let(:body) { '' }
412
+ let(:links) { [] }
413
+ let(:expected_headers) { {} }
414
+
415
+ it_behaves_like 'a grell page'
416
+ end
417
+
418
+ end
@@ -0,0 +1,43 @@
1
+ RSpec.describe Grell::Reader do
2
+
3
+ context 'Waiting time expired' do
4
+ let(:waiting_time) {0}
5
+ let(:sleeping_time) {2}
6
+ let(:condition) {false}
7
+ it 'does not sleep' do
8
+ before_time = Time.now
9
+ Grell::Reader.wait_for(->{''}, waiting_time, sleeping_time) do
10
+ condition
11
+ end
12
+ expect(Time.now - before_time).to be < 1
13
+ end
14
+ end
15
+
16
+ context 'The condition is true' do
17
+ let(:waiting_time) {3}
18
+ let(:sleeping_time) {2}
19
+ let(:condition) {true}
20
+ it 'does not sleep' do
21
+ before_time = Time.now
22
+ Grell::Reader.wait_for(->{''}, waiting_time, sleeping_time) do
23
+ condition
24
+ end
25
+ expect(Time.now - before_time).to be < 1
26
+ end
27
+ end
28
+
29
+ context 'The condition is false' do
30
+ let(:waiting_time) {0.2}
31
+ let(:sleeping_time) {0.2}
32
+ let(:condition) {false}
33
+
34
+ it 'waits the waiting time' do
35
+ before_time = Time.now
36
+ Grell::Reader.wait_for(->{''}, waiting_time, sleeping_time) do
37
+ condition
38
+ end
39
+ expect(Time.now - before_time).to be > waiting_time
40
+ end
41
+
42
+ end
43
+ end
@@ -0,0 +1,66 @@
1
+ require 'grell'
2
+ require 'byebug'
3
+ require 'timecop'
4
+ require 'webmock/rspec'
5
+ require 'billy/capybara/rspec'
6
+ require 'rack'
7
+ require 'rack/server'
8
+
9
+ # This will trick Puffing-billy into using this logger instead of its own
10
+ # Puffing billy is very noisy and we do not want to see that in our output
11
+ class Rails
12
+ def self.logger
13
+ Logger.new(nil)
14
+ end
15
+ end
16
+
17
+ WebMock.disable_net_connect!
18
+
19
+
20
+ # See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
21
+ RSpec.configure do |config|
22
+
23
+ # We do not need to wait for pages to return all the data
24
+ config.before do
25
+ stub_const("Grell::Page::WAIT_TIME", 0)
26
+ allow_any_instance_of(Capybara::Session).to receive(:evaluate_script).and_return(nil)
27
+ end
28
+
29
+ config.expect_with :rspec do |expectations|
30
+ expectations.include_chain_clauses_in_custom_matcher_descriptions = true
31
+ end
32
+
33
+ config.mock_with :rspec do |mocks|
34
+ # Prevents you from mocking or stubbing a method that does not exist on
35
+ # a real object.
36
+ mocks.verify_partial_doubles = true
37
+ end
38
+
39
+ # Limits the available syntax to the non-monkey patched syntax that is recommended.
40
+ config.disable_monkey_patching!
41
+
42
+ # This setting enables warnings. It's recommended, but in some cases may
43
+ # be too noisy due to issues in dependencies.
44
+ # TODO: Billy puffy has lots of warnings, test this with new versions
45
+ # config.warnings = true
46
+
47
+ # Many RSpec users commonly either run the entire suite or an individual
48
+ # file, and it's useful to allow more verbose output when running an
49
+ # individual spec file.
50
+ if config.files_to_run.one?
51
+ # Use the documentation formatter for detailed output,
52
+ # unless a formatter has already been configured
53
+ # (e.g. via a command-line flag).
54
+ config.default_formatter = 'doc'
55
+ end
56
+
57
+ config.order = :random
58
+ Kernel.srand config.seed
59
+
60
+ Capybara.javascript_driver = :poltergeist_billy
61
+ Capybara.default_driver = :poltergeist_billy
62
+
63
+ # config.profile_examples = 10
64
+ end
65
+
66
+