scraperwiki-api 0.0.6 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.travis.yml +4 -0
- data/.yardopts +4 -0
- data/lib/scraperwiki-api.rb +10 -10
- data/lib/scraperwiki-api/matchers.rb +52 -9
- data/lib/scraperwiki-api/version.rb +1 -1
- data/scraperwiki-api.gemspec +2 -3
- data/spec/scraperwiki-api_spec.rb +105 -107
- metadata +6 -5
data/.travis.yml
CHANGED
data/.yardopts
ADDED
data/lib/scraperwiki-api.rb
CHANGED
@@ -8,11 +8,11 @@ module ScraperWiki
|
|
8
8
|
base_uri 'api.scraperwiki.com/api/1.0'
|
9
9
|
|
10
10
|
RUN_INTERVALS = {
|
11
|
-
never
|
12
|
-
monthly
|
13
|
-
weekly
|
14
|
-
daily
|
15
|
-
hourly
|
11
|
+
:never => -1,
|
12
|
+
:monthly => 2678400,
|
13
|
+
:weekly => 604800,
|
14
|
+
:daily => 86400,
|
15
|
+
:hourly => 3600,
|
16
16
|
}
|
17
17
|
|
18
18
|
class << self
|
@@ -87,7 +87,7 @@ module ScraperWiki
|
|
87
87
|
if Array === opts[:attach]
|
88
88
|
opts[:attach] = opts[:attach].join ';'
|
89
89
|
end
|
90
|
-
request_with_apikey '/datastore/sqlite', {name
|
90
|
+
request_with_apikey '/datastore/sqlite', {:name => shortname, :query => query}.merge(opts)
|
91
91
|
end
|
92
92
|
|
93
93
|
# Extracts data about a scraper's code, owner, history, etc.
|
@@ -199,7 +199,7 @@ module ScraperWiki
|
|
199
199
|
if Array === opts[:quietfields]
|
200
200
|
opts[:quietfields] = opts[:quietfields].join '|'
|
201
201
|
end
|
202
|
-
request_with_apikey '/scraper/getinfo', {name
|
202
|
+
request_with_apikey '/scraper/getinfo', {:name => shortname}.merge(opts)
|
203
203
|
end
|
204
204
|
|
205
205
|
# See what the scraper did during each run.
|
@@ -235,7 +235,7 @@ module ScraperWiki
|
|
235
235
|
# @note The query string parameter is +name+, not +shortname+
|
236
236
|
# {https://scraperwiki.com/docs/api#getinfo as in the ScraperWiki docs}
|
237
237
|
def scraper_getruninfo(shortname, opts = {})
|
238
|
-
request_with_apikey '/scraper/getruninfo', {name
|
238
|
+
request_with_apikey '/scraper/getruninfo', {:name => shortname}.merge(opts)
|
239
239
|
end
|
240
240
|
|
241
241
|
# Find out information about a user.
|
@@ -271,7 +271,7 @@ module ScraperWiki
|
|
271
271
|
# @note The date joined field is +date_joined+ (with underscore) on
|
272
272
|
# {#scraper_usersearch}
|
273
273
|
def scraper_getuserinfo(username)
|
274
|
-
request_with_apikey '/scraper/getuserinfo', username
|
274
|
+
request_with_apikey '/scraper/getuserinfo', :username => username
|
275
275
|
end
|
276
276
|
|
277
277
|
# Search the titles and descriptions of all the scrapers.
|
@@ -341,7 +341,7 @@ module ScraperWiki
|
|
341
341
|
end
|
342
342
|
|
343
343
|
def request(path, opts)
|
344
|
-
self.class.get(path, query
|
344
|
+
self.class.get(path, :query => opts).parsed_response
|
345
345
|
end
|
346
346
|
end
|
347
347
|
end
|
@@ -19,6 +19,7 @@ module ScraperWiki
|
|
19
19
|
# it {should be_editable_by('frabcus')}
|
20
20
|
# it {should run(:daily)}
|
21
21
|
# it {should_not be_broken}
|
22
|
+
# it {should have_a_table('swdata')}
|
22
23
|
# it {should have_a_row_count_of(42).on('swdata')}
|
23
24
|
#
|
24
25
|
# # Check for missing keys:
|
@@ -233,6 +234,26 @@ module ScraperWiki
|
|
233
234
|
LastRunMatcher.new expected
|
234
235
|
end
|
235
236
|
|
237
|
+
class TableMatcher < ScraperInfoMatcher
|
238
|
+
def matches?(actual)
|
239
|
+
super
|
240
|
+
actual['datasummary']['tables'].key?(@expected)
|
241
|
+
end
|
242
|
+
|
243
|
+
def failure_message
|
244
|
+
"expected #{@actual['short_name']} to have a #{@expected} table"
|
245
|
+
end
|
246
|
+
|
247
|
+
def negative_failure_message
|
248
|
+
"expected #{@actual['short_name']} to not have a #{@expected} table"
|
249
|
+
end
|
250
|
+
end
|
251
|
+
# @example
|
252
|
+
# it {should have_a_table('swdata')}
|
253
|
+
def have_a_table(expected)
|
254
|
+
TableMatcher.new expected
|
255
|
+
end
|
256
|
+
|
236
257
|
class TablesMatcher < ScraperInfoMatcher
|
237
258
|
def on(table)
|
238
259
|
@table = table
|
@@ -269,7 +290,12 @@ module ScraperWiki
|
|
269
290
|
|
270
291
|
class MissingKeysMatcher < KeysMatcher
|
271
292
|
def difference
|
272
|
-
|
293
|
+
keys = if @actual['datasummary']['tables'][@table]
|
294
|
+
@actual['datasummary']['tables'][@table]['keys']
|
295
|
+
else
|
296
|
+
[]
|
297
|
+
end
|
298
|
+
@expected - keys
|
273
299
|
end
|
274
300
|
|
275
301
|
def failure_predicate
|
@@ -288,7 +314,12 @@ module ScraperWiki
|
|
288
314
|
|
289
315
|
class ExtraKeysMatcher < KeysMatcher
|
290
316
|
def difference
|
291
|
-
@actual['datasummary']['tables'][@table]
|
317
|
+
keys = if @actual['datasummary']['tables'][@table]
|
318
|
+
@actual['datasummary']['tables'][@table]['keys']
|
319
|
+
else
|
320
|
+
[]
|
321
|
+
end
|
322
|
+
keys - @expected
|
292
323
|
end
|
293
324
|
|
294
325
|
def failure_predicate
|
@@ -306,13 +337,21 @@ module ScraperWiki
|
|
306
337
|
end
|
307
338
|
|
308
339
|
class CountMatcher < TablesMatcher
|
340
|
+
def count
|
341
|
+
if @actual['datasummary']['tables'][@table]
|
342
|
+
@actual['datasummary']['tables'][@table]['count']
|
343
|
+
else
|
344
|
+
0
|
345
|
+
end
|
346
|
+
end
|
347
|
+
|
309
348
|
def matches?(actual)
|
310
349
|
super
|
311
|
-
|
350
|
+
count == @expected
|
312
351
|
end
|
313
352
|
|
314
353
|
def failure_message
|
315
|
-
"expected #{@actual['short_name']} to have #{@expected} rows, not #{
|
354
|
+
"expected #{@actual['short_name']} to have #{@expected} rows, not #{count}"
|
316
355
|
end
|
317
356
|
|
318
357
|
def negative_failure_message
|
@@ -362,12 +401,16 @@ module ScraperWiki
|
|
362
401
|
@items ||= if Array === @actual
|
363
402
|
@actual
|
364
403
|
elsif Hash === @actual
|
365
|
-
@actual['data']
|
366
|
-
|
367
|
-
|
368
|
-
|
404
|
+
if @actual['data']
|
405
|
+
@actual['data'].map do |array|
|
406
|
+
hash = {}
|
407
|
+
@actual['keys'].each_with_index do |key,index|
|
408
|
+
hash[key] = array[index]
|
409
|
+
end
|
410
|
+
hash
|
369
411
|
end
|
370
|
-
|
412
|
+
else
|
413
|
+
{}
|
371
414
|
end
|
372
415
|
else
|
373
416
|
raise NotImplementerError, "Can only handle jsondict or jsonlist formats"
|
data/scraperwiki-api.gemspec
CHANGED
@@ -1,6 +1,5 @@
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
2
|
-
|
3
|
-
require "scraperwiki-api/version"
|
2
|
+
require File.expand_path('../lib/scraperwiki-api/version', __FILE__)
|
4
3
|
|
5
4
|
Gem::Specification.new do |s|
|
6
5
|
s.name = "scraperwiki-api"
|
@@ -18,7 +17,7 @@ Gem::Specification.new do |s|
|
|
18
17
|
s.require_paths = ["lib"]
|
19
18
|
|
20
19
|
s.add_runtime_dependency('yajl-ruby', '~> 1.0')
|
21
|
-
s.add_runtime_dependency('httparty', '~> 0.
|
20
|
+
s.add_runtime_dependency('httparty', '~> 0.10.0')
|
22
21
|
s.add_development_dependency('rspec', '~> 2.10')
|
23
22
|
s.add_development_dependency('rake')
|
24
23
|
end
|
@@ -2,137 +2,135 @@ require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
|
|
2
2
|
|
3
3
|
require 'time'
|
4
4
|
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
5
|
+
# We don't want to test the ScraperWiki API. We just want to check that the
|
6
|
+
# wrapper works.
|
7
|
+
describe ScraperWiki::API do
|
8
|
+
EXAMPLE_SHORTNAME = 'frabcus.emailer'
|
9
|
+
EXAMPLE_USERNAME = 'frabcus'
|
10
|
+
QUIETFIELDS = %w(code runevents datasummary userroles history)
|
11
|
+
|
12
|
+
before :all do
|
13
|
+
@api = ScraperWiki::API.new
|
14
|
+
end
|
12
15
|
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
+
describe '#datastore_sqlite' do
|
17
|
+
# @todo
|
18
|
+
end
|
16
19
|
|
17
|
-
|
18
|
-
|
20
|
+
describe '#scraper_getinfo' do
|
21
|
+
it 'should return a non-empty array containing a single hash' do
|
22
|
+
response = @api.scraper_getinfo EXAMPLE_SHORTNAME
|
23
|
+
response.should be_an(Array)
|
24
|
+
response.should have(1).item
|
25
|
+
response.first.should be_a(Hash)
|
19
26
|
end
|
20
27
|
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
it 'should respect the :version argument' do
|
30
|
-
bare = @api.scraper_getinfo(EXAMPLE_SHORTNAME).first
|
31
|
-
bare.should_not have_key('currcommit')
|
32
|
-
result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, version: 1).first
|
33
|
-
result.should have_key('currcommit')
|
34
|
-
result['code'].should_not == bare['code']
|
35
|
-
end
|
28
|
+
it 'should respect the :version argument' do
|
29
|
+
bare = @api.scraper_getinfo(EXAMPLE_SHORTNAME).first
|
30
|
+
bare.should_not have_key('currcommit')
|
31
|
+
result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, :version => 1).first
|
32
|
+
result.should have_key('currcommit')
|
33
|
+
result['code'].should_not == bare['code']
|
34
|
+
end
|
36
35
|
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
36
|
+
it 'should respect the :history_start_date argument' do
|
37
|
+
bare = @api.scraper_getinfo(EXAMPLE_SHORTNAME).first
|
38
|
+
bare['history'].should have_at_least(2).items
|
39
|
+
history_start_date = bare['history'][0]['date'][0..9]
|
40
|
+
result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, :history_start_date => history_start_date).first
|
41
|
+
result['history'].should have(1).item
|
42
|
+
end
|
44
43
|
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
end
|
44
|
+
it 'should respect the :quietfields argument (as an array)' do
|
45
|
+
result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, :quietfields => QUIETFIELDS).first
|
46
|
+
QUIETFIELDS.each do |key|
|
47
|
+
result.should_not have_key(key)
|
50
48
|
end
|
49
|
+
end
|
51
50
|
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
end
|
51
|
+
it 'should respect the :quietfields argument (as an string)' do
|
52
|
+
result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, :quietfields => QUIETFIELDS.join('|')).first
|
53
|
+
QUIETFIELDS.each do |key|
|
54
|
+
result.should_not have_key(key)
|
57
55
|
end
|
58
56
|
end
|
57
|
+
end
|
59
58
|
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
59
|
+
describe '#scraper_getruninfo' do
|
60
|
+
it 'should return a non-empty array containing a single hash' do
|
61
|
+
response = @api.scraper_getruninfo EXAMPLE_SHORTNAME
|
62
|
+
response.should be_an(Array)
|
63
|
+
response.should have(1).item
|
64
|
+
response.first.should be_a(Hash)
|
65
|
+
end
|
67
66
|
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
end
|
67
|
+
it 'should respect the :runid argument' do
|
68
|
+
runevents = @api.scraper_getinfo(EXAMPLE_SHORTNAME).first['runevents']
|
69
|
+
bare = @api.scraper_getruninfo(EXAMPLE_SHORTNAME).first
|
70
|
+
bare['runid'].should == runevents.first['runid']
|
71
|
+
response = @api.scraper_getruninfo(EXAMPLE_SHORTNAME, :runid => runevents.last['runid']).first
|
72
|
+
response['runid'].should_not == bare['runid']
|
75
73
|
end
|
74
|
+
end
|
76
75
|
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
end
|
76
|
+
describe '#scraper_getuserinfo' do
|
77
|
+
it 'should return a non-empty array containing a single hash' do
|
78
|
+
response = @api.scraper_getuserinfo EXAMPLE_USERNAME
|
79
|
+
response.should be_an(Array)
|
80
|
+
response.should have(1).item
|
81
|
+
response.first.should be_a(Hash)
|
84
82
|
end
|
83
|
+
end
|
85
84
|
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
85
|
+
describe '#scraper_search' do
|
86
|
+
it 'should return a non-empty array of hashes' do
|
87
|
+
response = @api.scraper_search
|
88
|
+
response.should be_an(Array)
|
89
|
+
response.should have_at_least(1).item
|
90
|
+
response.first.should be_a(Hash)
|
91
|
+
end
|
93
92
|
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
93
|
+
it 'should respect the :searchquery argument' do
|
94
|
+
@api.scraper_search(:searchquery => EXAMPLE_SHORTNAME).find{|result|
|
95
|
+
result['short_name'] == EXAMPLE_SHORTNAME
|
96
|
+
}.should_not be_nil
|
97
|
+
end
|
99
98
|
|
100
|
-
|
101
|
-
|
102
|
-
end
|
99
|
+
it 'should respect the :maxrows argument' do
|
100
|
+
@api.scraper_search(:maxrows => 1).should have(1).item
|
103
101
|
end
|
102
|
+
end
|
104
103
|
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
104
|
+
describe '#scraper_usersearch' do
|
105
|
+
it 'should return a non-empty array of hashes' do
|
106
|
+
response = @api.scraper_usersearch
|
107
|
+
response.should be_an(Array)
|
108
|
+
response.should have_at_least(1).item
|
109
|
+
response.first.should be_a(Hash)
|
110
|
+
end
|
112
111
|
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
112
|
+
it 'should respect the :searchquery argument' do
|
113
|
+
@api.scraper_usersearch(:searchquery => EXAMPLE_USERNAME).find{|result|
|
114
|
+
result['username'] == EXAMPLE_USERNAME
|
115
|
+
}.should_not be_nil
|
116
|
+
end
|
118
117
|
|
119
|
-
|
120
|
-
|
121
|
-
|
118
|
+
it 'should respect the :maxrows argument' do
|
119
|
+
@api.scraper_usersearch(:maxrows => 1).should have(1).item
|
120
|
+
end
|
122
121
|
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
122
|
+
it 'should respect the :nolist argument (as an array)' do
|
123
|
+
usernames = @api.scraper_usersearch.map{|result| result['username']}
|
124
|
+
@api.scraper_usersearch(:nolist => usernames).find{|result|
|
125
|
+
usernames.include? result['username']
|
126
|
+
}.should be_nil
|
127
|
+
end
|
129
128
|
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
end
|
129
|
+
it 'should respect the :nolist argument (as an string)' do
|
130
|
+
usernames = @api.scraper_usersearch.map{|result| result['username']}
|
131
|
+
@api.scraper_usersearch(:nolist => usernames.join(' ')).find{|result|
|
132
|
+
usernames.include? result['username']
|
133
|
+
}.should be_nil
|
136
134
|
end
|
137
135
|
end
|
138
136
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: scraperwiki-api
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.7
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date:
|
12
|
+
date: 2013-03-05 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: yajl-ruby
|
@@ -34,7 +34,7 @@ dependencies:
|
|
34
34
|
requirements:
|
35
35
|
- - ~>
|
36
36
|
- !ruby/object:Gem::Version
|
37
|
-
version: 0.
|
37
|
+
version: 0.10.0
|
38
38
|
type: :runtime
|
39
39
|
prerelease: false
|
40
40
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -42,7 +42,7 @@ dependencies:
|
|
42
42
|
requirements:
|
43
43
|
- - ~>
|
44
44
|
- !ruby/object:Gem::Version
|
45
|
-
version: 0.
|
45
|
+
version: 0.10.0
|
46
46
|
- !ruby/object:Gem::Dependency
|
47
47
|
name: rspec
|
48
48
|
requirement: !ruby/object:Gem::Requirement
|
@@ -84,6 +84,7 @@ extra_rdoc_files: []
|
|
84
84
|
files:
|
85
85
|
- .gitignore
|
86
86
|
- .travis.yml
|
87
|
+
- .yardopts
|
87
88
|
- Gemfile
|
88
89
|
- LICENSE
|
89
90
|
- README.md
|
@@ -115,7 +116,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
115
116
|
version: '0'
|
116
117
|
requirements: []
|
117
118
|
rubyforge_project:
|
118
|
-
rubygems_version: 1.8.
|
119
|
+
rubygems_version: 1.8.25
|
119
120
|
signing_key:
|
120
121
|
specification_version: 3
|
121
122
|
summary: The ScraperWiki API Ruby Gem
|