scraperwiki-api 0.0.6 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,7 @@
1
1
  language: ruby
2
2
  rvm:
3
+ - 1.8.7
4
+ - 1.9.2
3
5
  - 1.9.3
6
+ - 2.0.0
7
+ - ree
@@ -0,0 +1,4 @@
1
+ --no-private
2
+ --hide-void-return
3
+ --embed-mixin ClassMethods
4
+ --markup=markdown
@@ -8,11 +8,11 @@ module ScraperWiki
8
8
  base_uri 'api.scraperwiki.com/api/1.0'
9
9
 
10
10
  RUN_INTERVALS = {
11
- never: -1,
12
- monthly: 2678400,
13
- weekly: 604800,
14
- daily: 86400,
15
- hourly: 3600,
11
+ :never => -1,
12
+ :monthly => 2678400,
13
+ :weekly => 604800,
14
+ :daily => 86400,
15
+ :hourly => 3600,
16
16
  }
17
17
 
18
18
  class << self
@@ -87,7 +87,7 @@ module ScraperWiki
87
87
  if Array === opts[:attach]
88
88
  opts[:attach] = opts[:attach].join ';'
89
89
  end
90
- request_with_apikey '/datastore/sqlite', {name: shortname, query: query}.merge(opts)
90
+ request_with_apikey '/datastore/sqlite', {:name => shortname, :query => query}.merge(opts)
91
91
  end
92
92
 
93
93
  # Extracts data about a scraper's code, owner, history, etc.
@@ -199,7 +199,7 @@ module ScraperWiki
199
199
  if Array === opts[:quietfields]
200
200
  opts[:quietfields] = opts[:quietfields].join '|'
201
201
  end
202
- request_with_apikey '/scraper/getinfo', {name: shortname}.merge(opts)
202
+ request_with_apikey '/scraper/getinfo', {:name => shortname}.merge(opts)
203
203
  end
204
204
 
205
205
  # See what the scraper did during each run.
@@ -235,7 +235,7 @@ module ScraperWiki
235
235
  # @note The query string parameter is +name+, not +shortname+
236
236
  # {https://scraperwiki.com/docs/api#getinfo as in the ScraperWiki docs}
237
237
  def scraper_getruninfo(shortname, opts = {})
238
- request_with_apikey '/scraper/getruninfo', {name: shortname}.merge(opts)
238
+ request_with_apikey '/scraper/getruninfo', {:name => shortname}.merge(opts)
239
239
  end
240
240
 
241
241
  # Find out information about a user.
@@ -271,7 +271,7 @@ module ScraperWiki
271
271
  # @note The date joined field is +date_joined+ (with underscore) on
272
272
  # {#scraper_usersearch}
273
273
  def scraper_getuserinfo(username)
274
- request_with_apikey '/scraper/getuserinfo', username: username
274
+ request_with_apikey '/scraper/getuserinfo', :username => username
275
275
  end
276
276
 
277
277
  # Search the titles and descriptions of all the scrapers.
@@ -341,7 +341,7 @@ module ScraperWiki
341
341
  end
342
342
 
343
343
  def request(path, opts)
344
- self.class.get(path, query: opts).parsed_response
344
+ self.class.get(path, :query => opts).parsed_response
345
345
  end
346
346
  end
347
347
  end
@@ -19,6 +19,7 @@ module ScraperWiki
19
19
  # it {should be_editable_by('frabcus')}
20
20
  # it {should run(:daily)}
21
21
  # it {should_not be_broken}
22
+ # it {should have_a_table('swdata')}
22
23
  # it {should have_a_row_count_of(42).on('swdata')}
23
24
  #
24
25
  # # Check for missing keys:
@@ -233,6 +234,26 @@ module ScraperWiki
233
234
  LastRunMatcher.new expected
234
235
  end
235
236
 
237
+ class TableMatcher < ScraperInfoMatcher
238
+ def matches?(actual)
239
+ super
240
+ actual['datasummary']['tables'].key?(@expected)
241
+ end
242
+
243
+ def failure_message
244
+ "expected #{@actual['short_name']} to have a #{@expected} table"
245
+ end
246
+
247
+ def negative_failure_message
248
+ "expected #{@actual['short_name']} to not have a #{@expected} table"
249
+ end
250
+ end
251
+ # @example
252
+ # it {should have_a_table('swdata')}
253
+ def have_a_table(expected)
254
+ TableMatcher.new expected
255
+ end
256
+
236
257
  class TablesMatcher < ScraperInfoMatcher
237
258
  def on(table)
238
259
  @table = table
@@ -269,7 +290,12 @@ module ScraperWiki
269
290
 
270
291
  class MissingKeysMatcher < KeysMatcher
271
292
  def difference
272
- @expected - @actual['datasummary']['tables'][@table]['keys']
293
+ keys = if @actual['datasummary']['tables'][@table]
294
+ @actual['datasummary']['tables'][@table]['keys']
295
+ else
296
+ []
297
+ end
298
+ @expected - keys
273
299
  end
274
300
 
275
301
  def failure_predicate
@@ -288,7 +314,12 @@ module ScraperWiki
288
314
 
289
315
  class ExtraKeysMatcher < KeysMatcher
290
316
  def difference
291
- @actual['datasummary']['tables'][@table]['keys'] - @expected
317
+ keys = if @actual['datasummary']['tables'][@table]
318
+ @actual['datasummary']['tables'][@table]['keys']
319
+ else
320
+ []
321
+ end
322
+ keys - @expected
292
323
  end
293
324
 
294
325
  def failure_predicate
@@ -306,13 +337,21 @@ module ScraperWiki
306
337
  end
307
338
 
308
339
  class CountMatcher < TablesMatcher
340
+ def count
341
+ if @actual['datasummary']['tables'][@table]
342
+ @actual['datasummary']['tables'][@table]['count']
343
+ else
344
+ 0
345
+ end
346
+ end
347
+
309
348
  def matches?(actual)
310
349
  super
311
- actual['datasummary']['tables'][@table]['count'] == @expected
350
+ count == @expected
312
351
  end
313
352
 
314
353
  def failure_message
315
- "expected #{@actual['short_name']} to have #{@expected} rows, not #{@actual['datasummary']['tables'][@table]['count']}"
354
+ "expected #{@actual['short_name']} to have #{@expected} rows, not #{count}"
316
355
  end
317
356
 
318
357
  def negative_failure_message
@@ -362,12 +401,16 @@ module ScraperWiki
362
401
  @items ||= if Array === @actual
363
402
  @actual
364
403
  elsif Hash === @actual
365
- @actual['data'].map do |array|
366
- hash = {}
367
- @actual['keys'].each_with_index do |key,index|
368
- hash[key] = array[index]
404
+ if @actual['data']
405
+ @actual['data'].map do |array|
406
+ hash = {}
407
+ @actual['keys'].each_with_index do |key,index|
408
+ hash[key] = array[index]
409
+ end
410
+ hash
369
411
  end
370
- hash
412
+ else
413
+ {}
371
414
  end
372
415
  else
373
416
  raise NotImplementerError, "Can only handle jsondict or jsonlist formats"
@@ -1,5 +1,5 @@
1
1
  module ScraperWiki
2
2
  class API
3
- VERSION = "0.0.6"
3
+ VERSION = "0.0.7"
4
4
  end
5
5
  end
@@ -1,6 +1,5 @@
1
1
  # -*- encoding: utf-8 -*-
2
- $:.push File.expand_path("../lib", __FILE__)
3
- require "scraperwiki-api/version"
2
+ require File.expand_path('../lib/scraperwiki-api/version', __FILE__)
4
3
 
5
4
  Gem::Specification.new do |s|
6
5
  s.name = "scraperwiki-api"
@@ -18,7 +17,7 @@ Gem::Specification.new do |s|
18
17
  s.require_paths = ["lib"]
19
18
 
20
19
  s.add_runtime_dependency('yajl-ruby', '~> 1.0')
21
- s.add_runtime_dependency('httparty', '~> 0.8.0')
20
+ s.add_runtime_dependency('httparty', '~> 0.10.0')
22
21
  s.add_development_dependency('rspec', '~> 2.10')
23
22
  s.add_development_dependency('rake')
24
23
  end
@@ -2,137 +2,135 @@ require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
2
2
 
3
3
  require 'time'
4
4
 
5
- class ScraperWiki::API
6
- # We don't want to test the ScraperWiki API. We just want to check that the
7
- # wrapper works.
8
- describe ScraperWiki::API do
9
- EXAMPLE_SHORTNAME = 'frabcus.emailer'
10
- EXAMPLE_USERNAME = 'frabcus'
11
- QUIETFIELDS = %w(code runevents datasummary userroles history)
5
+ # We don't want to test the ScraperWiki API. We just want to check that the
6
+ # wrapper works.
7
+ describe ScraperWiki::API do
8
+ EXAMPLE_SHORTNAME = 'frabcus.emailer'
9
+ EXAMPLE_USERNAME = 'frabcus'
10
+ QUIETFIELDS = %w(code runevents datasummary userroles history)
11
+
12
+ before :all do
13
+ @api = ScraperWiki::API.new
14
+ end
12
15
 
13
- before :all do
14
- @api = ScraperWiki::API.new
15
- end
16
+ describe '#datastore_sqlite' do
17
+ # @todo
18
+ end
16
19
 
17
- describe '#datastore_sqlite' do
18
- # @todo
20
+ describe '#scraper_getinfo' do
21
+ it 'should return a non-empty array containing a single hash' do
22
+ response = @api.scraper_getinfo EXAMPLE_SHORTNAME
23
+ response.should be_an(Array)
24
+ response.should have(1).item
25
+ response.first.should be_a(Hash)
19
26
  end
20
27
 
21
- describe '#scraper_getinfo' do
22
- it 'should return a non-empty array containing a single hash' do
23
- response = @api.scraper_getinfo EXAMPLE_SHORTNAME
24
- response.should be_an(Array)
25
- response.should have(1).item
26
- response.first.should be_a(Hash)
27
- end
28
-
29
- it 'should respect the :version argument' do
30
- bare = @api.scraper_getinfo(EXAMPLE_SHORTNAME).first
31
- bare.should_not have_key('currcommit')
32
- result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, version: 1).first
33
- result.should have_key('currcommit')
34
- result['code'].should_not == bare['code']
35
- end
28
+ it 'should respect the :version argument' do
29
+ bare = @api.scraper_getinfo(EXAMPLE_SHORTNAME).first
30
+ bare.should_not have_key('currcommit')
31
+ result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, :version => 1).first
32
+ result.should have_key('currcommit')
33
+ result['code'].should_not == bare['code']
34
+ end
36
35
 
37
- it 'should respect the :history_start_date argument' do
38
- bare = @api.scraper_getinfo(EXAMPLE_SHORTNAME).first
39
- bare['history'].should have_at_least(2).items
40
- history_start_date = bare['history'][0]['date'][0..9]
41
- result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, history_start_date: history_start_date).first
42
- result['history'].should have(1).item
43
- end
36
+ it 'should respect the :history_start_date argument' do
37
+ bare = @api.scraper_getinfo(EXAMPLE_SHORTNAME).first
38
+ bare['history'].should have_at_least(2).items
39
+ history_start_date = bare['history'][0]['date'][0..9]
40
+ result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, :history_start_date => history_start_date).first
41
+ result['history'].should have(1).item
42
+ end
44
43
 
45
- it 'should respect the :quietfields argument (as an array)' do
46
- result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, quietfields: QUIETFIELDS).first
47
- QUIETFIELDS.each do |key|
48
- result.should_not have_key(key)
49
- end
44
+ it 'should respect the :quietfields argument (as an array)' do
45
+ result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, :quietfields => QUIETFIELDS).first
46
+ QUIETFIELDS.each do |key|
47
+ result.should_not have_key(key)
50
48
  end
49
+ end
51
50
 
52
- it 'should respect the :quietfields argument (as an string)' do
53
- result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, quietfields: QUIETFIELDS.join('|')).first
54
- QUIETFIELDS.each do |key|
55
- result.should_not have_key(key)
56
- end
51
+ it 'should respect the :quietfields argument (as an string)' do
52
+ result = @api.scraper_getinfo(EXAMPLE_SHORTNAME, :quietfields => QUIETFIELDS.join('|')).first
53
+ QUIETFIELDS.each do |key|
54
+ result.should_not have_key(key)
57
55
  end
58
56
  end
57
+ end
59
58
 
60
- describe '#scraper_getruninfo' do
61
- it 'should return a non-empty array containing a single hash' do
62
- response = @api.scraper_getruninfo EXAMPLE_SHORTNAME
63
- response.should be_an(Array)
64
- response.should have(1).item
65
- response.first.should be_a(Hash)
66
- end
59
+ describe '#scraper_getruninfo' do
60
+ it 'should return a non-empty array containing a single hash' do
61
+ response = @api.scraper_getruninfo EXAMPLE_SHORTNAME
62
+ response.should be_an(Array)
63
+ response.should have(1).item
64
+ response.first.should be_a(Hash)
65
+ end
67
66
 
68
- it 'should respect the :runid argument' do
69
- runevents = @api.scraper_getinfo(EXAMPLE_SHORTNAME).first['runevents']
70
- bare = @api.scraper_getruninfo(EXAMPLE_SHORTNAME).first
71
- bare['runid'].should == runevents.first['runid']
72
- response = @api.scraper_getruninfo(EXAMPLE_SHORTNAME, runid: runevents.last['runid']).first
73
- response['runid'].should_not == bare['runid']
74
- end
67
+ it 'should respect the :runid argument' do
68
+ runevents = @api.scraper_getinfo(EXAMPLE_SHORTNAME).first['runevents']
69
+ bare = @api.scraper_getruninfo(EXAMPLE_SHORTNAME).first
70
+ bare['runid'].should == runevents.first['runid']
71
+ response = @api.scraper_getruninfo(EXAMPLE_SHORTNAME, :runid => runevents.last['runid']).first
72
+ response['runid'].should_not == bare['runid']
75
73
  end
74
+ end
76
75
 
77
- describe '#scraper_getuserinfo' do
78
- it 'should return a non-empty array containing a single hash' do
79
- response = @api.scraper_getuserinfo EXAMPLE_USERNAME
80
- response.should be_an(Array)
81
- response.should have(1).item
82
- response.first.should be_a(Hash)
83
- end
76
+ describe '#scraper_getuserinfo' do
77
+ it 'should return a non-empty array containing a single hash' do
78
+ response = @api.scraper_getuserinfo EXAMPLE_USERNAME
79
+ response.should be_an(Array)
80
+ response.should have(1).item
81
+ response.first.should be_a(Hash)
84
82
  end
83
+ end
85
84
 
86
- describe '#scraper_search' do
87
- it 'should return a non-empty array of hashes' do
88
- response = @api.scraper_search
89
- response.should be_an(Array)
90
- response.should have_at_least(1).item
91
- response.first.should be_a(Hash)
92
- end
85
+ describe '#scraper_search' do
86
+ it 'should return a non-empty array of hashes' do
87
+ response = @api.scraper_search
88
+ response.should be_an(Array)
89
+ response.should have_at_least(1).item
90
+ response.first.should be_a(Hash)
91
+ end
93
92
 
94
- it 'should respect the :searchquery argument' do
95
- @api.scraper_search(searchquery: EXAMPLE_SHORTNAME).find{|result|
96
- result['short_name'] == EXAMPLE_SHORTNAME
97
- }.should_not be_nil
98
- end
93
+ it 'should respect the :searchquery argument' do
94
+ @api.scraper_search(:searchquery => EXAMPLE_SHORTNAME).find{|result|
95
+ result['short_name'] == EXAMPLE_SHORTNAME
96
+ }.should_not be_nil
97
+ end
99
98
 
100
- it 'should respect the :maxrows argument' do
101
- @api.scraper_search(maxrows: 1).should have(1).item
102
- end
99
+ it 'should respect the :maxrows argument' do
100
+ @api.scraper_search(:maxrows => 1).should have(1).item
103
101
  end
102
+ end
104
103
 
105
- describe '#scraper_usersearch' do
106
- it 'should return a non-empty array of hashes' do
107
- response = @api.scraper_usersearch
108
- response.should be_an(Array)
109
- response.should have_at_least(1).item
110
- response.first.should be_a(Hash)
111
- end
104
+ describe '#scraper_usersearch' do
105
+ it 'should return a non-empty array of hashes' do
106
+ response = @api.scraper_usersearch
107
+ response.should be_an(Array)
108
+ response.should have_at_least(1).item
109
+ response.first.should be_a(Hash)
110
+ end
112
111
 
113
- it 'should respect the :searchquery argument' do
114
- @api.scraper_usersearch(searchquery: EXAMPLE_USERNAME).find{|result|
115
- result['username'] == EXAMPLE_USERNAME
116
- }.should_not be_nil
117
- end
112
+ it 'should respect the :searchquery argument' do
113
+ @api.scraper_usersearch(:searchquery => EXAMPLE_USERNAME).find{|result|
114
+ result['username'] == EXAMPLE_USERNAME
115
+ }.should_not be_nil
116
+ end
118
117
 
119
- it 'should respect the :maxrows argument' do
120
- @api.scraper_usersearch(maxrows: 1).should have(1).item
121
- end
118
+ it 'should respect the :maxrows argument' do
119
+ @api.scraper_usersearch(:maxrows => 1).should have(1).item
120
+ end
122
121
 
123
- it 'should respect the :nolist argument (as an array)' do
124
- usernames = @api.scraper_usersearch.map{|result| result['username']}
125
- @api.scraper_usersearch(nolist: usernames).find{|result|
126
- usernames.include? result['username']
127
- }.should be_nil
128
- end
122
+ it 'should respect the :nolist argument (as an array)' do
123
+ usernames = @api.scraper_usersearch.map{|result| result['username']}
124
+ @api.scraper_usersearch(:nolist => usernames).find{|result|
125
+ usernames.include? result['username']
126
+ }.should be_nil
127
+ end
129
128
 
130
- it 'should respect the :nolist argument (as an string)' do
131
- usernames = @api.scraper_usersearch.map{|result| result['username']}
132
- @api.scraper_usersearch(nolist: usernames.join(' ')).find{|result|
133
- usernames.include? result['username']
134
- }.should be_nil
135
- end
129
+ it 'should respect the :nolist argument (as an string)' do
130
+ usernames = @api.scraper_usersearch.map{|result| result['username']}
131
+ @api.scraper_usersearch(:nolist => usernames.join(' ')).find{|result|
132
+ usernames.include? result['username']
133
+ }.should be_nil
136
134
  end
137
135
  end
138
136
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: scraperwiki-api
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.6
4
+ version: 0.0.7
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2012-09-21 00:00:00.000000000 Z
12
+ date: 2013-03-05 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: yajl-ruby
@@ -34,7 +34,7 @@ dependencies:
34
34
  requirements:
35
35
  - - ~>
36
36
  - !ruby/object:Gem::Version
37
- version: 0.8.0
37
+ version: 0.10.0
38
38
  type: :runtime
39
39
  prerelease: false
40
40
  version_requirements: !ruby/object:Gem::Requirement
@@ -42,7 +42,7 @@ dependencies:
42
42
  requirements:
43
43
  - - ~>
44
44
  - !ruby/object:Gem::Version
45
- version: 0.8.0
45
+ version: 0.10.0
46
46
  - !ruby/object:Gem::Dependency
47
47
  name: rspec
48
48
  requirement: !ruby/object:Gem::Requirement
@@ -84,6 +84,7 @@ extra_rdoc_files: []
84
84
  files:
85
85
  - .gitignore
86
86
  - .travis.yml
87
+ - .yardopts
87
88
  - Gemfile
88
89
  - LICENSE
89
90
  - README.md
@@ -115,7 +116,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
115
116
  version: '0'
116
117
  requirements: []
117
118
  rubyforge_project:
118
- rubygems_version: 1.8.24
119
+ rubygems_version: 1.8.25
119
120
  signing_key:
120
121
  specification_version: 3
121
122
  summary: The ScraperWiki API Ruby Gem