TOSwimScraper 0.1.1 → 0.1.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/scraper.rb +9 -7
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8f558aee6fa59fd495364ee0fff61f2a62bda173
|
4
|
+
data.tar.gz: 199647dd4bb4b78eaf29e343e3dd59ebde3f1ff7
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 2bba02a899fd0579880a33c45720f83b48fe121690f7f0fb545d74e7a23004d68ac598035f2c83dd4c074d0582f6db78c990a32ecae9bd3cdd3ad0cb83a07ee0
|
7
|
+
data.tar.gz: d0b48204133a4fb41b643c79f1d858a6f086d65de20fe2a1092db3f25edce6f04ad33cd0a5e0f5ddb0c3f397cc86c8835ec2412c482f6f6bf1f8e70d788b3f2c
|
data/lib/scraper.rb
CHANGED
@@ -39,7 +39,7 @@ module Scraper
|
|
39
39
|
pool_coordinates = pool_addresses.map { |address| gather_pool_coordinates(address) }
|
40
40
|
|
41
41
|
# Convert Pool Data to Hash
|
42
|
-
pool_names.each_with_index do |
|
42
|
+
pool_names.each_with_index do |_, index|
|
43
43
|
current_pool = {}
|
44
44
|
current_pool[:name] = pool_names[index]
|
45
45
|
current_pool[:url] = pool_links[index]
|
@@ -52,10 +52,8 @@ module Scraper
|
|
52
52
|
File.open("pool_urls.json","w") do |f|
|
53
53
|
f.write(@pool_urls.to_json)
|
54
54
|
end
|
55
|
-
end
|
56
55
|
|
57
|
-
|
58
|
-
2 * num
|
56
|
+
@pool_urls
|
59
57
|
end
|
60
58
|
|
61
59
|
def swim_time_finder(week, lane_swim_row_index)
|
@@ -86,7 +84,7 @@ module Scraper
|
|
86
84
|
end
|
87
85
|
|
88
86
|
# remove days with no swim times
|
89
|
-
weeks.delete_if { |
|
87
|
+
weeks.delete_if { |_, time| time == [" "] || time == [] }
|
90
88
|
end
|
91
89
|
|
92
90
|
def gather_pool_addresses(pools)
|
@@ -140,7 +138,7 @@ module Scraper
|
|
140
138
|
def gather_pool_swim_times
|
141
139
|
begin
|
142
140
|
@pool_urls ||= JSON.parse(File.read('pool_urls.json'), symbolize_names: true)
|
143
|
-
rescue
|
141
|
+
rescue
|
144
142
|
puts "Couldn't open pool_info, run scrape -f or run in path with pool_urls.json file"
|
145
143
|
exit
|
146
144
|
end
|
@@ -163,6 +161,8 @@ module Scraper
|
|
163
161
|
f.write(@pool_urls.to_json)
|
164
162
|
puts "\nWriting pools_data.json complete"
|
165
163
|
end
|
164
|
+
|
165
|
+
@pool_urls
|
166
166
|
end
|
167
167
|
|
168
168
|
def gather_pool_program_cost_status
|
@@ -181,12 +181,14 @@ module Scraper
|
|
181
181
|
pool_url_regex = pool[:url].match(/\/parks\/prd\/facilities\/complex\/\d*/).to_s
|
182
182
|
match = free_facility_urls_regexed.find{ |e| pool_url_regex == e }
|
183
183
|
pool[:free_swim] = match ? true : false
|
184
|
-
|
184
|
+
end
|
185
185
|
|
186
186
|
File.open("pools_data.json","w") do |f|
|
187
187
|
f.write(@pools.to_json)
|
188
188
|
puts "Writing program cost status to pools_data.json complete"
|
189
189
|
end
|
190
|
+
|
191
|
+
@pools
|
190
192
|
end
|
191
193
|
|
192
194
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: TOSwimScraper
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Erich Welz
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-04-
|
11
|
+
date: 2016-04-22 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|