gooby 1.1.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/README +200 -35
- data/bin/code_scan.rb +1 -3
- data/bin/gooby_been_there.rb +12 -14
- data/bin/gooby_config.rb +11 -3
- data/bin/gooby_csv_validation.rb +50 -0
- data/bin/gooby_first_trackpoints_as_poi.rb +31 -0
- data/bin/gooby_gen_gmap.rb +7 -3
- data/bin/gooby_parser.rb +7 -5
- data/bin/gooby_splitter.rb +7 -4
- data/bin/gooby_version.rb +7 -3
- data/bin/run_all.sh +12 -2
- data/bin/run_been_there.sh +4 -1
- data/bin/run_config.sh +12 -0
- data/bin/run_csv_validation.sh +15 -0
- data/bin/run_db_gen.sh +1 -1
- data/bin/run_db_load.sh +1 -1
- data/bin/run_first_trackpoints_as_poi.sh +16 -0
- data/bin/run_gen_gmaps.sh +7 -6
- data/bin/run_parse_full.sh +45 -0
- data/bin/run_parse_samples.sh +21 -0
- data/bin/run_split.sh +5 -4
- data/bin/run_version.sh +12 -0
- data/config/gooby_config.yaml +130 -131
- data/data/20050305_corporate_cup_hm.csv +251 -251
- data/data/20050430_nashville_marathon_km.csv +1208 -0
- data/data/20060115_phoenix_marathon.csv +1280 -1280
- data/data/20070101_davidson_11m.csv +251 -0
- data/data/{davidson_11m_20070101.xml → 20070101_davidson_11m.xml} +0 -0
- data/data/{davidson_5K_20070505.xml → 20070505_davidson_5k.xml} +0 -0
- data/data/20070505_davidson_5k_km.csv +286 -0
- data/data/hrm1.csv +5 -0
- data/lib/gooby.rb +27 -3144
- data/lib/gooby_code_scanner.rb +288 -0
- data/lib/gooby_command.rb +210 -0
- data/lib/gooby_configuration.rb +123 -0
- data/lib/gooby_counter_hash.rb +95 -0
- data/lib/gooby_course.rb +117 -0
- data/lib/gooby_csv_point.rb +71 -0
- data/lib/gooby_csv_reader.rb +71 -0
- data/lib/gooby_csv_run.rb +28 -0
- data/lib/gooby_delim_line.rb +42 -0
- data/lib/gooby_dttm.rb +87 -0
- data/lib/gooby_duration.rb +86 -0
- data/lib/gooby_forerunner_xml_parser.rb +191 -0
- data/lib/gooby_forerunner_xml_splitter.rb +115 -0
- data/lib/gooby_google_map_generator.rb +385 -0
- data/lib/gooby_history.rb +41 -0
- data/lib/gooby_kernel.rb +163 -0
- data/lib/gooby_lap.rb +30 -0
- data/lib/gooby_line.rb +80 -0
- data/lib/gooby_object.rb +22 -0
- data/lib/gooby_point.rb +172 -0
- data/lib/gooby_run.rb +213 -0
- data/lib/gooby_simple_xml_parser.rb +50 -0
- data/lib/gooby_test_helper.rb +23 -0
- data/lib/gooby_track.rb +47 -0
- data/lib/gooby_track_point.rb +229 -0
- data/lib/gooby_training_center_xml_parser.rb +224 -0
- data/lib/gooby_training_center_xml_splitter.rb +116 -0
- data/lib/split_code.sh +29 -0
- data/samples/20050305_corporate_cup_hm.html +269 -269
- data/samples/20050430_nashville_marathon.html +1410 -1266
- data/samples/20060115_phoenix_marathon.html +1311 -1311
- data/samples/{davidson_11m_20070101.html → 20070101_davidson_11m.html} +267 -267
- data/samples/20070505_davidson_5k.html +413 -0
- data/samples/been_there.txt +52 -704
- data/samples/hrm1.html +87 -0
- data/sql/gooby.ddl +20 -16
- data/sql/gooby_load.dml +36 -9
- metadata +48 -14
- data/bin/example_usage.txt +0 -55
- data/bin/run_parse.sh +0 -43
- data/bin/run_parse_named.sh +0 -19
- data/data/20050430_nashville_marathon.csv +0 -1208
- data/data/davidson_11m_20070101.csv +0 -251
- data/data/davidson_5K_20070505.csv +0 -286
- data/data/test1.txt +0 -4
- data/samples/davidson_5K_20070505.html +0 -395
data/data/hrm1.csv
ADDED
@@ -0,0 +1,5 @@
|
|
1
|
+
#cols: primary_key|run_id|date|time|tkpt_num|latitude|longitude|altitude|heartbeat|run_distance|uom|run_elapsed|lap_tkpt_number|lap_distance|lap_elapsed
|
2
|
+
2007-07-16T23:42:44Z.1|2007-07-16T23:42:44Z|2007-07-16|23:42:44|1|35.495497|-80.832159|241.908936|75|0.0|mi|00:00:00|1|0|00:00:00
|
3
|
+
2007-07-16T23:42:44Z.2|2007-07-16T23:42:44Z|2007-07-16|23:42:46|2|35.495499|-80.832152|241.428223|75|0.000417102641276755|mi|00:00:02|2|0.000417102641276755|00:00:02
|
4
|
+
2007-07-16T23:42:44Z.5|2007-07-16T23:42:44Z|2007-07-16|23:42:54|5|35.495522|-80.832229|242.870239|74|0.00503094417417941|mi|00:00:10|3|0.00503094417417941|00:00:10
|
5
|
+
2007-07-16T23:42:44Z.6|2007-07-16T23:42:44Z|2007-07-16|23:47:46|6|35.495465|-80.832151|242.389648|87|0.0109261260004515|mi|00:05:02|4|0.0109261260004515|00:05:02
|
data/lib/gooby.rb
CHANGED
@@ -1,43 +1,6 @@
|
|
1
1
|
=begin
|
2
2
|
|
3
3
|
Gooby = Google APIs + Ruby
|
4
|
-
This file contains the classes and modules for the Gooby project.
|
5
|
-
|
6
|
-
See file 'tests/ts_gooby.rb' for the regression test suite.
|
7
|
-
|
8
|
-
Index of Modules and Classes in this file:
|
9
|
-
line type Module or Class name
|
10
|
-
---- ------ ---------------------------
|
11
|
-
57 module Gooby
|
12
|
-
64 module GoobyKernel
|
13
|
-
191 module TestHelper
|
14
|
-
209 class GoobyObject
|
15
|
-
222 class CounterHash
|
16
|
-
305 class DelimLine
|
17
|
-
340 class DtTm
|
18
|
-
417 class Duration
|
19
|
-
496 class ForerunnerXmlParser
|
20
|
-
665 class ForerunnerXmlSplitter
|
21
|
-
775 class TrainingCenterXmlParser
|
22
|
-
962 class TrainingCenterXmlSplitter
|
23
|
-
1067 class GeoData
|
24
|
-
1250 class GoogleMapGenerator
|
25
|
-
1625 class History
|
26
|
-
1656 class Lap
|
27
|
-
1671 class Line
|
28
|
-
1749 class Configuration
|
29
|
-
1848 class Point
|
30
|
-
1987 class CsvPoint
|
31
|
-
2042 class CvsRun
|
32
|
-
2060 class CsvReader
|
33
|
-
2121 class Trackpoint
|
34
|
-
2319 class Run
|
35
|
-
2521 class SimpleXmlParser
|
36
|
-
2561 class Track
|
37
|
-
2593 class Course
|
38
|
-
2713 class CodeScanner
|
39
|
-
3003 class GoobyCommand
|
40
|
-
|
41
4
|
Gooby - Copyright 2007 by Chris Joakim.
|
42
5
|
Gooby is available under GNU General Public License (GPL) license.
|
43
6
|
|
@@ -52,3110 +15,30 @@ require 'singleton'
|
|
52
15
|
require 'time'
|
53
16
|
require 'yaml'
|
54
17
|
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
def project_author
|
83
|
-
'Chris Joakim'
|
84
|
-
end
|
85
|
-
|
86
|
-
# Return a String year, like '2007'.
|
87
|
-
def project_year
|
88
|
-
project_date[0...4] # start, length
|
89
|
-
end
|
90
|
-
|
91
|
-
# Return a String containing copyright, year, and author.
|
92
|
-
def project_copyright
|
93
|
-
"Copyright (C) #{project_year} #{project_author}"
|
94
|
-
end
|
95
|
-
|
96
|
-
def project_embedded_comment
|
97
|
-
"#{project_name} #{project_version_number}"
|
98
|
-
end
|
99
|
-
|
100
|
-
# Return a String containing GNU/GPL, and the gpl.html URL.
|
101
|
-
def project_license
|
102
|
-
'GNU General Public License (GPL). See http://www.gnu.org/copyleft/gpl.html'
|
103
|
-
end
|
104
|
-
|
105
|
-
# Return an Array of lines in file, optionally stripped.
|
106
|
-
def read_lines(filename, strip=false)
|
107
|
-
|
108
|
-
array = IO.readlines(filename)
|
109
|
-
if strip
|
110
|
-
array = strip_lines(array)
|
111
|
-
end
|
112
|
-
return array
|
113
|
-
end
|
114
|
-
|
115
|
-
# Return an Array of lines in file per the given delimeter, optionally stripped.
|
116
|
-
def read_as_ascii_lines(filename, delim=10, strip=false)
|
117
|
-
|
118
|
-
array = Array.new
|
119
|
-
file = File.new(filename)
|
120
|
-
currLine = ''
|
121
|
-
bytesRead = 0
|
122
|
-
linesRead = 0
|
123
|
-
|
124
|
-
file.each_byte { |b|
|
125
|
-
bytesRead = bytesRead + 1
|
126
|
-
if (b == delim) # delim is 13 for quicken, 10 for address book xml
|
127
|
-
array << currLine
|
128
|
-
currLine = ''
|
129
|
-
linesRead = linesRead + 1
|
130
|
-
else
|
131
|
-
if (b < 127)
|
132
|
-
currLine << "#{b.chr}"
|
133
|
-
end
|
134
|
-
end
|
135
|
-
}
|
136
|
-
|
137
|
-
if currLine.size > 0
|
138
|
-
array << currLine
|
139
|
-
end
|
140
|
-
if strip
|
141
|
-
array = strip_lines(array)
|
142
|
-
end
|
143
|
-
return array
|
144
|
-
end
|
145
|
-
|
146
|
-
# Strip the lines/Strings; return a new Array.
|
147
|
-
def strip_lines(array)
|
148
|
-
|
149
|
-
newArray = Array.new
|
150
|
-
if (array != nil)
|
151
|
-
array.each { |line| line.strip! ; newArray << line }
|
152
|
-
end
|
153
|
-
return newArray
|
154
|
-
end
|
155
|
-
|
156
|
-
def tokenize(string, delim=nil, strip=false)
|
157
|
-
if string
|
158
|
-
tokens = string.split(delim)
|
159
|
-
if strip
|
160
|
-
tokens.each { |tok| tok.strip! }
|
161
|
-
end
|
162
|
-
tokens
|
163
|
-
else
|
164
|
-
Array.new
|
165
|
-
end
|
166
|
-
end
|
167
|
-
|
168
|
-
def default_delimiter
|
169
|
-
return '|'
|
170
|
-
end
|
171
|
-
|
172
|
-
def invalid_time
|
173
|
-
return -99999999
|
174
|
-
end
|
175
|
-
|
176
|
-
def invalid_latitude
|
177
|
-
return -1
|
178
|
-
end
|
179
|
-
|
180
|
-
def invalid_longitude
|
181
|
-
return -1
|
182
|
-
end
|
183
|
-
|
184
|
-
def invalid_altitude
|
185
|
-
return -1
|
186
|
-
end
|
187
|
-
end
|
188
|
-
|
189
|
-
# =============================================================================
|
190
|
-
|
191
|
-
module TestHelper
|
192
|
-
|
193
|
-
def setup
|
194
|
-
puts "test: #{name}"
|
195
|
-
end
|
196
|
-
|
197
|
-
def teardown
|
198
|
-
@debug = false
|
199
|
-
end
|
200
|
-
end
|
201
|
-
|
202
|
-
# =============================================================================
|
203
|
-
|
204
|
-
=begin rdoc
|
205
|
-
This is the abstract superclass of several Gooby classes.
|
206
|
-
Includes modules GoobyIO, Introspect, and GoobyProjectInfo.
|
207
|
-
=end
|
208
|
-
|
209
|
-
class GoobyObject
|
210
|
-
|
211
|
-
include Gooby::GoobyKernel
|
212
|
-
end
|
213
|
-
|
214
|
-
# =============================================================================
|
215
|
-
|
216
|
-
=begin rdoc
|
217
|
-
This class wrappers a Hash object and provides increment/decrement functionality
|
218
|
-
for a given key. It is used to sum the number of things (i.e. - xml tags) in a
|
219
|
-
collection.
|
220
|
-
=end
|
221
|
-
|
222
|
-
class CounterHash < GoobyObject
|
223
|
-
|
224
|
-
def initialize
|
225
|
-
@hash = Hash.new(0)
|
226
|
-
end
|
227
|
-
|
228
|
-
def size
|
229
|
-
@hash.size
|
230
|
-
end
|
231
|
-
|
232
|
-
# Return the Integer count for the given key; zero default.
|
233
|
-
def value(key)
|
234
|
-
(@hash.has_key?(key)) ? @hash[key] : 0
|
235
|
-
end
|
236
|
-
|
237
|
-
# Increment the count for the given key.
|
238
|
-
def increment(key)
|
239
|
-
if key == nil
|
240
|
-
return
|
241
|
-
end
|
242
|
-
if (@hash.has_key?(key))
|
243
|
-
val = @hash[key]
|
244
|
-
@hash[key] = val + 1
|
245
|
-
else
|
246
|
-
@hash[key] = 1
|
247
|
-
end
|
248
|
-
end
|
249
|
-
|
250
|
-
def increment_tokens(text)
|
251
|
-
tokens = tokenize(text)
|
252
|
-
tokens.each { |token| increment(token) }
|
253
|
-
end
|
254
|
-
|
255
|
-
# Decrement the count for the given key.
|
256
|
-
def decrement(key)
|
257
|
-
if key == nil
|
258
|
-
return
|
259
|
-
end
|
260
|
-
if (@hash.has_key?(key))
|
261
|
-
val = @hash[key]
|
262
|
-
@hash[key] = val - 1
|
263
|
-
else
|
264
|
-
@hash[key] = -1
|
265
|
-
end
|
266
|
-
end
|
267
|
-
|
268
|
-
# Return an Array of the sorted keys.
|
269
|
-
def sorted_keys
|
270
|
-
@hash.keys.sort
|
271
|
-
end
|
272
|
-
|
273
|
-
# Return a String containing all key=val pairs.
|
274
|
-
def to_s
|
275
|
-
s = "CHash:"
|
276
|
-
sorted_keys.each { |key|
|
277
|
-
val = @hash[key]
|
278
|
-
s << " key: [#{key}] val: [#{val}]\n"
|
279
|
-
}
|
280
|
-
s
|
281
|
-
end
|
282
|
-
|
283
|
-
# Return an XML String containing all key=val pairs, optionally aligned.
|
284
|
-
def to_xml(aligned=false)
|
285
|
-
s = "<CHash>"
|
286
|
-
sorted_keys.each { |key|
|
287
|
-
val = @hash[key]
|
288
|
-
(aligned) ? s << "\n " : s << ''
|
289
|
-
s << " <entry key='#{key}' value='#{val}'/>"
|
290
|
-
}
|
291
|
-
if aligned
|
292
|
-
s << "\n "
|
293
|
-
end
|
294
|
-
s << " </CHash>"
|
295
|
-
s
|
296
|
-
end
|
297
|
-
end
|
298
|
-
|
299
|
-
# =============================================================================
|
300
|
-
|
301
|
-
=begin rdoc
|
302
|
-
Instances of this class represent a delimited line of text, such as csv.
|
303
|
-
=end
|
304
|
-
|
305
|
-
class DelimLine < GoobyObject
|
306
|
-
|
307
|
-
attr_reader :line, :trim, :delim, :tokens
|
308
|
-
|
309
|
-
def initialize(line, trim=true, delim=default_delimiter)
|
310
|
-
@line = line
|
311
|
-
@trim = trim
|
312
|
-
@delim = delim
|
313
|
-
@tokens = @line.split(@delim)
|
314
|
-
if trim
|
315
|
-
@tokens.each { | token | token.strip! }
|
316
|
-
end
|
317
|
-
end
|
318
|
-
|
319
|
-
def as_trackpoint(num_idx, lat_idx, lng_idx, alt_idx, dttm_idx)
|
320
|
-
Trackpoint.new(@tokens[num_idx], @tokens[lat_idx], @tokens[lng_idx], @tokens[alt_idx], @tokens[dttm_idx])
|
321
|
-
end
|
322
|
-
|
323
|
-
def is_comment?
|
324
|
-
@line.strip.match('^#') ? true : false
|
325
|
-
end
|
326
|
-
|
327
|
-
def to_s
|
328
|
-
"DelimLine: length: #{@line.size} trim: #{@trim} delim: #{@delim} tokens: #{@tokens.size}"
|
329
|
-
end
|
330
|
-
end
|
331
|
-
|
332
|
-
# =============================================================================
|
333
|
-
|
334
|
-
=begin rdoc
|
335
|
-
Instances of this class represent a Date and Time as parsed from a value
|
336
|
-
such as '2006-01-15T13:41:40Z' in an XML file produced by a GPS device.
|
337
|
-
It wrappers both a DateTime and Time object.
|
338
|
-
=end
|
339
|
-
|
340
|
-
class DtTm < GoobyObject
|
341
|
-
|
342
|
-
attr_accessor :rawdata, :dateTime, :time, :valid
|
343
|
-
|
344
|
-
# Constructor; arg is a String like '2006-01-15T13:41:40Z'.
|
345
|
-
def initialize(raw)
|
346
|
-
if raw
|
347
|
-
@rawdata = raw.strip
|
348
|
-
if @rawdata.size > 18
|
349
|
-
@date_time = DateTime.parse(@rawdata[0..18])
|
350
|
-
@time = Time.parse(@date_time.to_s)
|
351
|
-
@valid = true
|
352
|
-
else
|
353
|
-
@valid = false
|
354
|
-
end
|
355
|
-
else
|
356
|
-
@rawdata = ''
|
357
|
-
@valid = false
|
358
|
-
end
|
359
|
-
end
|
360
|
-
|
361
|
-
public
|
362
|
-
|
363
|
-
# Return @time.to_i
|
364
|
-
def to_i()
|
365
|
-
(@time) ? @time.to_i : invalid_time
|
366
|
-
end
|
367
|
-
|
368
|
-
# Calculates and returns diff between another instance.
|
369
|
-
def seconds_diff(anotherDtTm)
|
370
|
-
if anotherDtTm
|
371
|
-
to_i - anotherDtTm.to_i
|
372
|
-
else
|
373
|
-
invalid_time
|
374
|
-
end
|
375
|
-
end
|
376
|
-
|
377
|
-
def yyyy_mm_dd
|
378
|
-
@time.strftime("%Y-%m-%d")
|
379
|
-
end
|
380
|
-
|
381
|
-
def yyyy_mm_dd_hh_mm_ss(delim=' ')
|
382
|
-
@time.strftime("%Y-%m-%d#{delim}%H:%M:%S")
|
383
|
-
end
|
384
|
-
|
385
|
-
def hh_mm_ss
|
386
|
-
@time.strftime("%H:%M:%S")
|
387
|
-
end
|
388
|
-
|
389
|
-
# Calculate and return time diff in 'hh:mm:ss' format.
|
390
|
-
def hhmmss_diff(anotherDtTm)
|
391
|
-
if anotherDtTm
|
392
|
-
t = @time - (anotherDtTm.to_i)
|
393
|
-
t.strftime("%H:%M:%S")
|
394
|
-
else
|
395
|
-
'00:00:00'
|
396
|
-
end
|
397
|
-
end
|
398
|
-
|
399
|
-
def to_s
|
400
|
-
"#{@rawdata}"
|
401
|
-
end
|
402
|
-
|
403
|
-
# Return a String with state values for debugging.
|
404
|
-
def print_string
|
405
|
-
"DtTm: #{yyyy_mm_dd_hh_mm_ss} #{to_i} #{@rawdata}"
|
406
|
-
end
|
407
|
-
end
|
408
|
-
|
409
|
-
# =============================================================================
|
410
|
-
|
411
|
-
=begin rdoc
|
412
|
-
Instances of this class represent the contents of a Forerunner extract
|
413
|
-
<Duration> tag, such as:
|
414
|
-
<Duration>PT507.870S</Duration>
|
415
|
-
=end
|
416
|
-
|
417
|
-
class Duration < GoobyObject
|
418
|
-
|
419
|
-
attr_accessor :rawdata, :seconds, :minutes, :mmss
|
420
|
-
|
421
|
-
# Constructor; arg is a String like 'PT507.870S'.
|
422
|
-
def initialize(raw)
|
423
|
-
if raw
|
424
|
-
@rawdata = scrub("#{raw}")
|
425
|
-
@seconds = @rawdata.to_f
|
426
|
-
@minutes = @seconds / 60
|
427
|
-
@base_min = @minutes.floor
|
428
|
-
@frac_min = @minutes - @base_min
|
429
|
-
@frac_sec = @frac_min * 60
|
430
|
-
|
431
|
-
@mmss = ''
|
432
|
-
if (@base_min < 10)
|
433
|
-
@mmss << "0#{@base_min}:"
|
434
|
-
else
|
435
|
-
@mmss << "#{@base_min}:"
|
436
|
-
end
|
437
|
-
if (@frac_sec < 10)
|
438
|
-
@mmss << "0#{@frac_sec}"
|
439
|
-
else
|
440
|
-
@mmss << "#{@frac_sec}"
|
441
|
-
end
|
442
|
-
if (@mmss.size > 8)
|
443
|
-
@mmss = @mmss[0..8]
|
444
|
-
end
|
445
|
-
else
|
446
|
-
@rawdata = ''
|
447
|
-
@seconds = invalidDistance
|
448
|
-
@minutes = invalidMinutes
|
449
|
-
@mmss = '??:??.?'
|
450
|
-
end
|
451
|
-
end
|
452
|
-
|
453
|
-
private
|
454
|
-
|
455
|
-
def scrub(raw)
|
456
|
-
if (raw)
|
457
|
-
raw.strip!
|
458
|
-
newStr = ''
|
459
|
-
raw.each_byte { | b |
|
460
|
-
if ((b >= 48) && (b <= 57))
|
461
|
-
newStr << b
|
462
|
-
end
|
463
|
-
if (b == 46)
|
464
|
-
newStr << b
|
465
|
-
end
|
466
|
-
}
|
467
|
-
return newStr
|
468
|
-
else
|
469
|
-
''
|
470
|
-
end
|
471
|
-
end
|
472
|
-
|
473
|
-
public
|
474
|
-
|
475
|
-
def to_s
|
476
|
-
"#{@mmss}"
|
477
|
-
end
|
478
|
-
|
479
|
-
# Return a String with state values for debugging.
|
480
|
-
def print_string
|
481
|
-
"Duration: #{@rawdata} sec: #{@seconds} min: #{@minutes} mmss: #{@mmss} bm: #{@base_min} fm: #{@frac_min} fs: #{@frac_sec}"
|
482
|
-
end
|
483
|
-
end
|
484
|
-
|
485
|
-
# =============================================================================
|
486
|
-
|
487
|
-
=begin rdoc
|
488
|
-
Instances of this class are used to parse a Forerunner XML file in a SAX-like
|
489
|
-
manner. Instances of the model classes - History, Run, Track, Trackpoint,
|
490
|
-
etc. are created in this parsing process.
|
491
|
-
|
492
|
-
See http://www.garmin.com/xmlschemas/ForerunnerLogbookv1.xsd for the XML Schema
|
493
|
-
Definition for the Garmin Forerunner XML. The Gooby object model mirrors this XSD.
|
494
|
-
=end
|
495
|
-
|
496
|
-
class ForerunnerXmlParser
|
497
|
-
|
498
|
-
DETAIL_TAGS = %w( Notes StartTime Duration Length Latitude Longitude Altitude Time BeginPosition EndPosition )
|
499
|
-
|
500
|
-
include REXML::StreamListener
|
501
|
-
|
502
|
-
attr_reader :history, :cvHash, :tagCount
|
503
|
-
|
504
|
-
def initialize
|
505
|
-
@cv_hash = Hash.new("")
|
506
|
-
@tag_count = 0
|
507
|
-
@run_count = 0
|
508
|
-
@lap_count = 0
|
509
|
-
@track_count = 0
|
510
|
-
@trackpoint_count = 0
|
511
|
-
@curr_text = "";
|
512
|
-
@history = History.new
|
513
|
-
@curr_run = nil
|
514
|
-
@curr_lap = nil
|
515
|
-
@curr_track = nil
|
516
|
-
@curr_begin_position = nil
|
517
|
-
@@curr_end_position = nil
|
518
|
-
end
|
519
|
-
|
520
|
-
public
|
521
|
-
|
522
|
-
# SAX API method; handles 'Run', 'Lap', 'Track'.
|
523
|
-
def tag_start(tagname, attrs)
|
524
|
-
@tag_count += 1
|
525
|
-
@currTag = tagname
|
526
|
-
@cv_hash[tagname] = ''
|
527
|
-
|
528
|
-
if detail_tag?(tagname)
|
529
|
-
@inDetail = true
|
530
|
-
end
|
531
|
-
|
532
|
-
if is_tag?('Run', tagname)
|
533
|
-
@run_count = @run_count + 1
|
534
|
-
@lap_count = 0
|
535
|
-
@track_count = 0
|
536
|
-
@trackpoint_count = 0
|
537
|
-
@curr_run = Run.new(@run_count)
|
538
|
-
@history.add_run(@curr_run)
|
539
|
-
@cv_hash['Notes'] = ''
|
540
|
-
return
|
541
|
-
end
|
542
|
-
|
543
|
-
if is_tag?('Lap', tagname)
|
544
|
-
@lap_count = @lap_count + 1
|
545
|
-
@curr_lap = Lap.new(@lap_count)
|
546
|
-
return
|
547
|
-
end
|
548
|
-
|
549
|
-
if is_tag?('Track', tagname)
|
550
|
-
@track_count = @track_count + 1
|
551
|
-
@curr_track = Track.new(@track_count)
|
552
|
-
return
|
553
|
-
end
|
554
|
-
end
|
555
|
-
|
556
|
-
# SAX API method; handles 'Position', 'Trackpoint', 'Track', 'Lap', 'Run'.
|
557
|
-
def tag_end(tagname)
|
558
|
-
if @inDetail
|
559
|
-
@cv_hash[tagname] = @curr_text
|
560
|
-
else
|
561
|
-
if is_tag?('Position', tagname)
|
562
|
-
lat = @cv_hash['Latitude']
|
563
|
-
long = @cv_hash['Longitude']
|
564
|
-
@curr_begin_position = Point.new(lat.strip, long.strip)
|
565
|
-
@@curr_end_position = Point.new(lat.strip, long.strip)
|
566
|
-
end
|
567
|
-
|
568
|
-
if is_tag?('BeginPosition', tagname)
|
569
|
-
lat = @cv_hash['Latitude']
|
570
|
-
long = @cv_hash['Longitude']
|
571
|
-
@curr_begin_position = Point.new(lat.strip, long.strip)
|
572
|
-
end
|
573
|
-
|
574
|
-
if is_tag?('EndPosition', tagname)
|
575
|
-
lat = @cv_hash['Latitude']
|
576
|
-
long = @cv_hash['Longitude']
|
577
|
-
@@curr_end_position = Point.new(lat.strip, long.strip)
|
578
|
-
end
|
579
|
-
|
580
|
-
if is_tag?('Trackpoint', tagname)
|
581
|
-
@trackpoint_count = @trackpoint_count + 1
|
582
|
-
lat = @cv_hash['Latitude']
|
583
|
-
long = @cv_hash['Longitude']
|
584
|
-
alt = @cv_hash['Altitude']
|
585
|
-
time = @cv_hash['Time']
|
586
|
-
tp = Trackpoint.new(@trackpoint_count, lat, long, alt, time)
|
587
|
-
@curr_track.add_trackpoint(tp)
|
588
|
-
end
|
589
|
-
|
590
|
-
if is_tag?('Track', tagname)
|
591
|
-
if @curr_run != nil
|
592
|
-
@curr_run.add_track(@curr_track)
|
593
|
-
end
|
594
|
-
end
|
595
|
-
|
596
|
-
if is_tag?('Lap', tagname)
|
597
|
-
@curr_lap.startTime = @cv_hash['StartTime']
|
598
|
-
@curr_lap.duration = Duration.new(@cv_hash['Duration'])
|
599
|
-
@curr_lap.length = @cv_hash['Length']
|
600
|
-
@curr_lap.begin_position = @curr_begin_position
|
601
|
-
@curr_lap.end_position = @@curr_end_position
|
602
|
-
@curr_run.add_lap(@curr_lap)
|
603
|
-
end
|
604
|
-
|
605
|
-
if is_tag?('Run', tagname)
|
606
|
-
@curr_run.notes = @cv_hash['Notes']
|
607
|
-
end
|
608
|
-
end
|
609
|
-
|
610
|
-
@inDetail = false
|
611
|
-
@curr_text = ""
|
612
|
-
@currTag = ""
|
613
|
-
end
|
614
|
-
|
615
|
-
# SAX API method.
|
616
|
-
def text(txt)
|
617
|
-
if @inDetail
|
618
|
-
@curr_text = @curr_text + txt
|
619
|
-
end
|
620
|
-
end
|
621
|
-
|
622
|
-
# Iterate all parsed Run objects and print each with to_s.
|
623
|
-
def gdump()
|
624
|
-
@history.runs().each { |run| puts run.to_s }
|
625
|
-
end
|
626
|
-
|
627
|
-
# Iterate all parsed Run objects and print each with to_s.
|
628
|
-
def dump()
|
629
|
-
@history.runs().each { |run| puts run.to_s }
|
630
|
-
end
|
631
|
-
|
632
|
-
# Iterate all parsed Run objects and print each with put_csv.
|
633
|
-
def put_run_csv()
|
634
|
-
@history.runs().each { |run| run.put_csv() }
|
635
|
-
end
|
636
|
-
|
637
|
-
# Iterate all parsed Run objects and print each with put_tkpt_csv.
|
638
|
-
def put_all_run_tkpt_csv()
|
639
|
-
@history.runs.each { |run| run.put_tkpt_csv() }
|
640
|
-
end
|
641
|
-
|
642
|
-
private
|
643
|
-
|
644
|
-
def is_tag?(tagname, value)
|
645
|
-
tagname == value
|
646
|
-
end
|
647
|
-
|
648
|
-
def detail_tag?(tagname)
|
649
|
-
DETAIL_TAGS.each { |typ|
|
650
|
-
if typ == tagname
|
651
|
-
return true
|
652
|
-
end
|
653
|
-
}
|
654
|
-
return false
|
655
|
-
end
|
656
|
-
end
|
657
|
-
|
658
|
-
# =============================================================================
|
659
|
-
|
660
|
-
=begin rdoc
|
661
|
-
Instances of this class are used to split a large ForerunnerLogbook
|
662
|
-
XML file into individual 'run_' files.
|
663
|
-
=end
|
664
|
-
|
665
|
-
class ForerunnerXmlSplitter < GoobyObject
|
666
|
-
|
667
|
-
attr_reader :out_dir, :forerunner_files, :out_files_hash
|
668
|
-
|
669
|
-
def initialize(xml_file, out_dir)
|
670
|
-
@out_dir = out_dir
|
671
|
-
@forerunner_files = Array.new
|
672
|
-
@forerunner_files << xml_file
|
673
|
-
@out_files_hash = Hash.new
|
674
|
-
end
|
675
|
-
|
676
|
-
def split
|
677
|
-
@forerunner_files.each { |f| process_file(f) }
|
678
|
-
write_files
|
679
|
-
end
|
680
|
-
|
681
|
-
private
|
682
|
-
|
683
|
-
def process_file(forerunnerXmlFile)
|
684
|
-
@file_name = forerunnerXmlFile
|
685
|
-
@xml_lines = read_lines(@file_name, false)
|
686
|
-
@line_num = 0
|
687
|
-
@run_num = 0
|
688
|
-
@curr_run_lines = Array.new
|
689
|
-
@curr_run_tkpts = 0
|
690
|
-
@start_line_num = 0
|
691
|
-
@end_line_num = 0
|
692
|
-
@first_start_time = nil
|
693
|
-
|
694
|
-
@xml_lines.each { |line|
|
695
|
-
@line_num = @line_num + 1
|
696
|
-
if (line.match(/<Run>/))
|
697
|
-
@run_num = @run_num + 1
|
698
|
-
@start_line_num = @line_num
|
699
|
-
@curr_run_lines = Array.new
|
700
|
-
@curr_run_lines << line
|
701
|
-
elsif (line.match(/<StartTime>/)) # <StartTime>2007-01-13T15:37:06Z</StartTime>
|
702
|
-
@curr_run_lines << line
|
703
|
-
if @first_start_time == nil
|
704
|
-
clone = String.new(line)
|
705
|
-
clone.gsub!(/[<>]/, ' ')
|
706
|
-
clone.gsub!(/[-:T]/, '_')
|
707
|
-
clone.gsub!(/[Z]/, '')
|
708
|
-
tokens = clone.split
|
709
|
-
@first_start_time = tokens[1]
|
710
|
-
end
|
711
|
-
elsif (line.match(/<Trackpoint>/))
|
712
|
-
@curr_run_tkpts = @curr_run_tkpts + 1
|
713
|
-
@curr_run_lines << line
|
714
|
-
elsif (line.match(/<\/Run>/))
|
715
|
-
@end_line_num = @line_num
|
716
|
-
@curr_run_lines << line
|
717
|
-
end_run
|
718
|
-
elsif (@curr_run_lines.size > 0)
|
719
|
-
@curr_run_lines << line
|
720
|
-
end
|
721
|
-
}
|
722
|
-
end
|
723
|
-
|
724
|
-
def end_run
|
725
|
-
out_file = "#{@out_dir}/run_#{@first_start_time}.xml"
|
726
|
-
comment = "<!-- file: #{out_file} lines: #{@curr_run_lines.size} (#{@start_line_num} to #{@end_line_num}) tkpts: #{@curr_run_tkpts} --> \n"
|
727
|
-
@curr_run_lines.insert(0, comment)
|
728
|
-
|
729
|
-
prev_entry = @out_files_hash[out_file]
|
730
|
-
if prev_entry
|
731
|
-
if (@curr_run_lines.size >= prev_entry.size)
|
732
|
-
puts "previous entry overlaid for #{out_file}. curr=#{@curr_run_lines.size} prev=#{prev_entry.size}"
|
733
|
-
@out_files_hash[out_file] = @curr_run_lines
|
734
|
-
else
|
735
|
-
puts "previous entry retained for #{out_file}. curr=#{@curr_run_lines.size} prev=#{prev_entry.size}"
|
736
|
-
end
|
737
|
-
else
|
738
|
-
puts "new entry for #{out_file}. curr=#{@curr_run_lines.size}"
|
739
|
-
@out_files_hash[out_file] = @curr_run_lines
|
740
|
-
end
|
741
|
-
|
742
|
-
@curr_run_lines = Array.new
|
743
|
-
@curr_run_tkpts = 0
|
744
|
-
@start_line_num = 0
|
745
|
-
@end_line_num = 0
|
746
|
-
@first_start_time = nil
|
747
|
-
end
|
748
|
-
|
749
|
-
def write_files
|
750
|
-
out_names = @out_files_hash.keys.sort
|
751
|
-
puts "Writing #{out_names.size} extract files..."
|
752
|
-
out_names.each { |out_name|
|
753
|
-
lines = @out_files_hash[out_name]
|
754
|
-
out = File.new out_name, "w+"
|
755
|
-
lines.each { |line| out.write line }
|
756
|
-
out.flush
|
757
|
-
out.close
|
758
|
-
puts "File written: #{out_name}"
|
759
|
-
}
|
760
|
-
puts "output files written."
|
761
|
-
end
|
762
|
-
end
|
763
|
-
|
764
|
-
# =============================================================================
|
765
|
-
|
766
|
-
=begin rdoc
|
767
|
-
Instances of this class are used to parse a Garmin TrainingCenter XML(TCX) file
|
768
|
-
in a SAX-like manner. Instances of the model classes - History, Run, Track,
|
769
|
-
Trackpoint, etc. are created in this parsing process.
|
770
|
-
|
771
|
-
See http://www.garmin.com/xmlschemas/TrainingCenterDatabasev2.xsd for the XML
|
772
|
-
Schema Definition for Garmin TrainingCenter XML.
|
773
|
-
=end
|
774
|
-
|
775
|
-
class TrainingCenterXmlParser
|
776
|
-
|
777
|
-
DETAIL_TAGS = %w( Notes StartTime Duration Length Time
|
778
|
-
TotalTimeSeconds DistanceMeters
|
779
|
-
LatitudeDegrees LongitudeDegrees AltitudeMeters BeginPosition EndPosition )
|
780
|
-
|
781
|
-
include REXML::StreamListener
|
782
|
-
|
783
|
-
attr_reader :history, :cvHash, :tagCount
|
784
|
-
|
785
|
-
def initialize
|
786
|
-
@cv_hash = Hash.new("")
|
787
|
-
@tag_count = 0
|
788
|
-
@run_count = 0
|
789
|
-
@lap_count = 0
|
790
|
-
@track_count = 0
|
791
|
-
@trackpoint_count = 0
|
792
|
-
@curr_text = "";
|
793
|
-
@history = History.new
|
794
|
-
@curr_run = nil
|
795
|
-
@curr_lap = nil
|
796
|
-
@curr_track = nil
|
797
|
-
@curr_begin_position = nil
|
798
|
-
@@curr_end_position = nil
|
799
|
-
@first_lap_start_time = nil
|
800
|
-
@curr_lap_start_time = ''
|
801
|
-
end
|
802
|
-
|
803
|
-
public
|
804
|
-
|
805
|
-
# SAX API method; handles 'Activity', 'Lap', 'Track'.
|
806
|
-
def tag_start(tagname, attrs)
|
807
|
-
@tag_count += 1
|
808
|
-
@currTag = tagname
|
809
|
-
@cv_hash[tagname] = ''
|
810
|
-
|
811
|
-
if detail_tag?(tagname)
|
812
|
-
@inDetail = true
|
813
|
-
end
|
814
|
-
|
815
|
-
if is_tag?('Activity', tagname)
|
816
|
-
@run_count = @run_count + 1
|
817
|
-
@lap_count = 0
|
818
|
-
@track_count = 0
|
819
|
-
@trackpoint_count = 0
|
820
|
-
@curr_run = Run.new(@run_count)
|
821
|
-
@history.add_run(@curr_run)
|
822
|
-
@cv_hash['Notes'] = ''
|
823
|
-
return
|
824
|
-
end
|
825
|
-
|
826
|
-
if is_tag?('Lap', tagname)
|
827
|
-
@lap_count = @lap_count + 1
|
828
|
-
@curr_lap = Lap.new(@lap_count)
|
829
|
-
|
830
|
-
attrs.each { |attr|
|
831
|
-
name = attr[0]
|
832
|
-
val = attr[1]
|
833
|
-
if (name && (name == 'StartTime'))
|
834
|
-
if (@first_lap_start_time == nil)
|
835
|
-
@first_lap_start_time = "#{val}"
|
836
|
-
end
|
837
|
-
@curr_lap_start_time = "#{val}"
|
838
|
-
end
|
839
|
-
}
|
840
|
-
# TODO - capture value of 'StartTime' attribute.
|
841
|
-
return
|
842
|
-
end
|
843
|
-
|
844
|
-
if is_tag?('Track', tagname)
|
845
|
-
@track_count = @track_count + 1
|
846
|
-
@curr_track = Track.new(@track_count)
|
847
|
-
return
|
848
|
-
end
|
849
|
-
|
850
|
-
end
|
851
|
-
|
852
|
-
# SAX API method; handles 'Position', 'Trackpoint', 'Track', 'Lap', 'Run'.
|
853
|
-
def tag_end(tagname)
|
854
|
-
if @inDetail
|
855
|
-
@cv_hash[tagname] = @curr_text
|
856
|
-
else
|
857
|
-
if is_tag?('Position', tagname)
|
858
|
-
lat = @cv_hash['LatitudeDegrees']
|
859
|
-
long = @cv_hash['LongitudeDegrees']
|
860
|
-
@curr_begin_position = Point.new(lat.strip, long.strip)
|
861
|
-
@@curr_end_position = Point.new(lat.strip, long.strip)
|
862
|
-
end
|
863
|
-
|
864
|
-
if is_tag?('BeginPosition', tagname)
|
865
|
-
lat = @cv_hash['LatitudeDegrees']
|
866
|
-
long = @cv_hash['LongitudeDegrees']
|
867
|
-
@curr_begin_position = Point.new(lat.strip, long.strip)
|
868
|
-
end
|
869
|
-
|
870
|
-
if is_tag?('EndPosition', tagname)
|
871
|
-
lat = @cv_hash['LatitudeDegrees']
|
872
|
-
long = @cv_hash['LongitudeDegrees']
|
873
|
-
@@curr_end_position = Point.new(lat.strip, long.strip)
|
874
|
-
end
|
875
|
-
|
876
|
-
if is_tag?('Trackpoint', tagname)
|
877
|
-
@trackpoint_count = @trackpoint_count + 1
|
878
|
-
lat = @cv_hash['LatitudeDegrees']
|
879
|
-
long = @cv_hash['LongitudeDegrees']
|
880
|
-
alt = @cv_hash['AltitudeMeters']
|
881
|
-
time = @cv_hash['Time']
|
882
|
-
|
883
|
-
hash = Hash.new('')
|
884
|
-
hash['lap_number'] = "#{@lap_count}"
|
885
|
-
hash['first_lap_start_time'] = "#{@first_lap_start_time}"
|
886
|
-
hash['curr_lap_start_time'] = "#{@curr_lap_start_time}"
|
887
|
-
|
888
|
-
tp = Trackpoint.new(@trackpoint_count, lat, long, alt, time, hash)
|
889
|
-
@curr_track.add_trackpoint(tp)
|
890
|
-
end
|
891
|
-
|
892
|
-
if is_tag?('Track', tagname)
|
893
|
-
if @curr_run != nil
|
894
|
-
@curr_run.add_track(@curr_track)
|
895
|
-
end
|
896
|
-
end
|
897
|
-
|
898
|
-
if is_tag?('Lap', tagname)
|
899
|
-
@curr_run.add_lap(@curr_lap)
|
900
|
-
end
|
901
|
-
|
902
|
-
if is_tag?('Activity', tagname)
|
903
|
-
@curr_run.notes = @cv_hash['Notes']
|
904
|
-
end
|
905
|
-
end
|
906
|
-
|
907
|
-
@inDetail = false
|
908
|
-
@curr_text = ""
|
909
|
-
@currTag = ""
|
910
|
-
end
|
911
|
-
|
912
|
-
# SAX API method.
|
913
|
-
def text(txt)
|
914
|
-
if @inDetail
|
915
|
-
@curr_text = @curr_text + txt
|
916
|
-
end
|
917
|
-
end
|
918
|
-
|
919
|
-
# Iterate all parsed Run objects and print each with to_s.
|
920
|
-
def gdump()
|
921
|
-
@history.runs().each { |run| puts run.to_s }
|
922
|
-
end
|
923
|
-
|
924
|
-
# Iterate all parsed Run objects and print each with to_s.
|
925
|
-
def dump()
|
926
|
-
@history.runs().each { |run| puts run.to_s }
|
927
|
-
end
|
928
|
-
|
929
|
-
# Iterate all parsed Run objects and print each with put_csv.
|
930
|
-
def put_run_csv()
|
931
|
-
@history.runs().each { |run| run.put_csv() }
|
932
|
-
end
|
933
|
-
|
934
|
-
# Iterate all parsed Run objects and print each with put_tkpt_csv.
|
935
|
-
def put_all_run_tkpt_csv()
|
936
|
-
@history.runs.each { |run| run.put_tkpt_csv() }
|
937
|
-
end
|
938
|
-
|
939
|
-
private
|
940
|
-
|
941
|
-
def is_tag?(tagname, value)
|
942
|
-
tagname == value
|
943
|
-
end
|
944
|
-
|
945
|
-
def detail_tag?(tagname)
|
946
|
-
DETAIL_TAGS.each { |typ|
|
947
|
-
if typ == tagname
|
948
|
-
return true
|
949
|
-
end
|
950
|
-
}
|
951
|
-
return false
|
952
|
-
end
|
953
|
-
end
|
954
|
-
|
955
|
-
# =============================================================================
|
956
|
-
|
957
|
-
=begin rdoc
|
958
|
-
Instances of this class are used to split a large Garmin TrainingCenter
|
959
|
-
*.tcx file into individual 'activity_' files.
|
960
|
-
=end
|
961
|
-
|
962
|
-
class TrainingCenterXmlSplitter < GoobyObject
|
963
|
-
|
964
|
-
attr_reader :out_dir, :training_center_files, :out_files_hash
|
965
|
-
|
966
|
-
def initialize(tcx_file, out_dir)
|
967
|
-
@out_dir = out_dir
|
968
|
-
@training_center_files = Array.new
|
969
|
-
@training_center_files << tcx_file
|
970
|
-
@out_files_hash = Hash.new
|
971
|
-
end
|
972
|
-
|
973
|
-
def split
|
974
|
-
@training_center_files.each { |f| process_file(f) }
|
975
|
-
write_files
|
976
|
-
end
|
977
|
-
|
978
|
-
private
|
979
|
-
|
980
|
-
def process_file(training_center_tcx_file)
|
981
|
-
@file_name = training_center_tcx_file
|
982
|
-
@tcx_lines = read_lines(@file_name, false)
|
983
|
-
@line_num = 0
|
984
|
-
@activity_num = 0
|
985
|
-
@curr_activity_lines = Array.new
|
986
|
-
@curr_activity_tkpts = 0
|
987
|
-
@start_line_num = 0
|
988
|
-
@end_line_num = 0
|
989
|
-
@activity_start_time = nil
|
990
|
-
|
991
|
-
@tcx_lines.each { |line|
|
992
|
-
@line_num = @line_num + 1
|
993
|
-
if (line.match(/<Activity /))
|
994
|
-
@activity_num = @activity_num + 1
|
995
|
-
@start_line_num = @line_num
|
996
|
-
@curr_activity_lines = Array.new
|
997
|
-
@curr_activity_lines << line
|
998
|
-
elsif (line.match(/<Id>/)) # <Id>2007-03-03T15:58:57Z</Id> <StartTime>2007-01-13T15:37:06Z</StartTime>
|
999
|
-
@curr_activity_lines << line
|
1000
|
-
if @activity_start_time == nil
|
1001
|
-
clone = String.new(line)
|
1002
|
-
clone.gsub!(/[<>]/, ' ')
|
1003
|
-
clone.gsub!(/[-:T]/, '_')
|
1004
|
-
clone.gsub!(/[Z]/, '')
|
1005
|
-
tokens = clone.split
|
1006
|
-
@activity_start_time = tokens[1]
|
1007
|
-
end
|
1008
|
-
elsif (line.match(/<Trackpoint>/))
|
1009
|
-
@curr_activity_tkpts = @curr_activity_tkpts + 1
|
1010
|
-
@curr_activity_lines << line
|
1011
|
-
elsif (line.match(/<\/Activity/))
|
1012
|
-
@end_line_num = @line_num
|
1013
|
-
@curr_activity_lines << line
|
1014
|
-
end_run
|
1015
|
-
elsif (@curr_activity_lines.size > 0)
|
1016
|
-
@curr_activity_lines << line
|
1017
|
-
end
|
1018
|
-
}
|
1019
|
-
end
|
1020
|
-
|
1021
|
-
def end_run
|
1022
|
-
out_file = "#{@out_dir}/activity_#{@activity_start_time}.xml"
|
1023
|
-
comment = "<!-- file: #{out_file} lines: #{@curr_activity_lines.size} (#{@start_line_num} to #{@end_line_num}) tkpts: #{@curr_activity_tkpts} --> \n"
|
1024
|
-
@curr_activity_lines.insert(0, comment)
|
1025
|
-
|
1026
|
-
prev_entry = @out_files_hash[out_file]
|
1027
|
-
if prev_entry
|
1028
|
-
if (@curr_activity_lines.size >= prev_entry.size)
|
1029
|
-
puts "previous entry overlaid for #{out_file}. curr=#{@curr_activity_lines.size} prev=#{prev_entry.size}"
|
1030
|
-
@out_files_hash[out_file] = @curr_activity_lines
|
1031
|
-
else
|
1032
|
-
puts "previous entry retained for #{out_file}. curr=#{@curr_activity_lines.size} prev=#{prev_entry.size}"
|
1033
|
-
end
|
1034
|
-
else
|
1035
|
-
puts "new entry for #{out_file}. curr=#{@curr_activity_lines.size}"
|
1036
|
-
@out_files_hash[out_file] = @curr_activity_lines
|
1037
|
-
end
|
1038
|
-
|
1039
|
-
@curr_activity_lines = Array.new
|
1040
|
-
@curr_activity_tkpts = 0
|
1041
|
-
@start_line_num = 0
|
1042
|
-
@end_line_num = 0
|
1043
|
-
@activity_start_time = nil
|
1044
|
-
end
|
1045
|
-
|
1046
|
-
def write_files
|
1047
|
-
out_names = @out_files_hash.keys.sort
|
1048
|
-
puts "Writing #{out_names.size} extract files..."
|
1049
|
-
out_names.each { |out_name|
|
1050
|
-
lines = @out_files_hash[out_name]
|
1051
|
-
out = File.new out_name, "w+"
|
1052
|
-
lines.each { |line| out.write line }
|
1053
|
-
out.flush
|
1054
|
-
out.close
|
1055
|
-
puts "File written: #{out_name}"
|
1056
|
-
}
|
1057
|
-
puts "output files written."
|
1058
|
-
end
|
1059
|
-
end
|
1060
|
-
|
1061
|
-
# =============================================================================
|
1062
|
-
|
1063
|
-
=begin rdoc
|
1064
|
-
Instances of this class represent a the set of Geographic data defined in file geo.txt
|
1065
|
-
=end
|
1066
|
-
|
1067
|
-
class GeoData < GoobyObject
|
1068
|
-
|
1069
|
-
attr_reader :filename, :lines, :poi_hash, :poi_array, :track_hash, :track_array, :route_hash, :route_array
|
1070
|
-
|
1071
|
-
def initialize(filename)
|
1072
|
-
@filename = filename
|
1073
|
-
@filename = 'data/geo_data.txt' if @filename == nil
|
1074
|
-
@lines = read_lines(@filename, true)
|
1075
|
-
@poi_hash = Hash.new
|
1076
|
-
@poi_array = Array.new
|
1077
|
-
@track_hash = Hash.new
|
1078
|
-
@track_array = Array.new
|
1079
|
-
@route_hash = Hash.new
|
1080
|
-
@route_array = Array.new
|
1081
|
-
parse_poi
|
1082
|
-
parse_tracks
|
1083
|
-
parse_routes
|
1084
|
-
end
|
1085
|
-
|
1086
|
-
private
|
1087
|
-
|
1088
|
-
def parse_poi
|
1089
|
-
in_poi, poi_number = false, 0
|
1090
|
-
@lines.each { |line|
|
1091
|
-
line_obj = Line.new(line, nil, true)
|
1092
|
-
tok_count = line_obj.token_count
|
1093
|
-
is_point = line_obj.token_idx_equals(0, '.')
|
1094
|
-
|
1095
|
-
if line_obj.is_populated_non_comment
|
1096
|
-
if line_obj.match('points_of_interest_start')
|
1097
|
-
in_poi = true
|
1098
|
-
elsif line_obj.match('points_of_interest_end')
|
1099
|
-
in_poi = false
|
1100
|
-
elsif in_poi && tok_count > 2 && is_point
|
1101
|
-
poi_number = poi_number + 1
|
1102
|
-
tkpt = Trackpoint.new(
|
1103
|
-
poi_number, line_obj.tokens[1], line_obj.tokens[2],
|
1104
|
-
'0', '', line_obj.concatinate_tokens(3))
|
1105
|
-
add_poi(tkpt)
|
1106
|
-
end
|
1107
|
-
end
|
1108
|
-
}
|
1109
|
-
end
|
1110
|
-
|
1111
|
-
def parse_tracks
|
1112
|
-
in_track, trk_number, tkpt_number = false, 0, 0
|
1113
|
-
curr_trk, curr_run = nil, nil
|
1114
|
-
@lines.each { |line|
|
1115
|
-
line_obj = Line.new(line, nil, true)
|
1116
|
-
tok_count = line_obj.token_count
|
1117
|
-
is_point = line_obj.token_idx_equals(0, '.')
|
1118
|
-
|
1119
|
-
if line_obj.is_populated_non_comment
|
1120
|
-
if line_obj.match('track_start')
|
1121
|
-
in_track = true
|
1122
|
-
trk_number = trk_number + 1
|
1123
|
-
tkpt_number = 0
|
1124
|
-
curr_trk = Track.new(0, line_obj.concatinate_tokens(1))
|
1125
|
-
curr_run = Run.new(trk_number, line_obj.concatinate_tokens(1))
|
1126
|
-
curr_run.add_track(curr_trk)
|
1127
|
-
elsif line_obj.match('track_end')
|
1128
|
-
in_track = false
|
1129
|
-
curr_run.finish
|
1130
|
-
add_track(curr_trk)
|
1131
|
-
add_route(curr_run)
|
1132
|
-
elsif in_track && tok_count > 2 && is_point
|
1133
|
-
tkpt_number = tkpt_number + 1
|
1134
|
-
tkpt = Trackpoint.new(
|
1135
|
-
tkpt_number, line_obj.tokens[1], line_obj.tokens[2],
|
1136
|
-
'0', '', line_obj.concatinate_tokens(3))
|
1137
|
-
curr_trk.add_trackpoint(tkpt)
|
1138
|
-
end
|
1139
|
-
end
|
1140
|
-
}
|
1141
|
-
end
|
1142
|
-
|
1143
|
-
def parse_routes
|
1144
|
-
in_route, route_number, trk_number, tkpt_number = false, 0, 0, 0
|
1145
|
-
curr_trk, curr_run = nil, nil
|
1146
|
-
@lines.each { |line|
|
1147
|
-
line_obj = Line.new(line, nil, true)
|
1148
|
-
tok_count = line_obj.token_count
|
1149
|
-
is_point = line_obj.token_idx_equals(0, '.')
|
1150
|
-
|
1151
|
-
if line_obj.is_populated_non_comment
|
1152
|
-
if line_obj.match('route_start')
|
1153
|
-
in_route = true
|
1154
|
-
trk_number = trk_number + 1
|
1155
|
-
tkpt_number = 0
|
1156
|
-
curr_trk = Track.new(0, line_obj.concatinate_tokens(1))
|
1157
|
-
curr_run = Run.new(trk_number, line_obj.concatinate_tokens(1))
|
1158
|
-
curr_run.add_track(curr_trk)
|
1159
|
-
elsif line_obj.match('route_end')
|
1160
|
-
in_route = false
|
1161
|
-
curr_run.finish
|
1162
|
-
add_route(curr_run)
|
1163
|
-
elsif in_route && tok_count > 2 && is_point
|
1164
|
-
tkpt_number = tkpt_number + 1
|
1165
|
-
tkpt = Trackpoint.new(
|
1166
|
-
tkpt_number, line_obj.tokens[1], line_obj.tokens[2],
|
1167
|
-
'0', '', line_obj.concatinate_tokens(3))
|
1168
|
-
curr_trk.add_trackpoint(tkpt)
|
1169
|
-
elsif in_route && line_obj.token_idx_equals(0, 'track') && tok_count > 1
|
1170
|
-
trk_desc = line_obj.concatinate_tokens(1)
|
1171
|
-
trk = @track_hash[trk_desc]
|
1172
|
-
if trk
|
1173
|
-
trk.trackpoints.each { |tkpt| curr_trk.add_trackpoint(tkpt) }
|
1174
|
-
end
|
1175
|
-
elsif in_route && line_obj.token_idx_equals(0, 'track_rev') && tok_count > 1
|
1176
|
-
trk_desc = line_obj.concatinate_tokens(1)
|
1177
|
-
trk = @track_hash[trk_desc]
|
1178
|
-
if trk
|
1179
|
-
array = trk.trackpoints
|
1180
|
-
trk.trackpoints.each { |tkpt| curr_trk.add_trackpoint(tkpt) }
|
1181
|
-
end
|
1182
|
-
end
|
1183
|
-
end
|
1184
|
-
}
|
1185
|
-
end
|
1186
|
-
|
1187
|
-
def add_poi(tkpt)
|
1188
|
-
if tkpt
|
1189
|
-
descr = tkpt.descr
|
1190
|
-
if @poi_hash.has_key? descr
|
1191
|
-
puts "Duplicate POI key ignored - '#{descr}'"
|
1192
|
-
else
|
1193
|
-
#puts "Adding POI: #{tkpt.to_poi_csv}"
|
1194
|
-
@poi_hash[descr] = tkpt
|
1195
|
-
@poi_array << tkpt
|
1196
|
-
end
|
1197
|
-
end
|
1198
|
-
end
|
1199
|
-
|
1200
|
-
def add_track(trk)
|
1201
|
-
if trk
|
1202
|
-
descr = trk.descr
|
1203
|
-
if @track_hash.has_key? descr
|
1204
|
-
puts "Duplicate Track key ignored - '#{descr}'"
|
1205
|
-
else
|
1206
|
-
@track_hash[descr] = trk
|
1207
|
-
@track_array << trk
|
1208
|
-
end
|
1209
|
-
end
|
1210
|
-
end
|
1211
|
-
|
1212
|
-
def add_route(run)
|
1213
|
-
if run
|
1214
|
-
descr = run.descr
|
1215
|
-
if @route_hash.has_key? descr
|
1216
|
-
puts "Duplicate Route key ignored - '#{descr}'"
|
1217
|
-
else
|
1218
|
-
@route_hash[descr] = run
|
1219
|
-
@route_array << run
|
1220
|
-
end
|
1221
|
-
end
|
1222
|
-
end
|
1223
|
-
|
1224
|
-
public
|
1225
|
-
|
1226
|
-
def to_s
|
1227
|
-
return "GeoData lines: #{lines.size} poi: #{@poi_hash.size} tracks: #{@track_hash.size} routes: #{@route_hash.size} "
|
1228
|
-
end
|
1229
|
-
|
1230
|
-
def dump
|
1231
|
-
puts "#{self.class} dump:"
|
1232
|
-
@poi_array.each { |tkpt| puts "POI: #{tkpt.to_geo_s}" }
|
1233
|
-
@track_array.each { |trk| trk.dump }
|
1234
|
-
@route_hash.keys.sort.each { |key|
|
1235
|
-
puts "Route: '#{key}'"
|
1236
|
-
}
|
1237
|
-
end
|
1238
|
-
end
|
1239
|
-
|
1240
|
-
# =============================================================================
|
1241
|
-
|
1242
|
-
=begin rdoc
|
1243
|
-
Instances of this class represent a <Run> aggregate object from a
|
1244
|
-
Forerunner XML file.
|
1245
|
-
|
1246
|
-
Additionally, there is distance, pace, and Google Map generation logic
|
1247
|
-
in this class.
|
1248
|
-
=end
|
1249
|
-
|
1250
|
-
class GoogleMapGenerator < GoobyObject
|
1251
|
-
|
1252
|
-
attr_reader :csv_file, :csv_lines, :dttm_idx, :num_idx, :lat_idx, :lng_idx, :alt_idx, :cdist_idx
|
1253
|
-
attr_reader :run, :tkpts, :content_hash, :center_longitude, :center_latitude, :gpoint_array, :overlay_points, :notes
|
1254
|
-
attr_reader :center_longitude, :center_latitude
|
1255
|
-
|
1256
|
-
# The default csv input file format is as follows:
|
1257
|
-
# 1 | 2006-01-15T18:31:10Z | 1279 | 33.42601 | -111.92927 | 347.654 | 26.3514930151813
|
1258
|
-
# 1 | 2004-11-13T13:05:20Z | 2 | 37.54318 | -77.43636 | -58.022 | 0.00297286231747969
|
1259
|
-
|
1260
|
-
# primary_key|run_id|date|time|tkpt_num|latitude|longitude|altitude_ft|run_distance|run_elapsed|lap_tkpt_number|lap_distance|lap_elapsed
|
1261
|
-
# 2005-03-05T13:00:29Z.2|2005-03-05T13:00:29Z|2005-03-05|13:00:49|2|35.22054|-80.84506|738.4161312|0.046918021941152|00:00:20|2|0.046918021941152|00:00:20
|
1262
|
-
|
1263
|
-
def initialize(csv_file, dttm_idx=1, num_idx=4, lat_idx=5, lng_idx=6, alt_idx=7, cdist_idx=8)
|
1264
|
-
@csv_file = csv_file
|
1265
|
-
@dttm_idx = dttm_idx
|
1266
|
-
@num_idx = num_idx
|
1267
|
-
@lat_idx = lat_idx
|
1268
|
-
@lng_idx = lng_idx
|
1269
|
-
@alt_idx = alt_idx
|
1270
|
-
@cdist_idx = cdist_idx
|
1271
|
-
|
1272
|
-
# Override default csv value indices if specified in the configuration yaml file.
|
1273
|
-
@configuration = Gooby::Configuration.get_config
|
1274
|
-
@dttm_idx = @configuration.get('csv_dttm_idx') if @configuration.get('csv_dttm_idx')
|
1275
|
-
@num_idx = @configuration.get('csv_num_idx') if @configuration.get('csv_num_idx')
|
1276
|
-
@lat_idx = @configuration.get('csv_lat_idx') if @configuration.get('csv_lat_idx')
|
1277
|
-
@lng_idx = @configuration.get('csv_lng_idx') if @configuration.get('csv_lng_idx')
|
1278
|
-
@alt_idx = @configuration.get('csv_alt_idx') if @configuration.get('csv_alt_idx')
|
1279
|
-
@title = @configuration.get("#{@csv_file}")
|
1280
|
-
|
1281
|
-
@content_hash = Hash.new('')
|
1282
|
-
@run = Gooby::Run.new(1)
|
1283
|
-
@track = Gooby::Track.new(1)
|
1284
|
-
@run.add_track(@track)
|
1285
|
-
@tkpts = Array.new
|
1286
|
-
@icon_url_base = @configuration.get('gmap_icon_url_base')
|
1287
|
-
|
1288
|
-
list = Array.new
|
1289
|
-
list << @csv_file
|
1290
|
-
@cvs_reader = Gooby::CsvReader.new(list)
|
1291
|
-
@cvs_points = @cvs_reader.read
|
1292
|
-
@cvs_points.each { |cvs_point|
|
1293
|
-
tkpt = cvs_point.as_trackpoint
|
1294
|
-
if tkpt
|
1295
|
-
@track.add_trackpoint(tkpt)
|
1296
|
-
end
|
1297
|
-
}
|
1298
|
-
@run.finish
|
1299
|
-
end
|
1300
|
-
|
1301
|
-
=begin
|
1302
|
-
Returns a Hash with specific generated content at the following keys:
|
1303
|
-
=end
|
1304
|
-
def generate(configuration)
|
1305
|
-
if (configuration == nil)
|
1306
|
-
@configuration = Gooby::Configuration.get_config
|
1307
|
-
else
|
1308
|
-
@configuration = configuration
|
1309
|
-
end
|
1310
|
-
@content_hash['when_generated'] = Time.now
|
1311
|
-
@content_hash['title'] = @title
|
1312
|
-
@icon_url_base = @configuration.get('gmap_icon_url_base')
|
1313
|
-
filter_trackpoints
|
1314
|
-
compute_center_point
|
1315
|
-
generate_key_js
|
1316
|
-
generate_map_div
|
1317
|
-
generate_messages_div
|
1318
|
-
generate_main_js_start
|
1319
|
-
generate_main_js_route_overlay
|
1320
|
-
generate_main_js_checkpoint_overlays
|
1321
|
-
generate_main_js_map_clicked_listeners
|
1322
|
-
generate_main_js_end
|
1323
|
-
@content_hash
|
1324
|
-
end
|
1325
|
-
|
1326
|
-
def filter_trackpoints
|
1327
|
-
count, @tkpts = 0, Array.new
|
1328
|
-
firstTkpt = @configuration.get('gmap_first_tkpt_number')
|
1329
|
-
lastTkpt = @configuration.get('gmap_last_tkpt_number')
|
1330
|
-
@run.tracks.each { |trk|
|
1331
|
-
trk.trackpoints.each { |tkpt|
|
1332
|
-
count = count + 1
|
1333
|
-
if ((count >= firstTkpt) && (count <= lastTkpt))
|
1334
|
-
@tkpts.push(tkpt)
|
1335
|
-
end
|
1336
|
-
}
|
1337
|
-
}
|
1338
|
-
end
|
1339
|
-
|
1340
|
-
=begin
|
1341
|
-
Returns a Hash with specific generated content at the following keys:
|
1342
|
-
=end
|
1343
|
-
def generate_page(configuration)
|
1344
|
-
|
1345
|
-
# puts "generate_page #{@csv_file} #{@csv_lines.size}"
|
1346
|
-
content_hash = generate(nil)
|
1347
|
-
s = String.new(@csv_file)
|
1348
|
-
s.gsub("/", " ")
|
1349
|
-
tokens = tokenize(s, nil)
|
1350
|
-
out_file = "#{tokens[-2]}.html"
|
1351
|
-
#content_hash.keys.sort.each { | key | puts key }
|
1352
|
-
|
1353
|
-
s = <<HERE
|
1354
|
-
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
|
1355
|
-
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
1356
|
-
<html xmlns="http://www.w3.org/1999/xhtml">
|
1357
|
-
<head>
|
1358
|
-
<meta http-equiv="content-type" content="text/html; charset=utf-8"/>
|
1359
|
-
<meta name="description" content="Google Map generated with #{project_embedded_comment}">
|
1360
|
-
<meta name="keywords" content="Google Map #{project_embedded_comment} GPS">
|
1361
|
-
<title> Google Map by Gooby </title>
|
1362
|
-
#{content_hash['key_js']}
|
1363
|
-
#{content_hash['main_js_start']}
|
1364
|
-
#{content_hash['main_js_route_overlay']}
|
1365
|
-
#{content_hash['main_js_checkpoint_overlays']}
|
1366
|
-
#{content_hash['main_js_map_clicked_listeners']}
|
1367
|
-
#{content_hash['main_js_end']}
|
1368
|
-
</head>
|
1369
|
-
<body onload="load()" onunload="GUnload()">
|
1370
|
-
<center>
|
1371
|
-
<h3> #{@title} </h3>
|
1372
|
-
<h5> Generated by Gooby #{content_hash['when_generated']} <br> Gooby = Google APIs + Ruby </h5>
|
1373
|
-
#{content_hash['map_div']}
|
1374
|
-
#{content_hash['messages_div']}
|
1375
|
-
</center>
|
1376
|
-
</body>
|
1377
|
-
</html>
|
1378
|
-
HERE
|
1379
|
-
puts s # Redirect output via shell.
|
1380
|
-
end
|
1381
|
-
|
1382
|
-
private
|
1383
|
-
|
1384
|
-
def compute_center_point
|
1385
|
-
highLat = -999.0
|
1386
|
-
highLong = -999.0
|
1387
|
-
lowLat = 999.0
|
1388
|
-
lowLong = 999.0
|
1389
|
-
@tkpts.each { |tkpt|
|
1390
|
-
highLat = tkpt.latitude_as_float if tkpt.latitude_as_float > highLat
|
1391
|
-
lowLat = tkpt.latitude_as_float if tkpt.latitude_as_float < lowLat
|
1392
|
-
highLong = tkpt.longitude_as_float if tkpt.longitude_as_float > highLong
|
1393
|
-
lowLong = tkpt.longitude_as_float if tkpt.longitude_as_float < lowLong
|
1394
|
-
}
|
1395
|
-
@center_longitude = (highLong + lowLong) / 2
|
1396
|
-
@center_latitude = (highLat + lowLat) / 2
|
1397
|
-
@content_hash['center_longitude'] = @center_longitude
|
1398
|
-
@content_hash['center_latitude'] = @center_latitude
|
1399
|
-
end
|
1400
|
-
|
1401
|
-
def generate_key_js
|
1402
|
-
key = @configuration.get('gmap_key')
|
1403
|
-
key.strip!
|
1404
|
-
# <script src="http://maps.google.com/maps?file=api&v=2&key=<%= @gmap_key -%>" type="text/javascript"></script>
|
1405
|
-
s = "<script src='http://maps.google.com/maps?file=api&v=2&key="
|
1406
|
-
s << key
|
1407
|
-
s << "' type='text/javascript'></script>"
|
1408
|
-
@content_hash['key_js'] = s
|
1409
|
-
end
|
1410
|
-
|
1411
|
-
def generate_map_div
|
1412
|
-
width = @configuration.get('gmap_width')
|
1413
|
-
height = @configuration.get('gmap_height')
|
1414
|
-
id = @configuration.get('gmap_map_element_id')
|
1415
|
-
s = '<div id="'
|
1416
|
-
s << id
|
1417
|
-
s << '" style="width: '
|
1418
|
-
s << width
|
1419
|
-
s << '; height: '
|
1420
|
-
s << height
|
1421
|
-
s << '"></div>'
|
1422
|
-
@content_hash['map_width'] = width
|
1423
|
-
@content_hash['map_height'] = height
|
1424
|
-
@content_hash['map_div'] = s
|
1425
|
-
end
|
1426
|
-
|
1427
|
-
def generate_messages_div
|
1428
|
-
s = "<div id=\"messages\"></div>"
|
1429
|
-
@content_hash['messages_div'] = s
|
1430
|
-
end
|
1431
|
-
|
1432
|
-
def generate_main_js_start
|
1433
|
-
id = @configuration.get('gmap_map_element_id')
|
1434
|
-
size = @configuration.get('gmap_size_control')
|
1435
|
-
type = @configuration.get('gmap_type')
|
1436
|
-
zoom = @configuration.get('gmap_zoom_level')
|
1437
|
-
title = @configuration.get("#{@csv_file}")
|
1438
|
-
title = '' if title == nil
|
1439
|
-
zoom_tab = @configuration.get('gmap_zoom_tab')
|
1440
|
-
if size
|
1441
|
-
if size == 'smallmap'
|
1442
|
-
size = 'GSmallMapControl'
|
1443
|
-
elsif size == 'smallzoom'
|
1444
|
-
size = 'GSmallMapControl'
|
1445
|
-
else
|
1446
|
-
size = 'GLargeMapControl'
|
1447
|
-
end
|
1448
|
-
end
|
1449
|
-
|
1450
|
-
if type
|
1451
|
-
if type == 'satellite'
|
1452
|
-
type = 'G_SATELLITE_MAP'
|
1453
|
-
elsif type == 'hybrid'
|
1454
|
-
type = 'G_HYBRID_MAP'
|
1455
|
-
else
|
1456
|
-
type = 'G_NORMAL_MAP'
|
1457
|
-
end
|
1458
|
-
else
|
1459
|
-
type = 'G_NORMAL_MAP'
|
1460
|
-
end
|
1461
|
-
|
1462
|
-
s = '<script type="text/javascript">'
|
1463
|
-
s << "\n"
|
1464
|
-
s << "//<![CDATA[ \n"
|
1465
|
-
s << " function load() { \n"
|
1466
|
-
s << " if (GBrowserIsCompatible()) { \n"
|
1467
|
-
s << ' var map = new GMap2(document.getElementById("'
|
1468
|
-
s << id
|
1469
|
-
s << '")); '
|
1470
|
-
s << "\n"
|
1471
|
-
|
1472
|
-
if size
|
1473
|
-
s << ' map.addControl(new '
|
1474
|
-
s << size
|
1475
|
-
s << '());'
|
1476
|
-
s << "\n"
|
1477
|
-
end
|
1478
|
-
|
1479
|
-
if type
|
1480
|
-
s << ' map.addControl(new GMapTypeControl());'
|
1481
|
-
s << "\n"
|
1482
|
-
# s << ' map.setMapType('
|
1483
|
-
# s << type
|
1484
|
-
# s << ');'
|
1485
|
-
s << "\n"
|
1486
|
-
end
|
1487
|
-
s << " var centerPoint = new GLatLng(#{@center_latitude}, #{@center_longitude}); // #{project_embedded_comment} \n"
|
1488
|
-
s << " map.setCenter(centerPoint, #{zoom}); \n"
|
1489
|
-
s << "\n"
|
1490
|
-
@content_hash['main_js_start'] = s
|
1491
|
-
@content_hash['title'] = title
|
1492
|
-
end
|
1493
|
-
|
1494
|
-
def generate_main_js_route_overlay
|
1495
|
-
tkpt_count = @tkpts.size.to_f
|
1496
|
-
app_max = @configuration.get('gmap_approx_max_points').to_f
|
1497
|
-
gen_comments = @configuration.get('gmap_gen_comments')
|
1498
|
-
ratio = tkpt_count / app_max
|
1499
|
-
@start_dttm = nil
|
1500
|
-
if ratio > 1.0
|
1501
|
-
increment = (tkpt_count / app_max).to_i
|
1502
|
-
else
|
1503
|
-
increment = 1
|
1504
|
-
end
|
1505
|
-
curr_idx, next_idx, gpoint_count, last_idx = -1, 0, 0, @tkpts.size - 1
|
1506
|
-
s = " var points = new Array(); "
|
1507
|
-
@tkpts.each { |tkpt|
|
1508
|
-
curr_idx = curr_idx + 1
|
1509
|
-
if curr_idx == 0
|
1510
|
-
@start_dttm = tkpt.dttm
|
1511
|
-
@start_pos = tkpt.point
|
1512
|
-
|
1513
|
-
time = Time.parse(@start_dttm.dateTime().to_s)
|
1514
|
-
end
|
1515
|
-
if ((curr_idx == next_idx) || (curr_idx == last_idx) || (tkpt.is_split()))
|
1516
|
-
gpoint_count = gpoint_count + 1
|
1517
|
-
s << tkpt.as_glatlng(false, gen_comments, tkpt_count, curr_idx, @start_dttm)
|
1518
|
-
next_idx = curr_idx + increment
|
1519
|
-
else
|
1520
|
-
s << tkpt.as_glatlng(true, gen_comments, tkpt_count, curr_idx, @start_dttm)
|
1521
|
-
end
|
1522
|
-
}
|
1523
|
-
s << "\n"
|
1524
|
-
s << "\n var routePolyline = new GPolyline(points); "
|
1525
|
-
s << "\n map.addOverlay(routePolyline); "
|
1526
|
-
@content_hash['main_js_route_overlay'] = s
|
1527
|
-
@content_hash['main_js_route_overlay_increment'] = increment
|
1528
|
-
end
|
1529
|
-
|
1530
|
-
def generate_main_js_checkpoint_overlays
|
1531
|
-
s = "\n // Create a base icon for all of our markers that specifies the "
|
1532
|
-
s << "\n // shadow, icon dimensions, etc."
|
1533
|
-
s << "\n var baseIcon = new GIcon();"
|
1534
|
-
s << "\n baseIcon.shadow = \"#{@icon_url_base}shadow50.png\";"
|
1535
|
-
s << "\n baseIcon.iconSize = new GSize(20, 34);"
|
1536
|
-
s << "\n baseIcon.shadowSize = new GSize(37, 34);"
|
1537
|
-
s << "\n baseIcon.iconAnchor = new GPoint(9, 34);"
|
1538
|
-
s << "\n baseIcon.infoWindowAnchor = new GPoint(9, 2);"
|
1539
|
-
s << "\n baseIcon.infoShadowAnchor = new GPoint(18, 25);"
|
1540
|
-
s << "\n"
|
1541
|
-
|
1542
|
-
curr_idx = -1
|
1543
|
-
last_idx = @tkpts.size - 1
|
1544
|
-
next_checkpoint = 0.0
|
1545
|
-
@start_dttm = nil
|
1546
|
-
@tkpts.each { | tkpt |
|
1547
|
-
curr_idx = curr_idx + 1
|
1548
|
-
if curr_idx == 0
|
1549
|
-
@start_dttm = tkpt.dttm
|
1550
|
-
info_window_html = tkpt.as_info_window_html('Start', @start_dttm)
|
1551
|
-
s << "\n var iconStart = new GIcon(baseIcon); "
|
1552
|
-
s << "\n iconStart.image = \"#{@icon_url_base}dd-start.png\";"
|
1553
|
-
s << "\n var pStart = new GPoint(#{tkpt.longitude_as_float}, #{tkpt.latitude_as_float});"
|
1554
|
-
s << "\n var mStart = new GMarker(pStart, iconStart);"
|
1555
|
-
s << "\n GEvent.addListener(mStart, \"click\", function() { "
|
1556
|
-
s << "\n mStart.openInfoWindowHtml(#{info_window_html});"
|
1557
|
-
s << "\n }); "
|
1558
|
-
s << "\n map.addOverlay(mStart);"
|
1559
|
-
s << "\n "
|
1560
|
-
next_checkpoint = 1.0
|
1561
|
-
elsif curr_idx == last_idx
|
1562
|
-
info_window_html = tkpt.as_info_window_html('Finish', @start_dttm)
|
1563
|
-
s << "\n var iconFinish = new GIcon(baseIcon); "
|
1564
|
-
s << "\n iconFinish.image = \"#{@icon_url_base}dd-end.png\";"
|
1565
|
-
s << "\n var pFinish = new GPoint(#{tkpt.longitude_as_float}, #{tkpt.latitude_as_float});"
|
1566
|
-
s << "\n var mFinish = new GMarker(pFinish, iconFinish);"
|
1567
|
-
s << "\n GEvent.addListener(mFinish, \"click\", function() { "
|
1568
|
-
s << "\n mFinish.openInfoWindowHtml(#{info_window_html});"
|
1569
|
-
s << "\n }); "
|
1570
|
-
s << "\n map.addOverlay(mFinish);"
|
1571
|
-
s << "\n "
|
1572
|
-
next_checkpoint = 999999
|
1573
|
-
else
|
1574
|
-
if (tkpt.cumulative_distance >= next_checkpoint)
|
1575
|
-
integer = next_checkpoint.to_i
|
1576
|
-
info_window_html = tkpt.as_info_window_html("#{integer}", @start_dttm)
|
1577
|
-
s << "\n var icon#{integer} = new GIcon(baseIcon); "
|
1578
|
-
s << "\n icon#{integer}.image = \"#{@icon_url_base}marker#{integer}.png\";"
|
1579
|
-
s << "\n var p#{integer} = new GPoint(#{tkpt.longitude_as_float}, #{tkpt.latitude_as_float});"
|
1580
|
-
s << "\n var m#{integer} = new GMarker(p#{integer}, icon#{integer});"
|
1581
|
-
s << "\n GEvent.addListener(m#{integer}, \"click\", function() { "
|
1582
|
-
s << "\n m#{integer}.openInfoWindowHtml(#{info_window_html});"
|
1583
|
-
s << "\n }); "
|
1584
|
-
s << "\n map.addOverlay(m#{integer});"
|
1585
|
-
s << "\n "
|
1586
|
-
next_checkpoint = next_checkpoint + 1.0
|
1587
|
-
end
|
1588
|
-
end
|
1589
|
-
}
|
1590
|
-
s << "\n"
|
1591
|
-
@content_hash['main_js_checkpoint_overlays'] = s
|
1592
|
-
|
1593
|
-
end
|
1594
|
-
|
1595
|
-
def generate_main_js_map_clicked_listeners
|
1596
|
-
s = "\n"
|
1597
|
-
s << "\n GEvent.addListener(map, \"click\", function() { "
|
1598
|
-
s << "\n var center = map.getCenter(); \n"
|
1599
|
-
s << "\n document.getElementById(\"messages\").innerHTML = 'click: ' + center.toString(); "
|
1600
|
-
s << "\n });"
|
1601
|
-
s << "\n GEvent.addListener(map, \"moveend\", function() { "
|
1602
|
-
s << "\n var center = map.getCenter(); \n"
|
1603
|
-
s << "\n document.getElementById(\"messages\").innerHTML = 'moveend: ' + center.toString(); "
|
1604
|
-
s << "\n });"
|
1605
|
-
@content_hash['main_js_map_clicked_listeners'] = s
|
1606
|
-
end
|
1607
|
-
|
1608
|
-
def generate_main_js_end
|
1609
|
-
s = "\n } "
|
1610
|
-
s << "\n } "
|
1611
|
-
s << "\n//]]> \n"
|
1612
|
-
s << "\n</script>"
|
1613
|
-
|
1614
|
-
@content_hash['main_js_end'] = s
|
1615
|
-
end
|
1616
|
-
end
|
1617
|
-
|
1618
|
-
# =============================================================================
|
1619
|
-
|
1620
|
-
=begin rdoc
|
1621
|
-
Instances of this class represent a <History> aggregate object from a
|
1622
|
-
Forerunner XML file.
|
1623
|
-
=end
|
1624
|
-
|
1625
|
-
class History < GoobyObject
|
1626
|
-
|
1627
|
-
attr_reader :runs
|
1628
|
-
|
1629
|
-
def initialize
|
1630
|
-
@runs = Array.new
|
1631
|
-
end
|
1632
|
-
|
1633
|
-
# Adds a Run during XML parsing.
|
1634
|
-
def add_run(run)
|
1635
|
-
@runs.push(run)
|
1636
|
-
end
|
1637
|
-
|
1638
|
-
def to_s
|
1639
|
-
return "Hist: runs: #{@runs.size}"
|
1640
|
-
end
|
1641
|
-
|
1642
|
-
def print_string
|
1643
|
-
s = "History: run count=#{@runs.size} \n"
|
1644
|
-
runs.each { | run | s << run.print_string }
|
1645
|
-
s
|
1646
|
-
end
|
1647
|
-
end
|
1648
|
-
|
1649
|
-
# =============================================================================
|
1650
|
-
|
1651
|
-
=begin rdoc
|
1652
|
-
Instances of this class represent a <Lap> aggregate object from a
|
1653
|
-
Forerunner XML file.
|
1654
|
-
=end
|
1655
|
-
|
1656
|
-
class Lap < GoobyObject
|
1657
|
-
|
1658
|
-
attr_accessor :number, :startTime, :duration, :length, :begin_position, :end_position
|
1659
|
-
|
1660
|
-
def initialize(num)
|
1661
|
-
@number = num
|
1662
|
-
end
|
1663
|
-
|
1664
|
-
def to_s
|
1665
|
-
return "Lap: num: #{@number} start: #{@startTime} dur: #{@duration} len: #{@length} begin: #{@begin_position.to_s} end: #{@end_position.to_s}"
|
1666
|
-
end
|
1667
|
-
end
|
1668
|
-
|
1669
|
-
# =============================================================================
|
1670
|
-
|
1671
|
-
class Line < GoobyObject
|
1672
|
-
|
1673
|
-
attr_accessor :raw_data, :tokens
|
1674
|
-
|
1675
|
-
def initialize(raw='', delim=nil, strip=false)
|
1676
|
-
if strip
|
1677
|
-
@raw_data = raw.strip
|
1678
|
-
else
|
1679
|
-
@raw_data = raw
|
1680
|
-
end
|
1681
|
-
|
1682
|
-
@tokens = tokenize(@raw_data, delim, strip=false)
|
1683
|
-
end
|
1684
|
-
|
1685
|
-
public
|
1686
|
-
|
1687
|
-
def token(idx)
|
1688
|
-
@tokens[idx]
|
1689
|
-
end
|
1690
|
-
|
1691
|
-
def token_count
|
1692
|
-
@tokens.size
|
1693
|
-
end
|
1694
|
-
|
1695
|
-
def token_idx_equals(idx, value)
|
1696
|
-
if idx < token_count
|
1697
|
-
if @tokens[idx] == value
|
1698
|
-
return true
|
1699
|
-
end
|
1700
|
-
end
|
1701
|
-
false
|
1702
|
-
end
|
1703
|
-
|
1704
|
-
def match(pattern)
|
1705
|
-
@raw_data.match(pattern)
|
1706
|
-
end
|
1707
|
-
|
1708
|
-
def is_comment
|
1709
|
-
s = @raw_data.strip
|
1710
|
-
(s.match('^#')) ? true : false
|
1711
|
-
end
|
1712
|
-
|
1713
|
-
def is_populated_non_comment
|
1714
|
-
s = @raw_data.strip
|
1715
|
-
if s.size == 0
|
1716
|
-
return false
|
1717
|
-
end
|
1718
|
-
if is_comment
|
1719
|
-
return false
|
1720
|
-
end
|
1721
|
-
return true
|
1722
|
-
end
|
1723
|
-
|
1724
|
-
def concatinate_tokens(start_idx = 0)
|
1725
|
-
s = ''
|
1726
|
-
idx = -1
|
1727
|
-
@tokens.each { |tok|
|
1728
|
-
idx = idx + 1
|
1729
|
-
if idx >= start_idx
|
1730
|
-
s << tok
|
1731
|
-
s << ' '
|
1732
|
-
end
|
1733
|
-
}
|
1734
|
-
s.strip!
|
1735
|
-
s
|
1736
|
-
end
|
1737
|
-
end
|
1738
|
-
|
1739
|
-
# =============================================================================
|
1740
|
-
=begin
|
1741
|
-
|
1742
|
-
This is a singleton class whose values are loaded from a YAML file when your
|
1743
|
-
GoobyCommand class is created. The default filename is 'gooby_config.yaml"'.
|
1744
|
-
|
1745
|
-
The YAML file contains configuration parameters, such as your Google Map key,
|
1746
|
-
map HTML options, points of interest, and courses.
|
1747
|
-
|
1748
|
-
=end
|
1749
|
-
class Configuration < GoobyObject
|
1750
|
-
|
1751
|
-
@@singleton_instance = nil
|
1752
|
-
|
1753
|
-
attr_reader :yaml_filename, :configuration
|
1754
|
-
|
1755
|
-
private_class_method :new
|
1756
|
-
|
1757
|
-
def Configuration.init(yaml_filename='gooby_config.yaml')
|
1758
|
-
return @@singleton_instance if @@singleton_instance
|
1759
|
-
@@singleton_instance = new(yaml_filename)
|
1760
|
-
end
|
1761
|
-
|
1762
|
-
def self.get_config
|
1763
|
-
@@singleton_instance
|
1764
|
-
end
|
1765
|
-
|
1766
|
-
def initialize(yaml_filename)
|
1767
|
-
@yaml_filename = yaml_filename
|
1768
|
-
File.open("#{@yaml_filename}") { |fn| @configuration = YAML::load(fn) }
|
1769
|
-
end
|
1770
|
-
|
1771
|
-
def get(name)
|
1772
|
-
if name == nil
|
1773
|
-
return ''
|
1774
|
-
end
|
1775
|
-
s = @configuration["#{name}"]
|
1776
|
-
|
1777
|
-
# Provide "sensible defaults".
|
1778
|
-
if s == nil
|
1779
|
-
if (name == '')
|
1780
|
-
return ''
|
1781
|
-
elsif (name == 'gmap_first_tkpt_number')
|
1782
|
-
return 1
|
1783
|
-
elsif (name == 'gmap_last_tkpt_number')
|
1784
|
-
return 5000
|
1785
|
-
elsif (name == 'gmap_map_element_id')
|
1786
|
-
return 'map'
|
1787
|
-
elsif (name == 'gmap_height')
|
1788
|
-
return '600'
|
1789
|
-
elsif (name == 'gmap_icon_url_base')
|
1790
|
-
return 'http://www.your-web-site.com/gicons/'
|
1791
|
-
elsif (name == 'gmap_key')
|
1792
|
-
return 'enter your Google Map Key here'
|
1793
|
-
elsif (name == 'gmap_type_control')
|
1794
|
-
return true
|
1795
|
-
elsif (name == 'gmap_approx_max_points')
|
1796
|
-
return '200'
|
1797
|
-
elsif (name == 'gmap_gen_comments')
|
1798
|
-
return true
|
1799
|
-
elsif (name == 'gmap_size_control')
|
1800
|
-
return nil
|
1801
|
-
elsif (name == 'gmap_type')
|
1802
|
-
return 'G_NORMAL_MAP'
|
1803
|
-
elsif (name == 'gmap_zoom_level')
|
1804
|
-
return 5
|
1805
|
-
else
|
1806
|
-
return ''
|
1807
|
-
end
|
1808
|
-
end
|
1809
|
-
s
|
1810
|
-
end
|
1811
|
-
|
1812
|
-
def print_all
|
1813
|
-
@configuration.keys.sort.each { |key| puts "#{key}: #{@configuration["#{key}"]}" }
|
1814
|
-
end
|
1815
|
-
|
1816
|
-
def print_all_poi
|
1817
|
-
@configuration.keys.sort.each { |key|
|
1818
|
-
if (key.match(/poi[\.]/))
|
1819
|
-
val = @configuration["#{key}"]
|
1820
|
-
poi = Point.new(val)
|
1821
|
-
puts poi.to_s
|
1822
|
-
end
|
1823
|
-
}
|
1824
|
-
end
|
1825
|
-
|
1826
|
-
def get_poi(number)
|
1827
|
-
val = @configuration["poi.#{number}"]
|
1828
|
-
(val) ? Point.new(val) : nil
|
1829
|
-
end
|
1830
|
-
|
1831
|
-
def get_course(number)
|
1832
|
-
val = @configuration["course.#{number}"]
|
1833
|
-
(val) ? Course.new(val) : nil
|
1834
|
-
end
|
1835
|
-
|
1836
|
-
def size
|
1837
|
-
@configuration.size
|
1838
|
-
end
|
1839
|
-
|
1840
|
-
# Return a String containing yaml filename and entry count.
|
1841
|
-
def to_s
|
1842
|
-
return "# Configuration: filename: #{@yaml_filename} entries: #{@configuration.size}"
|
1843
|
-
end
|
1844
|
-
end
|
1845
|
-
|
1846
|
-
# =============================================================================
|
1847
|
-
|
1848
|
-
class Point < GoobyObject
|
1849
|
-
|
1850
|
-
attr_accessor :number, :latitude, :longitude, :altitude, :note
|
1851
|
-
|
1852
|
-
def initialize(*args)
|
1853
|
-
@number, @latitude, @longitude, @altitude, @note = '', '', '', '', ''
|
1854
|
-
if args
|
1855
|
-
if args.size == 1
|
1856
|
-
initialize_from_string(args[0]) # yaml
|
1857
|
-
else
|
1858
|
-
initialize_from_array(args)
|
1859
|
-
end
|
1860
|
-
end
|
1861
|
-
end
|
1862
|
-
|
1863
|
-
def csv_delim
|
1864
|
-
'|'
|
1865
|
-
end
|
1866
|
-
|
1867
|
-
def initialize_from_array(args)
|
1868
|
-
@latitude = args[0] if args.size > 0
|
1869
|
-
@longitude = args[1] if args.size > 1
|
1870
|
-
@altitude = args[2] if args.size > 2
|
1871
|
-
@note = args[3] if args.size > 3
|
1872
|
-
end
|
1873
|
-
|
1874
|
-
def initialize_from_string(yaml_value_string)
|
1875
|
-
tokens = yaml_value_string.split
|
1876
|
-
if (tokens.size > 2)
|
1877
|
-
@latitude = tokens[0]
|
1878
|
-
@longitude = tokens[1]
|
1879
|
-
@note = ''
|
1880
|
-
count = 0
|
1881
|
-
tokens.each { |tok|
|
1882
|
-
count = count + 1
|
1883
|
-
if (count > 2)
|
1884
|
-
@note << tok
|
1885
|
-
@note << ' '
|
1886
|
-
end
|
1887
|
-
}
|
1888
|
-
end
|
1889
|
-
end
|
1890
|
-
|
1891
|
-
public
|
1892
|
-
|
1893
|
-
def to_s
|
1894
|
-
return "lat: #{@latitude} lng: #{@longitude} alt: #{@altitude} note: #{@note}"
|
1895
|
-
end
|
1896
|
-
|
1897
|
-
def to_formatted_string
|
1898
|
-
s = "lat: #{@latitude.to_s.ljust(20)}"
|
1899
|
-
s << " lng: #{@longitude.to_s.ljust(20)}"
|
1900
|
-
s << " poi.#{@number.to_s.ljust(12)}" if @number
|
1901
|
-
s << " #{@note}" if @note
|
1902
|
-
s
|
1903
|
-
end
|
1904
|
-
|
1905
|
-
def to_csv
|
1906
|
-
return "#{@latitude}#{csv_delim}#{@longitude}#{csv_delim}#{@altitude}"
|
1907
|
-
end
|
1908
|
-
|
1909
|
-
def latitude_as_float
|
1910
|
-
@latitude ? @latitude.to_f : invalid_latitude
|
1911
|
-
end
|
1912
|
-
|
1913
|
-
def longitude_as_float
|
1914
|
-
@longitude ? @longitude.to_f : invalid_longitude
|
1915
|
-
end
|
1916
|
-
|
1917
|
-
def altitude_as_float
|
1918
|
-
@altitude ? @altitude.to_f : invalid_altitude
|
1919
|
-
end
|
1920
|
-
|
1921
|
-
def degrees_diff(another_point)
|
1922
|
-
if (another_point)
|
1923
|
-
puts "this: #{to_s}" if false
|
1924
|
-
puts "other: #{another_point.to_s}" if false
|
1925
|
-
puts "lats: #{latitude_as_float} #{another_point.latitude_as_float}" if false
|
1926
|
-
puts "lngs: #{longitude_as_float} #{another_point.longitude_as_float}" if false
|
1927
|
-
lat_diff = latitude_as_float - another_point.latitude_as_float
|
1928
|
-
lng_diff = longitude_as_float - another_point.longitude_as_float
|
1929
|
-
diff = lat_diff.abs + lng_diff.abs
|
1930
|
-
puts "diff: #{diff} #{lat_diff} #{lng_diff}" if false
|
1931
|
-
diff
|
1932
|
-
else
|
1933
|
-
360
|
1934
|
-
end
|
1935
|
-
end
|
1936
|
-
|
1937
|
-
def proximity(another_point, units)
|
1938
|
-
if (another_point)
|
1939
|
-
arg1 = latitude_as_float
|
1940
|
-
arg2 = another_point.latitude_as_float
|
1941
|
-
arg3 = latitude_as_float
|
1942
|
-
arg4 = another_point.latitude_as_float
|
1943
|
-
theta = longitude_as_float - another_point.longitude_as_float
|
1944
|
-
res1 = Math.sin(deg2rad(arg1))
|
1945
|
-
res2 = Math.sin(deg2rad(arg2))
|
1946
|
-
res3 = Math.cos(deg2rad(arg3))
|
1947
|
-
res4 = Math.cos(deg2rad(arg4))
|
1948
|
-
res5 = Math.cos(deg2rad(theta.to_f))
|
1949
|
-
dist = ((res1 * res2) + (res3 * res4 * res5)).to_f
|
1950
|
-
|
1951
|
-
if (!dist.nan?)
|
1952
|
-
dist = Math.acos(dist.to_f)
|
1953
|
-
if (!dist.nan?)
|
1954
|
-
dist = rad2deg(dist)
|
1955
|
-
if (!dist.nan?)
|
1956
|
-
dist = dist * 60 * 1.1515;
|
1957
|
-
if (!dist.nan?)
|
1958
|
-
if units == "K"
|
1959
|
-
dist = dist * 1.609344;
|
1960
|
-
end
|
1961
|
-
if units == "N"
|
1962
|
-
dist = dist * 0.8684;
|
1963
|
-
end
|
1964
|
-
end
|
1965
|
-
end
|
1966
|
-
end
|
1967
|
-
return dist.to_f
|
1968
|
-
else
|
1969
|
-
return 0
|
1970
|
-
end
|
1971
|
-
else
|
1972
|
-
return 0
|
1973
|
-
end
|
1974
|
-
end
|
1975
|
-
|
1976
|
-
def deg2rad(degrees)
|
1977
|
-
(((0 + degrees) * Math::PI) / 180)
|
1978
|
-
end
|
1979
|
-
|
1980
|
-
def rad2deg(radians)
|
1981
|
-
(((0 + radians) * 180) / Math::PI)
|
1982
|
-
end
|
1983
|
-
end
|
1984
|
-
|
1985
|
-
# =============================================================================
|
1986
|
-
|
1987
|
-
class CsvPoint < Point
|
1988
|
-
|
1989
|
-
attr_reader :rawdata, :tokens
|
1990
|
-
attr_reader :id, :run_id, :date, :time, :tkpt_num, :distance, :elapsed
|
1991
|
-
attr_reader :lap_number, :lap_distance, :lap_elapsed
|
1992
|
-
attr_accessor :course_distance, :course_elapsed, :degrees_diff
|
1993
|
-
|
1994
|
-
def initialize(csv_line)
|
1995
|
-
@rawdata = "#{csv_line}"
|
1996
|
-
@tokens = @rawdata.split(csv_delim)
|
1997
|
-
if (tokens.size > 12)
|
1998
|
-
@id = tokens[0] # <-- consists of @run_id.@tkpt_num for uniqueness and use as a DB table primary key.
|
1999
|
-
@run_id = tokens[1]
|
2000
|
-
@date = tokens[2]
|
2001
|
-
@time = tokens[3]
|
2002
|
-
@tkpt_num = tokens[4].strip.to_i
|
2003
|
-
@latitude = tokens[5].to_f
|
2004
|
-
@longitude = tokens[6].to_f
|
2005
|
-
@altitude = tokens[7].strip.to_f
|
2006
|
-
@distance = tokens[8].strip.to_f
|
2007
|
-
@elapsed = tokens[9]
|
2008
|
-
@lap_number = tokens[10].strip.to_i
|
2009
|
-
@lap_distance = tokens[11].strip.to_f
|
2010
|
-
@lap_elapsed = tokens[12]
|
2011
|
-
end
|
2012
|
-
end
|
2013
|
-
|
2014
|
-
def as_trackpoint
|
2015
|
-
tkpt = Trackpoint.new(@tkpt_num, @latitude, @longitude, @altitude, "#{date}T#{time}Z")
|
2016
|
-
tkpt.lap_number = @lap_number
|
2017
|
-
tkpt.lap_distance = @lap_distance
|
2018
|
-
tkpt.lap_elapsed = @lap_elapsed
|
2019
|
-
tkpt
|
2020
|
-
end
|
2021
|
-
|
2022
|
-
def to_s
|
2023
|
-
return "lat: #{@latitude} lng: #{@longitude} alt: #{@altitude} note: #{@note}"
|
2024
|
-
end
|
2025
|
-
|
2026
|
-
def to_formatted_string
|
2027
|
-
s = "lat: #{@latitude.to_s.ljust(20)}"
|
2028
|
-
s << " lng: #{@longitude.to_s.ljust(20)}"
|
2029
|
-
s << " time: #{@time}"
|
2030
|
-
pad = ''.ljust(70)
|
2031
|
-
s << "\n#{pad} course elapsed: #{@course_elapsed}"
|
2032
|
-
s << "\n#{pad} distance: #{@distance}"
|
2033
|
-
s << "\n#{pad} course distance: #{@course_distance}"
|
2034
|
-
s << "\n#{pad} altitude: #{@altitude}"
|
2035
|
-
s << "\n#{pad} degrees diff: #{@degrees_diff}"
|
2036
|
-
s
|
2037
|
-
end
|
2038
|
-
end
|
2039
|
-
|
2040
|
-
# =============================================================================
|
2041
|
-
|
2042
|
-
class CvsRun < GoobyObject
|
2043
|
-
|
2044
|
-
attr_reader :id, :points
|
2045
|
-
|
2046
|
-
def initialize(id)
|
2047
|
-
@id = "#{id}"
|
2048
|
-
@points = Array.new
|
2049
|
-
end
|
2050
|
-
|
2051
|
-
def add_point(point)
|
2052
|
-
if point
|
2053
|
-
@points << point
|
2054
|
-
end
|
2055
|
-
end
|
2056
|
-
end
|
2057
|
-
|
2058
|
-
# =============================================================================
|
2059
|
-
|
2060
|
-
class CsvReader < GoobyObject
|
2061
|
-
|
2062
|
-
attr_reader :files, :cvs_points
|
2063
|
-
|
2064
|
-
def initialize(array_of_filenames=nil)
|
2065
|
-
@files = Array.new
|
2066
|
-
@cvs_points = Array.new
|
2067
|
-
if array_of_filenames
|
2068
|
-
array_of_filenames.each { |filename| add_file(filename) }
|
2069
|
-
end
|
2070
|
-
end
|
2071
|
-
|
2072
|
-
def add_file(filename)
|
2073
|
-
if (filename)
|
2074
|
-
if (File.exist?(filename))
|
2075
|
-
@files << filename
|
2076
|
-
end
|
2077
|
-
end
|
2078
|
-
end
|
2079
|
-
|
2080
|
-
def read
|
2081
|
-
@files.each { |filename|
|
2082
|
-
lines = read_lines(filename, true)
|
2083
|
-
lines.each { |line|
|
2084
|
-
if (line.match('^#'))
|
2085
|
-
if (line.match('^#cols: '))
|
2086
|
-
col_names_header = line[7, line.size]
|
2087
|
-
@col_names = col_names_header.split('|')
|
2088
|
-
end
|
2089
|
-
else
|
2090
|
-
if (line.size > 50)
|
2091
|
-
@cvs_points << Gooby::CsvPoint.new(line)
|
2092
|
-
end
|
2093
|
-
end
|
2094
|
-
}
|
2095
|
-
}
|
2096
|
-
@cvs_points
|
2097
|
-
end
|
2098
|
-
|
2099
|
-
def display_formatted_record(record_index=2)
|
2100
|
-
tokens = @cvs_points[record_index].rawdata.split('|')
|
2101
|
-
puts "\nCsvReader.display_formatted_record hdr_cols=#{@col_names.size} data_cols=#{tokens.size}"
|
2102
|
-
size = 0
|
2103
|
-
@col_names.each { |col_name|
|
2104
|
-
size = size + 1
|
2105
|
-
if size <= tokens.size
|
2106
|
-
value = tokens[size - 1]
|
2107
|
-
puts "#{col_name.strip.ljust(20)} #{(size - 1).to_s.ljust(3)} #{value}"
|
2108
|
-
end
|
2109
|
-
}
|
2110
|
-
end
|
2111
|
-
|
2112
|
-
def to_s
|
2113
|
-
s = "CsvReader - file count: #{files.size} total points: #{cvs_points.size}"
|
2114
|
-
@files.each { |file| s << "\n file: #{file} "}
|
2115
|
-
s
|
2116
|
-
end
|
2117
|
-
end
|
2118
|
-
|
2119
|
-
# =============================================================================
|
2120
|
-
|
2121
|
-
class Trackpoint < Point
|
2122
|
-
|
2123
|
-
attr_accessor :first, :last, :number, :run_number, :dttm, :prev_tkpt, :lap_number, :lap_seq, :lap_distance, :lap_elapsed
|
2124
|
-
attr_accessor :cumulative_distance, :cumulative_pace, :incremental_distance, :split, :prev_split
|
2125
|
-
attr_accessor :first, :last, :run_id, :run_number
|
2126
|
-
attr_accessor :run_start_dttm
|
2127
|
-
|
2128
|
-
def initialize(num, lat, lng, alt, time_string, auxInfoHash=Hash.new(''))
|
2129
|
-
@number = num
|
2130
|
-
@run_number = 0
|
2131
|
-
@auxInfoHash = auxInfoHash
|
2132
|
-
@lap_seq = 1
|
2133
|
-
@lap_distance = 0
|
2134
|
-
lap_num = @auxInfoHash['lap_number']
|
2135
|
-
if (lap_num && lap_num.size > 0)
|
2136
|
-
@lap_number = lap_num.to_i
|
2137
|
-
else
|
2138
|
-
@lap_number = 0
|
2139
|
-
end
|
2140
|
-
# initialize superclass variables:
|
2141
|
-
@latitude = lat.to_s
|
2142
|
-
@longitude = lng.to_s
|
2143
|
-
feet = alt.to_f * 3.2736 # Convert from meters (in the Garmin xml) to feet.
|
2144
|
-
@altitude = feet.to_s
|
2145
|
-
@note = note.to_s
|
2146
|
-
@dttm = DtTm.new(time_string)
|
2147
|
-
@first = false
|
2148
|
-
@last = false
|
2149
|
-
@prev_tkpt = nil
|
2150
|
-
@cumulative_distance, @incremental_distance, @split = 0.0, 0.0, 0.0
|
2151
|
-
@cumulative_pace = ""
|
2152
|
-
end
|
2153
|
-
|
2154
|
-
public
|
2155
|
-
|
2156
|
-
def point
|
2157
|
-
Point.new(@latitude, @longitude, @altitude, @note)
|
2158
|
-
end
|
2159
|
-
|
2160
|
-
def position
|
2161
|
-
Point.new(@latitude, @longitude, @altitude, @note)
|
2162
|
-
end
|
2163
|
-
|
2164
|
-
def to_s
|
2165
|
-
"Tkpt: #{@number} #{super.to_s} date: #{@dttm.to_s} cdist: #{@cumulative_distance}"
|
2166
|
-
end
|
2167
|
-
|
2168
|
-
def to_csv(prev_tkpt=nil)
|
2169
|
-
first_lap_start_time_s = @auxInfoHash['first_lap_start_time']
|
2170
|
-
curr_lap_start_time_s = @auxInfoHash['curr_lap_start_time']
|
2171
|
-
lap_elapsed = ''
|
2172
|
-
total_elapsed = ''
|
2173
|
-
|
2174
|
-
if ((first_lap_start_time_s.size > 0) && (curr_lap_start_time_s.size > 0)) # garmin205 & 305
|
2175
|
-
first_lap_start_time = DtTm.new(first_lap_start_time_s)
|
2176
|
-
first_lap_start_time = @run_start_dttm
|
2177
|
-
curr_lap_start_time = DtTm.new(curr_lap_start_time_s)
|
2178
|
-
lap_elapsed = @dttm.hhmmss_diff(curr_lap_start_time)
|
2179
|
-
total_elapsed = @dttm.hhmmss_diff(first_lap_start_time)
|
2180
|
-
else # garmin 201
|
2181
|
-
total_elapsed = @dttm.hhmmss_diff(@run_start_dttm)
|
2182
|
-
lap_elapsed = total_elapsed
|
2183
|
-
end
|
2184
|
-
|
2185
|
-
delim = csv_delim
|
2186
|
-
csv = "#{@run_id}.#{@number}" # <-- primary key
|
2187
|
-
csv << "#{delim}#{@run_id}"
|
2188
|
-
csv << "#{delim}#{@dttm.yyyy_mm_dd_hh_mm_ss('|')}"
|
2189
|
-
csv << "#{delim}#{@number}"
|
2190
|
-
csv << "#{delim}#{position.to_csv}"
|
2191
|
-
csv << "#{delim}#{@cumulative_distance}"
|
2192
|
-
csv << "#{delim}#{total_elapsed}"
|
2193
|
-
csv << "#{delim}#{@lap_seq}"
|
2194
|
-
csv << "#{delim}#{@lap_distance}"
|
2195
|
-
csv << "#{delim}#{lap_elapsed}"
|
2196
|
-
csv
|
2197
|
-
end
|
2198
|
-
|
2199
|
-
def self.csv_header
|
2200
|
-
"#cols: primary_key|run_id|date|time|tkpt_num|latitude|longitude|altitude_ft|run_distance|run_elapsed|lap_tkpt_number|lap_distance|lap_elapsed"
|
2201
|
-
end
|
2202
|
-
|
2203
|
-
def to_geo_s
|
2204
|
-
ss = position.to_csv
|
2205
|
-
"Tkpt: #{@number} | #{ss} | #{@descr}"
|
2206
|
-
end
|
2207
|
-
|
2208
|
-
def compute_distance_and_pace(curr_index, start_dttm, prev_cumulative_dist, prev_trackpoint, units)
|
2209
|
-
@prev_tkpt = prev_trackpoint
|
2210
|
-
@cumulative_distance = prev_cumulative_dist.to_f
|
2211
|
-
@run_start_dttm = start_dttm
|
2212
|
-
|
2213
|
-
if @prev_tkpt
|
2214
|
-
@incremental_distance = proximity(@prev_tkpt, units)
|
2215
|
-
if (!@incremental_distance.nan?)
|
2216
|
-
@cumulative_distance = @cumulative_distance + @incremental_distance.to_f
|
2217
|
-
if (@lap_number == prev_trackpoint.lap_number)
|
2218
|
-
@lap_seq = prev_trackpoint.lap_seq + 1
|
2219
|
-
@lap_distance = prev_trackpoint.lap_distance + @incremental_distance
|
2220
|
-
else
|
2221
|
-
@lap_seq = 1
|
2222
|
-
@lap_distance = @incremental_distance
|
2223
|
-
end
|
2224
|
-
end
|
2225
|
-
compute_cumulative_pace(start_dttm)
|
2226
|
-
@cumulative_distance
|
2227
|
-
else
|
2228
|
-
@lap_seq = 1
|
2229
|
-
0
|
2230
|
-
end
|
2231
|
-
end
|
2232
|
-
|
2233
|
-
def compute_cumulative_pace(start_dttm)
|
2234
|
-
if @cumulative_distance > 0
|
2235
|
-
secsDiff = @dttm.seconds_diff(start_dttm)
|
2236
|
-
secsMile = ((secsDiff.to_f) / (@cumulative_distance.to_f))
|
2237
|
-
minsMile = (secsMile / 60)
|
2238
|
-
wholeMins = minsMile.floor
|
2239
|
-
secsBal = secsMile - (wholeMins * 60)
|
2240
|
-
s1 = "#{secsDiff} #{secsMile} #{minsMile} #{wholeMins} #{secsBal} #{@cumulative_distance} | "
|
2241
|
-
s2 = sprintf("%d:%2.1f", minsMile, secsBal)
|
2242
|
-
@cumulative_pace = "#{s2}"
|
2243
|
-
else
|
2244
|
-
@cumulative_pace = ""
|
2245
|
-
end
|
2246
|
-
end
|
2247
|
-
|
2248
|
-
def set_split(n, tkpt)
|
2249
|
-
@split, @prev_split = n, tkpt
|
2250
|
-
end
|
2251
|
-
|
2252
|
-
def is_split()
|
2253
|
-
(@split >= 1)
|
2254
|
-
end
|
2255
|
-
|
2256
|
-
def split_info(dtTm)
|
2257
|
-
if is_split
|
2258
|
-
hhmmss = ''
|
2259
|
-
if @prev_split
|
2260
|
-
return "#{@split} #{@dttm.hhmmss_diff(@prev_split.dttm())}"
|
2261
|
-
else
|
2262
|
-
return "#{@split} #{@dttm.hhmmss_diff(dtTm)}"
|
2263
|
-
end
|
2264
|
-
else
|
2265
|
-
""
|
2266
|
-
end
|
2267
|
-
end
|
2268
|
-
|
2269
|
-
public
|
2270
|
-
|
2271
|
-
def as_glatlng(comment_out, gen_comments, tkpt_count, curr_idx, start_dttm)
|
2272
|
-
comment_out ? comment = '// ' : comment = ''
|
2273
|
-
if gen_comments
|
2274
|
-
secs_diff = @dttm.seconds_diff(start_dttm)
|
2275
|
-
fmt_time = @dttm.hhmmss_diff(start_dttm)
|
2276
|
-
"\n #{comment}points.push(new GLatLng(#{latitude_as_float},#{longitude_as_float})); " +
|
2277
|
-
"// (#{curr_idx + 1} of #{tkpt_count}) #{@dttm.to_s} #{secs_diff} #{fmt_time} #{@cumulative_distance} #{split_info(start_dttm)} #{project_embedded_comment} "
|
2278
|
-
else
|
2279
|
-
"\n #{comment}points.push(new GLatLng(#{latitude_as_float},#{longitude_as_float})); // #{project_embedded_comment} "
|
2280
|
-
end
|
2281
|
-
end
|
2282
|
-
|
2283
|
-
def as_info_window_html(checkpoint, start_dttm)
|
2284
|
-
s = "\"<table align='left'>"
|
2285
|
-
if checkpoint
|
2286
|
-
secs_diff = @dttm.seconds_diff(start_dttm)
|
2287
|
-
fmt_time = @dttm.hhmmss_diff(start_dttm)
|
2288
|
-
|
2289
|
-
if checkpoint == 'Start'
|
2290
|
-
s << "<tr><td colspan='2'><b>Start!</b></td></tr>"
|
2291
|
-
elsif checkpoint == 'Finish'
|
2292
|
-
s << "<tr><td colspan='2'><b>Finish!</b></td></tr>"
|
2293
|
-
else
|
2294
|
-
s << "<tr><td colspan='2'><b>Checkpoint #{checkpoint}</b></td></tr>"
|
2295
|
-
end
|
2296
|
-
s << "<tr><td>Distance: </td><td>#{@cumulative_distance}</td></tr>"
|
2297
|
-
s << "<tr><td>Time of Day: </td><td>#{@dttm.to_s} </td></tr>"
|
2298
|
-
s << "<tr><td>Elapsed Time: </td><td>#{fmt_time} </td></tr>"
|
2299
|
-
s << "<tr><td>Average Pace: </td><td>#{@cumulative_pace} </td></tr>"
|
2300
|
-
s << "<tr><td>Lat/Lng: </td><td>#{latitude_as_float} , #{longitude_as_float} </td></tr>"
|
2301
|
-
#s << "<tr><td>Altitude: </td><td>#{altitude_as_float}m </td></tr>"
|
2302
|
-
s
|
2303
|
-
end
|
2304
|
-
s << "</table>\""
|
2305
|
-
s
|
2306
|
-
end
|
2307
|
-
end
|
2308
|
-
|
2309
|
-
# =============================================================================
|
2310
|
-
|
2311
|
-
=begin rdoc
|
2312
|
-
Instances of this class represent a <Run> aggregate object from a
|
2313
|
-
Forerunner XML file.
|
2314
|
-
|
2315
|
-
Additionally, there is distance, pace, and Google Map generation logic
|
2316
|
-
in this class.
|
2317
|
-
=end
|
2318
|
-
|
2319
|
-
class Run < GoobyObject
|
2320
|
-
|
2321
|
-
attr_accessor :number, :run_id, :descr, :notes, :tracks, :tkpts, :laps, :distance
|
2322
|
-
|
2323
|
-
def initialize(number=0, descr='')
|
2324
|
-
@number = number
|
2325
|
-
@run_id = nil
|
2326
|
-
@descr = descr
|
2327
|
-
@notes = ''
|
2328
|
-
@tracks = Array.new
|
2329
|
-
@tkpts = Array.new
|
2330
|
-
@laps = Array.new
|
2331
|
-
@distance = 0
|
2332
|
-
@configuration = Hash.new
|
2333
|
-
@logProgress = true
|
2334
|
-
@finished = false
|
2335
|
-
end
|
2336
|
-
|
2337
|
-
public
|
2338
|
-
|
2339
|
-
# This method is invoked at end-of-parsing.
|
2340
|
-
def finish()
|
2341
|
-
@logProgress = false
|
2342
|
-
unless @finished
|
2343
|
-
@tracks.each { |trk|
|
2344
|
-
trk.trackpoints().each { |tkpt|
|
2345
|
-
tkpt.run_number = @number
|
2346
|
-
@tkpts.push(tkpt)
|
2347
|
-
}
|
2348
|
-
}
|
2349
|
-
compute_distance_and_pace
|
2350
|
-
compute_splits
|
2351
|
-
set_run_ids
|
2352
|
-
@finished = true
|
2353
|
-
end
|
2354
|
-
end
|
2355
|
-
|
2356
|
-
public
|
2357
|
-
|
2358
|
-
def add_track(trk)
|
2359
|
-
if trk != nil
|
2360
|
-
@tracks.push(trk)
|
2361
|
-
end
|
2362
|
-
end
|
2363
|
-
|
2364
|
-
def trackpoint_count()
|
2365
|
-
@tkpts.size()
|
2366
|
-
end
|
2367
|
-
|
2368
|
-
def add_lap(lap)
|
2369
|
-
@laps.push(lap)
|
2370
|
-
end
|
2371
|
-
|
2372
|
-
def lap_count()
|
2373
|
-
@laps.size
|
2374
|
-
end
|
2375
|
-
|
2376
|
-
def start_dttm()
|
2377
|
-
count = 0
|
2378
|
-
@tracks.each { |trk|
|
2379
|
-
trk.trackpoints().each { |tkpt|
|
2380
|
-
return tkpt.dttm()
|
2381
|
-
}
|
2382
|
-
}
|
2383
|
-
return nil
|
2384
|
-
end
|
2385
|
-
|
2386
|
-
def end_dttm()
|
2387
|
-
lastOne = nil
|
2388
|
-
@tracks.each { |trk|
|
2389
|
-
trk.trackpoints().each { |tkpt|
|
2390
|
-
lastOne = tkpt.dttm()
|
2391
|
-
}
|
2392
|
-
}
|
2393
|
-
lastOne
|
2394
|
-
end
|
2395
|
-
|
2396
|
-
def duration()
|
2397
|
-
first = start_dttm()
|
2398
|
-
last = end_dttm()
|
2399
|
-
if first
|
2400
|
-
if last
|
2401
|
-
return last.hhmmss_diff(first)
|
2402
|
-
end
|
2403
|
-
end
|
2404
|
-
return "00:00:00"
|
2405
|
-
end
|
2406
|
-
|
2407
|
-
def start_yyyy_mm_dd
|
2408
|
-
if start_dttm()
|
2409
|
-
start_dttm().yyyy_mm_dd()
|
2410
|
-
else
|
2411
|
-
""
|
2412
|
-
end
|
2413
|
-
end
|
2414
|
-
|
2415
|
-
def start_hh_mm_ss
|
2416
|
-
if start_dttm()
|
2417
|
-
start_dttm().hh_mm_ss()
|
2418
|
-
else
|
2419
|
-
""
|
2420
|
-
end
|
2421
|
-
end
|
2422
|
-
|
2423
|
-
def end_hh_mm_ss
|
2424
|
-
if end_dttm()
|
2425
|
-
end_dttm().hh_mm_ss()
|
2426
|
-
else
|
2427
|
-
""
|
2428
|
-
end
|
2429
|
-
end
|
2430
|
-
|
2431
|
-
def to_s
|
2432
|
-
finish() unless @finished
|
2433
|
-
s = "Run: #{@number} date: #{start_yyyy_mm_dd} distance: #{distance} duration: #{duration} "
|
2434
|
-
s << " tracks: #{@tracks.size} tkpts: #{trackpoint_count} laps: #{lap_count} "
|
2435
|
-
s << " notes: #{@notes} "
|
2436
|
-
s
|
2437
|
-
end
|
2438
|
-
|
2439
|
-
def print_string
|
2440
|
-
finish() unless @finished
|
2441
|
-
"Run number=#{@number} tracks=#{@tracks.size} tkpts=#{@tkpts.size} laps=#{@laps.size} distance=#{@distance} "
|
2442
|
-
end
|
2443
|
-
|
2444
|
-
def put_csv()
|
2445
|
-
finish() unless @finished
|
2446
|
-
puts "#{@number}|#{}|#{start_yyyy_mm_dd()}|#{start_hh_mm_ss()}|#{end_hh_mm_ss}|#{duration()}|#{@distance}|#{@tracks.size}|#{trackpoint_count()}|#{lap_count}|#{@notes.strip}"
|
2447
|
-
end
|
2448
|
-
|
2449
|
-
def put_tkpt_csv()
|
2450
|
-
finish() unless @finished
|
2451
|
-
@tkpts.each { | tkpt |
|
2452
|
-
if (@prev_tkpt == nil)
|
2453
|
-
@prev_tkpt = tkpt
|
2454
|
-
end
|
2455
|
-
puts tkpt.to_csv(@prev_tkpt)
|
2456
|
-
}
|
2457
|
-
end
|
2458
|
-
|
2459
|
-
def put_laps
|
2460
|
-
@laps.each { | lap | puts lap.to_s }
|
2461
|
-
end
|
2462
|
-
|
2463
|
-
private
|
2464
|
-
|
2465
|
-
def compute_distance_and_pace
|
2466
|
-
cumulative_dist = 0.to_f;
|
2467
|
-
curr_index = -1
|
2468
|
-
prev_tkpt = nil
|
2469
|
-
start_dttm = nil
|
2470
|
-
@tkpts.each { | tkpt |
|
2471
|
-
curr_index = curr_index + 1
|
2472
|
-
if curr_index == 0
|
2473
|
-
start_dttm = tkpt.dttm()
|
2474
|
-
prev_tkpt = tkpt
|
2475
|
-
else
|
2476
|
-
cumulative_dist = tkpt.compute_distance_and_pace(curr_index, start_dttm, cumulative_dist, prev_tkpt, 'M')
|
2477
|
-
prev_tkpt = tkpt
|
2478
|
-
end
|
2479
|
-
}
|
2480
|
-
@distance = cumulative_dist
|
2481
|
-
end
|
2482
|
-
|
2483
|
-
def compute_splits
|
2484
|
-
nextSplitDist = 1.00
|
2485
|
-
prev_splitTkpt = nil
|
2486
|
-
loop1Count = 0;
|
2487
|
-
@tkpts.each { |tkpt|
|
2488
|
-
loop1Count = loop1Count + 1
|
2489
|
-
if tkpt.cumulative_distance() >= nextSplitDist
|
2490
|
-
tkpt.set_split(0 + nextSplitDist, prev_splitTkpt)
|
2491
|
-
nextSplitDist = nextSplitDist + 1.00
|
2492
|
-
prev_splitTkpt = tkpt
|
2493
|
-
end
|
2494
|
-
}
|
2495
|
-
# set first and last booleans
|
2496
|
-
count = 0
|
2497
|
-
@tkpts.each { |tkpt|
|
2498
|
-
count = count + 1
|
2499
|
-
tkpt.first = true if count == 1
|
2500
|
-
tkpt.last = true if count == loop1Count
|
2501
|
-
}
|
2502
|
-
end
|
2503
|
-
|
2504
|
-
def set_run_ids
|
2505
|
-
@tkpts.each { |tkpt|
|
2506
|
-
if (@run_id == nil)
|
2507
|
-
@run_id = tkpt.dttm.rawdata
|
2508
|
-
end
|
2509
|
-
tkpt.run_id = @run_id
|
2510
|
-
}
|
2511
|
-
end
|
2512
|
-
end
|
2513
|
-
|
2514
|
-
# =============================================================================
|
2515
|
-
|
2516
|
-
=begin rdoc
|
2517
|
-
Sample implementation of a REXML::StreamListener SAX parser.
|
2518
|
-
This class isn't actually used in Gooby, but is retained for future use.
|
2519
|
-
=end
|
2520
|
-
|
2521
|
-
class SimpleXmlParser
|
2522
|
-
|
2523
|
-
include REXML::StreamListener
|
2524
|
-
|
2525
|
-
attr_accessor :tag_count, :watched_tags
|
2526
|
-
|
2527
|
-
def initialize
|
2528
|
-
@tag_count = 0
|
2529
|
-
@counter_hash = CounterHash.new
|
2530
|
-
end
|
2531
|
-
|
2532
|
-
public
|
2533
|
-
|
2534
|
-
# SAX API method. Increments the tagname in the counter hash.
|
2535
|
-
def tag_start(tag_name, attrs)
|
2536
|
-
@tag_count = @tag_count + 1
|
2537
|
-
@counter_hash.increment(tag_name)
|
2538
|
-
end
|
2539
|
-
|
2540
|
-
# SAX API method. No impl.
|
2541
|
-
def tag_end(tagname)
|
2542
|
-
end
|
2543
|
-
|
2544
|
-
# SAX API method. No impl.
|
2545
|
-
def text(txt)
|
2546
|
-
end
|
2547
|
-
|
2548
|
-
# Prints the state of this object (the counter hash).
|
2549
|
-
def dump
|
2550
|
-
puts @counter_hash.to_s
|
2551
|
-
end
|
2552
|
-
end
|
2553
|
-
|
2554
|
-
# =============================================================================
|
2555
|
-
|
2556
|
-
=begin rdoc
|
2557
|
-
Instances of this class represent a <Track> aggregate object from a Forerunner
|
2558
|
-
XML file. Note that a <Run> may contain more than one <Track> aggregates.
|
2559
|
-
=end
|
2560
|
-
|
2561
|
-
class Track < GoobyObject
|
2562
|
-
|
2563
|
-
attr_reader :number, :descr, :trackpoints
|
2564
|
-
|
2565
|
-
def initialize(num=0, descr='')
|
2566
|
-
@number = num
|
2567
|
-
@descr = descr
|
2568
|
-
@trackpoints = Array.new
|
2569
|
-
end
|
2570
|
-
|
2571
|
-
public
|
2572
|
-
|
2573
|
-
def add_trackpoint(tkpt)
|
2574
|
-
@trackpoints.push(tkpt)
|
2575
|
-
end
|
2576
|
-
|
2577
|
-
def size
|
2578
|
-
@trackpoints.size
|
2579
|
-
end
|
2580
|
-
|
2581
|
-
def to_s
|
2582
|
-
return "Trk: #{@descr} tkpts: #{size}"
|
2583
|
-
end
|
2584
|
-
|
2585
|
-
def dump
|
2586
|
-
puts "Track: '#{@descr}' tkpts: #{size}"
|
2587
|
-
@trackpoints.each { |tkpt| puts tkpt.to_csv }
|
2588
|
-
end
|
2589
|
-
end
|
2590
|
-
|
2591
|
-
# =============================================================================
|
2592
|
-
|
2593
|
-
class Course < GoobyObject
|
2594
|
-
|
2595
|
-
attr_accessor :name, :distance, :point_numbers, :points
|
2596
|
-
|
2597
|
-
def initialize(yaml_csv)
|
2598
|
-
@name, @distance = '', 0.0
|
2599
|
-
@point_numbers, @points, @bad_points = Array.new, Array.new, Array.new
|
2600
|
-
@points_hash, @matched_points = Hash.new, Hash.new
|
2601
|
-
tokens = yaml_csv.split(',')
|
2602
|
-
@name = tokens[0] if tokens.size > 0
|
2603
|
-
@distance = tokens[1].to_f if tokens.size > 1
|
2604
|
-
if tokens.size > 2
|
2605
|
-
index = 0
|
2606
|
-
tokens.each { |tok|
|
2607
|
-
index = index + 1
|
2608
|
-
if (index > 2)
|
2609
|
-
poi = Configuration.get_config.get_poi(tok)
|
2610
|
-
if (poi)
|
2611
|
-
poi.number = "#{tok}"
|
2612
|
-
@point_numbers << "#{tok}"
|
2613
|
-
@points << poi
|
2614
|
-
@points_hash["#{tok}"] = poi
|
2615
|
-
else
|
2616
|
-
@bad_points << tok
|
2617
|
-
end
|
2618
|
-
end
|
2619
|
-
}
|
2620
|
-
end
|
2621
|
-
end
|
2622
|
-
|
2623
|
-
def has_errors
|
2624
|
-
(@bad_points.size > 0) ? true : false
|
2625
|
-
end
|
2626
|
-
|
2627
|
-
def matched(number, point)
|
2628
|
-
@matched_points["#{number}"] = point if point
|
2629
|
-
end
|
2630
|
-
|
2631
|
-
def matched?
|
2632
|
-
(@matched_points.size == @point_numbers.size) ? true : false
|
2633
|
-
end
|
2634
|
-
|
2635
|
-
def display_matches
|
2636
|
-
puts ''
|
2637
|
-
calculate_matches
|
2638
|
-
@point_numbers.each { |num|
|
2639
|
-
point = @points_hash["#{num}"]
|
2640
|
-
mpoint = @matched_points["#{num}"]
|
2641
|
-
puts ''
|
2642
|
-
puts " Course Point: #{point.to_formatted_string}" if point
|
2643
|
-
puts " Matched Point: #{mpoint.to_formatted_string}" if mpoint
|
2644
|
-
}
|
2645
|
-
puts ''
|
2646
|
-
end
|
2647
|
-
|
2648
|
-
def reset
|
2649
|
-
@matched_points = Hash.new
|
2650
|
-
end
|
2651
|
-
|
2652
|
-
def to_s
|
2653
|
-
"#{@name} #{@distance} points: #{@points.size} errors: #{has_errors}"
|
2654
|
-
end
|
2655
|
-
|
2656
|
-
def dump
|
2657
|
-
puts "Course: #{@name}"
|
2658
|
-
puts "Distance: #{@distance}"
|
2659
|
-
points.each { |pt| puts pt } #{@points.size} errors: #{has_errors}"
|
2660
|
-
end
|
2661
|
-
|
2662
|
-
private
|
2663
|
-
|
2664
|
-
def calculate_matches
|
2665
|
-
# first, identify the high and low distance points and their indices.
|
2666
|
-
idx, low_dist, low_dist_idx, low_time, high_dist, high_dist_idx = -1, 999999.0, 0, '', -1.0, 0
|
2667
|
-
@point_numbers.each { |num|
|
2668
|
-
idx = idx + 1
|
2669
|
-
mpoint = @matched_points["#{num}"]
|
2670
|
-
if mpoint && mpoint.distance < low_dist
|
2671
|
-
low_dist = mpoint.distance
|
2672
|
-
low_dist_idx = idx
|
2673
|
-
low_time = mpoint.elapsed
|
2674
|
-
end
|
2675
|
-
if mpoint && mpoint.distance > high_dist
|
2676
|
-
high_dist = mpoint.distance
|
2677
|
-
high_dist_idx = idx
|
2678
|
-
end
|
2679
|
-
}
|
2680
|
-
low_dttm = DtTm.new("2007-06-09T#{low_time}Z")
|
2681
|
-
|
2682
|
-
# reorder the entries in @point_numbers if necessary - 'low-to-high distance'.
|
2683
|
-
if (high_dist_idx < low_dist_idx)
|
2684
|
-
@point_numbers.reverse!
|
2685
|
-
end
|
2686
|
-
|
2687
|
-
@point_numbers.each { |num|
|
2688
|
-
mpoint = @matched_points["#{num}"]
|
2689
|
-
if mpoint
|
2690
|
-
mpoint.course_distance = mpoint.distance - low_dist
|
2691
|
-
dttm = DtTm.new("2007-06-09T#{mpoint.elapsed}Z")
|
2692
|
-
mpoint.course_elapsed = dttm.hhmmss_diff(low_dttm)
|
2693
|
-
end
|
2694
|
-
}
|
2695
|
-
end
|
2696
|
-
end
|
2697
|
-
|
2698
|
-
# =============================================================================
|
2699
|
-
|
2700
|
-
=begin rdoc
|
2701
|
-
This class reads and scans the Gooby code for various purposes.
|
2702
|
-
|
2703
|
-
Primarily, it is used to regenerate, on an ongoing basis, the various
|
2704
|
-
regression tests in file 'ts_gooby.rb'. Regeneration retains the current
|
2705
|
-
test methods, adds stubs for new test methods, and flags obsolete methods.
|
2706
|
-
|
2707
|
-
It is also used to create a Gooby class, module, and method "quick reference"
|
2708
|
-
- somewhat similar to the TextMate symbol list.
|
2709
|
-
|
2710
|
-
TODO: Method indexing and where used" functionality.
|
2711
|
-
=end
|
2712
|
-
|
2713
|
-
class CodeScanner < GoobyObject
|
2714
|
-
|
2715
|
-
def initialize(argv)
|
2716
|
-
|
2717
|
-
function = 'outline'
|
2718
|
-
if (argv.size > 0)
|
2719
|
-
function = argv[0]
|
2720
|
-
end
|
2721
|
-
|
2722
|
-
@codebase_file = 'gooby.rb'
|
2723
|
-
@testbase_file = 'ts_gooby.rb'
|
2724
|
-
@code_lines = read_lines("lib/#{@codebase_file}")
|
2725
|
-
@test_lines = read_lines("tests/#{@testbase_file}")
|
2726
|
-
|
2727
|
-
puts "code lines = #{@code_lines.size}"
|
2728
|
-
puts "test lines = #{@test_lines.size}"
|
2729
|
-
|
2730
|
-
@tokens_hash = CounterHash.new
|
2731
|
-
@module_names_hash = CounterHash.new
|
2732
|
-
@class_names_hash = CounterHash.new
|
2733
|
-
@method_names_hash = CounterHash.new
|
2734
|
-
@mc_line_num_array = Array.new
|
2735
|
-
@type_names = Hash.new
|
2736
|
-
@code_hash = Hash.new
|
2737
|
-
@test_hash = Hash.new
|
2738
|
-
@api_hash = Hash.new
|
2739
|
-
@merged_hash = Hash.new
|
2740
|
-
@exclude_classes = Array.new
|
2741
|
-
|
2742
|
-
regenerate_test_suite if (function == 'regenerate_test_suite')
|
2743
|
-
regenerate_test_suite if (function == 'tests')
|
2744
|
-
|
2745
|
-
model_class_outline if (function == 'model_class_outline')
|
2746
|
-
model_class_outline if (function == 'outline')
|
2747
|
-
|
2748
|
-
quick_reference_guide if (function == 'quick_reference_guide')
|
2749
|
-
quick_reference_guide if (function == 'qrg')
|
2750
|
-
|
2751
|
-
mcm_references if (function == 'mcm_references')
|
2752
|
-
mcm_references if (function == 'mcm')
|
2753
|
-
end
|
2754
|
-
|
2755
|
-
private
|
2756
|
-
|
2757
|
-
def regenerate_test_suite
|
2758
|
-
@exclude_classes = %w( TestHelper CodeScanner )
|
2759
|
-
parse_code_lines
|
2760
|
-
parse_test_lines
|
2761
|
-
merge_keys
|
2762
|
-
regenerate(@testbase_file)
|
2763
|
-
end
|
2764
|
-
|
2765
|
-
def model_class_outline
|
2766
|
-
parse_code_lines
|
2767
|
-
@mc_line_num_array.each { |line|
|
2768
|
-
tokens = tokenize(line)
|
2769
|
-
type = tokens[0].ljust(6)
|
2770
|
-
name = tokens[1]
|
2771
|
-
count = tokens[2].rjust(6)
|
2772
|
-
puts " #{count} #{type} #{name}"
|
2773
|
-
}
|
2774
|
-
end
|
2775
|
-
|
2776
|
-
def quick_reference_guide
|
2777
|
-
parse_code_lines
|
2778
|
-
determine_longest_names
|
2779
|
-
quick_reference_guide_report('module')
|
2780
|
-
quick_reference_guide_report('class')
|
2781
|
-
end
|
2782
|
-
|
2783
|
-
def quick_reference_guide_report(obj_type)
|
2784
|
-
@api_hash.keys.sort.each { |key|
|
2785
|
-
val = @api_hash[key]
|
2786
|
-
tokens = tokenize(key, '|')
|
2787
|
-
if (tokens[0] == obj_type)
|
2788
|
-
s1 = tokens[0].ljust(6)
|
2789
|
-
s2 = tokens[1].ljust(@longest_classname + 1)
|
2790
|
-
s3 = tokens[2]
|
2791
|
-
# s = sprintf("%6s '%30s %s'", tokens[0], tokens[1], tokens[2])
|
2792
|
-
puts "#{s1} #{s2} #{s3}"
|
2793
|
-
end
|
2794
|
-
}
|
2795
|
-
end
|
2796
|
-
|
2797
|
-
def determine_longest_names
|
2798
|
-
@longest_classname = 0
|
2799
|
-
@api_hash.keys.sort.each { |key|
|
2800
|
-
tokens = tokenize(@api_hash[key], '|')
|
2801
|
-
@longest_classname = tokens[1].size if tokens[1].size > @longest_classname
|
2802
|
-
}
|
2803
|
-
end
|
2804
|
-
|
2805
|
-
def mcm_references
|
2806
|
-
parse_code_lines
|
2807
|
-
determine_longest_names
|
2808
|
-
@module_names_hash.sorted_keys.each { |name|
|
2809
|
-
count = @tokens_hash.value(name)
|
2810
|
-
puts "module: #{name} (#{count})"
|
2811
|
-
}
|
2812
|
-
@class_names_hash.sorted_keys.each { |name|
|
2813
|
-
count = @tokens_hash.value(name)
|
2814
|
-
puts "class: #{name} (#{count})"
|
2815
|
-
}
|
2816
|
-
@method_names_hash.sorted_keys.each { |name|
|
2817
|
-
count = @tokens_hash.value(name)
|
2818
|
-
puts "method: #{name} (#{count})"
|
2819
|
-
}
|
2820
|
-
end
|
2821
|
-
|
2822
|
-
def parse_code_lines
|
2823
|
-
type = ''
|
2824
|
-
type_name = ''
|
2825
|
-
meth_name = ''
|
2826
|
-
line_number = 0
|
2827
|
-
@code_lines.each { | line |
|
2828
|
-
line_number = line_number + 1
|
2829
|
-
line.strip!
|
2830
|
-
if (line.match(/^module /))
|
2831
|
-
type = 'module'
|
2832
|
-
tokens = line.split
|
2833
|
-
type_name = tokens[1]
|
2834
|
-
@module_names_hash.increment(type_name)
|
2835
|
-
@mc_line_num_array << "module #{type_name} #{line_number}"
|
2836
|
-
elsif (line.match(/^class /))
|
2837
|
-
type = 'class'
|
2838
|
-
tokens = line.split
|
2839
|
-
type_name = tokens[1]
|
2840
|
-
@class_names_hash.increment(type_name)
|
2841
|
-
@mc_line_num_array << "class #{type_name} #{line_number}"
|
2842
|
-
elsif (line.match(/^def /))
|
2843
|
-
signature = line[4...999]
|
2844
|
-
short_method = parse_meth_name("#{signature}")
|
2845
|
-
@code_hash["test_#{type}_#{type_name}"] = "#{type_name}"
|
2846
|
-
@code_hash["test_#{type}_#{type_name}_#{short_method}"] = "#{type_name}|#{signature}"
|
2847
|
-
@api_hash["#{type}|#{type_name}|#{signature}" ] = "#{type}|#{type_name}|#{signature}"
|
2848
|
-
@method_names_hash.increment(short_method)
|
2849
|
-
end
|
2850
|
-
increment_tokens(line)
|
2851
|
-
}
|
2852
|
-
end
|
2853
|
-
|
2854
|
-
def increment_tokens(line)
|
2855
|
-
# see http://www.asciitable.com/
|
2856
|
-
s = ''
|
2857
|
-
line.each_byte { |b|
|
2858
|
-
keep = false
|
2859
|
-
keep = true if ((b >= 65) && (b <= 90)) # A-Z
|
2860
|
-
keep = true if ((b >= 97) && (b <= 122)) # a-z
|
2861
|
-
keep = true if ((b >= 48) && (b <= 57)) # 0-9
|
2862
|
-
keep = true if (b == 95) # _
|
2863
|
-
keep = true if (b == 64) # @
|
2864
|
-
keep = true if (b == 63) # ?
|
2865
|
-
keep = true if (b == 33) # !
|
2866
|
-
if keep
|
2867
|
-
s << b.chr
|
2868
|
-
else
|
2869
|
-
s << ' '
|
2870
|
-
end
|
2871
|
-
}
|
2872
|
-
@tokens_hash.increment_tokens(s)
|
2873
|
-
end
|
2874
|
-
|
2875
|
-
def parse_meth_name(string)
|
2876
|
-
string.gsub!('(', ' ')
|
2877
|
-
string.gsub!(')', ' ')
|
2878
|
-
tokens = string.split
|
2879
|
-
tokens[0]
|
2880
|
-
end
|
2881
|
-
|
2882
|
-
def parse_test_lines
|
2883
|
-
in_zone = true
|
2884
|
-
method_name = 'a_start'
|
2885
|
-
method_lines = Array.new
|
2886
|
-
line_num = 0
|
2887
|
-
|
2888
|
-
@test_lines.each { | line |
|
2889
|
-
line_num = line_num + 1
|
2890
|
-
line.chomp!
|
2891
|
-
prefix = line[0...5] # ' def' or ' end'
|
2892
|
-
prefix42 = line [0..42]
|
2893
|
-
|
2894
|
-
if ((prefix == ' def') || (prefix == "\tdef"))
|
2895
|
-
in_zone = true
|
2896
|
-
tokens = line.split
|
2897
|
-
method_name = tokens[1]
|
2898
|
-
method_lines = Array.new
|
2899
|
-
end
|
2900
|
-
|
2901
|
-
if in_zone
|
2902
|
-
method_lines << "#{line}"
|
2903
|
-
end
|
2904
|
-
if (prefix42 == ' # beginning of tests - keep this marker')
|
2905
|
-
in_zone = false
|
2906
|
-
@test_hash["aaa"] = method_lines
|
2907
|
-
end
|
2908
|
-
if ((prefix == ' end') || (prefix == "\tend"))
|
2909
|
-
in_zone = false
|
2910
|
-
@test_hash["#{method_name}"] = method_lines
|
2911
|
-
end
|
2912
|
-
}
|
2913
|
-
end
|
2914
|
-
|
2915
|
-
def merge_keys
|
2916
|
-
@merged_hash = Hash.new
|
2917
|
-
@code_hash.keys.sort.each { |key| @merged_hash["#{key}"] = "code" }
|
2918
|
-
@test_hash.keys.sort.each { |key| @merged_hash["#{key}"] = "test" }
|
2919
|
-
end
|
2920
|
-
|
2921
|
-
def regenerate(test_file)
|
2922
|
-
code = ''
|
2923
|
-
@merged_hash.keys.sort.each { |key|
|
2924
|
-
tokens = key.split('_')
|
2925
|
-
type, name, meth = tokens[1], tokens[2], tokens[3]
|
2926
|
-
|
2927
|
-
processThisKey = true
|
2928
|
-
@exclude_classes.each { |xc|
|
2929
|
-
if xc == name
|
2930
|
-
processThisKey = false
|
2931
|
-
end
|
2932
|
-
}
|
2933
|
-
|
2934
|
-
next if !processThisKey
|
2935
|
-
|
2936
|
-
if @test_hash.has_key?(key)
|
2937
|
-
# We already have a test method written in the test class,
|
2938
|
-
# so keep this currently existing test code!
|
2939
|
-
|
2940
|
-
if @code_hash.has_key?(key)
|
2941
|
-
comment = nil
|
2942
|
-
else
|
2943
|
-
if key != 'a_start'
|
2944
|
-
comment = "# Warning: possible obsolete test method - #{key}"
|
2945
|
-
end
|
2946
|
-
end
|
2947
|
-
|
2948
|
-
code << "\n"
|
2949
|
-
if comment != nil
|
2950
|
-
code << "\n#{comment}"
|
2951
|
-
code << "\n"
|
2952
|
-
end
|
2953
|
-
array = @test_hash["#{key}"]
|
2954
|
-
array.each { |line| code << "\n#{line}" }
|
2955
|
-
else
|
2956
|
-
# We don't have this test method in the current test class,
|
2957
|
-
# so generate a test method stub.
|
2958
|
-
code << "\n"
|
2959
|
-
code << "\n def #{key}"
|
2960
|
-
code << "\n"
|
2961
|
-
|
2962
|
-
if @gen_impl_stub
|
2963
|
-
if type = 'class'
|
2964
|
-
code << "\n #obj = #{type}.new"
|
2965
|
-
code << "\n #result = obj.#{meth}"
|
2966
|
-
code << "\n #expected = ''"
|
2967
|
-
s = "\n"
|
2968
|
-
s << ' #assert_equal(expected, actual, "'
|
2969
|
-
s << "#{type}.#{meth} "
|
2970
|
-
s << 'values are not as expected; #{result} vs #{expected}")'
|
2971
|
-
code << s
|
2972
|
-
else
|
2973
|
-
code << "\n #result = #{type}.#{meth}"
|
2974
|
-
code << "\n #expected = ''"
|
2975
|
-
s = "\n"
|
2976
|
-
s << ' #assert_equal(expected, actual, "'
|
2977
|
-
s << "#{type}.#{meth} "
|
2978
|
-
s << 'values are not as expected; #{result} vs #{expected}")'
|
2979
|
-
code << s
|
2980
|
-
end
|
2981
|
-
end
|
2982
|
-
code << "\n end"
|
2983
|
-
end
|
2984
|
-
}
|
2985
|
-
code << "\nend" # end of class
|
2986
|
-
code << "\n"
|
2987
|
-
fn = "tests/#{test_file}"
|
2988
|
-
out = File.new fn, "w+"
|
2989
|
-
out.write code
|
2990
|
-
out.flush
|
2991
|
-
out.close
|
2992
|
-
puts "file written: #{fn}"
|
2993
|
-
end
|
2994
|
-
end
|
2995
|
-
|
2996
|
-
# =============================================================================
|
2997
|
-
|
2998
|
-
=begin rdoc
|
2999
|
-
This class provides "user friendly DSL" functionality for the use
|
3000
|
-
of Gooby. See 'bin/example_usage.rb' for examples using this class.
|
3001
|
-
=end
|
3002
|
-
|
3003
|
-
class GoobyCommand < GoobyObject
|
3004
|
-
|
3005
|
-
attr_reader :configuration
|
3006
|
-
|
3007
|
-
def initialize(gooby_yaml_filename=nil)
|
3008
|
-
@configuration = Gooby::Configuration.init(gooby_yaml_filename)
|
3009
|
-
end
|
3010
|
-
|
3011
|
-
def display_version
|
3012
|
-
s = "# #{project_name} #{project_version_number} #{project_date}. #{project_copyright}."
|
3013
|
-
puts s
|
3014
|
-
end
|
3015
|
-
|
3016
|
-
def split_garmin_export_file(argv)
|
3017
|
-
if (argv == nil)
|
3018
|
-
puts "ERROR: no ARGV args passed."
|
3019
|
-
elsif (argv.size < 3)
|
3020
|
-
puts ""
|
3021
|
-
puts "Invalid program args; three args required - format, input filename, output directory"
|
3022
|
-
puts " the first arg, format, should be one of: garmin201, garmin205, garmin305, etc."
|
3023
|
-
puts " the second arg is the input filename - which is a garmin export file"
|
3024
|
-
puts " the third arg is the output directory where the split files are written to\n\n"
|
3025
|
-
puts "Please correct the program arguments and try again. \n\n"
|
3026
|
-
else
|
3027
|
-
format = argv[0].downcase
|
3028
|
-
filename = argv[1]
|
3029
|
-
out_dir = argv[2]
|
3030
|
-
|
3031
|
-
if (format == 'garmin201')
|
3032
|
-
split_garmin_forerunner_logbook_xml(filename, out_dir)
|
3033
|
-
else
|
3034
|
-
split_garmin_training_center_xml(filename, out_dir)
|
3035
|
-
end
|
3036
|
-
end
|
3037
|
-
end
|
3038
|
-
|
3039
|
-
def split_garmin_forerunner_logbook_xml(xml_filename, out_dir)
|
3040
|
-
splitter = Gooby::ForerunnerXmlSplitter.new(xml_filename, out_dir)
|
3041
|
-
splitter.split
|
3042
|
-
end
|
3043
|
-
|
3044
|
-
def split_garmin_training_center_xml(tcx_filename, out_dir)
|
3045
|
-
splitter = Gooby::TrainingCenterXmlSplitter.new(tcx_filename, out_dir)
|
3046
|
-
splitter.split
|
3047
|
-
end
|
3048
|
-
|
3049
|
-
def parse_garmin_xml_file(argv)
|
3050
|
-
if (argv == nil)
|
3051
|
-
puts "ERROR: no ARGV args passed."
|
3052
|
-
elsif (argv.size < 2)
|
3053
|
-
puts ""
|
3054
|
-
puts "Invalid program args; two args required - format, input (xml) filename, output directory"
|
3055
|
-
puts " the first arg, format, should be one of: garmin201, garmin205, garmin305, etc."
|
3056
|
-
puts " the second arg, input xml filename, was produced by the Gooby 'splitter.rb' program."
|
3057
|
-
puts "Please correct the program arguments and try again. \n\n"
|
3058
|
-
else
|
3059
|
-
format = argv[0].downcase
|
3060
|
-
filename = argv[1]
|
3061
|
-
puts Trackpoint.csv_header
|
3062
|
-
if (format == 'garmin201')
|
3063
|
-
parse_garmin_forerunner_logbook_xml(filename)
|
3064
|
-
else
|
3065
|
-
parse_garmin_training_center_xml(filename)
|
3066
|
-
end
|
3067
|
-
end
|
3068
|
-
end
|
3069
|
-
|
3070
|
-
def parse_garmin_forerunner_logbook_xml(xml_filename)
|
3071
|
-
handler = Gooby::ForerunnerXmlParser.new
|
3072
|
-
Document.parse_stream((File.new xml_filename), handler)
|
3073
|
-
handler.put_all_run_tkpt_csv()
|
3074
|
-
end
|
3075
|
-
|
3076
|
-
def parse_garmin_training_center_xml(tcx_filename)
|
3077
|
-
handler = Gooby::TrainingCenterXmlParser.new
|
3078
|
-
Document.parse_stream((File.new tcx_filename), handler)
|
3079
|
-
handler.put_all_run_tkpt_csv()
|
3080
|
-
end
|
3081
|
-
|
3082
|
-
def read_csv_files(array_of_filenames, record_index=0)
|
3083
|
-
@cvs_reader = Gooby::CsvReader.new(array_of_filenames)
|
3084
|
-
@cvs_points = @cvs_reader.read
|
3085
|
-
puts @cvs_reader.to_s
|
3086
|
-
if (record_index > 0)
|
3087
|
-
@cvs_reader.display_formatted_record(record_index)
|
3088
|
-
end
|
3089
|
-
@cvs_points
|
3090
|
-
end
|
3091
|
-
|
3092
|
-
def been_there(course_id, proximity=0.0070, uom='deg')
|
3093
|
-
unless @cvs_points
|
3094
|
-
puts "You must first invoke method 'read_csv_files' with a list of parsed CSV filenames."
|
3095
|
-
return
|
3096
|
-
end
|
3097
|
-
course = configuration.get_course("#{course_id}")
|
3098
|
-
unless course
|
3099
|
-
puts "Unable to find course id '#{course_id}' in the gooby config yaml file."
|
3100
|
-
return
|
3101
|
-
end
|
3102
|
-
puts ''
|
3103
|
-
# collect the cvs_points into run arrays
|
3104
|
-
@curr_run_id = 'x'
|
3105
|
-
@cvs_runs = Array.new
|
3106
|
-
@cvs_points.each { |cvs_point|
|
3107
|
-
if (cvs_point.run_id == @curr_run_id)
|
3108
|
-
@curr_run.add_point(cvs_point)
|
3109
|
-
else
|
3110
|
-
@curr_run_id = cvs_point.run_id
|
3111
|
-
@curr_run = Gooby::CvsRun.new(@curr_run_id)
|
3112
|
-
@curr_run.add_point(cvs_point)
|
3113
|
-
@cvs_runs << @curr_run
|
3114
|
-
end
|
3115
|
-
}
|
3116
|
-
|
3117
|
-
# iterate the runs - looking for a match vs the course
|
3118
|
-
@cvs_runs.each { |cvs_run|
|
3119
|
-
puts "Scanning run id '#{cvs_run.id}', point count= #{cvs_run.points.size}" if false
|
3120
|
-
run_points = cvs_run.points
|
3121
|
-
course.reset
|
3122
|
-
course.points.each { |course_point|
|
3123
|
-
closest_diff = 999
|
3124
|
-
closest_point = nil
|
3125
|
-
run_points.each { |run_point|
|
3126
|
-
diff = course_point.degrees_diff(run_point)
|
3127
|
-
if ((diff < proximity) && (diff < closest_diff))
|
3128
|
-
closest_diff = diff
|
3129
|
-
closest_point = run_point
|
3130
|
-
closest_point.degrees_diff = diff
|
3131
|
-
course.matched(course_point.number, run_point)
|
3132
|
-
end
|
3133
|
-
}
|
3134
|
-
}
|
3135
|
-
if course.matched?
|
3136
|
-
puts "Course '#{course.name}' matched vs run id '#{cvs_run.id}'"
|
3137
|
-
course.display_matches
|
3138
|
-
else
|
3139
|
-
puts "course not matched" if false
|
3140
|
-
end
|
3141
|
-
}
|
3142
|
-
end
|
3143
|
-
|
3144
|
-
def generate_google_map(argv)
|
3145
|
-
if (argv == nil)
|
3146
|
-
puts "ERROR: no ARGV args passed."
|
3147
|
-
elsif (argv.size < 1)
|
3148
|
-
puts ""
|
3149
|
-
puts "Invalid program args; one required - input csv filename"
|
3150
|
-
puts " the first arg, csv filename, was produced by the Gooby 'parser.rb' program."
|
3151
|
-
puts "Please correct the program arguments and try again. \n\n"
|
3152
|
-
else
|
3153
|
-
csv_filename = argv[0]
|
3154
|
-
configuration = Gooby::Configuration.get_config
|
3155
|
-
generator = Gooby::GoogleMapGenerator.new(csv_filename)
|
3156
|
-
generator.generate_page(configuration)
|
3157
|
-
end
|
3158
|
-
|
3159
|
-
end
|
3160
|
-
end
|
3161
|
-
end # end of module
|
18
|
+
require 'gooby_kernel'
|
19
|
+
require 'gooby_test_helper'
|
20
|
+
require 'gooby_object'
|
21
|
+
require 'gooby_counter_hash'
|
22
|
+
require 'gooby_delim_line'
|
23
|
+
require 'gooby_dttm'
|
24
|
+
require 'gooby_duration'
|
25
|
+
require 'gooby_forerunner_xml_parser'
|
26
|
+
require 'gooby_forerunner_xml_splitter'
|
27
|
+
require 'gooby_training_center_xml_parser'
|
28
|
+
require 'gooby_training_center_xml_splitter'
|
29
|
+
require 'gooby_google_map_generator'
|
30
|
+
require 'gooby_history'
|
31
|
+
require 'gooby_lap'
|
32
|
+
require 'gooby_line'
|
33
|
+
require 'gooby_configuration'
|
34
|
+
require 'gooby_point'
|
35
|
+
require 'gooby_csv_point'
|
36
|
+
require 'gooby_csv_run'
|
37
|
+
require 'gooby_csv_reader'
|
38
|
+
require 'gooby_track_point'
|
39
|
+
require 'gooby_run'
|
40
|
+
require 'gooby_simple_xml_parser'
|
41
|
+
require 'gooby_track'
|
42
|
+
require 'gooby_course'
|
43
|
+
require 'gooby_code_scanner'
|
44
|
+
require 'gooby_command'
|