fit_kit 0.2.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6f5d819105551844f5a7e45a8399aa60b403cbc5b8b2adb278c0fb9a8a29631c
4
- data.tar.gz: 1a55ae0af1a09860935dc3dee596a191c42133ae2a070c7c788514e4b4694248
3
+ metadata.gz: 10c537aabfcdc0d4542be231629868eca99e14a6f5ee2cf7a3f313e964f14ddc
4
+ data.tar.gz: 218b078edf26e38088d3da9565be4243ea83a1d5eb56d8f570e600795bd21a3f
5
5
  SHA512:
6
- metadata.gz: 46cd84be49270a1197f815f776cc51627ba58d7abf72d8f0f4b6d80cb9f485ceca1a73745eb62df2696fe2a0cd3b78e2311550bf3f711dfcaf133daeed3abf1e
7
- data.tar.gz: 32993953f6611a2ad85a70426be7525bb558e3cdf201e994e6848d04ab29f8ebdee3e1d722ac432173f05e799196f47a8f38e3d73c6f151c7de83497cdfa3463
6
+ metadata.gz: 648647a0900cb94c8828f48935196b3e4395206c4fb08dd122a0c4300f156a0f3a507f0ee9bf141a1456cb243adbefdb881716b42bf1793ce81e334414336e19
7
+ data.tar.gz: 217bc0fb924c5fe02b444252ac6c4547294bcde9de6ee913459e7fe742efa8d3fce092118272cd8c0559b29f0cbf7733b75b72fe247d01ddd9b0bc37a7a9c72d
data/CHANGELOG.md CHANGED
@@ -1,4 +1,10 @@
1
- ## [Unreleased]
1
+ ## [0.3.0] - 2024-10-16
2
+
3
+ - Simplify to return hash for the parsing
4
+
5
+ ## [0.2.0] - 2024-10-15
6
+
7
+ - Simplify to return hash fileds for records
2
8
 
3
9
  ## [0.1.0] - 2024-10-14
4
10
 
data/Cargo.lock CHANGED
@@ -1,6 +1,6 @@
1
1
  # This file is automatically @generated by Cargo.
2
2
  # It is not intended for manual editing.
3
- version = 3
3
+ version = 4
4
4
 
5
5
  [[package]]
6
6
  name = "aho-corasick"
@@ -136,9 +136,9 @@ dependencies = [
136
136
 
137
137
  [[package]]
138
138
  name = "fitparser"
139
- version = "0.7.0"
139
+ version = "0.9.0"
140
140
  source = "registry+https://github.com/rust-lang/crates.io-index"
141
- checksum = "3c57b6c1de15f92aa2846a6286eefc115637405a4298eea11a65799aa4ce4177"
141
+ checksum = "dc8dfc12dd3f7ec76da4609addc6700a3d5c9e37e8c0e924c7fbd61bc52d70a7"
142
142
  dependencies = [
143
143
  "chrono",
144
144
  "nom",
@@ -228,9 +228,9 @@ checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
228
228
 
229
229
  [[package]]
230
230
  name = "magnus"
231
- version = "0.6.4"
231
+ version = "0.7.1"
232
232
  source = "registry+https://github.com/rust-lang/crates.io-index"
233
- checksum = "b1597ef40aa8c36be098249e82c9a20cf7199278ac1c1a1a995eeead6a184479"
233
+ checksum = "3d87ae53030f3a22e83879e666cb94e58a7bdf31706878a0ba48752994146dab"
234
234
  dependencies = [
235
235
  "magnus-macros",
236
236
  "rb-sys",
data/README.md CHANGED
@@ -1,5 +1,7 @@
1
1
  # FitKit
2
2
 
3
+ > Not production ready yet.
4
+
3
5
  `fit_kit` is a small ruby gem wraps Rust's crate [fitparse-rs](https://github.com/stadelmanma/fitparse-rs) to provide effecient and fast Garmin fit files parsing.
4
6
 
5
7
  ## Installation
@@ -17,9 +19,20 @@ If bundler is not being used to manage dependencies, install the gem by executin
17
19
  ```ruby
18
20
  test_fit_file = File.join(Dir.pwd, "example.fit")
19
21
  fit_data_records = ::FitKit.parse_fit_file(test_fit_file)
20
- # [RFitDataRecord, RFitDataRecord, RFitDataRecord ...]
22
+ # { record: [{...}, {...}], session: [{...}], lap: [..], activity: [...] }
21
23
  ```
22
24
 
25
+ ## Features
26
+ There are couple of more built-in features can simplify the consuming of the data significantly:
27
+
28
+ - `records_hash` - All records in a ruby hash format
29
+ - `avg_for` - Average of a given field
30
+ - `elevation_gain` - Elevation gain. Calculate the overall elevation gain. Also allow to pass in a field.
31
+ - `calculate_partition_indices` - Calculate the partition indices for a given field
32
+ - `partition_stats_for_fields` - Calculate the partition stats for a given field. Useful for calculating splits data.
33
+ - `sample_series_for_records` - Sample series for a given field, useful for plotting
34
+ - `zone_time_for` - Heart Rate Zones data
35
+
23
36
  ## Performance
24
37
  Here is the performance parsing __4090__ fit files on my M1 Mac Mini (16G, 8 Cores) took 6 seconds (in parallel):
25
38
 
@@ -10,5 +10,5 @@ publish = false
10
10
  crate-type = ["cdylib"]
11
11
 
12
12
  [dependencies]
13
- fitparser = "0.7.0"
14
- magnus = { version = "0.6.2" }
13
+ fitparser = "0.9.0"
14
+ magnus = { version = "0.7.1" }
@@ -1,75 +1,524 @@
1
- use fitparser::{self, FitDataRecord, Value};
1
+ use fitparser::{self, profile::MesgNum, FitDataField, FitDataRecord, Value};
2
2
  use magnus::{function, method, prelude::*, Error, IntoValue, RArray, RHash, Ruby, Symbol};
3
- use std::fs::File;
3
+ use std::{
4
+ collections::{BTreeMap, HashMap},
5
+ fs::File,
6
+ };
7
+
8
+ /// Extesnion methods for FitDataRecord
9
+ pub trait FitDataRecordExt {
10
+ fn timestamp(&self) -> Option<i64>;
11
+ fn field_value(&self, field_name: &str) -> Option<(Value, String)>;
12
+ }
13
+
14
+ impl FitDataRecordExt for FitDataRecord {
15
+ fn timestamp(&self) -> Option<i64> {
16
+ self.fields()
17
+ .iter()
18
+ .find(|field| field.name() == "timestamp")
19
+ .and_then(|field| match field.value() {
20
+ Value::Timestamp(v) => Some(v.timestamp()),
21
+ _ => None,
22
+ })
23
+ .map(|v| v as i64)
24
+ }
25
+
26
+ fn field_value(&self, field_name: &str) -> Option<(Value, String)> {
27
+ let field = self
28
+ .fields()
29
+ .iter()
30
+ .find(|field| field.name() == field_name)?;
31
+
32
+ Some((field.value().clone(), field.units().to_string()))
33
+ }
34
+ }
35
+
36
+ /// extension methods for vec of FitDataRecord
37
+ pub trait FitDataRecordVecExt {
38
+ fn sample_series_for_records(
39
+ &self,
40
+ field_name: String,
41
+ num_of_points: u16,
42
+ ) -> Vec<(i64, f64, String)>;
43
+ fn aggregate_field_values(&self, records: Vec<&FitDataRecord>, field: &str) -> (f64, String);
44
+ }
45
+
46
+ impl FitDataRecordVecExt for Vec<FitDataRecord> {
47
+ fn sample_series_for_records(
48
+ &self,
49
+ field_name: String,
50
+ num_of_points: u16,
51
+ ) -> Vec<(i64, f64, String)> {
52
+ // if there are no records, return empty vec
53
+ if self.is_empty() {
54
+ return vec![];
55
+ }
56
+
57
+ // find the min and max timestamp
58
+ let min_timestamp = self
59
+ .iter()
60
+ .find(|r| r.kind() == MesgNum::Record && r.timestamp().is_some())
61
+ .and_then(|r| r.timestamp());
62
+
63
+ let max_timestamp = self
64
+ .iter()
65
+ .rev()
66
+ .find(|r| r.kind() == MesgNum::Record && r.timestamp().is_some())
67
+ .and_then(|r| r.timestamp());
68
+
69
+ // if both exists and min is less than max, we proceed
70
+ // otherwise return empty vec
71
+ let (min, max) = match (min_timestamp, max_timestamp) {
72
+ (Some(min), Some(max)) if min < max => (min, max),
73
+ _ => return vec![],
74
+ };
75
+
76
+ print!("min: {:?}, max: {:?}", min, max);
77
+
78
+ // calculate the interval we need to sample
79
+ let total_duration_in_seconds = max - min;
80
+ let interval = total_duration_in_seconds / num_of_points as i64;
81
+
82
+ // now we can group them into buckets
83
+ let mut sample_data = BTreeMap::new();
84
+ for record in self.into_iter().filter(|r| r.timestamp().is_some()) {
85
+ let timestamp = record.timestamp().unwrap() as i64;
86
+ let bucket = ((timestamp - min) / interval as i64) as u16;
87
+ // insert it into the bucket
88
+ sample_data.entry(bucket).or_insert(vec![]).push(record);
89
+ }
90
+
91
+ // now we can sample the data
92
+ sample_data
93
+ .into_iter()
94
+ .map(|(bucket, records)| {
95
+ let timestamp = min + (bucket as i64 * interval) as i64;
96
+ let (value, units) = self.aggregate_field_values(records, &field_name);
97
+ (timestamp, value, units)
98
+ })
99
+ .collect()
100
+ }
101
+
102
+ fn aggregate_field_values(&self, records: Vec<&FitDataRecord>, field: &str) -> (f64, String) {
103
+ // Use the new field method
104
+ let values: Vec<f64> = records
105
+ .iter()
106
+ .filter_map(|record| {
107
+ record
108
+ .field_value(field)
109
+ .and_then(|(v, _)| MyValue(v).as_f64())
110
+ })
111
+ .collect();
112
+
113
+ let units = records
114
+ .iter()
115
+ .find_map(|record| record.field_value(field).map(|(_, u)| u))
116
+ .unwrap_or_else(|| String::from(""));
117
+
118
+ // Calculate average (or use another aggregation method)
119
+ if values.is_empty() {
120
+ (0.0, String::from(""))
121
+ } else {
122
+ (values.iter().sum::<f64>() / values.len() as f64, units)
123
+ }
124
+ }
125
+ }
126
+
127
+ // wrap fitparse value
128
+ struct MyValue(Value);
129
+
130
+ impl MyValue {
131
+ // turn value into f64
132
+ fn as_f64(&self) -> Option<f64> {
133
+ match &self.0 {
134
+ Value::SInt8(i) => Some(*i as f64),
135
+ Value::UInt8(u) => Some(*u as f64),
136
+ Value::SInt16(i) => Some(*i as f64),
137
+ Value::UInt16(u) => Some(*u as f64),
138
+ Value::SInt32(i) => Some(*i as f64),
139
+ Value::UInt32(u) => Some(*u as f64),
140
+ Value::Float32(f) => Some(*f as f64),
141
+ Value::Float64(f) => Some(*f),
142
+ Value::UInt8z(u) => Some(*u as f64),
143
+ Value::UInt16z(u) => Some(*u as f64),
144
+ Value::UInt32z(u) => Some(*u as f64),
145
+ Value::SInt64(i) => Some(*i as f64),
146
+ Value::UInt64(u) => Some(*u as f64),
147
+ Value::UInt64z(u) => Some(*u as f64),
148
+ _ => None, // Handle any other variants that don't convert to f64
149
+ }
150
+ }
151
+ }
152
+
153
+ #[magnus::wrap(class = "FitParseResult")]
154
+ struct FitParseResult(Vec<FitDataRecord>);
155
+
156
+ impl FitParseResult {
157
+ /**
158
+ * Returns Ruby hash for all the records
159
+ * With keys are the record types
160
+ */
161
+ fn records_hash(&self) -> RHash {
162
+ let mut records_by_kind_vec: Vec<(String, Vec<&FitDataRecord>)> = {
163
+ let mut map: HashMap<String, Vec<&FitDataRecord>> = HashMap::new();
164
+
165
+ for record in self.0.iter() {
166
+ let kind = record.kind().to_string();
167
+ map.entry(kind).or_insert_with(Vec::new).push(record);
168
+ }
169
+
170
+ map.into_iter().collect()
171
+ };
172
+
173
+ // Sort the vector by the first element (kind string)
174
+ records_by_kind_vec.sort_by(|a, b| a.0.cmp(&b.0));
175
+
176
+ // now let's group by the record by kind
177
+ let result_hash = RHash::new();
178
+ for (kind, kind_records) in records_by_kind_vec {
179
+ // turn records into rarray
180
+ let array = RArray::new();
181
+ for record in kind_records {
182
+ // TODO here do not pass RFitDataRecord
183
+ // turn it into fields_hash directly
184
+ array.push(get_fields_hash(record)).unwrap();
185
+ }
186
+
187
+ result_hash.aset(Symbol::new(kind), array).unwrap();
188
+ }
189
+
190
+ result_hash
191
+ }
192
+
193
+ // summary methods
194
+ fn avg_for(&self, field_name: String) -> (f64, String) {
195
+ // passing the reference
196
+ self.avg_for_records(&self.0, field_name)
197
+ }
198
+
199
+ fn elevation_gain(&self, field_name: String) -> (f64, String) {
200
+ self.elevation_gain_for_records(&self.0, field_name)
201
+ }
202
+
203
+ // given a bunch of records, calculate the elevation gain
204
+ fn elevation_gain_for_records(
205
+ &self,
206
+ records: &Vec<FitDataRecord>,
207
+ field_name: String,
208
+ ) -> (f64, String) {
209
+ let fields: Vec<&FitDataField> = records
210
+ .iter()
211
+ .filter(|r| r.kind() == MesgNum::Record)
212
+ .flat_map(|r| r.fields().iter().filter(|field| field.name() == field_name))
213
+ .collect();
214
+
215
+ let count = fields.len();
216
+
217
+ if count == 0 {
218
+ return (0.0, String::from(""));
219
+ }
220
+
221
+ let units = fields.first().unwrap().units();
222
+
223
+ let elevation_gain_sum = fields.windows(2).fold(0.0, |acc, window| {
224
+ // find the field first
225
+ let value1 = MyValue(window[1].value().clone()).as_f64();
226
+ let value0 = MyValue(window[0].value().clone()).as_f64();
227
+
228
+ match (value1, value0) {
229
+ (Some(v1), Some(v0)) if v1 > v0 => acc + (v1 - v0),
230
+ _ => acc,
231
+ }
232
+ });
233
+
234
+ (elevation_gain_sum, String::from(units))
235
+ }
236
+
237
+ fn partition_stats_for_fields(
238
+ &self,
239
+ by_field: String,
240
+ partition_distance: f64,
241
+ fields: Vec<String>,
242
+ ) -> Vec<Vec<(String, (f64, String))>> {
243
+ // first get the partitions
244
+ let partition_indices = self.calculate_partition_indices(partition_distance, by_field);
245
+
246
+ partition_indices
247
+ .windows(2)
248
+ .map(|window| {
249
+ let start = window[0];
250
+ let end = window[1];
251
+ let partition_records = self.0[start..=end].to_vec();
252
+
253
+ // ok we have this, now we can map the resulf the field
254
+ let results: Vec<(String, (f64, String))> = fields
255
+ .iter()
256
+ .map(|field_name| {
257
+ (
258
+ field_name.clone(),
259
+ self.avg_for_records(&partition_records, field_name.clone()),
260
+ )
261
+ })
262
+ .collect();
263
+
264
+ results
265
+ })
266
+ .collect()
267
+ }
268
+
269
+ fn zone_time_for(&self, zones: Vec<(f64, f64)>, field_name: String) -> Vec<((f64, f64), f64)> {
270
+ // filter records
271
+ let filtered_records: Vec<&FitDataRecord> = self
272
+ .0
273
+ .iter()
274
+ .filter(|r| r.kind() == MesgNum::Record)
275
+ .filter(|r| r.fields().iter().any(|f| f.name() == field_name))
276
+ .collect();
277
+ self.zone_time_for_records(filtered_records, zones, field_name)
278
+ }
279
+
280
+ /// Given a list of records, and list of zones, calculate the time spent in each zone
281
+ fn zone_time_for_records(
282
+ &self,
283
+ records: Vec<&FitDataRecord>,
284
+ zones: Vec<(f64, f64)>,
285
+ field_name: String,
286
+ ) -> Vec<((f64, f64), f64)> {
287
+ let mut zone_times: Vec<((f64, f64), f64)> =
288
+ zones.iter().map(|z| (z.clone(), 0.0)).collect();
289
+
290
+ for window in records.windows(2) {
291
+ let value1 = window[1]
292
+ .fields()
293
+ .iter()
294
+ .find(|f| f.name() == field_name)
295
+ .unwrap()
296
+ .value();
297
+
298
+ // using MyValue to extract the value
299
+ let value1 = match MyValue(value1.clone()).as_f64() {
300
+ Some(v) => v,
301
+ None => continue,
302
+ };
303
+
304
+ // we need to find the zone from start again
305
+ // because the value could drop below the current zone
306
+ // always reset the current zone index to 0
307
+ let mut current_zone_index = 0;
308
+ while current_zone_index < zones.len() - 1 && value1 > zones[current_zone_index].1 {
309
+ // moving to next zone
310
+ // as current zone index is less than the last zone
311
+ // also the value is greater than the current zone's max
312
+ current_zone_index += 1;
313
+ }
314
+
315
+ // find the timestamp value
316
+ let timestamp1 = window[1]
317
+ .fields()
318
+ .iter()
319
+ .find(|f| f.name() == "timestamp")
320
+ .unwrap()
321
+ .value();
322
+
323
+ let timestamp1_value = match timestamp1 {
324
+ Value::Timestamp(t) => t.timestamp(),
325
+ _ => continue,
326
+ };
327
+
328
+ let timestamp0 = window[0]
329
+ .fields()
330
+ .iter()
331
+ .find(|f| f.name() == "timestamp")
332
+ .unwrap()
333
+ .value();
334
+
335
+ let timestamp0_value = match timestamp0 {
336
+ Value::Timestamp(t) => t.timestamp(),
337
+ _ => continue,
338
+ };
339
+
340
+ let time_diff = timestamp1_value - timestamp0_value;
341
+ zone_times[current_zone_index].1 += time_diff as f64;
342
+ }
343
+
344
+ zone_times
345
+ }
346
+
347
+ fn sample_series_for_records(
348
+ &self,
349
+ field_name: String,
350
+ num_of_points: u16,
351
+ ) -> Vec<(i64, f64, String)> {
352
+ self.0.sample_series_for_records(field_name, num_of_points)
353
+ }
354
+
355
+ /// Calculate the average for a given field name for a list of records
356
+ fn avg_for_records(&self, records: &Vec<FitDataRecord>, field_name: String) -> (f64, String) {
357
+ // only get the record types
358
+ let fields: Vec<&FitDataField> = records
359
+ .iter()
360
+ .filter(|r| r.kind() == MesgNum::Record)
361
+ .flat_map(|r| r.fields().iter().filter(|field| field.name() == field_name))
362
+ .collect();
363
+
364
+ // do a map filter to only sum the possible values could be sumed
365
+ // we only care about int, float values
366
+ let sumable_values: Vec<f64> = fields
367
+ .iter()
368
+ .filter_map(|field| MyValue(field.value().clone()).as_f64())
369
+ .collect();
370
+
371
+ let sum: f64 = sumable_values.iter().sum();
372
+ let count = sumable_values.len();
373
+
374
+ if count == 0 {
375
+ (0.0, String::from(""))
376
+ } else {
377
+ // we also need to return the unit
378
+ let units = fields.first().unwrap().units();
379
+ let avg_value = sum / count as f64;
380
+ (avg_value, String::from(units))
381
+ }
382
+ }
383
+
384
+ fn calculate_partition_indices(
385
+ &self,
386
+ partition_distance: f64,
387
+ field_name: String,
388
+ ) -> Vec<usize> {
389
+ let records: Vec<&FitDataRecord> = self
390
+ .0
391
+ .iter()
392
+ .filter(|r| r.kind() == MesgNum::Record)
393
+ .collect();
394
+ self.calculate_partition_indices_for_records(records, partition_distance, field_name)
395
+ }
396
+
397
+ /// Given a list of records, calculate the partition indices based on the field name and partition distance
398
+ fn calculate_partition_indices_for_records(
399
+ &self,
400
+ records: Vec<&FitDataRecord>,
401
+ partition_distance: f64,
402
+ field_name: String,
403
+ ) -> Vec<usize> {
404
+ let mut partition_indices = vec![0]; // always start include the start index
405
+ let mut start_distance = 0.0;
406
+
407
+ // let's loop
408
+ for (index, record) in records.iter().enumerate().skip(1) {
409
+ let fields: Vec<&FitDataField> = record
410
+ .fields()
411
+ .iter()
412
+ .filter(|f| f.name() == field_name)
413
+ .collect();
414
+
415
+ let distance_field = fields
416
+ .first()
417
+ .and_then(|f| MyValue(f.value().clone()).as_f64());
418
+ match distance_field {
419
+ Some(distance_value) => {
420
+ if distance_value - start_distance >= partition_distance {
421
+ // found it
422
+ partition_indices.push(index);
423
+ start_distance = distance_value;
424
+ }
425
+ }
426
+ None => {}
427
+ }
428
+ }
429
+
430
+ // now we have the whole array
431
+ // if the last record is not there, add it
432
+ if *partition_indices.last().unwrap() != records.len() - 1 {
433
+ partition_indices.push(records.len() - 1);
434
+ }
435
+
436
+ partition_indices
437
+ }
438
+ }
4
439
 
5
440
  // recursive method to turn Fit value into magnus::Value
6
- fn value_to_rb_value(value: &Value) -> magnus::Value {
441
+ fn value_to_rb_value(value: &Value) -> Option<magnus::Value> {
7
442
  match value {
8
- Value::Timestamp(t) => t.timestamp().into_value(),
9
- Value::SInt8(i) => i.into_value(),
10
- Value::UInt8(u) => u.into_value(),
11
- Value::SInt16(i) => i.into_value(),
12
- Value::UInt16(u) => u.into_value(),
13
- Value::SInt32(i) => i.into_value(),
14
- Value::UInt32(u) => u.into_value(),
15
- Value::String(s) => s.clone().into_value(),
16
- Value::Float32(f) => f.into_value(),
17
- Value::Float64(f) => f.into_value(),
18
- Value::UInt8z(u) => u.into_value(),
19
- Value::UInt16z(u) => u.into_value(),
20
- Value::UInt32z(u) => u.into_value(),
21
- Value::Byte(b) => b.into_value(),
22
- Value::SInt64(i) => i.into_value(),
23
- Value::UInt64(u) => u.into_value(),
24
- Value::UInt64z(u) => u.into_value(),
25
- Value::Enum(e) => e.into_value(),
443
+ Value::Timestamp(t) => Some(t.timestamp().into_value()),
444
+ Value::SInt8(i) => Some(i.into_value()),
445
+ Value::UInt8(u) => Some(u.into_value()),
446
+ Value::SInt16(i) => Some(i.into_value()),
447
+ Value::UInt16(u) => Some(u.into_value()),
448
+ Value::SInt32(i) => Some(i.into_value()),
449
+ Value::UInt32(u) => Some(u.into_value()),
450
+ Value::String(s) => Some(s.clone().into_value()),
451
+ Value::Float32(f) => Some(f.into_value()),
452
+ Value::Float64(f) => Some(f.into_value()),
453
+ Value::UInt8z(u) => Some(u.into_value()),
454
+ Value::UInt16z(u) => Some(u.into_value()),
455
+ Value::UInt32z(u) => Some(u.into_value()),
456
+ Value::Byte(b) => Some(b.into_value()),
457
+ Value::SInt64(i) => Some(i.into_value()),
458
+ Value::UInt64(u) => Some(u.into_value()),
459
+ Value::UInt64z(u) => Some(u.into_value()),
460
+ Value::Enum(e) => Some(e.into_value()),
26
461
  Value::Array(arr) => {
27
462
  let rb_array = RArray::new();
28
463
  for value in arr {
29
464
  rb_array.push(value_to_rb_value(value)).unwrap();
30
465
  }
31
- rb_array.into_value()
466
+ Some(rb_array.into_value())
32
467
  }
468
+ Value::Invalid => None,
33
469
  }
34
470
  }
35
471
 
36
- ///////////////////////// RFitDataRecord ///////////////////////////
37
- #[magnus::wrap(class = "RFitDataRecord")]
38
- struct RFitDataRecord(FitDataRecord);
39
-
40
- impl RFitDataRecord {
41
- fn kind(&self) -> String {
42
- self.0.kind().to_string()
43
- }
44
-
45
- fn fields_hash(&self) -> RHash {
46
- let hash = RHash::new();
47
- for field in self.0.fields() {
48
- let value = value_to_rb_value(field.value());
49
- let pair = RHash::new();
50
- pair.aset(Symbol::new("units"), field.units()).unwrap();
51
- pair.aset(Symbol::new("value"), value).unwrap();
52
- // here we add the stuff to the hash
53
- let field_name_symbol = Symbol::new(field.name());
54
- hash.aset(field_name_symbol, pair).unwrap();
472
+ // Turning FitDataRecord into a hash
473
+ fn get_fields_hash(record: &FitDataRecord) -> RHash {
474
+ let hash = RHash::new();
475
+ for field in record.fields() {
476
+ match value_to_rb_value(field.value()) {
477
+ Some(value) => {
478
+ let pair = RHash::new();
479
+ pair.aset(Symbol::new("units"), field.units()).unwrap();
480
+ pair.aset(Symbol::new("value"), value).unwrap();
481
+ // here we add the stuff to the hash
482
+ let field_name_symbol = Symbol::new(field.name());
483
+ hash.aset(field_name_symbol, pair).unwrap();
484
+ }
485
+ None => {}
55
486
  }
56
-
57
- hash
58
487
  }
488
+
489
+ hash
59
490
  }
60
491
 
61
492
  // Here we define two ruby classes
62
493
  // RFitDataRecord and RFitDataField
63
494
  fn define_ruby_classes(ruby: &Ruby) -> Result<(), magnus::Error> {
64
495
  // definie the the other one here
65
- let data_record_class = ruby.define_class("RFitDataRecord", ruby.class_object())?;
66
- data_record_class.define_method("kind", method!(RFitDataRecord::kind, 0))?;
67
- data_record_class.define_method("fields_hash", method!(RFitDataRecord::fields_hash, 0))?;
496
+ let data_record_class = ruby.define_class("FitParseResult", ruby.class_object())?;
497
+ data_record_class.define_method("records_hash", method!(FitParseResult::records_hash, 0))?;
498
+ data_record_class.define_method("avg_for", method!(FitParseResult::avg_for, 1))?;
499
+ data_record_class
500
+ .define_method("elevation_gain", method!(FitParseResult::elevation_gain, 1))?;
501
+ data_record_class.define_method(
502
+ "calculate_partition_indices",
503
+ method!(FitParseResult::calculate_partition_indices, 2),
504
+ )?;
505
+
506
+ data_record_class.define_method(
507
+ "partition_stats_for_fields",
508
+ method!(FitParseResult::partition_stats_for_fields, 3),
509
+ )?;
510
+
511
+ data_record_class.define_method(
512
+ "sample_series_for_records",
513
+ method!(FitParseResult::sample_series_for_records, 2),
514
+ )?;
515
+
516
+ data_record_class.define_method("zone_time_for", method!(FitParseResult::zone_time_for, 2))?;
68
517
 
69
518
  Ok(())
70
519
  }
71
520
 
72
- fn parse_fit_file(file_path: String) -> Result<RArray, magnus::Error> {
521
+ fn parse_fit_file(file_path: String) -> Result<FitParseResult, magnus::Error> {
73
522
  let mut fp = File::open(file_path)
74
523
  .map_err(|e| Error::new(Ruby::get().unwrap().exception_io_error(), e.to_string()))?;
75
524
  let data = fitparser::from_reader(&mut fp).map_err(|e| {
@@ -79,20 +528,16 @@ fn parse_fit_file(file_path: String) -> Result<RArray, magnus::Error> {
79
528
  )
80
529
  })?;
81
530
 
82
- // finally we have the result array of record
83
- let array = RArray::new();
84
- for record in data {
85
- array.push(RFitDataRecord(record)).unwrap();
86
- }
531
+ let result = FitParseResult(data);
87
532
 
88
- Ok(array)
533
+ Ok(result)
89
534
  }
90
535
 
91
536
  #[magnus::init]
92
537
  fn init(ruby: &Ruby) -> Result<(), Error> {
93
538
  let module = ruby.define_module("FitKit")?;
94
- let _ = define_ruby_classes(&ruby);
95
539
 
540
+ let _ = define_ruby_classes(ruby);
96
541
  module.define_singleton_method("parse_fit_file", function!(parse_fit_file, 1))?;
97
542
 
98
543
  Ok(())
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module FitKit
4
- VERSION = "0.2.0"
4
+ VERSION = "0.4.0"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fit_kit
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 0.4.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - 29decibel
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-10-16 00:00:00.000000000 Z
11
+ date: 2025-03-17 00:00:00.000000000 Z
12
12
  dependencies: []
13
13
  description: Garmin fit file parser wrapping Rust crate fitparse_rs.
14
14
  email: