python-fitparse 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
fitparse/__init__.py ADDED
@@ -0,0 +1,10 @@
1
+ #!/usr/bin/env python
2
+
3
+ # Make classes available
4
+ from fitparse.base import FitFile, FitFileDecoder, UncachedFitFile, \
5
+ FitParseError, CacheMixin, DataProcessorMixin
6
+ from fitparse.records import DataMessage
7
+ from fitparse.processors import FitFileDataProcessor, StandardUnitsDataProcessor
8
+
9
+
10
+ __version__ = '1.2.0'
fitparse/base.py ADDED
@@ -0,0 +1,625 @@
1
+ #!/usr/bin/env python
2
+
3
+ import io
4
+ import os
5
+ import struct
6
+ import warnings
7
+
8
+ from fitparse.processors import FitFileDataProcessor
9
+ from fitparse.profile import FIELD_TYPE_TIMESTAMP, MESSAGE_TYPES
10
+ from fitparse.records import (
11
+ Crc, DevField, DataMessage, FieldData, FieldDefinition, DevFieldDefinition, DefinitionMessage,
12
+ MessageHeader, BASE_TYPES, BASE_TYPE_BYTE,
13
+ )
14
+ from fitparse.utils import fileish_open, is_iterable, FitParseError, FitEOFError, FitCRCError, FitHeaderError
15
+
16
+
17
+ class DeveloperDataMixin:
18
+ def __init__(self, *args, check_developer_data=True, **kwargs):
19
+ self.check_developer_data = check_developer_data
20
+ self.dev_types = {}
21
+
22
+ super().__init__(*args, **kwargs)
23
+
24
+ def _append_dev_data_id(self, dev_data_index, application_id=None, fields=None):
25
+ if fields is None:
26
+ fields = {}
27
+
28
+ # Note that nothing in the spec says overwriting an existing type is invalid
29
+ self.dev_types[dev_data_index] = {
30
+ 'dev_data_index': dev_data_index,
31
+ 'application_id': application_id,
32
+ 'fields': fields
33
+ }
34
+
35
+ def add_dev_data_id(self, message):
36
+ dev_data_index = message.get_raw_value('developer_data_index')
37
+ application_id = message.get_raw_value('application_id')
38
+
39
+ self._append_dev_data_id(dev_data_index, application_id)
40
+
41
+ def _append_dev_field_description(self, dev_data_index, field_def_num, type=BASE_TYPE_BYTE, name=None,
42
+ units=None, native_field_num=None):
43
+ if dev_data_index not in self.dev_types:
44
+ if self.check_developer_data:
45
+ raise FitParseError("No such dev_data_index=%s found" % (dev_data_index))
46
+
47
+ warnings.warn(
48
+ "Dev type for dev_data_index=%s missing. Adding dummy dev type." % (dev_data_index)
49
+ )
50
+ self._append_dev_data_id(dev_data_index)
51
+
52
+ self.dev_types[dev_data_index]["fields"][field_def_num] = DevField(
53
+ dev_data_index=dev_data_index,
54
+ def_num=field_def_num,
55
+ type=type,
56
+ name=name,
57
+ units=units,
58
+ native_field_num=native_field_num
59
+ )
60
+
61
+ def add_dev_field_description(self, message):
62
+ dev_data_index = message.get_raw_value('developer_data_index')
63
+ field_def_num = message.get_raw_value('field_definition_number')
64
+ base_type_id = message.get_raw_value('fit_base_type_id')
65
+ field_name = message.get_raw_value('field_name') or "unnamed_dev_field_%s" % field_def_num
66
+ units = message.get_raw_value("units")
67
+ native_field_num = message.get_raw_value('native_field_num')
68
+
69
+ if dev_data_index not in self.dev_types:
70
+ if self.check_developer_data:
71
+ raise FitParseError("No such dev_data_index=%s found" % (dev_data_index))
72
+
73
+ warnings.warn(
74
+ "Dev type for dev_data_index=%s missing. Adding dummy dev type." % (dev_data_index)
75
+ )
76
+ self._append_dev_data_id(dev_data_index)
77
+
78
+ fields = self.dev_types[int(dev_data_index)]['fields']
79
+
80
+ # Note that nothing in the spec says overwriting an existing field is invalid
81
+ fields[field_def_num] = DevField(
82
+ dev_data_index=dev_data_index,
83
+ def_num=field_def_num,
84
+ type=BASE_TYPES[base_type_id],
85
+ name=field_name,
86
+ units=units,
87
+ native_field_num=native_field_num
88
+ )
89
+
90
+ def get_dev_type(self, dev_data_index, field_def_num):
91
+ if dev_data_index not in self.dev_types:
92
+ if self.check_developer_data:
93
+ raise FitParseError(
94
+ f"No such dev_data_index={dev_data_index} found when looking up field {field_def_num}"
95
+ )
96
+
97
+ warnings.warn(
98
+ "Dev type for dev_data_index=%s missing. Adding dummy dev type." % (dev_data_index)
99
+ )
100
+ self._append_dev_data_id(dev_data_index)
101
+
102
+ dev_type = self.dev_types[dev_data_index]
103
+
104
+ if field_def_num not in dev_type['fields']:
105
+ if self.check_developer_data:
106
+ raise FitParseError(
107
+ f"No such field {field_def_num} for dev_data_index {dev_data_index}"
108
+ )
109
+
110
+ warnings.warn(
111
+ f"Field {field_def_num} for dev_data_index {dev_data_index} missing. Adding dummy field."
112
+ )
113
+ self._append_dev_field_description(
114
+ dev_data_index=dev_data_index,
115
+ field_def_num=field_def_num
116
+ )
117
+
118
+ return dev_type['fields'][field_def_num]
119
+
120
+
121
+ class FitFileDecoder(DeveloperDataMixin):
122
+ """Basic decoder for fit files"""
123
+
124
+ def __init__(self, fileish, *args, check_crc=True, data_processor=None, **kwargs):
125
+ self._file = fileish_open(fileish, 'rb')
126
+
127
+ self.check_crc = check_crc
128
+ self._crc = None
129
+
130
+ # Get total filesize
131
+ self._file.seek(0, os.SEEK_END)
132
+ self._filesize = self._file.tell()
133
+ self._file.seek(0, os.SEEK_SET)
134
+
135
+ # Start off by parsing the file header (sets initial attribute values)
136
+ self._parse_file_header()
137
+
138
+ super().__init__(*args, **kwargs)
139
+
140
+ def __del__(self):
141
+ self.close()
142
+
143
+ def close(self):
144
+ if hasattr(self, "_file") and self._file and hasattr(self._file, "close"):
145
+ self._file.close()
146
+ self._file = None
147
+
148
+ def __enter__(self):
149
+ return self
150
+
151
+ def __exit__(self, *_):
152
+ self.close()
153
+
154
+ ##########
155
+ # Private low-level utility methods for reading of fit file
156
+
157
+ def _read(self, size):
158
+ if size <= 0:
159
+ return None
160
+ data = self._file.read(size)
161
+ if size != len(data):
162
+ raise FitEOFError("Tried to read %d bytes from .FIT file but got %d" % (size, len(data)))
163
+
164
+ if self.check_crc:
165
+ self._crc.update(data)
166
+ self._bytes_left -= len(data)
167
+ return data
168
+
169
+ def _read_struct(self, fmt, endian='<', data=None, always_tuple=False):
170
+ fmt_with_endian = endian + fmt
171
+ size = struct.calcsize(fmt_with_endian)
172
+ if size <= 0:
173
+ raise FitParseError("Invalid struct format: %s" % fmt_with_endian)
174
+
175
+ if data is None:
176
+ data = self._read(size)
177
+
178
+ unpacked = struct.unpack(fmt_with_endian, data)
179
+ # Flatten tuple if it's got only one value
180
+ return unpacked if (len(unpacked) > 1) or always_tuple else unpacked[0]
181
+
182
+ def _read_and_assert_crc(self, allow_zero=False):
183
+ # CRC Calculation is little endian from SDK
184
+ # TODO - How to handle the case of unterminated file? Error out and have user retry with check_crc=false?
185
+ crc_computed, crc_read = self._crc.value, self._read_struct(Crc.FMT)
186
+ if not self.check_crc:
187
+ return
188
+ if crc_computed == crc_read or (allow_zero and crc_read == 0):
189
+ return
190
+ raise FitCRCError('CRC Mismatch [computed: {}, read: {}]'.format(
191
+ Crc.format(crc_computed), Crc.format(crc_read)))
192
+
193
+ ##########
194
+ # Private Data Parsing Methods
195
+
196
+ def _parse_file_header(self):
197
+
198
+ # Initialize data
199
+ self._accumulators = {}
200
+ self._bytes_left = -1
201
+ self._complete = False
202
+ self._compressed_ts_accumulator = 0
203
+ self._crc = Crc()
204
+ self._local_mesgs = {}
205
+
206
+ header_data = self._read(12)
207
+ if header_data[8:12] != b'.FIT':
208
+ raise FitHeaderError("Invalid .FIT File Header")
209
+
210
+ # Larger fields are explicitly little endian from SDK
211
+ header_size, protocol_ver_enc, profile_ver_enc, data_size = self._read_struct('2BHI4x', data=header_data)
212
+
213
+ # Decode the same way the SDK does
214
+ self.protocol_version = float("%d.%d" % (protocol_ver_enc >> 4, protocol_ver_enc & ((1 << 4) - 1)))
215
+ self.profile_version = float("%d.%d" % (profile_ver_enc / 100, profile_ver_enc % 100))
216
+
217
+ # Consume extra header information
218
+ extra_header_size = header_size - 12
219
+ if extra_header_size > 0:
220
+ # Make sure extra field in header is at least 2 bytes to calculate CRC
221
+ if extra_header_size < 2:
222
+ raise FitHeaderError('Irregular File Header Size')
223
+
224
+ # Consume extra two bytes of header and check CRC
225
+ self._read_and_assert_crc(allow_zero=True)
226
+
227
+ # Consume any extra bytes, since header size "may be increased in
228
+ # "future to add additional optional information" (from SDK)
229
+ self._read(extra_header_size - 2)
230
+
231
+ # After we've consumed the header, set the bytes left to be read
232
+ self._bytes_left = data_size
233
+
234
+ def _parse_message(self):
235
+ # When done, calculate the CRC and return None
236
+ if self._bytes_left <= 0:
237
+ # Don't assert CRC if requested not
238
+ if not self._complete and self.check_crc:
239
+ self._read_and_assert_crc()
240
+
241
+ if self._file.tell() >= self._filesize:
242
+ self._complete = True
243
+ self.close()
244
+ return None
245
+
246
+ # Still have data left in the file - assuming chained fit files
247
+ self._parse_file_header()
248
+ return self._parse_message()
249
+
250
+ header = self._parse_message_header()
251
+
252
+ if header.is_definition:
253
+ message = self._parse_definition_message(header)
254
+ else:
255
+ message = self._parse_data_message(header)
256
+ if message.mesg_type is not None:
257
+ if message.mesg_type.name == 'developer_data_id':
258
+ self.add_dev_data_id(message)
259
+ elif message.mesg_type.name == 'field_description':
260
+ self.add_dev_field_description(message)
261
+
262
+ return message
263
+
264
+ def _parse_message_header(self):
265
+ header = self._read_struct('B')
266
+
267
+ if header & 0x80: # bit 7: Is this record a compressed timestamp?
268
+ return MessageHeader(
269
+ is_definition=False,
270
+ is_developer_data=False,
271
+ local_mesg_num=(header >> 5) & 0x3, # bits 5-6
272
+ time_offset=header & 0x1F, # bits 0-4
273
+ )
274
+ else:
275
+ return MessageHeader(
276
+ is_definition=bool(header & 0x40), # bit 6
277
+ is_developer_data=bool(header & 0x20), # bit 5
278
+ local_mesg_num=header & 0xF, # bits 0-3
279
+ time_offset=None,
280
+ )
281
+
282
+ def _parse_definition_message(self, header):
283
+ # Read reserved byte and architecture byte to resolve endian
284
+ endian = '>' if self._read_struct('xB') else '<'
285
+ # Read rest of header with endian awareness
286
+ global_mesg_num, num_fields = self._read_struct('HB', endian=endian)
287
+ mesg_type = MESSAGE_TYPES.get(global_mesg_num)
288
+ field_defs = []
289
+
290
+ for n in range(num_fields):
291
+ field_def_num, field_size, base_type_num = self._read_struct('3B', endian=endian)
292
+ # Try to get field from message type (None if unknown)
293
+ field = mesg_type.fields.get(field_def_num) if mesg_type else None
294
+ base_type = BASE_TYPES.get(base_type_num, BASE_TYPE_BYTE)
295
+
296
+ if (field_size % base_type.size) != 0:
297
+ warnings.warn(
298
+ "Invalid field size %d for field '%s' of type '%s' (expected a multiple of %d); falling back to byte encoding." % (
299
+ field_size, field.name, base_type.name, base_type.size)
300
+ )
301
+ base_type = BASE_TYPE_BYTE
302
+
303
+ # If the field has components that are accumulators
304
+ # start recording their accumulation at 0
305
+ if field and field.components:
306
+ for component in field.components:
307
+ if component.accumulate:
308
+ accumulators = self._accumulators.setdefault(global_mesg_num, {})
309
+ accumulators[component.def_num] = 0
310
+
311
+ field_defs.append(FieldDefinition(
312
+ field=field,
313
+ def_num=field_def_num,
314
+ base_type=base_type,
315
+ size=field_size,
316
+ ))
317
+
318
+ dev_field_defs = []
319
+ if header.is_developer_data:
320
+ num_dev_fields = self._read_struct('B', endian=endian)
321
+ for n in range(num_dev_fields):
322
+ field_def_num, field_size, dev_data_index = self._read_struct('3B', endian=endian)
323
+ field = self.get_dev_type(dev_data_index, field_def_num)
324
+ dev_field_defs.append(DevFieldDefinition(
325
+ field=field,
326
+ dev_data_index=dev_data_index,
327
+ def_num=field_def_num,
328
+ size=field_size
329
+ ))
330
+
331
+ def_mesg = DefinitionMessage(
332
+ header=header,
333
+ endian=endian,
334
+ mesg_type=mesg_type,
335
+ mesg_num=global_mesg_num,
336
+ field_defs=field_defs,
337
+ dev_field_defs=dev_field_defs,
338
+ )
339
+ self._local_mesgs[header.local_mesg_num] = def_mesg
340
+ return def_mesg
341
+
342
+ def _parse_raw_values_from_data_message(self, def_mesg):
343
+ # Go through mesg's field defs and read them
344
+ raw_values = []
345
+ for field_def in def_mesg.field_defs + def_mesg.dev_field_defs:
346
+ base_type = field_def.base_type
347
+ is_byte = base_type.name == 'byte'
348
+ # Struct to read n base types (field def size / base type size)
349
+ struct_fmt = str(int(field_def.size / base_type.size)) + base_type.fmt
350
+
351
+ # Extract the raw value, ask for a tuple if it's a byte type
352
+ try:
353
+ raw_value = self._read_struct(
354
+ struct_fmt, endian=def_mesg.endian, always_tuple=is_byte,
355
+ )
356
+ except FitEOFError:
357
+ # file was suddenly terminated
358
+ warnings.warn("File was terminated unexpectedly, some data will not be loaded.")
359
+ break
360
+
361
+ # If the field returns with a tuple of values it's definitely an
362
+ # oddball, but we'll parse it on a per-value basis it.
363
+ # If it's a byte type, treat the tuple as a single value
364
+ if isinstance(raw_value, tuple) and not is_byte:
365
+ raw_value = tuple(base_type.parse(rv) for rv in raw_value)
366
+ else:
367
+ # Otherwise, just scrub the singular value
368
+ raw_value = base_type.parse(raw_value)
369
+
370
+ raw_values.append(raw_value)
371
+ return raw_values
372
+
373
+ @staticmethod
374
+ def _resolve_subfield(field, def_mesg, raw_values):
375
+ # Resolve into (field, parent) ie (subfield, field) or (field, None)
376
+ if field.subfields:
377
+ for sub_field in field.subfields:
378
+ # Go through reference fields for this sub field
379
+ for ref_field in sub_field.ref_fields:
380
+ # Go through field defs AND their raw values
381
+ for field_def, raw_value in zip(def_mesg.field_defs, raw_values):
382
+ # If there's a definition number AND raw value match on the
383
+ # reference field, then we return this subfield
384
+ if (field_def.def_num == ref_field.def_num) and (ref_field.raw_value == raw_value):
385
+ return sub_field, field
386
+ return field, None
387
+
388
+ def _apply_scale_offset(self, field, raw_value):
389
+ # Apply numeric transformations (scale+offset)
390
+ if isinstance(raw_value, tuple):
391
+ # Contains multiple values, apply transformations to all of them
392
+ return tuple(self._apply_scale_offset(field, x) for x in raw_value)
393
+ elif isinstance(raw_value, (int, float)):
394
+ if field.scale:
395
+ raw_value = float(raw_value) / field.scale
396
+ if field.offset:
397
+ raw_value = raw_value - field.offset
398
+ return raw_value
399
+
400
+ @staticmethod
401
+ def _apply_compressed_accumulation(raw_value, accumulation, num_bits):
402
+ max_value = (1 << num_bits)
403
+ max_mask = max_value - 1
404
+ base_value = raw_value + (accumulation & ~max_mask)
405
+
406
+ if raw_value < (accumulation & max_mask):
407
+ base_value += max_value
408
+
409
+ return base_value
410
+
411
+ def _parse_data_message_components(self, header):
412
+ def_mesg = self._local_mesgs.get(header.local_mesg_num)
413
+ if not def_mesg:
414
+ raise FitParseError('Got data message with invalid local message type %d' % (
415
+ header.local_mesg_num))
416
+
417
+ raw_values = self._parse_raw_values_from_data_message(def_mesg)
418
+ field_datas = [] # TODO: I don't love this name, update on DataMessage too
419
+
420
+ # TODO: Maybe refactor this and make it simpler (or at least broken
421
+ # up into sub-functions)
422
+ for field_def, raw_value in zip(def_mesg.field_defs + def_mesg.dev_field_defs, raw_values):
423
+ field, parent_field = field_def.field, None
424
+ if field:
425
+ field, parent_field = self._resolve_subfield(field, def_mesg, raw_values)
426
+
427
+ # Resolve component fields
428
+ if field.components:
429
+ for component in field.components:
430
+ # Render its raw value
431
+ try:
432
+ cmp_raw_value = component.render(raw_value)
433
+ except ValueError:
434
+ continue
435
+
436
+ # Apply accumulated value
437
+ if component.accumulate and cmp_raw_value is not None:
438
+ accumulator = self._accumulators[def_mesg.mesg_num]
439
+ cmp_raw_value = self._apply_compressed_accumulation(
440
+ cmp_raw_value, accumulator[component.def_num], component.bits,
441
+ )
442
+ accumulator[component.def_num] = cmp_raw_value
443
+
444
+ # Apply scale and offset from component, not from the dynamic field
445
+ # as they may differ
446
+ cmp_raw_value = self._apply_scale_offset(component, cmp_raw_value)
447
+
448
+ # Extract the component's dynamic field from def_mesg
449
+ cmp_field = def_mesg.mesg_type.fields[component.def_num]
450
+
451
+ # Resolve a possible subfield
452
+ cmp_field, cmp_parent_field = self._resolve_subfield(cmp_field, def_mesg, raw_values)
453
+ cmp_value = cmp_field.render(cmp_raw_value)
454
+
455
+ # Plop it on field_datas
456
+ field_datas.append(
457
+ FieldData(
458
+ field_def=None,
459
+ field=cmp_field,
460
+ parent_field=cmp_parent_field,
461
+ value=cmp_value,
462
+ raw_value=cmp_raw_value,
463
+ )
464
+ )
465
+
466
+ # TODO: Do we care about a base_type and a resolved field mismatch?
467
+ # My hunch is we don't
468
+ value = self._apply_scale_offset(field, field.render(raw_value))
469
+ else:
470
+ value = raw_value
471
+
472
+ # Update compressed timestamp field
473
+ if (field_def.def_num == FIELD_TYPE_TIMESTAMP.def_num) and (raw_value is not None):
474
+ self._compressed_ts_accumulator = raw_value
475
+
476
+ field_datas.append(
477
+ FieldData(
478
+ field_def=field_def,
479
+ field=field,
480
+ parent_field=parent_field,
481
+ value=value,
482
+ raw_value=raw_value,
483
+ )
484
+ )
485
+
486
+ # Apply timestamp field if we got a header
487
+ if header.time_offset is not None:
488
+ ts_value = self._compressed_ts_accumulator = self._apply_compressed_accumulation(
489
+ header.time_offset, self._compressed_ts_accumulator, 5,
490
+ )
491
+ field_datas.append(
492
+ FieldData(
493
+ field_def=None,
494
+ field=FIELD_TYPE_TIMESTAMP,
495
+ parent_field=None,
496
+ value=FIELD_TYPE_TIMESTAMP.render(ts_value),
497
+ raw_value=ts_value,
498
+ )
499
+ )
500
+
501
+ return header, def_mesg, field_datas
502
+
503
+ def _parse_data_message(self, header):
504
+ header, def_mesg, field_datas = self._parse_data_message_components(header)
505
+ return DataMessage(header=header, def_mesg=def_mesg, fields=field_datas)
506
+
507
+ @staticmethod
508
+ def _should_yield(message, with_definitions, names):
509
+ if not message:
510
+ return False
511
+ if with_definitions or message.type == 'data':
512
+ # name arg is None we return all
513
+ if names is None:
514
+ return True
515
+ elif (message.name in names) or (message.mesg_num in names):
516
+ return True
517
+ return False
518
+
519
+ @staticmethod
520
+ def _make_set(obj):
521
+ if obj is None:
522
+ return None
523
+
524
+ if is_iterable(obj):
525
+ return set(obj)
526
+ else:
527
+ return {obj}
528
+
529
+ ##########
530
+ # Public API
531
+
532
+ def get_messages(self, name=None, with_definitions=False, as_dict=False):
533
+ if with_definitions: # with_definitions implies as_dict=False
534
+ as_dict = False
535
+
536
+ names = self._make_set(name)
537
+
538
+ while not self._complete:
539
+ message = self._parse_message()
540
+ if self._should_yield(message, with_definitions, names):
541
+ yield message.as_dict() if as_dict else message
542
+
543
+ def __iter__(self):
544
+ return self.get_messages()
545
+
546
+
547
+ class CacheMixin:
548
+ """Add message caching to the FitFileDecoder"""
549
+
550
+ def __init__(self, *args, **kwargs):
551
+ super().__init__(*args, **kwargs)
552
+ self._messages = []
553
+
554
+ def _parse_message(self):
555
+ self._messages.append(super()._parse_message())
556
+ return self._messages[-1]
557
+
558
+ def get_messages(self, name=None, with_definitions=False, as_dict=False):
559
+ if with_definitions: # with_definitions implies as_dict=False
560
+ as_dict = False
561
+
562
+ names = self._make_set(name)
563
+
564
+ # Yield all parsed messages first
565
+ for message in self._messages:
566
+ if self._should_yield(message, with_definitions, names):
567
+ yield message.as_dict() if as_dict else message
568
+
569
+ for message in super().get_messages(names, with_definitions, as_dict):
570
+ yield message
571
+
572
+ @property
573
+ def messages(self):
574
+ return list(self.get_messages())
575
+
576
+ def parse(self):
577
+ while self._parse_message():
578
+ pass
579
+
580
+
581
+ class DataProcessorMixin:
582
+ """Add data processing to the FitFileDecoder"""
583
+
584
+ def __init__(self, *args, **kwargs):
585
+ self._processor = kwargs.pop("data_processor", None) or FitFileDataProcessor()
586
+ super().__init__(*args, **kwargs)
587
+
588
+ def _parse_data_message(self, header):
589
+ header, def_mesg, field_datas = self._parse_data_message_components(header)
590
+
591
+ # Apply data processors
592
+ for field_data in field_datas:
593
+ # Apply type name processor
594
+ self._processor.run_type_processor(field_data)
595
+ self._processor.run_field_processor(field_data)
596
+ self._processor.run_unit_processor(field_data)
597
+
598
+ data_message = DataMessage(header=header, def_mesg=def_mesg, fields=field_datas)
599
+ self._processor.run_message_processor(data_message)
600
+
601
+ return data_message
602
+
603
+
604
+ class UncachedFitFile(DataProcessorMixin, FitFileDecoder):
605
+ """FitFileDecoder with data processing"""
606
+
607
+ def __init__(self, fileish, *args, check_crc=True, data_processor=None, **kwargs):
608
+ # Ensure all optional params are passed as kwargs
609
+ super().__init__(
610
+ fileish,
611
+ *args,
612
+ check_crc=check_crc,
613
+ data_processor=data_processor,
614
+ **kwargs
615
+ )
616
+
617
+
618
+ class FitFile(CacheMixin, UncachedFitFile):
619
+ """FitFileDecoder with caching and data processing"""
620
+ pass
621
+
622
+
623
+
624
+ # TODO: Create subclasses like Activity and do per-value monkey patching
625
+ # for example local_timestamp to adjust timestamp on a per-file basis