pymast 1.0.1__tar.gz → 1.0.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {pymast-1.0.1 → pymast-1.0.2}/PKG-INFO +1 -1
  2. {pymast-1.0.1 → pymast-1.0.2}/pymast/__init__.py +1 -1
  3. {pymast-1.0.1 → pymast-1.0.2}/pymast/parsers.py +18 -18
  4. {pymast-1.0.1 → pymast-1.0.2}/pymast/radio_project.py +164 -66
  5. {pymast-1.0.1 → pymast-1.0.2}/pymast.egg-info/PKG-INFO +1 -1
  6. {pymast-1.0.1 → pymast-1.0.2}/pyproject.toml +19 -19
  7. {pymast-1.0.1 → pymast-1.0.2}/LICENSE.txt +0 -0
  8. {pymast-1.0.1 → pymast-1.0.2}/README.md +0 -0
  9. {pymast-1.0.1 → pymast-1.0.2}/pymast/fish_history.py +0 -0
  10. {pymast-1.0.1 → pymast-1.0.2}/pymast/formatter.py +0 -0
  11. {pymast-1.0.1 → pymast-1.0.2}/pymast/logger.py +0 -0
  12. {pymast-1.0.1 → pymast-1.0.2}/pymast/naive_bayes.py +0 -0
  13. {pymast-1.0.1 → pymast-1.0.2}/pymast/overlap_removal.py +0 -0
  14. {pymast-1.0.1 → pymast-1.0.2}/pymast/predictors.py +0 -0
  15. {pymast-1.0.1 → pymast-1.0.2}/pymast/validation.py +0 -0
  16. {pymast-1.0.1 → pymast-1.0.2}/pymast.egg-info/SOURCES.txt +0 -0
  17. {pymast-1.0.1 → pymast-1.0.2}/pymast.egg-info/dependency_links.txt +0 -0
  18. {pymast-1.0.1 → pymast-1.0.2}/pymast.egg-info/requires.txt +0 -0
  19. {pymast-1.0.1 → pymast-1.0.2}/pymast.egg-info/top_level.txt +0 -0
  20. {pymast-1.0.1 → pymast-1.0.2}/setup.cfg +0 -0
  21. {pymast-1.0.1 → pymast-1.0.2}/setup.py +0 -0
  22. {pymast-1.0.1 → pymast-1.0.2}/tests/test_basic.py +0 -0
  23. {pymast-1.0.1 → pymast-1.0.2}/tests/test_csv_pit.py +0 -0
  24. {pymast-1.0.1 → pymast-1.0.2}/tests/test_formatter_tte.py +0 -0
  25. {pymast-1.0.1 → pymast-1.0.2}/tests/test_initial_state_release.py +0 -0
  26. {pymast-1.0.1 → pymast-1.0.2}/tests/test_overlap_hdf5_integration.py +0 -0
  27. {pymast-1.0.1 → pymast-1.0.2}/tests/test_overlap_loading.py +0 -0
  28. {pymast-1.0.1 → pymast-1.0.2}/tests/test_overlap_small.py +0 -0
  29. {pymast-1.0.1 → pymast-1.0.2}/tests/test_overlap_unit.py +0 -0
  30. {pymast-1.0.1 → pymast-1.0.2}/tests/test_parsers_basic.py +0 -0
  31. {pymast-1.0.1 → pymast-1.0.2}/tests/test_pit_multiple_parser.py +0 -0
  32. {pymast-1.0.1 → pymast-1.0.2}/tests/test_pit_parser.py +0 -0
  33. {pymast-1.0.1 → pymast-1.0.2}/tests/test_unified_pit.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pymast
3
- Version: 1.0.1
3
+ Version: 1.0.2
4
4
  Summary: Movement Analysis Software for Telemetry (MAST) - False positive removal and movement analysis for radio telemetry data
5
5
  Author: Theodore Castro-Santos
6
6
  Author-email: "Kevin P. Nebiolo" <kevin.nebiolo@kleinschmidtgroup.com>
@@ -28,7 +28,7 @@ from .parsers import *
28
28
  from .radio_project import *
29
29
 
30
30
  # Version
31
- __version__ = '1.0.0'
31
+ __version__ = '1.0.2'
32
32
 
33
33
  # Define what's available when using "from pymast import *"
34
34
  __all__ = [
@@ -1840,19 +1840,19 @@ def PIT(file_name,
1840
1840
  else:
1841
1841
  # try to find an antenna column in the fixed-width frame
1842
1842
  antenna_col = None
1843
- for col in telem_dat.columns:
1844
- col_lower = str(col).lower().strip()
1845
- if col_lower in (
1846
- 'antenna id',
1847
- 'antenna',
1848
- 'ant',
1849
- 'antennae',
1850
- 'antennae id',
1851
- 'reader id',
1852
- 'readerid',
1853
- ):
1854
- antenna_col = col
1855
- break
1843
+ for col in telem_dat.columns:
1844
+ col_lower = str(col).lower().strip()
1845
+ if col_lower in (
1846
+ 'antenna id',
1847
+ 'antenna',
1848
+ 'ant',
1849
+ 'antennae',
1850
+ 'antennae id',
1851
+ 'reader id',
1852
+ 'readerid',
1853
+ ):
1854
+ antenna_col = col
1855
+ break
1856
1856
 
1857
1857
  if antenna_col is not None:
1858
1858
  # extract numeric antenna identifier and map using provided dictionary
@@ -1884,11 +1884,11 @@ def PIT(file_name,
1884
1884
 
1885
1885
  # drop detections that do not map to a known receiver
1886
1886
  telem_dat = telem_dat.dropna(subset=['rec_id'])
1887
- else:
1888
- raise ValueError(
1889
- 'Multi-antenna fixed-width PIT file requires an antenna/reader column '
1890
- '(e.g., "Antenna ID" or "Reader ID"), but none was found'
1891
- )
1887
+ else:
1888
+ raise ValueError(
1889
+ 'Multi-antenna fixed-width PIT file requires an antenna/reader column '
1890
+ '(e.g., "Antenna ID" or "Reader ID"), but none was found'
1891
+ )
1892
1892
 
1893
1893
  # Data cleaning - remove invalid entries
1894
1894
  print(f"\nCleaning data - original records: {len(telem_dat)}")
@@ -95,21 +95,21 @@ import pymast.predictors as predictors
95
95
  import matplotlib.pyplot as plt
96
96
  from matplotlib import rcParams
97
97
  from scipy import interpolate
98
- try:
99
- from tqdm import tqdm
100
- except ImportError:
101
- def tqdm(iterable, **kwargs):
102
- return iterable
98
+ try:
99
+ from tqdm import tqdm
100
+ except ImportError:
101
+ def tqdm(iterable, **kwargs):
102
+ return iterable
103
103
  import shutil
104
104
  import warnings
105
105
  import dask.dataframe as dd
106
106
  import dask.array as da
107
- try:
108
- from dask_ml.cluster import KMeans
109
- _KMEANS_IMPL = 'dask'
110
- except ImportError:
111
- from sklearn.cluster import KMeans
112
- _KMEANS_IMPL = 'sklearn'
107
+ try:
108
+ from dask_ml.cluster import KMeans
109
+ _KMEANS_IMPL = 'dask'
110
+ except ImportError:
111
+ from sklearn.cluster import KMeans
112
+ _KMEANS_IMPL = 'sklearn'
113
113
 
114
114
  # Initialize logger
115
115
  logger = logging.getLogger('pymast.radio_project')
@@ -415,12 +415,12 @@ class radio_project():
415
415
  if self.non_interactive:
416
416
  logger.debug(f"Non-interactive mode: auto-answering '{prompt_text}' with '{default}'")
417
417
  return default
418
- try:
419
- return input(prompt_text)
420
- except (EOFError, OSError) as exc:
421
- raise RuntimeError(
422
- "Input prompt failed. Set project.non_interactive = True to use defaults."
423
- ) from exc
418
+ try:
419
+ return input(prompt_text)
420
+ except (EOFError, OSError) as exc:
421
+ raise RuntimeError(
422
+ "Input prompt failed. Set project.non_interactive = True to use defaults."
423
+ ) from exc
424
424
 
425
425
  def telem_data_import(self,
426
426
  rec_id,
@@ -496,9 +496,19 @@ class radio_project():
496
496
 
497
497
  logger.info(f" Found {len(tFiles)} file(s) to import")
498
498
 
499
+ # Track detections per file for statistics
500
+ detections_per_file = []
501
+
499
502
  # for every file call the correct text parser and import
500
503
  for i, f in enumerate(tqdm(tFiles, desc=f"Importing {rec_id}", unit="file"), 1):
501
504
  logger.debug(f" Processing file {i}/{len(tFiles)}: {f}")
505
+
506
+ # Count detections before import
507
+ try:
508
+ pre_count = len(pd.read_hdf(self.db, key='raw_data', where=f'rec_id = "{rec_id}"'))
509
+ except (KeyError, FileNotFoundError):
510
+ pre_count = 0
511
+
502
512
  # get the complete file directory
503
513
  f_dir = os.path.join(file_dir,f)
504
514
 
@@ -533,8 +543,91 @@ class radio_project():
533
543
  else:
534
544
  logger.error(f"No import routine for receiver type: {rec_type}")
535
545
  raise ValueError(f"No import routine available for receiver type: {rec_type}")
546
+
547
+ # Count detections after import
548
+ try:
549
+ post_count = len(pd.read_hdf(self.db, key='raw_data', where=f'rec_id = "{rec_id}"'))
550
+ detections_this_file = post_count - pre_count
551
+ detections_per_file.append(detections_this_file)
552
+ except (KeyError, FileNotFoundError):
553
+ detections_per_file.append(0)
536
554
 
537
555
  logger.info(f"✓ Import complete for receiver {rec_id}: {len(tFiles)} file(s) processed")
556
+
557
+ # Calculate and display import statistics
558
+ try:
559
+ raw_data = pd.read_hdf(self.db, key='raw_data', where=f'rec_id = "{rec_id}"')
560
+
561
+ # Total Detection Count
562
+ total_detections = len(raw_data)
563
+ logger.info(f"\n{'='*60}")
564
+ logger.info(f"IMPORT STATISTICS FOR {rec_id}")
565
+ logger.info(f"{'='*60}")
566
+ logger.info(f"Total Detection Count: {total_detections:,}")
567
+
568
+ if total_detections > 0:
569
+ # Detection count summary statistics
570
+ logger.info(f"\nDetection Summary Statistics:")
571
+ logger.info(f" Mean detections per file: {total_detections / len(tFiles):.1f}")
572
+ logger.info(f" Files processed: {len(tFiles)}")
573
+
574
+ # 5-number summary for detections per file
575
+ if len(detections_per_file) > 0:
576
+ det_array = np.array(detections_per_file)
577
+ logger.info(f"\nDetections Per File (5-number summary):")
578
+ logger.info(f" Min: {np.min(det_array):,.0f}")
579
+ logger.info(f" Q1: {np.percentile(det_array, 25):,.0f}")
580
+ logger.info(f" Median: {np.median(det_array):,.0f}")
581
+ logger.info(f" Q3: {np.percentile(det_array, 75):,.0f}")
582
+ logger.info(f" Max: {np.max(det_array):,.0f}")
583
+
584
+ # Unique Tag Count
585
+ unique_tags = raw_data['freq_code'].nunique()
586
+ logger.info(f"\nUnique Tag Count: {unique_tags}")
587
+
588
+ # Duplicate Tag Count and IDs
589
+ # Check for detections at the exact same timestamp (true duplicates)
590
+ if 'time_stamp' in raw_data.columns:
591
+ dup_mask = raw_data.duplicated(subset=['freq_code', 'time_stamp'], keep=False)
592
+ duplicate_count = dup_mask.sum()
593
+
594
+ if duplicate_count > 0:
595
+ duplicate_tags = raw_data.loc[dup_mask, 'freq_code'].unique()
596
+ logger.info(f"\nDuplicate Detection Count (same timestamp): {duplicate_count:,}")
597
+ logger.info(f"Duplicate Tag IDs ({len(duplicate_tags)} tags):")
598
+ for tag in sorted(duplicate_tags)[:10]: # Show first 10
599
+ tag_dups = dup_mask & (raw_data['freq_code'] == tag)
600
+ logger.info(f" {tag}: {tag_dups.sum()} duplicate(s)")
601
+ if len(duplicate_tags) > 10:
602
+ logger.info(f" ... and {len(duplicate_tags) - 10} more")
603
+ else:
604
+ logger.info(f"\nDuplicate Detection Count: 0 (no exact timestamp duplicates)")
605
+
606
+ # Time Coverage
607
+ if 'time_stamp' in raw_data.columns:
608
+ raw_data['time_stamp'] = pd.to_datetime(raw_data['time_stamp'])
609
+ start_time = raw_data['time_stamp'].min()
610
+ end_time = raw_data['time_stamp'].max()
611
+ duration = end_time - start_time
612
+
613
+ logger.info(f"\nTime Coverage:")
614
+ logger.info(f" Start: {start_time}")
615
+ logger.info(f" End: {end_time}")
616
+ logger.info(f" Duration: {duration.days} days, {duration.seconds // 3600} hours")
617
+
618
+ # Detection rate
619
+ if duration.total_seconds() > 0:
620
+ det_per_hour = total_detections / (duration.total_seconds() / 3600)
621
+ logger.info(f" Detection rate: {det_per_hour:.1f} detections/hour")
622
+
623
+ logger.info(f"{'='*60}\n")
624
+ else:
625
+ logger.warning(f"No detections found for receiver {rec_id}")
626
+
627
+ except KeyError:
628
+ logger.warning(f"Could not retrieve statistics - raw_data table not found in database")
629
+ except Exception as e:
630
+ logger.warning(f"Error calculating import statistics: {e}")
538
631
 
539
632
  def get_fish(self, rec_id, train = True, reclass_iter = None):
540
633
  logger.info(f"Getting fish for receiver {rec_id}")
@@ -1576,16 +1669,16 @@ class radio_project():
1576
1669
  node_path = node._v_pathname
1577
1670
  print(f" Copying {node_path}...")
1578
1671
 
1579
- try:
1580
- # Use recursive=True to copy entire subtree (Groups, Tables, Arrays, etc.)
1581
- h5in.copy_node(
1582
- where=node_path,
1583
- newparent=h5out.root,
1584
- recursive=True,
1585
- filters=filters
1586
- )
1587
- except (tables.NodeError, tables.HDF5ExtError, OSError, ValueError) as e:
1588
- raise RuntimeError(f"Failed to copy HDF5 node {node_path}: {e}") from e
1672
+ try:
1673
+ # Use recursive=True to copy entire subtree (Groups, Tables, Arrays, etc.)
1674
+ h5in.copy_node(
1675
+ where=node_path,
1676
+ newparent=h5out.root,
1677
+ recursive=True,
1678
+ filters=filters
1679
+ )
1680
+ except (tables.NodeError, tables.HDF5ExtError, OSError, ValueError) as e:
1681
+ raise RuntimeError(f"Failed to copy HDF5 node {node_path}: {e}") from e
1589
1682
 
1590
1683
  # Get new size
1591
1684
  new_size = os.path.getsize(output_path)
@@ -1603,26 +1696,29 @@ class radio_project():
1603
1696
  def make_recaptures_table(self, export=True, pit_study=False):
1604
1697
  '''Creates a recaptures key in the HDF5 file, iterating over receivers to manage memory.'''
1605
1698
  logger.info("Creating recaptures table")
1699
+ logger.info(f" PIT study mode: {pit_study}")
1606
1700
  logger.info(f" Processing {len(self.receivers)} receiver(s)")
1607
1701
  # prepare a heartbeat log so long runs can be monitored (one-line per receiver)
1608
1702
  heartbeat_dir = os.path.join(self.project_dir, 'build')
1609
- try:
1610
- os.makedirs(heartbeat_dir, exist_ok=True)
1611
- except OSError as e:
1612
- raise RuntimeError(
1613
- f"Failed to create heartbeat directory '{heartbeat_dir}': {e}"
1614
- ) from e
1703
+ try:
1704
+ os.makedirs(heartbeat_dir, exist_ok=True)
1705
+ except OSError as e:
1706
+ raise RuntimeError(
1707
+ f"Failed to create heartbeat directory '{heartbeat_dir}': {e}"
1708
+ ) from e
1615
1709
  heartbeat_path = os.path.join(heartbeat_dir, 'recaptures_heartbeat.log')
1616
1710
  print(f"Starting recaptures: {len(self.receivers)} receivers. Heartbeat -> {heartbeat_path}")
1617
- try:
1618
- with open(heartbeat_path, 'a') as _hb:
1619
- _hb.write(f"START {datetime.datetime.now().isoformat()} receivers={len(self.receivers)}\n")
1620
- except OSError as e:
1621
- raise RuntimeError(
1622
- f"Failed to write heartbeat start to '{heartbeat_path}': {e}"
1623
- ) from e
1624
-
1625
- if pit_study==False:
1711
+ try:
1712
+ with open(heartbeat_path, 'a') as _hb:
1713
+ _hb.write(f"START {datetime.datetime.now().isoformat()} receivers={len(self.receivers)}\n")
1714
+ except OSError as e:
1715
+ raise RuntimeError(
1716
+ f"Failed to write heartbeat start to '{heartbeat_path}': {e}"
1717
+ ) from e
1718
+
1719
+ if not pit_study:
1720
+ # RADIO STUDY PATH
1721
+ logger.info(" Using RADIO study processing path")
1626
1722
  # Convert release dates to datetime if not already done
1627
1723
  self.tags['rel_date'] = pd.to_datetime(self.tags['rel_date'])
1628
1724
  tags_copy = self.tags.copy()
@@ -1787,15 +1883,17 @@ class radio_project():
1787
1883
  logger.info(f" ✓ Recaps for {rec} compiled and written to HDF5")
1788
1884
  print(f"[recaptures] ✓ {rec} written to database", flush=True)
1789
1885
  # append heartbeat line
1790
- try:
1791
- with open(heartbeat_path, 'a') as _hb:
1792
- _hb.write(f"{datetime.datetime.now().isoformat()} rec={rec} rows={len(rec_dat)}\n")
1793
- except OSError as e:
1794
- raise RuntimeError(
1795
- f"Failed to write heartbeat for receiver {rec} to '{heartbeat_path}': {e}"
1796
- ) from e
1886
+ try:
1887
+ with open(heartbeat_path, 'a') as _hb:
1888
+ _hb.write(f"{datetime.datetime.now().isoformat()} rec={rec} rows={len(rec_dat)}\n")
1889
+ except OSError as e:
1890
+ raise RuntimeError(
1891
+ f"Failed to write heartbeat for receiver {rec} to '{heartbeat_path}': {e}"
1892
+ ) from e
1797
1893
 
1798
1894
  else:
1895
+ # PIT STUDY PATH
1896
+ logger.info(" Using PIT study processing path")
1799
1897
  # Loop over each receiver in self.receivers
1800
1898
  for rec in tqdm(self.receivers.index, desc="Processing PIT receivers", unit="receiver"):
1801
1899
  logger.info(f" Processing {rec} (PIT study)...")
@@ -1917,13 +2015,13 @@ class radio_project():
1917
2015
 
1918
2016
  logger.info(f" ✓ PIT recaps for {rec} compiled and written to HDF5")
1919
2017
  print(f"[recaptures] ✓ {rec} PIT data written to database", flush=True)
1920
- try:
1921
- with open(heartbeat_path, 'a') as _hb:
1922
- _hb.write(f"{datetime.datetime.now().isoformat()} pit_rec={rec} rows={len(pit_data)}\n")
1923
- except OSError as e:
1924
- raise RuntimeError(
1925
- f"Failed to write PIT heartbeat for receiver {rec} to '{heartbeat_path}': {e}"
1926
- ) from e
2018
+ try:
2019
+ with open(heartbeat_path, 'a') as _hb:
2020
+ _hb.write(f"{datetime.datetime.now().isoformat()} pit_rec={rec} rows={len(pit_data)}\n")
2021
+ except OSError as e:
2022
+ raise RuntimeError(
2023
+ f"Failed to write PIT heartbeat for receiver {rec} to '{heartbeat_path}': {e}"
2024
+ ) from e
1927
2025
 
1928
2026
 
1929
2027
  if export:
@@ -1933,16 +2031,16 @@ class radio_project():
1933
2031
  rec_data.to_csv(os.path.join(self.output_dir,'recaptures.csv'), index=False)
1934
2032
  logger.info(f" ✓ Export complete: {os.path.join(self.output_dir,'recaptures.csv')}")
1935
2033
  print(f"[recaptures] ✓ Export complete: {os.path.join(self.output_dir,'recaptures.csv')}", flush=True)
1936
- try:
1937
- with open(heartbeat_path, 'a') as _hb:
1938
- _hb.write(
1939
- f"DONE {datetime.datetime.now().isoformat()} export="
1940
- f"{os.path.join(self.output_dir, 'recaptures.csv')}\n"
1941
- )
1942
- except OSError as e:
1943
- raise RuntimeError(
1944
- f"Failed to write heartbeat completion to '{heartbeat_path}': {e}"
1945
- ) from e
2034
+ try:
2035
+ with open(heartbeat_path, 'a') as _hb:
2036
+ _hb.write(
2037
+ f"DONE {datetime.datetime.now().isoformat()} export="
2038
+ f"{os.path.join(self.output_dir, 'recaptures.csv')}\n"
2039
+ )
2040
+ except OSError as e:
2041
+ raise RuntimeError(
2042
+ f"Failed to write heartbeat completion to '{heartbeat_path}': {e}"
2043
+ ) from e
1946
2044
 
1947
2045
 
1948
2046
  def undo_recaptures(self):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pymast
3
- Version: 1.0.1
3
+ Version: 1.0.2
4
4
  Summary: Movement Analysis Software for Telemetry (MAST) - False positive removal and movement analysis for radio telemetry data
5
5
  Author: Theodore Castro-Santos
6
6
  Author-email: "Kevin P. Nebiolo" <kevin.nebiolo@kleinschmidtgroup.com>
@@ -1,23 +1,23 @@
1
- [build-system]
2
- requires = ["setuptools>=61", "wheel"]
3
- build-backend = "setuptools.build_meta"
1
+ [build-system]
2
+ requires = ["setuptools>=61", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "pymast"
7
- version = "1.0.1"
7
+ version = "1.0.2"
8
8
  description = "Movement Analysis Software for Telemetry (MAST) - False positive removal and movement analysis for radio telemetry data"
9
9
  readme = "README.md"
10
10
  authors = [
11
11
  {name = "Kevin P. Nebiolo", email = "kevin.nebiolo@kleinschmidtgroup.com"},
12
12
  {name = "Theodore Castro-Santos"}
13
13
  ]
14
- license = "MIT"
15
- classifiers = [
16
- "Development Status :: 4 - Beta",
17
- "Intended Audience :: Science/Research",
18
- "Programming Language :: Python :: 3",
19
- "Programming Language :: Python :: 3.8",
20
- "Programming Language :: Python :: 3.9",
14
+ license = "MIT"
15
+ classifiers = [
16
+ "Development Status :: 4 - Beta",
17
+ "Intended Audience :: Science/Research",
18
+ "Programming Language :: Python :: 3",
19
+ "Programming Language :: Python :: 3.8",
20
+ "Programming Language :: Python :: 3.9",
21
21
  "Programming Language :: Python :: 3.10",
22
22
  "Programming Language :: Python :: 3.11",
23
23
  "Topic :: Scientific/Engineering :: Bio-Informatics",
@@ -30,14 +30,14 @@ dependencies = [
30
30
  "matplotlib>=3.4.0",
31
31
  "statsmodels>=0.12.0",
32
32
  "networkx>=2.5",
33
- "scipy>=1.7.1",
34
- "scikit-learn>=0.24.0",
35
- "h5py>=3.0.0",
36
- "dask>=2021.3.0",
37
- "dask-ml>=1.9.0",
38
- "distributed>=2021.3.0",
39
- "numba>=0.53.0",
40
- "tables>=3.8.0",
33
+ "scipy>=1.7.1",
34
+ "scikit-learn>=0.24.0",
35
+ "h5py>=3.0.0",
36
+ "dask>=2021.3.0",
37
+ "dask-ml>=1.9.0",
38
+ "distributed>=2021.3.0",
39
+ "numba>=0.53.0",
40
+ "tables>=3.8.0",
41
41
  "intervaltree>=3.1.0",
42
42
  ]
43
43
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes