compass-lib 0.0.2__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
compass_lib/__init__.py CHANGED
@@ -1,7 +1,117 @@
1
1
  # -*- coding: utf-8 -*-
2
+ """Compass Parser Library.
2
3
 
4
+ A Python library for parsing and formatting Compass cave survey data files.
5
+ Supports .DAT (survey data), .MAK (project files), and .PLT (plot files).
6
+
7
+ Usage:
8
+ # Load a complete project (MAK + all DAT files)
9
+ from compass_lib import load_project
10
+ project = load_project(Path("cave.mak"))
11
+
12
+ for file_dir in project.file_directives:
13
+ print(f"File: {file_dir.file}")
14
+ if file_dir.data:
15
+ for trip in file_dir.data.trips:
16
+ print(f" Survey: {trip.header.survey_name}")
17
+
18
+ # Or load individual files
19
+ from compass_lib import read_dat_file, read_mak_file
20
+ trips = read_dat_file(Path("survey.DAT"))
21
+ directives = read_mak_file(Path("project.MAK"))
3
22
  """
4
- "A library to read Compass Survey files"
5
- """
6
23
 
7
- __version__ = "0.0.2"
24
+ __version__ = "0.0.3"
25
+
26
+ # Constants
27
+ from compass_lib.constants import COMPASS_ENCODING
28
+ from compass_lib.constants import FEET_TO_METERS
29
+ from compass_lib.constants import JSON_ENCODING
30
+ from compass_lib.constants import METERS_TO_FEET
31
+
32
+ # Enums
33
+ from compass_lib.enums import AzimuthUnit
34
+ from compass_lib.enums import CompassFileType
35
+ from compass_lib.enums import DrawOperation
36
+ from compass_lib.enums import FileFormat
37
+ from compass_lib.enums import InclinationUnit
38
+ from compass_lib.enums import LengthUnit
39
+ from compass_lib.enums import LrudAssociation
40
+ from compass_lib.enums import LrudItem
41
+ from compass_lib.enums import Severity
42
+ from compass_lib.enums import ShotItem
43
+ from compass_lib.errors import CompassParseError
44
+ from compass_lib.errors import CompassParseException
45
+ from compass_lib.errors import SourceLocation
46
+ from compass_lib.interface import CompassInterface
47
+ from compass_lib.io import CancellationToken
48
+ from compass_lib.io import load_project
49
+ from compass_lib.io import read_dat_file
50
+ from compass_lib.io import read_mak_and_dat_files
51
+ from compass_lib.io import read_mak_file
52
+ from compass_lib.io import save_project
53
+ from compass_lib.io import write_dat_file
54
+ from compass_lib.io import write_mak_file
55
+ from compass_lib.models import Bounds
56
+ from compass_lib.models import Location
57
+ from compass_lib.models import NEVLocation
58
+ from compass_lib.project.models import CompassMakFile
59
+ from compass_lib.project.models import FileDirective
60
+ from compass_lib.project.models import LinkStation
61
+ from compass_lib.survey.models import CompassDatFile
62
+ from compass_lib.survey.models import CompassShot
63
+ from compass_lib.survey.models import CompassTrip
64
+ from compass_lib.survey.models import CompassTripHeader
65
+ from compass_lib.validation import days_in_month
66
+ from compass_lib.validation import is_valid_station_name
67
+ from compass_lib.validation import validate_station_name
68
+
69
+ __all__ = [
70
+ # Constants
71
+ "COMPASS_ENCODING",
72
+ "FEET_TO_METERS",
73
+ "JSON_ENCODING",
74
+ "METERS_TO_FEET",
75
+ # Enums
76
+ "AzimuthUnit",
77
+ # Base Models
78
+ "Bounds",
79
+ "CancellationToken",
80
+ # Survey Models
81
+ "CompassDatFile",
82
+ "CompassFileType",
83
+ # I/O
84
+ "CompassInterface",
85
+ # Project Models
86
+ "CompassMakFile",
87
+ # Errors
88
+ "CompassParseError",
89
+ "CompassParseException",
90
+ "CompassShot",
91
+ "CompassTrip",
92
+ "CompassTripHeader",
93
+ "DrawOperation",
94
+ "FileDirective",
95
+ "FileFormat",
96
+ "InclinationUnit",
97
+ "LengthUnit",
98
+ "LinkStation",
99
+ "Location",
100
+ "LrudAssociation",
101
+ "LrudItem",
102
+ "NEVLocation",
103
+ "Severity",
104
+ "ShotItem",
105
+ "SourceLocation",
106
+ "days_in_month",
107
+ # Validation
108
+ "is_valid_station_name",
109
+ "load_project",
110
+ "read_dat_file",
111
+ "read_mak_and_dat_files",
112
+ "read_mak_file",
113
+ "save_project",
114
+ "validate_station_name",
115
+ "write_dat_file",
116
+ "write_mak_file",
117
+ ]
@@ -0,0 +1,2 @@
1
+ # -*- coding: utf-8 -*-
2
+ """Command-line interface commands for compass_lib."""
@@ -1,62 +1,249 @@
1
- from __future__ import annotations
1
+ # -*- coding: utf-8 -*-
2
+ """Convert command for Compass files.
3
+
4
+ Supports bidirectional conversion between Compass native formats (DAT, MAK)
5
+ and JSON format using Pydantic's built-in serialization.
6
+ """
2
7
 
3
8
  import argparse
9
+ import json
4
10
  from pathlib import Path
5
11
 
6
- from compass_lib.parser import CompassParser
12
+ from compass_lib.constants import COMPASS_ENCODING
13
+ from compass_lib.constants import JSON_ENCODING
14
+ from compass_lib.enums import CompassFileType
15
+ from compass_lib.enums import FileExtension
16
+ from compass_lib.enums import FileFormat
17
+ from compass_lib.enums import FormatIdentifier
18
+ from compass_lib.io import load_project
19
+ from compass_lib.io import read_dat_file
20
+ from compass_lib.project.format import format_mak_file
21
+ from compass_lib.project.models import CompassMakFile
22
+ from compass_lib.survey.format import format_dat_file
23
+ from compass_lib.survey.models import CompassDatFile
24
+
25
+
26
+ class ConversionError(Exception):
27
+ """Error raised for invalid conversion operations."""
28
+
29
+
30
+ def detect_file_format(
31
+ path: Path,
32
+ ) -> tuple[FileFormat, CompassFileType | None]:
33
+ """Detect the file format and type based on extension and content.
34
+
35
+ Args:
36
+ path: File path
37
+
38
+ Returns:
39
+ Tuple of (format_type, file_type) where:
40
+ - format_type: FileFormat.COMPASS or FileFormat.JSON
41
+ - file_type: CompassFileType.DAT or CompassFileType.MAK (or None)
42
+ """
43
+
44
+ match f_ext := path.suffix.lower():
45
+ case FileExtension.DAT.value:
46
+ return (FileFormat.COMPASS, CompassFileType.DAT)
47
+
48
+ case FileExtension.MAK.value:
49
+ return (FileFormat.COMPASS, CompassFileType.MAK)
50
+
51
+ case FileExtension.JSON.value:
52
+ # Read the file to detect format from content
53
+ content = path.read_text(encoding=JSON_ENCODING)
54
+ if (
55
+ f'"format": "{FormatIdentifier.COMPASS_DAT.value}"' in content
56
+ or f'"format":"{FormatIdentifier.COMPASS_DAT.value}"' in content
57
+ ):
58
+ return (FileFormat.JSON, CompassFileType.DAT)
59
+
60
+ if (
61
+ f'"format": "{FormatIdentifier.COMPASS_MAK.value}"' in content
62
+ or f'"format":"{FormatIdentifier.COMPASS_MAK.value}"' in content
63
+ ):
64
+ return (FileFormat.JSON, CompassFileType.MAK)
65
+
66
+ raise ValueError(f"Unknown file type found inside json: `{f_ext}`")
67
+
68
+ case _:
69
+ raise ValueError(f"Unknown file extension: `{f_ext}`")
70
+
71
+
72
+ def _convert(
73
+ input_path: Path,
74
+ output_path: Path | None = None,
75
+ target_format: FileFormat | str | None = None,
76
+ ) -> str | None:
77
+ """Convert a file between formats.
78
+
79
+ Args:
80
+ input_path: Input file path
81
+ output_path: Output file path (None = return as string)
82
+ target_format: Target format (FileFormat or string 'compass'/'json')
83
+
84
+ Returns:
85
+ Converted content as string if output_path is None,
86
+ otherwise None (writes to file)
87
+
88
+ Raises:
89
+ ConversionError: If conversion is not valid
90
+ FileNotFoundError: If input file doesn't exist
91
+ """
92
+ if not input_path.exists():
93
+ raise FileNotFoundError(f"Input file not found: {input_path}")
94
+
95
+ # Detect source format
96
+ source_format, file_type = detect_file_format(input_path)
97
+
98
+ if file_type is None:
99
+ raise ConversionError(f"Cannot determine file type for: {input_path}")
100
+
101
+ # Normalize target format to enum
102
+ if isinstance(target_format, str):
103
+ target_format = FileFormat(target_format)
104
+ elif target_format is None:
105
+ # Auto-determine: opposite of source
106
+ target_format = (
107
+ FileFormat.JSON
108
+ if source_format == FileFormat.COMPASS
109
+ else FileFormat.COMPASS
110
+ )
111
+
112
+ # Validate: no same-format conversion
113
+ if source_format == target_format:
114
+ raise ConversionError(
115
+ f"Invalid conversion: {source_format.value} => {target_format.value}. "
116
+ f"Source and target formats must be different."
117
+ )
118
+
119
+ # Perform conversion
120
+ if source_format == FileFormat.COMPASS and target_format == FileFormat.JSON:
121
+ # Compass -> JSON using Pydantic
122
+ if file_type == CompassFileType.DAT:
123
+ trips = read_dat_file(input_path)
124
+ dat_file = CompassDatFile(trips=trips)
125
+ # Wrap in format envelope for DAT files
126
+ envelope = {
127
+ "version": "1.0",
128
+ "format": FormatIdentifier.COMPASS_DAT.value,
129
+ "trips": json.loads(dat_file.model_dump_json(by_alias=True))["trips"],
130
+ }
131
+ result = json.dumps(envelope, indent=2, sort_keys=True)
132
+ else: # mak
133
+ project = load_project(input_path)
134
+ result = project.model_dump_json(indent=2, by_alias=True)
135
+
136
+ elif source_format == FileFormat.JSON and target_format == FileFormat.COMPASS:
137
+ # JSON -> Compass using Pydantic
138
+ json_str = input_path.read_text(encoding=JSON_ENCODING)
139
+ if file_type == CompassFileType.DAT:
140
+ data = json.loads(json_str)
141
+ # Handle both envelope format and raw format
142
+ trips_data = data.get("trips", [])
143
+ dat_file = CompassDatFile.model_validate({"trips": trips_data})
144
+ result = format_dat_file(dat_file.trips) or ""
145
+ else: # mak
146
+ project = CompassMakFile.model_validate_json(json_str)
147
+ result = format_mak_file(project.directives) or ""
148
+
149
+ else:
150
+ raise ConversionError(
151
+ f"Unsupported conversion: {source_format.value} => {target_format.value}"
152
+ )
153
+
154
+ # Output handling
155
+ if output_path is None:
156
+ return result
157
+
158
+ # Write to file
159
+ if target_format == FileFormat.JSON:
160
+ output_path.write_text(result, encoding=JSON_ENCODING)
161
+ else:
162
+ # Compass format uses Windows-1252
163
+ output_path.write_text(result, encoding=COMPASS_ENCODING, errors="replace")
164
+
165
+ return None
7
166
 
8
167
 
9
168
  def convert(args: list[str]) -> int:
10
- parser = argparse.ArgumentParser(prog="compass convert")
169
+ """Entry point for the convert command."""
170
+ parser = argparse.ArgumentParser(
171
+ prog="compass convert",
172
+ description="Convert Compass files between native and JSON formats",
173
+ formatter_class=argparse.RawDescriptionHelpFormatter,
174
+ epilog="""
175
+ Examples:
176
+ compass convert -i survey.DAT # Convert to JSON (stdout)
177
+ compass convert -i survey.DAT -o survey.json # Convert to JSON file
178
+ compass convert -i project.MAK -f json # Convert to JSON (stdout)
179
+ compass convert -i survey.json -o survey.DAT # Convert to Compass format
180
+ compass convert -i survey.json -f compass # Convert to Compass (stdout)
181
+
182
+ Supported conversions:
183
+ compass -> json Survey/project data to JSON
184
+ json -> compass JSON to survey/project data
185
+
186
+ Notes:
187
+ - File type (DAT/MAK) is auto-detected from extension or JSON content
188
+ - Target format is auto-detected if not specified (opposite of source)
189
+ - Cannot convert compass -> compass or json -> json
190
+ """,
191
+ )
11
192
 
12
193
  parser.add_argument(
13
194
  "-i",
14
- "--input_file",
15
- type=str,
16
- default=None,
195
+ "--input-file",
196
+ type=Path,
17
197
  required=True,
18
- help="Compass Survey Source File.",
198
+ help="Input file path (.DAT, .MAK, or .json)",
19
199
  )
20
-
21
200
  parser.add_argument(
22
201
  "-o",
23
- "--output_file",
24
- type=str,
202
+ "--output-file",
203
+ type=Path,
25
204
  default=None,
26
- required=True,
27
- help="Path to save the converted file at.",
28
- )
29
-
30
- parser.add_argument(
31
- "-w",
32
- "--overwrite",
33
- action="store_true",
34
- help="Allow overwrite an already existing file.",
35
- default=False,
205
+ help="Output file path (prints to stdout if not specified)",
36
206
  )
37
-
38
207
  parser.add_argument(
39
208
  "-f",
40
209
  "--format",
41
- type=str,
42
- choices=["json"],
43
- required=True,
44
- help="Conversion format used.",
210
+ choices=[FileFormat.COMPASS.value, FileFormat.JSON.value],
211
+ default=None,
212
+ dest="target_format",
213
+ help="Target format: 'compass' or 'json' (auto-detected if not specified)",
45
214
  )
46
215
 
47
216
  parsed_args = parser.parse_args(args)
48
217
 
49
- dmp_file = Path(parsed_args.input_file)
50
- if not dmp_file.exists():
51
- raise FileNotFoundError(f"Impossible to find: `{dmp_file}`.")
52
-
53
- output_file = Path(parsed_args.output_file)
54
- if output_file.exists() and not parsed_args.overwrite:
55
- raise FileExistsError(
56
- f"The file {output_file} already existing. "
57
- "Please pass the flag `--overwrite` to ignore."
218
+ try:
219
+ result = _convert(
220
+ input_path=parsed_args.input_file,
221
+ output_path=parsed_args.output_file,
222
+ target_format=parsed_args.target_format,
58
223
  )
59
224
 
60
- survey = CompassParser.load_dat_file(dmp_file)
61
- survey.to_json(filepath=output_file)
225
+ if result is not None:
226
+ # Print to stdout
227
+ pass
228
+
229
+ # Print status to stderr if writing to file
230
+ if parsed_args.output_file is not None:
231
+ source_format, _file_type = detect_file_format(parsed_args.input_file)
232
+ (
233
+ FileFormat(parsed_args.target_format)
234
+ if parsed_args.target_format
235
+ else (
236
+ FileFormat.JSON
237
+ if source_format == FileFormat.COMPASS
238
+ else FileFormat.COMPASS
239
+ )
240
+ )
241
+
242
+ except ConversionError:
243
+ return 1
244
+ except FileNotFoundError:
245
+ return 1
246
+ except Exception: # noqa: BLE001
247
+ return 1
248
+
62
249
  return 0
@@ -1,11 +1,13 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import argparse
4
+ import base64
4
5
  import logging
6
+ import lzma
5
7
  import os
6
8
  from pathlib import Path
7
9
 
8
- from cryptography.fernet import Fernet
10
+ from cryptography.hazmat.primitives.ciphers.aead import AESSIV
9
11
  from dotenv import load_dotenv
10
12
 
11
13
  logger = logging.getLogger(__name__)
@@ -39,7 +41,15 @@ def encrypt(args: list[str]) -> int:
39
41
  type=str,
40
42
  default=None,
41
43
  required=True,
42
- help="Path of the environment file containing the Fernet key.",
44
+ help="Path of the environment file containing the AES-SIV key.",
45
+ )
46
+
47
+ parser.add_argument(
48
+ "-z",
49
+ "--compress",
50
+ action="store_true",
51
+ help="Allow to compress the file before encryption.",
52
+ default=False,
43
53
  )
44
54
 
45
55
  parser.add_argument(
@@ -68,22 +78,38 @@ def encrypt(args: list[str]) -> int:
68
78
  load_dotenv(envfile, verbose=True, override=True)
69
79
  logger.info("Loaded environment variables from: `%s`", envfile)
70
80
 
71
- if (fernet_key := os.getenv("ARTIFACT_ENCRYPTION_KEY")) is None:
81
+ if (key_str := os.getenv("ARTIFACT_ENCRYPTION_KEY")) is None:
72
82
  raise ValueError(
73
- "No Fernet key found in the environment file. "
83
+ "No AES-SIV key found in the environment file. "
74
84
  "Check if `ARTIFACT_ENCRYPTION_KEY` is set."
75
85
  )
76
- fernet_key = Fernet(fernet_key)
86
+
87
+ key_bytes = base64.urlsafe_b64decode(key_str.encode("ascii"))
88
+ aead = AESSIV(key_bytes)
77
89
 
78
90
  with input_file.open("rb") as f:
79
91
  clear_data = f.read()
80
92
 
93
+ # Compress before encryption if requested
94
+ # LZMA with preset 9 + PRESET_EXTREME for best compression ratio
95
+ if parsed_args.compress:
96
+ lzma_filters = [{"id": lzma.FILTER_LZMA2, "preset": 9 | lzma.PRESET_EXTREME}]
97
+ data_to_encrypt = lzma.compress(
98
+ clear_data,
99
+ format=lzma.FORMAT_XZ,
100
+ filters=lzma_filters,
101
+ )
102
+ else:
103
+ data_to_encrypt = clear_data
104
+
81
105
  with output_file.open("wb") as f:
82
- f.write(fernet_key.encrypt(clear_data))
106
+ f.write(aead.encrypt(data_to_encrypt, None))
83
107
 
84
108
  # Round Trip Check:
85
109
  with output_file.open("rb") as f:
86
- roundtrip_data = fernet_key.decrypt(f.read())
110
+ roundtrip_data = aead.decrypt(f.read(), None)
111
+ if parsed_args.compress:
112
+ roundtrip_data = lzma.decompress(roundtrip_data)
87
113
  assert clear_data == roundtrip_data
88
114
 
89
115
  return 0
@@ -0,0 +1,118 @@
1
+ # -*- coding: utf-8 -*-
2
+ """GeoJSON export command for Compass files.
3
+
4
+ This command converts Compass MAK+DAT files to GeoJSON format,
5
+ computing station coordinates from the survey data.
6
+ """
7
+
8
+ import argparse
9
+ import logging
10
+ from pathlib import Path
11
+
12
+ from compass_lib.enums import FileExtension
13
+ from compass_lib.geojson import convert_mak_to_geojson
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ def geojson(args: list[str]) -> int:
19
+ """Entry point for the geojson command."""
20
+ parser = argparse.ArgumentParser(
21
+ prog="compass geojson",
22
+ description="Convert Compass MAK+DAT files to GeoJSON format",
23
+ formatter_class=argparse.RawDescriptionHelpFormatter,
24
+ epilog="""
25
+ Examples:
26
+ compass geojson -i project.MAK # Output to stdout
27
+ compass geojson -i project.MAK -o cave.geojson # Output to file
28
+ compass geojson -i project.MAK --no-stations # Only survey legs
29
+ compass geojson -i project.MAK --passages # Include passage polygons
30
+
31
+ Output:
32
+ The GeoJSON FeatureCollection includes:
33
+ - Point features for survey stations
34
+ - LineString features for survey legs (shots)
35
+ - Polygon features for passage outlines (optional, from LRUD data)
36
+
37
+ Coordinates are in UTM meters, with the CRS specified in the output
38
+ if the MAK file contains UTM zone information.
39
+
40
+ Notes:
41
+ - Station coordinates are computed by traversing shots from fixed points
42
+ - Fixed points come from link stations with coordinates or project location
43
+ - If no fixed points exist, the first station is placed at origin (0,0,0)
44
+ - All coordinates are converted to meters
45
+ """,
46
+ )
47
+
48
+ parser.add_argument(
49
+ "-i",
50
+ "--input-file",
51
+ type=Path,
52
+ required=True,
53
+ help="Input MAK file path",
54
+ )
55
+ parser.add_argument(
56
+ "-o",
57
+ "--output-file",
58
+ type=Path,
59
+ default=None,
60
+ help="Output GeoJSON file path (prints to stdout if not specified)",
61
+ )
62
+ parser.add_argument(
63
+ "--no-stations",
64
+ action="store_true",
65
+ help="Exclude station point features",
66
+ )
67
+ parser.add_argument(
68
+ "--no-legs",
69
+ action="store_true",
70
+ help="Exclude survey leg features",
71
+ )
72
+ parser.add_argument(
73
+ "--passages",
74
+ action="store_true",
75
+ help="Include passage polygon features (from LRUD data)",
76
+ )
77
+
78
+ parsed_args = parser.parse_args(args)
79
+
80
+ # Validate input
81
+ if not parsed_args.input_file.exists():
82
+ logger.error("Error: Input file not found: %s", parsed_args.input_file)
83
+ return 1
84
+
85
+ if parsed_args.input_file.suffix.lower() != FileExtension.MAK.value:
86
+ logger.error(
87
+ "Error: Input file must be a .MAK file: %s", parsed_args.input_file
88
+ )
89
+ return 1
90
+
91
+ try:
92
+ result = convert_mak_to_geojson(
93
+ parsed_args.input_file,
94
+ output_path=parsed_args.output_file,
95
+ include_stations=not parsed_args.no_stations,
96
+ include_legs=not parsed_args.no_legs,
97
+ include_passages=parsed_args.passages,
98
+ )
99
+
100
+ if parsed_args.output_file is None:
101
+ # Print to stdout
102
+ print(result) # noqa: T201
103
+
104
+ else:
105
+ # Print status to stderr
106
+ logger.info(
107
+ "Converted %s -> %s", parsed_args.input_file, parsed_args.output_file
108
+ )
109
+
110
+ except FileNotFoundError:
111
+ logger.exception("FileNotFoundError")
112
+ return 1
113
+
114
+ except Exception:
115
+ logger.exception("Unknown Problem ...")
116
+ return 1
117
+
118
+ return 0
@@ -7,7 +7,7 @@ import compass_lib
7
7
 
8
8
 
9
9
  def main():
10
- registered_commands = entry_points(group="compass.actions")
10
+ registered_commands = entry_points(group="compass_lib.actions")
11
11
 
12
12
  parser = argparse.ArgumentParser(prog="compass_lib")
13
13
  parser.add_argument(