sgn-drift 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,91 @@
1
+ Metadata-Version: 2.4
2
+ Name: sgn-drift
3
+ Version: 0.1.0
4
+ Summary: PSD Drift Data Quality Extensions for the SGN Framework
5
+ Author-email: James Kennington <jmk7376@psu.edu>, Zach Yarbrough <jmk7376@psu.edu>, Joshua Black <jmk7376@psu.edu>
6
+ License-Expression: MPL-2.0
7
+ Project-URL: Homepage, https://git.ligo.org/james.kennington/sgn-drift
8
+ Project-URL: Documentation, https://docs.ligo.org/james.kennington/sgn-drift
9
+ Project-URL: Issues, https://git.ligo.org/james.kennington/sgn-drift/issues
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: Operating System :: OS Independent
12
+ Requires-Python: >=3.10
13
+ Description-Content-Type: text/markdown
14
+ Requires-Dist: gwpy
15
+ Requires-Dist: h5py
16
+ Requires-Dist: igwn-ligolw
17
+ Requires-Dist: igwn-segments
18
+ Requires-Dist: lalsuite>=7.26
19
+ Requires-Dist: scipy<1.17
20
+ Requires-Dist: sgn>=0.4
21
+ Requires-Dist: sgn-ts>=0.4
22
+ Requires-Dist: sgn-ligo
23
+ Provides-Extra: test
24
+ Requires-Dist: pytest; extra == "test"
25
+ Requires-Dist: pytest-cov; extra == "test"
26
+ Requires-Dist: pytest-freezer; extra == "test"
27
+ Requires-Dist: pytest-markdown-docs; extra == "test"
28
+ Provides-Extra: docs
29
+ Requires-Dist: mkdocs; extra == "docs"
30
+ Requires-Dist: mkdocstrings; extra == "docs"
31
+ Requires-Dist: mkdocstrings-python; extra == "docs"
32
+ Requires-Dist: mkdocs-material; extra == "docs"
33
+ Requires-Dist: pymdown-extensions; extra == "docs"
34
+ Provides-Extra: lint
35
+ Requires-Dist: black; extra == "lint"
36
+ Requires-Dist: flake8; extra == "lint"
37
+ Requires-Dist: flake8-bandit; extra == "lint"
38
+ Requires-Dist: flake8-black; extra == "lint"
39
+ Requires-Dist: flake8-bugbear; extra == "lint"
40
+ Requires-Dist: flake8-future-annotations; extra == "lint"
41
+ Requires-Dist: flake8-isort; extra == "lint"
42
+ Requires-Dist: flake8-logging-format; extra == "lint"
43
+ Requires-Dist: flake8-pyproject; extra == "lint"
44
+ Requires-Dist: isort; extra == "lint"
45
+ Requires-Dist: mypy; extra == "lint"
46
+ Requires-Dist: mypy-extensions; extra == "lint"
47
+ Provides-Extra: dev
48
+ Requires-Dist: sgn-drift[docs]; extra == "dev"
49
+ Requires-Dist: sgn-drift[lint]; extra == "dev"
50
+ Requires-Dist: sgn-drift[test]; extra == "dev"
51
+
52
+ # SGN-Drift
53
+
54
+ **Geometric Spectral Diagnostics for Gravitational Wave Detectors.**
55
+
56
+ `sgn-drift` is an extension to the [SGN](https://git.ligo.org/greg/sgn) framework designed to quantify the stability of
57
+ the detector noise floor using Information Geometry.
58
+
59
+ ## Installation
60
+
61
+ ```bash
62
+ pip install sgn-drift
63
+ ```
64
+
65
+ ## Example Usage
66
+
67
+ ### Command Line
68
+
69
+ ```bash
70
+ sgn-drift-estimate --detector L1 --start 1263085000 --end 1263089096 --output drift_L1.csv --bands "low:10:50,mid:50:300"
71
+ ```
72
+
73
+ ### Python API
74
+
75
+ ```python
76
+ from sgndrift.bin.estimate_drift import estimate_drift
77
+
78
+ bands = {
79
+ "v_low": (10, 50),
80
+ "v_mid": (50, 300)
81
+ }
82
+
83
+ estimate_drift(
84
+ start=1262600000,
85
+ end=1262604096,
86
+ detector="L1",
87
+ output_drift="analysis.csv",
88
+ bands=bands,
89
+ verbose=True
90
+ )
91
+ ```
@@ -0,0 +1,22 @@
1
+ sgndrift/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ sgndrift/_version.py,sha256=5jwwVncvCiTnhOedfkzzxmxsggwmTBORdFL_4wq0ZeY,704
3
+ sgndrift/bin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ sgndrift/bin/estimate_drift.py,sha256=qVp-PacdNRUAEWOrHlvzP-whMRCB_zhteZZDE1Ld_9g,8822
5
+ sgndrift/bin/plot_drift.py,sha256=EYIhluD0asFWoYmLfpu2vsmU5TDHtjmPjCNuY2tvYLI,4746
6
+ sgndrift/bin/plot_drift_comparison.py,sha256=0VknMCS1-szBk5trTARUYvYWEXrid-FnyOf0yM1_lnw,6355
7
+ sgndrift/bin/plot_drift_super.py,sha256=W-OgqfL8_UFDKxyNhNe3ahXteQllu3oHPyidYcJd6Kc,7934
8
+ sgndrift/bin/plot_drift_super_comp.py,sha256=lcHqw4w0xKfTW1DX5pAVOGM3mAlxfc9Xsz12fjlQlTQ,12222
9
+ sgndrift/bin/plot_drift_time.py,sha256=EdLkzUe4Exsi6OJDQYLmY8PH1wSHLoRv5q3ZwKyDS8g,5929
10
+ sgndrift/psd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
+ sgndrift/psd/drift.py,sha256=3v0UlLc3yQLSJwLHJXQo5Hy1wRM7wV8hkMiiSGtpY8o,2413
12
+ sgndrift/psd/estimators.py,sha256=UdI7SaUOEiQeXbBzWNfY3C0w0tS8NWMzJHzULJW30Ck,4280
13
+ sgndrift/sinks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ sgndrift/sinks/drift_sink.py,sha256=PHspvH9LupSmjLWjavvsk2a3TfU5jYXUMN0TV9lTdjs,5058
15
+ sgndrift/transforms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
+ sgndrift/transforms/drift.py,sha256=bbjmrW1q8awkb-oBpeWo94M-pGxHYN88iOKrkbQHL2A,4727
17
+ sgndrift/transforms/psd.py,sha256=6LABGSUpbOE4nKzHEWybHT3ronnuUgLuV5tI46txn34,5427
18
+ sgn_drift-0.1.0.dist-info/METADATA,sha256=-5mp3zaZIktyCf2Ap6ljRQXEGXwd0QMIWaUFqQhtoRw,2828
19
+ sgn_drift-0.1.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
20
+ sgn_drift-0.1.0.dist-info/entry_points.txt,sha256=emRR36dQDFCkTxwuWEMnN-TeNNTrnlJ-HFeJEw_37vY,362
21
+ sgn_drift-0.1.0.dist-info/top_level.txt,sha256=xmogf6bL8sl_VbrQ6pPMXLdY967VtYYUtFzIg_U3Rq4,9
22
+ sgn_drift-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.10.2)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,7 @@
1
+ [console_scripts]
2
+ sgn-drift-estimate = sgndrift.bin.estimate_drift:main
3
+ sgn-drift-plot = sgndrift.bin.plot_drift:main
4
+ sgn-drift-plot-comp = sgndrift.bin.plot_drift_comparison:main
5
+ sgn-drift-plot-super = sgndrift.bin.plot_drift_super:main
6
+ sgn-drift-plot-super-comp = sgndrift.bin.plot_drift_super_comp:main
7
+ sgn-drift-plot-time = sgndrift.bin.plot_drift_time:main
@@ -0,0 +1 @@
1
+ sgndrift
sgndrift/__init__.py ADDED
File without changes
sgndrift/_version.py ADDED
@@ -0,0 +1,34 @@
1
+ # file generated by setuptools-scm
2
+ # don't change, don't track in version control
3
+
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
12
+
13
+ TYPE_CHECKING = False
14
+ if TYPE_CHECKING:
15
+ from typing import Tuple
16
+ from typing import Union
17
+
18
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
20
+ else:
21
+ VERSION_TUPLE = object
22
+ COMMIT_ID = object
23
+
24
+ version: str
25
+ __version__: str
26
+ __version_tuple__: VERSION_TUPLE
27
+ version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
30
+
31
+ __version__ = version = '0.1.0'
32
+ __version_tuple__ = version_tuple = (0, 1, 0)
33
+
34
+ __commit_id__ = commit_id = None
File without changes
@@ -0,0 +1,278 @@
1
+ """
2
+ Drift Estimation Application Logic (Robust & Modular).
3
+
4
+ This script orchestrates the SGN pipeline to estimate Geometric Spectral Drift.
5
+ It includes logic to:
6
+ 1. Identify valid science segments (handling both Internal and Public data).
7
+ 2. Construct the pipeline dynamically for each segment.
8
+ 3. Handle data gaps gracefully without crashing the entire job.
9
+ """
10
+
11
+ import os
12
+ import argparse
13
+ from typing import Optional, Dict, Tuple, List
14
+
15
+ # GW Imports
16
+ from gwpy.segments import DataQualityFlag
17
+
18
+ try:
19
+ from gwosc.timeline import get_segments as get_gwosc_segments
20
+ except ImportError:
21
+ get_gwosc_segments = None
22
+
23
+ # SGN Imports
24
+ from sgn.apps import Pipeline
25
+ from sgn.base import SourceElement
26
+ from sgnligo.sources.gwosc import GWOSCSource
27
+
28
+ from sgndrift.transforms.psd import RecursivePSD
29
+ from sgndrift.transforms.drift import FisherVelocity
30
+ from sgndrift.sinks.drift_sink import DriftCSVSink
31
+
32
+
33
+ def get_science_segments(
34
+ detector: str, start: float, end: float, verbose: bool = False
35
+ ) -> List[Tuple[float, float]]:
36
+ """
37
+ Queries valid science segments.
38
+ 1. Tries internal LIGO DQSegDB (requires grid cert).
39
+ 2. Falls back to Public GWOSC Timeline (open data).
40
+ 3. If both fail/empty, returns full window (Force Mode).
41
+ """
42
+ # Method A: Internal LIGO Database
43
+ flag_name = f"{detector}:DMT-ANALYSIS_READY:1"
44
+ if verbose:
45
+ print(f"Querying segments for {flag_name} [{start} ... {end}]")
46
+
47
+ try:
48
+ flags = DataQualityFlag.query(flag_name, start, end)
49
+ if not flags.active:
50
+ raise ValueError("No segments found in internal DB.")
51
+ return [(float(seg.start), float(seg.end)) for seg in flags.active]
52
+
53
+ except Exception as e:
54
+ if verbose:
55
+ print(f"Internal segment query failed/empty ({e}). Checking GWOSC...")
56
+
57
+ segments = []
58
+ if get_gwosc_segments:
59
+ try:
60
+ # Method B: Public GWOSC
61
+ segments = get_gwosc_segments(detector, int(start), int(end))
62
+ except Exception as e2:
63
+ print(f"Warning: GWOSC segment query error: {e2}")
64
+ else:
65
+ print("Warning: 'gwosc' library not installed.")
66
+
67
+ if segments:
68
+ if verbose:
69
+ print(f"Found {len(segments)} valid segments in GWOSC.")
70
+ return segments
71
+ else:
72
+ print(f"Warning: No known science segments found for {detector}.")
73
+ print("Defaulting to full window (Pipeline will skip if data is missing).")
74
+ return [(start, end)]
75
+
76
+
77
+ def build_pipeline(
78
+ start: float,
79
+ end: float,
80
+ detector: str,
81
+ output_drift: str,
82
+ alpha: float = 0.1,
83
+ fft_length: float = 1.0,
84
+ overlap: float = 0.5,
85
+ sample_rate: int = 4096,
86
+ bands: Optional[Dict[str, Tuple[float, float]]] = None,
87
+ source_element: Optional[SourceElement] = None,
88
+ verbose: bool = False,
89
+ ) -> Pipeline:
90
+ """
91
+ Constructs the SGN pipeline for Drift Estimation.
92
+ """
93
+ # 1. Setup Source (if not provided)
94
+ if source_element is None:
95
+ source_name = f"gwosc_{detector}_{int(start)}"
96
+ source_element = GWOSCSource(
97
+ name=source_name,
98
+ start=start,
99
+ end=end,
100
+ detectors=[detector],
101
+ sample_rate=sample_rate,
102
+ cache_data=True,
103
+ verbose=verbose,
104
+ )
105
+ source_pad_name = detector
106
+ else:
107
+ source_pad_name = detector
108
+
109
+ # 2. Pipeline Container
110
+ pipe = Pipeline()
111
+
112
+ # 3. PSD Estimator
113
+ psd_est = RecursivePSD(
114
+ name=f"psd_{detector}_{int(start)}",
115
+ sink_pad_names=("in",),
116
+ source_pad_names=("out",),
117
+ fft_length=fft_length,
118
+ overlap=overlap,
119
+ sample_rate=sample_rate,
120
+ alpha=alpha,
121
+ )
122
+
123
+ # 4. Fisher Velocity
124
+ fisher = FisherVelocity(
125
+ name=f"fisher_{detector}_{int(start)}",
126
+ sink_pad_names=("in",),
127
+ source_pad_names=("out",),
128
+ bands=bands or {},
129
+ )
130
+ fisher.configure()
131
+
132
+ # 5. Sink (Appends to CSV)
133
+ sink = DriftCSVSink(
134
+ name=f"sink_{detector}_{int(start)}",
135
+ sink_pad_names=("in",),
136
+ filename=output_drift,
137
+ )
138
+ sink.configure()
139
+
140
+ # 6. Build & Link
141
+ pipe.insert(source_element, psd_est, fisher, sink)
142
+
143
+ pipe.link({psd_est.snks["in"]: source_element.srcs[source_pad_name]})
144
+ pipe.link({fisher.snks["in"]: psd_est.srcs["out"]})
145
+ pipe.link({sink.snks["in"]: fisher.srcs["out"]})
146
+
147
+ return pipe
148
+
149
+
150
+ def estimate_drift(
151
+ start: float,
152
+ end: float,
153
+ detector: str = "H1",
154
+ output_drift: str = "drift.csv",
155
+ source_element: Optional[SourceElement] = None,
156
+ method: str = "recursive",
157
+ alpha: float = 0.1,
158
+ fft_length: float = 1.0,
159
+ overlap: float = 0.5,
160
+ sample_rate: float = 16384.0,
161
+ bands: Optional[Dict[str, Tuple[float, float]]] = None,
162
+ force: bool = False,
163
+ verbose: bool = False,
164
+ ):
165
+ """
166
+ High-level Orchestrator.
167
+ Handles Gap Checks -> Segment Loops -> Pipeline Build -> Pipeline Run.
168
+ """
169
+ # 1. Clear output file (if starting fresh CLI run)
170
+ if source_element is None and os.path.exists(output_drift):
171
+ if verbose:
172
+ print(f"Clearing previous output: {output_drift}")
173
+ os.remove(output_drift)
174
+
175
+ # 2. Determine Segments
176
+ segments = []
177
+ if force:
178
+ if verbose:
179
+ print("Force Mode: Skipping segment check. Using full window.")
180
+ segments = [(start, end)]
181
+ elif source_element is None:
182
+ segments = get_science_segments(detector, start, end, verbose)
183
+ else:
184
+ # If custom source provided, trust the user
185
+ segments = [(start, end)]
186
+
187
+ if not segments:
188
+ print("No valid segments found. Use --force to attempt anyway.")
189
+ return
190
+
191
+ # 3. Process Each Segment
192
+ for seg_start, seg_end in segments:
193
+ duration = seg_end - seg_start
194
+ # Skip tiny segments
195
+ if duration < (fft_length * 8):
196
+ continue
197
+
198
+ if verbose:
199
+ print(f"Processing segment: {seg_start:.1f} - {seg_end:.1f}")
200
+
201
+ try:
202
+ # A. BUILD
203
+ pipe = build_pipeline(
204
+ start=seg_start,
205
+ end=seg_end,
206
+ detector=detector,
207
+ output_drift=output_drift,
208
+ alpha=alpha,
209
+ fft_length=fft_length,
210
+ overlap=overlap,
211
+ sample_rate=int(sample_rate),
212
+ bands=bands,
213
+ source_element=source_element if source_element else None,
214
+ verbose=verbose,
215
+ )
216
+
217
+ # B. RUN
218
+ pipe.run()
219
+
220
+ except RuntimeError as e:
221
+ # Handle GWOSC "No Data" errors gracefully
222
+ err_str = str(e)
223
+ if "Cannot find a GWOSC dataset" in err_str or "No data found" in err_str:
224
+ print(f"[Gap] No data available for {seg_start}-{seg_end}. Skipping.")
225
+ else:
226
+ print(f"[Error] Pipeline Runtime Error: {e}")
227
+ continue
228
+
229
+ except Exception as e:
230
+ print(f"[Error] Critical failure in segment {seg_start}-{seg_end}: {e}")
231
+ continue
232
+
233
+ if verbose:
234
+ print(f"Analysis complete. Results in {output_drift}")
235
+
236
+
237
+ def parse_args():
238
+ parser = argparse.ArgumentParser(description="Estimate Geometric Spectral Drift.")
239
+ parser.add_argument("--detector", type=str, default="H1", help="Detector ID")
240
+ parser.add_argument("--start", type=float, required=True, help="GPS Start")
241
+ parser.add_argument("--end", type=float, required=True, help="GPS End")
242
+ parser.add_argument("--output", type=str, default="drift.csv", help="Output CSV")
243
+ parser.add_argument("--alpha", type=float, default=0.1, help="Filter decay")
244
+ parser.add_argument("--fft-length", type=float, default=1.0, help="FFT sec")
245
+ parser.add_argument("--sample-rate", type=float, default=4096.0, help="Hz")
246
+ parser.add_argument("--bands", type=str, default=None, help="name:min:max,...")
247
+ parser.add_argument("--force", action="store_true", help="Ignore segment checks")
248
+ parser.add_argument("-v", "--verbose", action="store_true", help="Verbose")
249
+ return parser.parse_args()
250
+
251
+
252
+ def main():
253
+ args = parse_args()
254
+
255
+ # Parse band string
256
+ bands_dict = {}
257
+ if args.bands:
258
+ for item in args.bands.split(","):
259
+ parts = item.split(":")
260
+ if len(parts) == 3:
261
+ bands_dict[parts[0]] = (float(parts[1]), float(parts[2]))
262
+
263
+ estimate_drift(
264
+ start=args.start,
265
+ end=args.end,
266
+ detector=args.detector,
267
+ output_drift=args.output,
268
+ alpha=args.alpha,
269
+ fft_length=args.fft_length,
270
+ sample_rate=args.sample_rate,
271
+ bands=bands_dict if bands_dict else None,
272
+ force=args.force,
273
+ verbose=args.verbose,
274
+ )
275
+
276
+
277
+ if __name__ == "__main__":
278
+ main()
@@ -0,0 +1,177 @@
1
+ """
2
+ Drift Visualization Tool.
3
+
4
+ Generates 2D scatter plots (Phase Space projections) of Geometric Drift data.
5
+
6
+ Features:
7
+ - Scatter plot of Band X vs Band Y.
8
+ - Coloring:
9
+ - Time: Linear scale (Seconds from start).
10
+ - Bands: Explicit Log10 scale (Log10 of magnitude) for better contrast.
11
+ """
12
+
13
+ import argparse
14
+ import sys
15
+ from typing import Optional
16
+
17
+ import matplotlib.pyplot as plt
18
+ import pandas as pd
19
+ import numpy as np
20
+
21
+
22
+ def load_data(
23
+ filepath: str, start: Optional[float] = None, end: Optional[float] = None
24
+ ) -> pd.DataFrame:
25
+ """Loads drift data from CSV and applies time filtering."""
26
+ try:
27
+ df = pd.read_csv(filepath)
28
+ except FileNotFoundError:
29
+ print(f"Error: File not found at {filepath}")
30
+ sys.exit(1)
31
+ except Exception as e:
32
+ print(f"Error reading CSV: {e}")
33
+ sys.exit(1)
34
+
35
+ df.columns = df.columns.str.strip()
36
+
37
+ if "time" not in df.columns:
38
+ print("Error: CSV must contain a 'time' column.")
39
+ sys.exit(1)
40
+
41
+ if start is not None:
42
+ df = df[df["time"] >= start]
43
+ if end is not None:
44
+ df = df[df["time"] <= end]
45
+
46
+ if df.empty:
47
+ print("Error: No data found in the specified time range.")
48
+ sys.exit(1)
49
+
50
+ return df
51
+
52
+
53
+ def make_plot(
54
+ df: pd.DataFrame,
55
+ x_col: str,
56
+ y_col: str,
57
+ output_path: str,
58
+ color_col: Optional[str] = None,
59
+ log_scale: bool = True,
60
+ title: Optional[str] = None,
61
+ ):
62
+ """Generates and saves the scatter plot."""
63
+ if x_col not in df.columns or y_col not in df.columns:
64
+ print(f"Error: Columns '{x_col}' or '{y_col}' not found.")
65
+ print(f"Available: {list(df.columns)}")
66
+ sys.exit(1)
67
+
68
+ x_data = df[x_col]
69
+ y_data = df[y_col]
70
+
71
+ # --- Coloring Logic ---
72
+ c_data = None
73
+ c_label = None
74
+ cmap = None
75
+
76
+ if color_col:
77
+ if color_col.lower() == "time":
78
+ # Linear Scale for Time
79
+ c_data = df["time"] - df["time"].min()
80
+ c_label = "Time (seconds from start)"
81
+ cmap = "turbo"
82
+ elif color_col in df.columns:
83
+ # Explicit Log10 Transform for Bands
84
+ raw_data = df[color_col].copy()
85
+
86
+ # Mask zeros/negative values to avoid -inf
87
+ raw_data[raw_data <= 0] = np.nan
88
+ c_data = np.log10(raw_data)
89
+
90
+ c_label = f"Log10({color_col} Magnitude)"
91
+ cmap = "plasma" # High contrast
92
+ else:
93
+ print(f"Warning: Color column '{color_col}' not found. Using solid color.")
94
+
95
+ # --- Plotting ---
96
+ plt.figure(figsize=(10, 8))
97
+
98
+ sc = plt.scatter(
99
+ x_data,
100
+ y_data,
101
+ c=c_data if c_data is not None else "tab:blue",
102
+ cmap=cmap,
103
+ s=15,
104
+ alpha=0.6,
105
+ edgecolors="none",
106
+ )
107
+
108
+ if c_data is not None:
109
+ plt.colorbar(sc, label=c_label)
110
+
111
+ plt.xlabel(f"{x_col} Drift Velocity")
112
+ plt.ylabel(f"{y_col} Drift Velocity")
113
+
114
+ if log_scale:
115
+ plt.xscale("log")
116
+ plt.yscale("log")
117
+ plt.grid(True, which="both", alpha=0.2)
118
+ else:
119
+ plt.grid(True, alpha=0.3)
120
+
121
+ if title:
122
+ plt.title(title)
123
+ else:
124
+ plt.title(f"Geometric Drift: {x_col} vs {y_col}")
125
+
126
+ try:
127
+ plt.savefig(output_path, dpi=150)
128
+ print(f"Plot saved to: {output_path}")
129
+ except Exception as e:
130
+ print(f"Error saving plot: {e}")
131
+
132
+
133
+ def parse_args():
134
+ parser = argparse.ArgumentParser(
135
+ description="Visualize Geometric Drift Phase Space."
136
+ )
137
+ parser.add_argument("input_file", type=str, help="Path to input CSV file")
138
+ parser.add_argument("--x-band", type=str, default="v_low", help="X-axis column")
139
+ parser.add_argument("--y-band", type=str, default="v_mid", help="Y-axis column")
140
+ parser.add_argument(
141
+ "-o", "--output", type=str, default="drift_plot.png", help="Output filename"
142
+ )
143
+
144
+ parser.add_argument("--start", type=float, default=None, help="Start GPS")
145
+ parser.add_argument("--end", type=float, default=None, help="End GPS")
146
+
147
+ parser.add_argument(
148
+ "--color-by",
149
+ type=str,
150
+ default=None,
151
+ help="Column to color by (e.g. 'time', 'v_high'). Default: No color.",
152
+ )
153
+
154
+ parser.add_argument(
155
+ "--linear", action="store_true", help="Use linear scale (Default: Log)"
156
+ )
157
+ parser.add_argument("--title", type=str, default=None, help="Custom plot title")
158
+
159
+ return parser.parse_args()
160
+
161
+
162
+ def main():
163
+ args = parse_args()
164
+ df = load_data(args.input_file, start=args.start, end=args.end)
165
+ make_plot(
166
+ df,
167
+ x_col=args.x_band,
168
+ y_col=args.y_band,
169
+ output_path=args.output,
170
+ color_col=args.color_by,
171
+ log_scale=not args.linear,
172
+ title=args.title,
173
+ )
174
+
175
+
176
+ if __name__ == "__main__":
177
+ main()