gwsim 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. gwsim/__init__.py +11 -0
  2. gwsim/__main__.py +8 -0
  3. gwsim/cli/__init__.py +0 -0
  4. gwsim/cli/config.py +88 -0
  5. gwsim/cli/default_config.py +56 -0
  6. gwsim/cli/main.py +101 -0
  7. gwsim/cli/merge.py +150 -0
  8. gwsim/cli/repository/__init__.py +0 -0
  9. gwsim/cli/repository/create.py +91 -0
  10. gwsim/cli/repository/delete.py +51 -0
  11. gwsim/cli/repository/download.py +54 -0
  12. gwsim/cli/repository/list_depositions.py +63 -0
  13. gwsim/cli/repository/main.py +38 -0
  14. gwsim/cli/repository/metadata/__init__.py +0 -0
  15. gwsim/cli/repository/metadata/main.py +24 -0
  16. gwsim/cli/repository/metadata/update.py +58 -0
  17. gwsim/cli/repository/publish.py +52 -0
  18. gwsim/cli/repository/upload.py +74 -0
  19. gwsim/cli/repository/utils.py +47 -0
  20. gwsim/cli/repository/verify.py +61 -0
  21. gwsim/cli/simulate.py +220 -0
  22. gwsim/cli/simulate_utils.py +596 -0
  23. gwsim/cli/utils/__init__.py +85 -0
  24. gwsim/cli/utils/checkpoint.py +178 -0
  25. gwsim/cli/utils/config.py +347 -0
  26. gwsim/cli/utils/hash.py +23 -0
  27. gwsim/cli/utils/retry.py +62 -0
  28. gwsim/cli/utils/simulation_plan.py +439 -0
  29. gwsim/cli/utils/template.py +56 -0
  30. gwsim/cli/utils/utils.py +149 -0
  31. gwsim/cli/validate.py +255 -0
  32. gwsim/data/__init__.py +8 -0
  33. gwsim/data/serialize/__init__.py +9 -0
  34. gwsim/data/serialize/decoder.py +59 -0
  35. gwsim/data/serialize/encoder.py +44 -0
  36. gwsim/data/serialize/serializable.py +33 -0
  37. gwsim/data/time_series/__init__.py +3 -0
  38. gwsim/data/time_series/inject.py +104 -0
  39. gwsim/data/time_series/time_series.py +355 -0
  40. gwsim/data/time_series/time_series_list.py +182 -0
  41. gwsim/detector/__init__.py +8 -0
  42. gwsim/detector/base.py +156 -0
  43. gwsim/detector/detectors/E1_2L_Aligned_Sardinia.interferometer +22 -0
  44. gwsim/detector/detectors/E1_2L_Misaligned_Sardinia.interferometer +22 -0
  45. gwsim/detector/detectors/E1_Triangle_EMR.interferometer +19 -0
  46. gwsim/detector/detectors/E1_Triangle_Sardinia.interferometer +19 -0
  47. gwsim/detector/detectors/E2_2L_Aligned_EMR.interferometer +22 -0
  48. gwsim/detector/detectors/E2_2L_Misaligned_EMR.interferometer +22 -0
  49. gwsim/detector/detectors/E2_Triangle_EMR.interferometer +19 -0
  50. gwsim/detector/detectors/E2_Triangle_Sardinia.interferometer +19 -0
  51. gwsim/detector/detectors/E3_Triangle_EMR.interferometer +19 -0
  52. gwsim/detector/detectors/E3_Triangle_Sardinia.interferometer +19 -0
  53. gwsim/detector/noise_curves/ET_10_HF_psd.txt +3000 -0
  54. gwsim/detector/noise_curves/ET_10_full_cryo_psd.txt +3000 -0
  55. gwsim/detector/noise_curves/ET_15_HF_psd.txt +3000 -0
  56. gwsim/detector/noise_curves/ET_15_full_cryo_psd.txt +3000 -0
  57. gwsim/detector/noise_curves/ET_20_HF_psd.txt +3000 -0
  58. gwsim/detector/noise_curves/ET_20_full_cryo_psd.txt +3000 -0
  59. gwsim/detector/noise_curves/ET_D_psd.txt +3000 -0
  60. gwsim/detector/utils.py +90 -0
  61. gwsim/glitch/__init__.py +7 -0
  62. gwsim/glitch/base.py +69 -0
  63. gwsim/mixin/__init__.py +8 -0
  64. gwsim/mixin/detector.py +203 -0
  65. gwsim/mixin/gwf.py +192 -0
  66. gwsim/mixin/population_reader.py +175 -0
  67. gwsim/mixin/randomness.py +107 -0
  68. gwsim/mixin/time_series.py +295 -0
  69. gwsim/mixin/waveform.py +47 -0
  70. gwsim/noise/__init__.py +19 -0
  71. gwsim/noise/base.py +134 -0
  72. gwsim/noise/bilby_stationary_gaussian.py +117 -0
  73. gwsim/noise/colored_noise.py +275 -0
  74. gwsim/noise/correlated_noise.py +257 -0
  75. gwsim/noise/pycbc_stationary_gaussian.py +112 -0
  76. gwsim/noise/stationary_gaussian.py +44 -0
  77. gwsim/noise/white_noise.py +51 -0
  78. gwsim/repository/__init__.py +0 -0
  79. gwsim/repository/zenodo.py +269 -0
  80. gwsim/signal/__init__.py +11 -0
  81. gwsim/signal/base.py +137 -0
  82. gwsim/signal/cbc.py +61 -0
  83. gwsim/simulator/__init__.py +7 -0
  84. gwsim/simulator/base.py +315 -0
  85. gwsim/simulator/state.py +85 -0
  86. gwsim/utils/__init__.py +11 -0
  87. gwsim/utils/datetime_parser.py +44 -0
  88. gwsim/utils/et_2l_geometry.py +165 -0
  89. gwsim/utils/io.py +167 -0
  90. gwsim/utils/log.py +145 -0
  91. gwsim/utils/population.py +48 -0
  92. gwsim/utils/random.py +69 -0
  93. gwsim/utils/retry.py +75 -0
  94. gwsim/utils/triangular_et_geometry.py +164 -0
  95. gwsim/version.py +7 -0
  96. gwsim/waveform/__init__.py +7 -0
  97. gwsim/waveform/factory.py +83 -0
  98. gwsim/waveform/pycbc_wrapper.py +37 -0
  99. gwsim-0.1.0.dist-info/METADATA +157 -0
  100. gwsim-0.1.0.dist-info/RECORD +103 -0
  101. gwsim-0.1.0.dist-info/WHEEL +4 -0
  102. gwsim-0.1.0.dist-info/entry_points.txt +2 -0
  103. gwsim-0.1.0.dist-info/licenses/LICENSE +21 -0
gwsim/cli/validate.py ADDED
@@ -0,0 +1,255 @@
1
+ """
2
+ Validation functions for CLI commands.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ from pathlib import Path
8
+ from typing import Annotated
9
+
10
+ import typer
11
+
12
+
13
+ def validate_command( # pylint: disable=too-many-branches,too-many-statements,too-many-locals,import-outside-toplevel
14
+ paths: Annotated[
15
+ list[Path],
16
+ typer.Argument(
17
+ help="Files/directories to validate. Can be output files, metadata files, or directories containing either."
18
+ ),
19
+ ],
20
+ metadata_paths: Annotated[
21
+ list[str] | None, typer.Option("--metadata-paths", help="Additional metadata files or directories")
22
+ ] = None,
23
+ pattern: Annotated[
24
+ str | None, typer.Option("--pattern", help="File pattern to match (e.g., '*noise*' for subset validation)")
25
+ ] = None,
26
+ metadata_pattern: Annotated[
27
+ str | None, typer.Option("--metadata-pattern", help="Metadata file pattern to match")
28
+ ] = "*metadata.yaml",
29
+ ) -> None:
30
+ """Validate output files against metadata hashes and other checks.
31
+
32
+ This command verifies the integrity of generated simulation files by:
33
+ 1. Loading metadata files and extracting expected file hashes
34
+ 2. Recomputing hashes for actual output files
35
+ 3. Comparing hashes and reporting mismatches
36
+ 4. Future: Add sampling rate and continuity checks
37
+
38
+ The command automatically detects whether provided paths are:
39
+ - Output files (.gwf, etc.) - will find corresponding metadata
40
+ - Metadata files (.metadata.yaml) - will validate their output files
41
+ - Directories - will scan for both types of files
42
+
43
+ Examples:
44
+ Validate specific output files (finds metadata automatically):
45
+ gwsim validate H1-NOISE-123.gwf L1-SIGNAL-456.gwf
46
+
47
+ Validate specific metadata files:
48
+ gwsim validate --metadata signal-0.metadata.yaml noise-0.metadata.yaml
49
+
50
+ Validate all files in a directory:
51
+ gwsim validate /path/to/output/
52
+
53
+ Validate subset using pattern:
54
+ gwsim validate /path/to/output/ --pattern "*noise*"
55
+
56
+ Mix files and directories:
57
+ gwsim validate H1-NOISE-123.gwf /path/to/more/files/
58
+
59
+ Override output directory:
60
+ gwsim validate metadata/ --output-dir /custom/output/
61
+
62
+ Args:
63
+ paths: Output files, metadata files, or directories containing either
64
+ metadata: Additional metadata files or directories
65
+ output_dir: Override output directory (defaults to paths in metadata)
66
+ pattern: Glob pattern to filter files (e.g., '*noise*')
67
+ strict: Exit with error code if any validation fails
68
+ """
69
+ import fnmatch
70
+ import logging
71
+
72
+ import yaml
73
+ from rich.console import Console
74
+ from rich.table import Table
75
+
76
+ from gwsim.cli.utils.hash import compute_file_hash
77
+
78
+ logger = logging.getLogger("gwsim")
79
+
80
+ console = Console()
81
+
82
+ logger.info("Validating simulation files...")
83
+
84
+ # Separate into metadata files and potential output files
85
+ metadata_files: list[Path] = []
86
+ output_files: list[Path] = []
87
+ output_directories: list[Path] = []
88
+ metadata_directories: list[Path] = []
89
+
90
+ for path_str in paths:
91
+ path = Path(path_str)
92
+ if path.is_dir():
93
+ output_directories.append(path)
94
+ elif path.is_file():
95
+ # Assume it's an output file
96
+ output_files.append(path)
97
+ else:
98
+ console.print(f"[red]Error:[/red] Path not found: {path}")
99
+
100
+ for path_str in metadata_paths or []:
101
+ path = Path(path_str)
102
+ if path.is_dir():
103
+ metadata_directories.append(path)
104
+ elif path.is_file():
105
+ if path.suffix == ".yaml" and "metadata" in path.name:
106
+ metadata_files.append(path)
107
+ else:
108
+ console.print(f"[yellow]Warning:[/yellow] Ignoring non-metadata file: {path}")
109
+ else:
110
+ console.print(f"[red]Error:[/red] Metadata path not found: {path}")
111
+
112
+ # Scan directories for files
113
+ for directory in output_directories:
114
+ for file_path in directory.rglob("*"):
115
+ if file_path.is_file():
116
+ output_files.append(file_path)
117
+
118
+ for directory in metadata_directories:
119
+ for file_path in directory.rglob("*.yaml"):
120
+ if "metadata" in file_path.name and file_path.is_file():
121
+ metadata_files.append(file_path)
122
+
123
+ # Apply pattern filtering if specified
124
+ if pattern:
125
+ output_files = [f for f in output_files if fnmatch.fnmatch(f.name, pattern)]
126
+
127
+ if metadata_pattern:
128
+ metadata_files = [f for f in metadata_files if fnmatch.fnmatch(f.name, metadata_pattern)]
129
+
130
+ # Build validation plan: output_file -> metadata_file
131
+ output_to_metadata = {}
132
+
133
+ # First, extract output files from provided metadata files
134
+ for metadata_file in metadata_files:
135
+ try:
136
+ with metadata_file.open("r") as f:
137
+ metadata = yaml.safe_load(f)
138
+ except Exception as e: # pylint: disable=broad-exception-caught
139
+ logger.error("Error loading metadata %s: %s", metadata_file, e)
140
+ continue
141
+
142
+ output_files_in_meta = metadata.get("output_files", [])
143
+ globals_config = metadata.get("globals_config", {})
144
+ output_dir = Path(globals_config.get("output_directory", "."))
145
+
146
+ for filename in output_files_in_meta:
147
+ # Apply pattern filtering
148
+ if pattern and not fnmatch.fnmatch(filename, pattern):
149
+ continue
150
+
151
+ output_file = output_dir / filename
152
+ if output_file not in output_to_metadata:
153
+ output_to_metadata[output_file] = metadata_file
154
+
155
+ # Then, for explicitly provided output files, find their metadata
156
+ for output_file in output_files:
157
+ if output_file not in output_to_metadata:
158
+ potential_metadata = None
159
+
160
+ # First, check if any already-identified metadata files contain this output file
161
+ for metadata_file in metadata_files:
162
+ try:
163
+ with metadata_file.open("r") as f:
164
+ meta_data = yaml.safe_load(f)
165
+ if output_file.name in meta_data.get("output_files", []):
166
+ potential_metadata = metadata_file
167
+ break
168
+ except Exception as e: # pylint: disable=broad-exception-caught
169
+ logger.error("Error reading metadata file %s: %s", metadata_file, e)
170
+ continue
171
+
172
+ # If not found in existing metadata, search in directories
173
+ if not potential_metadata:
174
+ # Look in the same directory or parent directories
175
+ search_dir = output_file.parent
176
+ metadata_dir = search_dir / "metadata"
177
+ if metadata_dir.exists():
178
+ # Look for metadata files that might match
179
+ for meta_file in metadata_dir.glob("*.metadata.yaml"):
180
+ with meta_file.open("r") as f:
181
+ try:
182
+ meta_data = yaml.safe_load(f)
183
+ if output_file.name in meta_data.get("output_files", []):
184
+ potential_metadata = meta_file
185
+ break
186
+ except Exception as e: # pylint: disable=broad-exception-caught
187
+ logger.error("Error reading metadata file %s: %s", meta_file, e)
188
+ continue
189
+
190
+ if potential_metadata:
191
+ output_to_metadata[output_file] = potential_metadata
192
+ else:
193
+ logger.warning("No metadata found for output file %s", output_file) # Combine all metadata files
194
+ all_metadata_files = list({v for v in output_to_metadata.values() if v is not None})
195
+
196
+ if not all_metadata_files and not metadata_files:
197
+ logger.error("Error: No metadata files found")
198
+ raise typer.Exit(1)
199
+
200
+ # Create results table
201
+ table = Table(title="Validation Results")
202
+ table.add_column("Metadata File", style="cyan")
203
+ table.add_column("Output File", style="magenta")
204
+ table.add_column("Hash Match", style="green")
205
+ table.add_column("Status", style="yellow")
206
+
207
+ total_files = len(output_to_metadata)
208
+ failed_files = 0
209
+
210
+ # Order output for consistent reporting
211
+ for output_file in sorted(output_to_metadata.keys()):
212
+ metadata_file = output_to_metadata[output_file]
213
+ if metadata_file is None:
214
+ table.add_row("N/A", output_file.name, "N/A", "[red]No metadata found[/red]")
215
+ failed_files += 1
216
+ continue
217
+ try:
218
+ with metadata_file.open("r") as f:
219
+ metadata: dict = yaml.safe_load(f)
220
+ except Exception as e: # pylint: disable=broad-exception-caught
221
+ logger.error("Error loading metadata %s: %s", metadata_file, e)
222
+ table.add_row(str(metadata_file.name), output_file.name, "N/A", "[red]Error loading metadata[/red]")
223
+ continue
224
+
225
+ file_hashes = metadata.get("file_hashes", {})
226
+ expected_hash = file_hashes.get(output_file.name)
227
+
228
+ if not output_file.exists():
229
+ table.add_row(str(metadata_file.name), output_file.name, "N/A", "[red]File not found[/red]")
230
+ failed_files += 1
231
+ continue
232
+
233
+ if not expected_hash:
234
+ table.add_row(str(metadata_file.name), output_file.name, "N/A", "[yellow]No hash in metadata[/yellow]")
235
+ failed_files += 1
236
+ continue
237
+
238
+ try:
239
+ actual_hash = compute_file_hash(output_file)
240
+ if actual_hash == expected_hash:
241
+ table.add_row(str(metadata_file.name), output_file.name, "[green]✓[/green]", "[green]PASS[/green]")
242
+ else:
243
+ table.add_row(str(metadata_file.name), output_file.name, "[red]✗[/red]", "[red]HASH MISMATCH[/red]")
244
+ failed_files += 1
245
+ except Exception as e: # pylint: disable=broad-exception-caught
246
+ table.add_row(str(metadata_file.name), output_file.name, "N/A", f"[red]Error: {e}[/red]")
247
+ failed_files += 1
248
+
249
+ console.print(table)
250
+ console.print(f"\n[bold]Summary:[/bold] {total_files - failed_files}/{total_files} files passed validation")
251
+
252
+ if failed_files > 0:
253
+ console.print(f"[red]{failed_files} files failed validation[/red]")
254
+ raise typer.Exit(1)
255
+ console.print("[green]All files validated successfully![/green]")
gwsim/data/__init__.py ADDED
@@ -0,0 +1,8 @@
1
+ """Initialization code for the gwsim.data package."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from gwsim.data.time_series.time_series import TimeSeries
6
+ from gwsim.data.time_series.time_series_list import TimeSeriesList
7
+
8
+ __all__ = ["TimeSeries", "TimeSeriesList"]
@@ -0,0 +1,9 @@
1
+ """Init file for the serialize module."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from gwsim.data.serialize.decoder import Decoder
6
+ from gwsim.data.serialize.encoder import Encoder
7
+ from gwsim.data.serialize.serializable import JSONSerializable
8
+
9
+ __all__ = ["Decoder", "Encoder", "JSONSerializable"]
@@ -0,0 +1,59 @@
1
+ """Custom JSON decoder for JSONSerializable objects."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import base64
6
+ import importlib
7
+ import json
8
+ from typing import Any
9
+
10
+ import numpy as np
11
+ from astropy.units import Quantity
12
+
13
+
14
+ class Decoder(json.JSONDecoder):
15
+ """Custom JSON decoder for JSONSerializable objects.
16
+
17
+ Automatically reconstructs objects that have been serialized with
18
+ the Encoder class by checking for the "__type__" key and calling
19
+ the appropriate from_json_dict class method.
20
+ """
21
+
22
+ def __init__(self, *args, **kwargs):
23
+ """Initialize the decoder with custom object_hook.
24
+
25
+ Args:
26
+ *args: Positional arguments passed to json.JSONDecoder.
27
+ **kwargs: Keyword arguments passed to json.JSONDecoder.
28
+ """
29
+ super().__init__(*args, object_hook=self._object_hook, **kwargs)
30
+
31
+ def _object_hook(self, obj: dict[str, Any]) -> Any:
32
+ """Object hook to reconstruct JSONSerializable objects.
33
+
34
+ Args:
35
+ obj: Dictionary from JSON.
36
+
37
+ Returns:
38
+ Reconstructed object or original dict if not a known type.
39
+ """
40
+ if "__type__" in obj:
41
+ type_name = obj["__type__"]
42
+
43
+ if type_name == "Quantity":
44
+ return Quantity(value=obj["value"], unit=obj["unit"])
45
+
46
+ if type_name == "ndarray":
47
+ encoded_data = obj["data"]
48
+ bytes_data = base64.b64decode(encoded_data)
49
+ array = np.frombuffer(bytes_data, dtype=obj["dtype"])
50
+ array = array.reshape(obj["shape"])
51
+ return array
52
+
53
+ # Assume all serializable classes are in gwsim.data module
54
+ module = importlib.import_module("gwsim.data")
55
+ cls = getattr(module, type_name, None)
56
+ if cls and hasattr(cls, "from_json_dict"):
57
+ return cls.from_json_dict(obj)
58
+
59
+ return obj
@@ -0,0 +1,44 @@
1
+ """Custom JSON encoder for JSONSerializable objects."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import base64
6
+ import json
7
+ from typing import Any
8
+
9
+ import numpy as np
10
+ from astropy.units import Quantity
11
+
12
+
13
+ class Encoder(json.JSONEncoder):
14
+ """Custom JSON encoder for JSONSerializable objects."""
15
+
16
+ def default(self, o: Any) -> Any:
17
+ """Serialize JSONSerializable objects to JSON.
18
+
19
+ Args:
20
+ o: Object to serialize.
21
+ """
22
+ if hasattr(o, "to_json_dict"):
23
+ encoded = o.to_json_dict()
24
+ if "__type__" not in encoded:
25
+ encoded["__type__"] = o.__class__.__name__
26
+ return encoded
27
+ if isinstance(o, Quantity):
28
+ return {
29
+ "__type__": "Quantity",
30
+ "value": o.value,
31
+ "unit": str(o.unit),
32
+ }
33
+ if isinstance(o, np.ndarray):
34
+ bytes_data = o.tobytes()
35
+ encoded_data = base64.b64encode(bytes_data).decode("ascii")
36
+ return {
37
+ "__type__": "ndarray",
38
+ "data": encoded_data,
39
+ "dtype": str(o.dtype),
40
+ "shape": list(o.shape),
41
+ "encoding": "base64",
42
+ }
43
+
44
+ return super().default(o)
@@ -0,0 +1,33 @@
1
+ """Protocols for serializable objects in the simulator."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import Any, Protocol
6
+
7
+
8
+ class JSONSerializable(Protocol): # pylint: disable=too-few-public-methods
9
+ """Protocol for JSON serializable objects.
10
+
11
+ Classes implementing this protocol should provide a method to convert
12
+ the object to a JSON-serializable dictionary.
13
+ """
14
+
15
+ def to_json_dict(self) -> dict[str, Any]:
16
+ """Convert the object to a JSON-serializable dictionary.
17
+
18
+ Returns:
19
+ dict[str, Any]: JSON-serializable dictionary representation of the object.
20
+ """
21
+ raise NotImplementedError("to_json_dict method must be implemented by subclasses.")
22
+
23
+ @classmethod
24
+ def from_json_dict(cls, json_dict: dict[str, Any]) -> Any:
25
+ """Create an object from a JSON-serializable dictionary.
26
+
27
+ Args:
28
+ json_dict (dict[str, Any]): JSON-serializable dictionary representation of the object.
29
+
30
+ Returns:
31
+ JSONSerializable: An instance of the class created from the dictionary.
32
+ """
33
+ raise NotImplementedError("from_json_dict method must be implemented by subclasses.")
@@ -0,0 +1,3 @@
1
+ """Time_series module."""
2
+
3
+ from __future__ import annotations
@@ -0,0 +1,104 @@
1
+ """Module to handle injection of one TimeSeries into another, with support for time offsets."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import logging
6
+ from typing import cast
7
+
8
+ import numpy as np
9
+ from astropy.units import second # pylint: disable=no-name-in-module
10
+ from gwpy.timeseries import TimeSeries
11
+ from scipy.interpolate import interp1d
12
+
13
+ logger = logging.getLogger("gwsim")
14
+
15
+
16
+ def inject(timeseries: TimeSeries, other: TimeSeries, interpolate_if_offset: bool = True) -> TimeSeries:
17
+ """Inject one TimeSeries into another, handling time offsets.
18
+
19
+ Args:
20
+ timeseries: The target TimeSeries to inject into.
21
+ other: The TimeSeries to be injected.
22
+ interpolate_if_offset: Whether to interpolate if there is a non-integer sample offset.
23
+
24
+ Returns:
25
+ TimeSeries: The resulting TimeSeries after injection.
26
+ """
27
+ # Check whether timeseries is compatible with other
28
+ timeseries.is_compatible(other)
29
+
30
+ # crop to fit
31
+ if (timeseries.xunit == second) and (other.xspan[0] < timeseries.xspan[0]):
32
+ other = cast(TimeSeries, other.crop(start=timeseries.xspan[0]))
33
+ if (timeseries.xunit == second) and (other.xspan[1] > timeseries.xspan[1]):
34
+ other = cast(TimeSeries, other.crop(end=timeseries.xspan[1]))
35
+
36
+ # Check if other is empty after cropping
37
+ if len(other.times) == 0:
38
+ logger.debug("Other TimeSeries is empty after cropping to fit; returning original timeseries")
39
+ return timeseries
40
+
41
+ target_times = timeseries.times.value
42
+ other_times = other.times.value
43
+ sample_spacing = float(timeseries.dt.value)
44
+
45
+ # Calculate offset between start times
46
+ offset = (other_times[0] - target_times[0]) / sample_spacing
47
+
48
+ # Check if offset is aligned (integer number of samples)
49
+ if not np.isclose(offset, round(offset)):
50
+ if not interpolate_if_offset:
51
+ logger.debug("Non-integer offset of %s samples; not interpolating, returning original timeseries", offset)
52
+ return timeseries
53
+
54
+ # Interpolate to align grids
55
+ logger.debug("Injecting with interpolation due to non-integer offset of %s samples", offset)
56
+
57
+ # Determine overlap range in target time grid
58
+ start_idx = int(np.searchsorted(target_times, other_times[0], side="left"))
59
+ end_idx = int(np.searchsorted(target_times, other_times[-1], side="right")) - 1
60
+
61
+ if start_idx >= len(target_times) or end_idx < 0 or start_idx > end_idx:
62
+ logger.debug("No overlap between timeseries and other after searching; returning original timeseries")
63
+ return timeseries
64
+
65
+ interp_func = interp1d(other_times, other.value, kind="cubic", axis=0, bounds_error=False, fill_value=0.0)
66
+ resampled = interp_func(target_times[start_idx : end_idx + 1])
67
+
68
+ # Create a new TimeSeries with explicit parameters to avoid floating-point precision issues
69
+ injected_data = timeseries.value.copy()
70
+ injected_data[start_idx : end_idx + 1] += resampled
71
+ injected = TimeSeries(
72
+ injected_data,
73
+ t0=timeseries.t0,
74
+ dt=timeseries.dt,
75
+ unit=timeseries.unit,
76
+ )
77
+ return injected
78
+
79
+ # Aligned case: offset is integer
80
+ logger.debug("Injecting with aligned grids (offset: %s samples)", round(offset))
81
+ start_idx = round(offset)
82
+ end_idx = start_idx + len(other.value) - 1
83
+
84
+ # Bounds check
85
+ if start_idx < 0 or end_idx >= len(target_times) or start_idx >= len(target_times):
86
+ logger.warning(
87
+ "Injection range [%s:%s] out of bounds for timeseries of length %s; skipping injection",
88
+ start_idx,
89
+ end_idx,
90
+ len(target_times),
91
+ )
92
+ return timeseries
93
+
94
+ # Create a new TimeSeries with explicit parameters to avoid floating-point precision issues
95
+ injected_data = timeseries.value.copy()
96
+ inject_len = min(len(other.value), end_idx - start_idx + 1)
97
+ injected_data[start_idx : start_idx + inject_len] += other.value[:inject_len]
98
+ injected = TimeSeries(
99
+ injected_data,
100
+ t0=timeseries.t0,
101
+ dt=timeseries.dt,
102
+ unit=timeseries.unit,
103
+ )
104
+ return injected