sxs 2024.0.36__tar.gz → 2024.0.38__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sxs-2024.0.36 → sxs-2024.0.38}/.github/workflows/build.yml +5 -6
- {sxs-2024.0.36 → sxs-2024.0.38}/CITATION.cff +2 -2
- {sxs-2024.0.36 → sxs-2024.0.38}/PKG-INFO +1 -1
- sxs-2024.0.38/sxs/__version__.py +1 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/caltechdata/__init__.py +0 -5
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/metadata/__init__.py +1 -0
- sxs-2024.0.38/sxs/metadata/metric.py +148 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/simulations/local.py +11 -3
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/simulations/simulation.py +139 -80
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/simulations/simulations.py +8 -42
- sxs-2024.0.38/sxs/utilities/string_converters.py +47 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/waveform_modes.py +4 -5
- {sxs-2024.0.36 → sxs-2024.0.38}/tests/conftest.py +5 -1
- {sxs-2024.0.36 → sxs-2024.0.38}/tests/test_horizons.py +4 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/tests/test_metadata.py +2 -2
- sxs-2024.0.38/tests/test_simulation.py +78 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/tests/test_utilities.py +2 -1
- {sxs-2024.0.36 → sxs-2024.0.38}/tests/test_waveform_rotations.py +3 -1
- {sxs-2024.0.36 → sxs-2024.0.38}/tests/test_waveforms.py +6 -1
- sxs-2024.0.36/sxs/__version__.py +0 -1
- sxs-2024.0.36/tests/test_simulation.py +0 -67
- {sxs-2024.0.36 → sxs-2024.0.38}/.codecov.yml +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/.github/dependabot.yml +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/.github/scripts/parse_bump_rule.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/.github/workflows/pr_rtd_link.yml +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/.gitignore +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/.readthedocs.yaml +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/LICENSE +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/README.md +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/api/catalog.md +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/api/horizons.md +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/api/load.md +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/api/metadata.md +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/api/simulation.md +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/api/simulations.md +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/api/time_series.md +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/api/waveforms.md +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/html/main.html +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/images/favicon.ico +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/index.md +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/javascript/mathjax.js +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/julia.md +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/mathematica.md +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/stylesheets/extra.css +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/tutorials/00-Introduction.ipynb +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/tutorials/01-Simulations_and_Metadata.ipynb +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/tutorials/02-Simulation.ipynb +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/tutorials/03-Horizons.ipynb +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/tutorials/04-Waveforms.ipynb +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/docs/tutorials/05-PreprocessingForFFTs.ipynb +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/mkdocs.yml +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/pyproject.toml +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/caltechdata/catalog.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/caltechdata/login.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/catalog/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/catalog/catalog.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/catalog/create.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/catalog/description.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/handlers.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/horizons/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/horizons/spec_horizons_h5.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/horizons/xor_multishuffle_bzip2.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/julia/GWFrames.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/julia/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/juliapkg.json +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/metadata/metadata.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/simulations/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/time_series.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/bitwise.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/decimation/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/decimation/greedy_spline.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/decimation/linear_bisection.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/decimation/peak_greed.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/decimation/suppression.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/dicts.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/downloads.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/files.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/formats.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/inspire.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/lvcnr/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/lvcnr/comparisons.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/lvcnr/conversion.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/lvcnr/dataset.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/lvcnr/horizons.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/lvcnr/metadata.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/lvcnr/waveform_amp_phase.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/lvcnr/waveforms.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/monotonicity.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/pretty_print.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/references/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/references/ads.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/references/arxiv.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/references/fairchild_report.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/references/inspire.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/references/journal_abbreviations.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/references/references.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/select.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/smooth_functions.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/sxs_directories.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/sxs_identifiers.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/utilities/url.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/alignment.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/format_handlers/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/format_handlers/grathena.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/format_handlers/lvc.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/format_handlers/nrar.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/format_handlers/rotating_paired_diff_multishuffle_bzip2.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/format_handlers/rotating_paired_xor_multishuffle_bzip2.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/format_handlers/spectre_cce_v1.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/memory.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/mode_utilities.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/transformations.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/waveform_grid.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/waveform_mixin.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/waveforms/waveform_signal.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/zenodo/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/zenodo/api/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/zenodo/api/deposit.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/zenodo/api/login.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/zenodo/api/records.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/zenodo/catalog.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/zenodo/creators.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/zenodo/simannex.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/sxs/zenodo/surrogatemodeling.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/tests/__init__.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/tests/test_catalog.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/tests/test_julia.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/tests/test_loader.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/tests/test_time_series.py +0 -0
- {sxs-2024.0.36 → sxs-2024.0.38}/tests/test_transformations.py +0 -0
|
@@ -47,12 +47,9 @@ jobs:
|
|
|
47
47
|
if: ${{ env.skipping_build_and_test_replicate != 'true' }}
|
|
48
48
|
uses: actions/cache@v4
|
|
49
49
|
with:
|
|
50
|
-
key: sxs-${{ runner.os }}-
|
|
50
|
+
key: sxs-${{ runner.os }}-1 # Increment this number whenever the cached files should change
|
|
51
51
|
path: |
|
|
52
|
-
|
|
53
|
-
restore-keys: |
|
|
54
|
-
sxs-${{ runner.os }}-
|
|
55
|
-
sxs-
|
|
52
|
+
${{ runner.os == 'Linux' && '/home/runner/.cache/sxs' || runner.os == 'Windows' && 'C:\Users\runneradmin\.sxs\cache' || '/Users/runner/.sxs/cache' }}
|
|
56
53
|
|
|
57
54
|
- name: Set up Julia
|
|
58
55
|
if: ${{ env.skipping_build_and_test_replicate != 'true' }}
|
|
@@ -78,7 +75,9 @@ jobs:
|
|
|
78
75
|
|
|
79
76
|
- name: Run tests
|
|
80
77
|
if: ${{ env.skipping_build_and_test_replicate != 'true' }}
|
|
81
|
-
|
|
78
|
+
shell: bash
|
|
79
|
+
run: |
|
|
80
|
+
hatch run test
|
|
82
81
|
|
|
83
82
|
- name: Upload coverage
|
|
84
83
|
if: "matrix.python-version == '3.13' && matrix.os == 'ubuntu-latest'"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: sxs
|
|
3
|
-
Version: 2024.0.
|
|
3
|
+
Version: 2024.0.38
|
|
4
4
|
Summary: Interface to data produced by the Simulating eXtreme Spacetimes collaboration
|
|
5
5
|
Project-URL: Homepage, https://github.com/sxs-collaboration/sxs
|
|
6
6
|
Project-URL: Documentation, https://sxs.readthedocs.io/
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "2024.0.38"
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import copy
|
|
2
1
|
import time
|
|
3
2
|
import warnings
|
|
4
3
|
import re
|
|
@@ -11,10 +10,6 @@ from .. import sxs_id, Metadata
|
|
|
11
10
|
from ..utilities import sxs_identifier_regex, SimpleVersion
|
|
12
11
|
|
|
13
12
|
|
|
14
|
-
# To do:
|
|
15
|
-
# - Finish up the code to create a new version from an existing one
|
|
16
|
-
|
|
17
|
-
|
|
18
13
|
def mtime(f):
|
|
19
14
|
"""Look for git or filesystem modification time
|
|
20
15
|
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
from ..utilities.string_converters import *
|
|
2
|
+
import numpy as np
|
|
3
|
+
|
|
4
|
+
class MetadataMetric:
|
|
5
|
+
"""A metric for comparing metadata.
|
|
6
|
+
|
|
7
|
+
This class is designed to be used as a callable object that takes
|
|
8
|
+
two collections of metadata (`sxs.Metadata`, `dict`, `pd.Series`)
|
|
9
|
+
and returns a number measuring the distance between the metadata.
|
|
10
|
+
|
|
11
|
+
With the default arguments, this will not strictly be a metric, as
|
|
12
|
+
it does not satisfy the triangle inequality. However, it is
|
|
13
|
+
intended to be used as a heuristic for sorting and filtering
|
|
14
|
+
metadata, rather than as a strict metric for clustering or
|
|
15
|
+
classification.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
parameters : list of str, optional
|
|
20
|
+
The names of the metadata fields to be compared. The defaults
|
|
21
|
+
are the reference quantities for mass ratio, spin,
|
|
22
|
+
eccentricity, and mean anomaly. Note that all of these fields
|
|
23
|
+
*must* be present in *both* metadata collections. (The
|
|
24
|
+
`Metadata.add_standard_parameters` method may be useful here.)
|
|
25
|
+
metric : array_like, optional
|
|
26
|
+
The matrix used to weight the differences in the parameters.
|
|
27
|
+
The default is a diagonal matrix with ones on the diagonal,
|
|
28
|
+
except for the mean-anomaly entry, which is 1/pi^2.
|
|
29
|
+
allow_different_object_types : bool, optional
|
|
30
|
+
If True, metadata with different object types (BHBH, BHNS,
|
|
31
|
+
NSNS) will be compared without penalty. If False, metadata
|
|
32
|
+
with different object types will be assigned an infinite
|
|
33
|
+
distance.
|
|
34
|
+
eccentricity_threshold1 : float, optional
|
|
35
|
+
The threshold eccentricity below which we consider metadata1
|
|
36
|
+
non-eccentric. Default is 1e-2.
|
|
37
|
+
eccentricity_threshold2 : float, optional
|
|
38
|
+
The threshold eccentricity below which we consider metadata2
|
|
39
|
+
non-eccentric. Default is 1e-3.
|
|
40
|
+
eccentricity_threshold_penalize_shorter : int, optional
|
|
41
|
+
The number of orbits below which we penalize metadata2 for
|
|
42
|
+
having a non-zero eccentricity when metadata1 does not. This
|
|
43
|
+
is intended to avoid ascribing small distances to systems with
|
|
44
|
+
shorter inspirals. Default is 20.
|
|
45
|
+
|
|
46
|
+
The mean anomaly, if present, is treated specially to account for
|
|
47
|
+
the fact that a mean anomaly of 0 is equivalent to a mean anomaly
|
|
48
|
+
of 2π. The difference between the entries in the two metadata
|
|
49
|
+
collections is "unwrapped" before the metric is applied.
|
|
50
|
+
|
|
51
|
+
If the eccentricity of metadata1 is below
|
|
52
|
+
`eccentricity_threshold1`, then the mean anomaly is ignored. If
|
|
53
|
+
that is true and the eccentricity of metadata2 is below
|
|
54
|
+
`eccentricity_threshold2` *and* the number of orbits in metadata2
|
|
55
|
+
is longer than `eccentricity_threshold_penalize_shorter`, then the
|
|
56
|
+
eccentricity is also ignored. You may set these arguments to 0 to
|
|
57
|
+
disable these features.
|
|
58
|
+
|
|
59
|
+
"""
|
|
60
|
+
def __init__(
|
|
61
|
+
self,
|
|
62
|
+
parameters=[
|
|
63
|
+
"reference_mass_ratio",
|
|
64
|
+
"reference_dimensionless_spin1",
|
|
65
|
+
"reference_dimensionless_spin2",
|
|
66
|
+
"reference_eccentricity",
|
|
67
|
+
"reference_mean_anomaly",
|
|
68
|
+
],
|
|
69
|
+
metric=np.diag([1, 1, 1, 1, 1, 1, 1, 1, 1/np.pi**2]),
|
|
70
|
+
allow_different_object_types=False,
|
|
71
|
+
eccentricity_threshold1=1e-2,
|
|
72
|
+
eccentricity_threshold2=1e-3,
|
|
73
|
+
eccentricity_threshold_penalize_shorter=20,
|
|
74
|
+
):
|
|
75
|
+
self.parameters = parameters
|
|
76
|
+
self.metric = metric
|
|
77
|
+
self.allow_different_object_types = allow_different_object_types
|
|
78
|
+
self.eccentricity_threshold1 = eccentricity_threshold1
|
|
79
|
+
self.eccentricity_threshold2 = eccentricity_threshold2
|
|
80
|
+
self.eccentricity_threshold_penalize_shorter = eccentricity_threshold_penalize_shorter
|
|
81
|
+
|
|
82
|
+
def __call__(self, metadata1, metadata2, debug=False):
|
|
83
|
+
if not self.allow_different_object_types:
|
|
84
|
+
type1 = (
|
|
85
|
+
metadata1["object_types"]
|
|
86
|
+
if "object_types" in metadata1
|
|
87
|
+
else "".join(sorted([
|
|
88
|
+
metadata1.get("object1", "A").upper(),
|
|
89
|
+
metadata1.get("object2", "B").upper()
|
|
90
|
+
]))
|
|
91
|
+
)
|
|
92
|
+
type2 = (
|
|
93
|
+
metadata2["object_types"]
|
|
94
|
+
if "object_types" in metadata2
|
|
95
|
+
else "".join(sorted([
|
|
96
|
+
metadata2.get("object1", "C").upper(),
|
|
97
|
+
metadata2.get("object2", "D").upper()
|
|
98
|
+
]))
|
|
99
|
+
)
|
|
100
|
+
if type1 != type2:
|
|
101
|
+
return np.inf
|
|
102
|
+
|
|
103
|
+
values1 = [metadata1[parameter] for parameter in self.parameters]
|
|
104
|
+
values2 = [metadata2[parameter] for parameter in self.parameters]
|
|
105
|
+
|
|
106
|
+
if debug:
|
|
107
|
+
print(f"{self.parameters=}")
|
|
108
|
+
print(f"{values1=}")
|
|
109
|
+
print(f"{values2=}")
|
|
110
|
+
|
|
111
|
+
if "reference_mean_anomaly" in self.parameters:
|
|
112
|
+
i = self.parameters.index("reference_mean_anomaly")
|
|
113
|
+
values1[i], values2[i] = np.unwrap([floater(values1[i]), floater(values2[i])])
|
|
114
|
+
|
|
115
|
+
if "reference_eccentricity" in self.parameters:
|
|
116
|
+
# Either way, we first make sure that the corresponding entries are floats.
|
|
117
|
+
i = self.parameters.index("reference_eccentricity")
|
|
118
|
+
values1[i] = metadata1.get("reference_eccentricity_bound", floaterbound(values1[i]))
|
|
119
|
+
values2[i] = metadata2.get("reference_eccentricity_bound", floaterbound(values2[i]))
|
|
120
|
+
|
|
121
|
+
if values1[i] < self.eccentricity_threshold1:
|
|
122
|
+
# Then we consider metadata1 a non-eccentric system...
|
|
123
|
+
|
|
124
|
+
# ...so we ignore the mean anomaly entirely...
|
|
125
|
+
if "reference_mean_anomaly" in self.parameters:
|
|
126
|
+
i_ma = self.parameters.index("reference_mean_anomaly")
|
|
127
|
+
values1[i_ma] = values2[i_ma]
|
|
128
|
+
|
|
129
|
+
# ...and we ignore the eccentricity if metadata2 is also non-eccentric,
|
|
130
|
+
# and longer than eccentricity_threshold_penalize_shorter.
|
|
131
|
+
if (
|
|
132
|
+
values2[i] < self.eccentricity_threshold2
|
|
133
|
+
and metadata2.get(
|
|
134
|
+
"number_of_orbits",
|
|
135
|
+
metadata2.get("number_of_orbits_from_start", 0)
|
|
136
|
+
) > self.eccentricity_threshold_penalize_shorter
|
|
137
|
+
):
|
|
138
|
+
values1[i] = values2[i]
|
|
139
|
+
|
|
140
|
+
difference = (
|
|
141
|
+
np.concatenate(list(map(np.atleast_1d, values1)))
|
|
142
|
+
- np.concatenate(list(map(np.atleast_1d, values2)))
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
if debug:
|
|
146
|
+
print(f"{difference=}")
|
|
147
|
+
|
|
148
|
+
return difference @ self.metric @ difference
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
|
+
from datetime import datetime, timezone
|
|
2
3
|
from .. import sxs_id, Metadata, sxs_directory
|
|
3
4
|
from ..utilities import sxs_identifier_re
|
|
4
5
|
from ..zenodo import path_to_invenio
|
|
@@ -147,16 +148,23 @@ def local_simulations(annex_dir, compute_md5=False, show_progress=False):
|
|
|
147
148
|
|
|
148
149
|
metadata["directory"] = str(dirpath.relative_to(annex_dir))
|
|
149
150
|
|
|
151
|
+
simulations[key] = metadata
|
|
152
|
+
|
|
153
|
+
files = files_to_upload(dirpath, annex_dir)
|
|
154
|
+
|
|
155
|
+
metadata["mtime"] = datetime.fromtimestamp(
|
|
156
|
+
max(file.resolve().stat().st_mtime for file in files),
|
|
157
|
+
tz=timezone.utc,
|
|
158
|
+
).isoformat()
|
|
159
|
+
|
|
150
160
|
metadata["files"] = {
|
|
151
161
|
path_to_invenio(file.relative_to(dirpath)): {
|
|
152
162
|
"link": str(file),
|
|
153
163
|
"size": file.stat().st_size,
|
|
154
164
|
"checksum": md5checksum(file) if compute_md5 else "",
|
|
155
165
|
}
|
|
156
|
-
for file in
|
|
166
|
+
for file in files
|
|
157
167
|
}
|
|
158
|
-
|
|
159
|
-
simulations[key] = metadata
|
|
160
168
|
except KeyboardInterrupt:
|
|
161
169
|
raise
|
|
162
170
|
except Exception as e:
|
|
@@ -53,12 +53,13 @@ def Simulation(location, *args, **kwargs):
|
|
|
53
53
|
SXS ID, but can also include the version and Lev number, as
|
|
54
54
|
described above.
|
|
55
55
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
ignore_deprecation : bool
|
|
56
|
+
Keyword Arguments
|
|
57
|
+
-----------------
|
|
58
|
+
ignore_deprecation : bool, optional
|
|
59
59
|
If `True`, completely bypass checking for deprecation or
|
|
60
|
-
supersession. No warnings or errors will be issued.
|
|
61
|
-
|
|
60
|
+
supersession. No warnings or errors will be issued. Default
|
|
61
|
+
is `False`.
|
|
62
|
+
auto_supersede : bool, optional
|
|
62
63
|
If `True`, automatically load the superseding simulation, if
|
|
63
64
|
there is only one. If there are multiple superseding
|
|
64
65
|
simulations, an error will be raised, and you must explicitly
|
|
@@ -66,7 +67,11 @@ def Simulation(location, *args, **kwargs):
|
|
|
66
67
|
issued, but the superseding simulation will be loaded. Note
|
|
67
68
|
that this can also be set in the configuration file with
|
|
68
69
|
`sxs.write_config(auto_supersede=True)`.
|
|
69
|
-
|
|
70
|
+
metadata_metric : MetadataMetric, optional
|
|
71
|
+
Metric to use for comparing simulations when automatically
|
|
72
|
+
superseding deprecated a simulation. If not provided, the
|
|
73
|
+
default metric will be used.
|
|
74
|
+
extrapolation : str, optional
|
|
70
75
|
The extrapolation order to use for the strain and Psi4 data.
|
|
71
76
|
This is only relevant for versions 1 and 2 of the data format,
|
|
72
77
|
both of which default to "N2". Other options include "N3",
|
|
@@ -74,14 +79,13 @@ def Simulation(location, *args, **kwargs):
|
|
|
74
79
|
polynomials in 1/r with degree `x`, while "Outer" refers to
|
|
75
80
|
data extracted at the outermost extraction radius but
|
|
76
81
|
corrected for time-dilation and areal-radius effects.
|
|
77
|
-
|
|
82
|
+
download_file_info : bool, optional
|
|
78
83
|
If `True`, download the information about the files from the
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
configured.
|
|
84
|
+
CaltechDATA record. If `False`, only use the file information
|
|
85
|
+
that is already available (which will raise an error if the
|
|
86
|
+
file information has not previously been downloaded). If not
|
|
87
|
+
present, use the value from the configuration file, defaulting
|
|
88
|
+
to `True` if it is not configured.
|
|
85
89
|
|
|
86
90
|
Returns
|
|
87
91
|
-------
|
|
@@ -96,6 +100,7 @@ def Simulation(location, *args, **kwargs):
|
|
|
96
100
|
|
|
97
101
|
"""
|
|
98
102
|
from .. import load, sxs_directory
|
|
103
|
+
from ..metadata.metric import MetadataMetric
|
|
99
104
|
|
|
100
105
|
# Load the simulation catalog
|
|
101
106
|
simulations = load("simulations")
|
|
@@ -132,81 +137,58 @@ def Simulation(location, *args, **kwargs):
|
|
|
132
137
|
sxs_id = f"{sxs_id_stem}{version}"
|
|
133
138
|
url = f"{doi_url}{sxs_id}"
|
|
134
139
|
|
|
135
|
-
# Deal with
|
|
136
|
-
deprecated =
|
|
137
|
-
if not kwargs.get("ignore_deprecation", False):
|
|
140
|
+
# Deal with deprecations
|
|
141
|
+
deprecated = "deprecated" in metadata.get("keywords", [])
|
|
142
|
+
if deprecated and not kwargs.get("ignore_deprecation", False):
|
|
138
143
|
auto_supersede = kwargs.get("auto_supersede", read_config("auto_supersede", False))
|
|
139
|
-
if (
|
|
140
|
-
input_version
|
|
141
|
-
and not auto_supersede
|
|
142
|
-
and deprecated
|
|
143
|
-
):
|
|
144
|
-
message = ("\n"
|
|
145
|
-
+ f"Simulation '{sxs_id_stem}' is deprecated and/or superseded.\n"
|
|
146
|
-
+ "Normally, this simulation should no longer be used, but you\n"
|
|
147
|
-
+ f"explicitly requested version '{input_version}', so it is being used.\n"
|
|
148
|
-
)
|
|
149
|
-
warn(message)
|
|
150
|
-
else:
|
|
151
|
-
if "superseded_by" in metadata:
|
|
152
|
-
superseded_by = metadata["superseded_by"]
|
|
153
|
-
if auto_supersede and isinstance(superseded_by, list):
|
|
154
|
-
raise ValueError(
|
|
155
|
-
f"`auto_supersede` is enabled, but simulation '{sxs_id}' is\n"
|
|
156
|
-
+ "superseded by multiple simulations. You must choose one\n"
|
|
157
|
-
+ "explicitly from the list:\n"
|
|
158
|
-
+ "\n".join(f" {s}" for s in superseded_by)
|
|
159
|
-
+ "\nAlternatively, you could pass `ignore_deprecation=True` or\n"
|
|
160
|
-
+ "specify a version to load this waveform anyway."
|
|
161
|
-
)
|
|
162
|
-
elif auto_supersede and isinstance(superseded_by, str):
|
|
163
|
-
# raise NotImplementedError(
|
|
164
|
-
# f"\nSimulation '{sxs_id}' cannot be automatically superseded.\n"
|
|
165
|
-
# + "The auto_supersede option is temporarily disabled. The superseding\n"
|
|
166
|
-
# + "simulations have been removed from the metadata, and the new function\n"
|
|
167
|
-
# + "to load them has not yet been implemented. Please specify a version.\n"
|
|
168
|
-
# )
|
|
169
|
-
message = f"\nSimulation '{sxs_id}' is being automatically superseded by '{superseded_by}'."
|
|
170
|
-
warn(message)
|
|
171
|
-
new_location = f"{superseded_by}{input_version}"
|
|
172
|
-
if input_lev_number:
|
|
173
|
-
new_location += f"/Lev{input_lev_number}"
|
|
174
|
-
return Simulation(new_location, *args, **kwargs)
|
|
175
|
-
elif isinstance(superseded_by, list):
|
|
176
|
-
raise ValueError(
|
|
177
|
-
f"Simulation '{sxs_id}' is superseded by multiple simulations.\n"
|
|
178
|
-
+ "Even if you enable `auto_supersede`, with multiple options, you\n"
|
|
179
|
-
+ "must choose one explicitly from the list:\n"
|
|
180
|
-
+ "\n".join(f" {s}" for s in superseded_by)
|
|
181
|
-
+ "\nAlternatively, you could pass `ignore_deprecation=True` or\n"
|
|
182
|
-
+ "specify a version to load this waveform anyway."
|
|
183
|
-
)
|
|
184
|
-
elif isinstance(superseded_by, str):
|
|
185
|
-
raise ValueError(
|
|
186
|
-
f"Simulation '{sxs_id}' is superseded by '{superseded_by}'.\n"
|
|
187
|
-
+ "Note that you could enable `auto_supersede` to automatically\n"
|
|
188
|
-
+ "load the superseding simulation. Alternatively, you could\n"
|
|
189
|
-
+ "pass `ignore_deprecation=True` or specify a version to load\n"
|
|
190
|
-
+ "this waveform anyway."
|
|
191
|
-
)
|
|
192
|
-
else:
|
|
193
|
-
raise ValueError(
|
|
194
|
-
f"Simulation '{sxs_id}' is superseded by '{superseded_by}'.\n"
|
|
195
|
-
+ "Note that you could pass `ignore_deprecation=True` or\n"
|
|
196
|
-
+ "specify a version to load this waveform anyway."
|
|
197
|
-
)
|
|
198
|
-
if "deprecated" in metadata.get("keywords", []):
|
|
144
|
+
if not bool(auto_supersede):
|
|
145
|
+
if not input_version:
|
|
199
146
|
raise ValueError(
|
|
200
|
-
f"Simulation '{
|
|
201
|
-
+
|
|
202
|
-
+
|
|
147
|
+
f"Simulation '{location}' is deprecated. You could\n"
|
|
148
|
+
+ " 1. pass `ignore_deprecation=True` to load the latest available version,\n"
|
|
149
|
+
+ " 2. manually choose a different simulation from the catalog,\n"
|
|
150
|
+
+ " 3. pass `auto_supersede=True` to load the closest match in the catalog, or\n"
|
|
151
|
+
+ f" 4. include the version number, as in '{sxs_id_stem}v2.0', to load a specific version.\n"
|
|
152
|
+
)
|
|
153
|
+
else:
|
|
154
|
+
message = ("\n"
|
|
155
|
+
+ f"Simulation '{sxs_id_stem}' is deprecated, but you explicitly\n"
|
|
156
|
+
+ f"requested version '{input_version}', so it is being used.\n"
|
|
157
|
+
+ f"Pass `ignore_deprecation=True` to quiet this warning.\n"
|
|
158
|
+
)
|
|
159
|
+
warn(message)
|
|
160
|
+
else:
|
|
161
|
+
if input_version:
|
|
162
|
+
message = ("\n"
|
|
163
|
+
+ f"\nSimulation '{sxs_id}' is deprecated. You explicitly requested.\n"
|
|
164
|
+
+ f"version '{input_version}', but you also passed the `auto_supersede` option.\n"
|
|
165
|
+
+ f"Using the specified version, as that takes precedence.\n"
|
|
166
|
+
)
|
|
167
|
+
warn(message)
|
|
168
|
+
else:
|
|
169
|
+
original_kwargs = kwargs.copy()
|
|
170
|
+
original_kwargs["ignore_deprecation"] = True
|
|
171
|
+
original = Simulation(location, *args, **original_kwargs)
|
|
172
|
+
metadata_metric = kwargs.pop("metadata_metric", MetadataMetric())
|
|
173
|
+
superseding = original.closest_simulation(
|
|
174
|
+
dataframe=simulations.dataframe,
|
|
175
|
+
metadata_metric=metadata_metric
|
|
203
176
|
)
|
|
177
|
+
message = f"\nSimulation '{sxs_id}' is being automatically superseded by '{superseding}'."
|
|
178
|
+
warn(message)
|
|
179
|
+
new_location = f"{superseding}{input_version}"
|
|
180
|
+
if input_lev_number:
|
|
181
|
+
new_location += f"/Lev{input_lev_number}"
|
|
182
|
+
return Simulation(new_location, *args, **kwargs)
|
|
204
183
|
|
|
205
184
|
# Note the deprecation status in the kwargs, even if ignoring deprecation
|
|
206
185
|
kwargs["deprecated"] = deprecated
|
|
207
186
|
|
|
187
|
+
# TODO: Default to not downloading file info
|
|
188
|
+
# TODO: In that case, deal with Lev numbers somehow
|
|
189
|
+
|
|
208
190
|
# We want to do this *after* deprecation checking, to avoid possibly unnecessary web requests
|
|
209
|
-
files = get_file_info(metadata, sxs_id, download=kwargs.get("
|
|
191
|
+
files = get_file_info(metadata, sxs_id, download=kwargs.get("download_file_info", None))
|
|
210
192
|
|
|
211
193
|
# If Lev is given as part of `location`, use it; otherwise, use the highest available
|
|
212
194
|
lev_numbers = sorted({lev for f in files if (lev:=lev_number(f))})
|
|
@@ -328,6 +310,82 @@ class SimulationBase:
|
|
|
328
310
|
def __str__(self):
|
|
329
311
|
return repr(self)
|
|
330
312
|
|
|
313
|
+
def distances(self, dataframe=None, metadata_metric=None, drop_deprecated=False):
|
|
314
|
+
"""Measure the distance from this simulation to others
|
|
315
|
+
|
|
316
|
+
Parameters
|
|
317
|
+
----------
|
|
318
|
+
dataframe : pandas.DataFrame, optional
|
|
319
|
+
DataFrame of simulations to compare to. If not provided,
|
|
320
|
+
the full catalog of simulations will be loaded as
|
|
321
|
+
`sxs.load("simulations").dataframe`.
|
|
322
|
+
metadata_metric : MetadataMetric, optional
|
|
323
|
+
Metric to use for comparing simulations. If not provided,
|
|
324
|
+
the default metric will be used.
|
|
325
|
+
drop_deprecated : bool, optional
|
|
326
|
+
If `True`, remove deprecated simulations from the
|
|
327
|
+
`dataframe` before measuring distances.
|
|
328
|
+
|
|
329
|
+
Returns
|
|
330
|
+
-------
|
|
331
|
+
distances : pandas.Series
|
|
332
|
+
Distance from this simulation to each element of the
|
|
333
|
+
`dataframe`. This series will be indexed by the index of
|
|
334
|
+
the `dataframe`. If this simulation is in the
|
|
335
|
+
`dataframe`, it will have a distance of 0.
|
|
336
|
+
|
|
337
|
+
See Also
|
|
338
|
+
--------
|
|
339
|
+
simulations_sorted_by_distance : Sort dataframe of simulations
|
|
340
|
+
by "distance" to this one
|
|
341
|
+
sxs.metadata.metric.MetadataMetric : Metric for comparing
|
|
342
|
+
metadata
|
|
343
|
+
|
|
344
|
+
"""
|
|
345
|
+
from ..metadata.metric import MetadataMetric
|
|
346
|
+
from .. import load
|
|
347
|
+
if dataframe is None:
|
|
348
|
+
dataframe = load("simulations").dataframe
|
|
349
|
+
metadata_metric = metadata_metric or MetadataMetric()
|
|
350
|
+
if drop_deprecated:
|
|
351
|
+
dataframe = dataframe[~dataframe.deprecated]
|
|
352
|
+
return dataframe.apply(
|
|
353
|
+
lambda m: metadata_metric(self.metadata, m),
|
|
354
|
+
axis=1
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
def closest_simulation(self, dataframe=None, metadata_metric=None):
|
|
358
|
+
"""Return the closest undeprecated simulation to this one
|
|
359
|
+
|
|
360
|
+
Note that any simulation in `dataframe` with zero distance
|
|
361
|
+
from this one will be ignored; the returned index will not
|
|
362
|
+
refer to this simulation, even if it is undeprecated.
|
|
363
|
+
|
|
364
|
+
Parameters
|
|
365
|
+
----------
|
|
366
|
+
dataframe : pandas.DataFrame, optional
|
|
367
|
+
DataFrame of simulations to compare to. If not provided,
|
|
368
|
+
the full catalog of simulations will be loaded as
|
|
369
|
+
`sxs.load("simulations").dataframe`.
|
|
370
|
+
metadata_metric : MetadataMetric, optional
|
|
371
|
+
Metric to use for comparing simulations. If not provided,
|
|
372
|
+
the default metric will be used.
|
|
373
|
+
|
|
374
|
+
Returns
|
|
375
|
+
-------
|
|
376
|
+
closest_index : str
|
|
377
|
+
Index of the closest undeprecated simulation in the
|
|
378
|
+
`dataframe`.
|
|
379
|
+
|
|
380
|
+
"""
|
|
381
|
+
d = self.distances(
|
|
382
|
+
dataframe=dataframe,
|
|
383
|
+
metadata_metric=metadata_metric,
|
|
384
|
+
drop_deprecated=True
|
|
385
|
+
)
|
|
386
|
+
d = d[d > 0].sort_values()
|
|
387
|
+
return d.index[0]
|
|
388
|
+
|
|
331
389
|
@property
|
|
332
390
|
def dataframe(self):
|
|
333
391
|
return self.series
|
|
@@ -753,6 +811,7 @@ class Simulation_v2(SimulationBase):
|
|
|
753
811
|
|
|
754
812
|
|
|
755
813
|
def get_file_info(metadata, sxs_id, download=None):
|
|
814
|
+
# TODO: Allow an existing zenodo_metadata.json file to be used
|
|
756
815
|
from .. import load_via_sxs_id
|
|
757
816
|
if "files" in metadata:
|
|
758
817
|
return metadata["files"]
|
|
@@ -4,6 +4,9 @@ import collections
|
|
|
4
4
|
import numpy as np
|
|
5
5
|
import pandas as pd
|
|
6
6
|
|
|
7
|
+
from ..utilities.string_converters import *
|
|
8
|
+
|
|
9
|
+
|
|
7
10
|
class SimulationsDataFrame(pd.DataFrame):
|
|
8
11
|
@property
|
|
9
12
|
def BBH(self):
|
|
@@ -87,6 +90,11 @@ class SimulationsDataFrame(pd.DataFrame):
|
|
|
87
90
|
return type(self)(self[
|
|
88
91
|
np.isfinite(total_mass) & (total_mass > 0) & (normalized_ADM > 1)
|
|
89
92
|
])
|
|
93
|
+
|
|
94
|
+
@property
|
|
95
|
+
def undeprecated(self):
|
|
96
|
+
"""Restrict dataframe to just simulations that are not deprecated"""
|
|
97
|
+
return type(self)(self[~self["deprecated"]])
|
|
90
98
|
|
|
91
99
|
|
|
92
100
|
class Simulations(collections.OrderedDict):
|
|
@@ -416,46 +424,6 @@ class Simulations(collections.OrderedDict):
|
|
|
416
424
|
if col not in simulations.columns:
|
|
417
425
|
simulations[col] = np.nan
|
|
418
426
|
|
|
419
|
-
def floater(x):
|
|
420
|
-
try:
|
|
421
|
-
f = float(x)
|
|
422
|
-
except:
|
|
423
|
-
f = np.nan
|
|
424
|
-
return f
|
|
425
|
-
|
|
426
|
-
def floaterbound(x):
|
|
427
|
-
try:
|
|
428
|
-
f = float(x)
|
|
429
|
-
except:
|
|
430
|
-
try:
|
|
431
|
-
f = float(x.replace("<", ""))
|
|
432
|
-
except:
|
|
433
|
-
f = np.nan
|
|
434
|
-
return f
|
|
435
|
-
|
|
436
|
-
def norm(x):
|
|
437
|
-
try:
|
|
438
|
-
n = np.linalg.norm(x)
|
|
439
|
-
except:
|
|
440
|
-
n = np.nan
|
|
441
|
-
return n
|
|
442
|
-
|
|
443
|
-
def three_vec(x):
|
|
444
|
-
try:
|
|
445
|
-
a = np.array(x, dtype=float)
|
|
446
|
-
if a.shape != (3,):
|
|
447
|
-
raise ValueError("Don't understand input as a three-vector")
|
|
448
|
-
except:
|
|
449
|
-
a = np.array([np.nan, np.nan, np.nan])
|
|
450
|
-
return a
|
|
451
|
-
|
|
452
|
-
def datetime_from_string(x):
|
|
453
|
-
try:
|
|
454
|
-
dt = pd.to_datetime(x).tz_convert("UTC")
|
|
455
|
-
except:
|
|
456
|
-
dt = pd.to_datetime("1970-1-1").tz_localize("UTC")
|
|
457
|
-
return dt
|
|
458
|
-
|
|
459
427
|
sims_df = SimulationsDataFrame(pd.concat((
|
|
460
428
|
simulations["reference_time"].map(floater),
|
|
461
429
|
simulations["reference_mass_ratio"].map(floater),
|
|
@@ -523,7 +491,6 @@ class Simulations(collections.OrderedDict):
|
|
|
523
491
|
simulations["number_of_orbits"].map(floater),
|
|
524
492
|
simulations["number_of_orbits_from_start"].map(floater),
|
|
525
493
|
simulations["number_of_orbits_from_reference_time"].map(floater),
|
|
526
|
-
# simulations["superseded_by"],
|
|
527
494
|
simulations["DOI_versions"],
|
|
528
495
|
simulations["keywords"],
|
|
529
496
|
simulations["date_link_earliest"].map(datetime_from_string),
|
|
@@ -533,7 +500,6 @@ class Simulations(collections.OrderedDict):
|
|
|
533
500
|
), axis=1))
|
|
534
501
|
|
|
535
502
|
sims_df.insert(0, "deprecated", (
|
|
536
|
-
# ~sims_df.superseded_by.isna() |
|
|
537
503
|
sims_df["keywords"].map(lambda ks: "deprecated" in ks)
|
|
538
504
|
))
|
|
539
505
|
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
|
|
2
|
+
# Assemble some helper functions
|
|
3
|
+
|
|
4
|
+
def floater(x):
|
|
5
|
+
import numpy as np
|
|
6
|
+
try:
|
|
7
|
+
f = float(x)
|
|
8
|
+
except:
|
|
9
|
+
f = np.nan
|
|
10
|
+
return f
|
|
11
|
+
|
|
12
|
+
def floaterbound(x):
|
|
13
|
+
import numpy as np
|
|
14
|
+
try:
|
|
15
|
+
f = float(x)
|
|
16
|
+
except:
|
|
17
|
+
try:
|
|
18
|
+
f = float(x.replace("<", ""))
|
|
19
|
+
except:
|
|
20
|
+
f = np.nan
|
|
21
|
+
return f
|
|
22
|
+
|
|
23
|
+
def norm(x):
|
|
24
|
+
import numpy as np
|
|
25
|
+
try:
|
|
26
|
+
n = np.linalg.norm(x)
|
|
27
|
+
except:
|
|
28
|
+
n = np.nan
|
|
29
|
+
return n
|
|
30
|
+
|
|
31
|
+
def three_vec(x):
|
|
32
|
+
import numpy as np
|
|
33
|
+
try:
|
|
34
|
+
a = np.array(x, dtype=float)
|
|
35
|
+
if a.shape != (3,):
|
|
36
|
+
raise ValueError("Don't understand input as a three-vector")
|
|
37
|
+
except:
|
|
38
|
+
a = np.array([np.nan, np.nan, np.nan])
|
|
39
|
+
return a
|
|
40
|
+
|
|
41
|
+
def datetime_from_string(x):
|
|
42
|
+
import pandas as pd
|
|
43
|
+
try:
|
|
44
|
+
dt = pd.to_datetime(x).tz_convert("UTC")
|
|
45
|
+
except:
|
|
46
|
+
dt = pd.to_datetime("1970-1-1").tz_localize("UTC")
|
|
47
|
+
return dt
|