sxs 2023.3.2__py3-none-any.whl → 2024.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,369 @@
1
+ """Container interface to the catalog of SXS simulations"""
2
+
3
+ import functools
4
+ import collections
5
+
6
+
7
+ class Simulations(collections.OrderedDict):
8
+ """Interface to the catalog of SXS simulations
9
+
10
+ Creation
11
+ --------
12
+ You probably don't need to create this object yourself. The
13
+ easiest way to create this object is just to use the `sxs.load`
14
+ function:
15
+
16
+ ```python
17
+ import sxs
18
+
19
+ simulations = sxs.load("simulations")
20
+ ```
21
+ """
22
+ last_modified_url = "https://api.github.com/repos/sxs-collaboration/sxs/contents/simulations.json?ref=simulations"
23
+ url = "https://github.com/sxs-collaboration/sxs/raw/simulations/simulations.json"
24
+
25
+ def __init__(self, sims):
26
+ """Initialize the Simulations dictionary
27
+
28
+ Note that the constructor is not generally useful from outside
29
+ this class. See `Simulations.load` for a more useful
30
+ initialization function, or simply call
31
+ `sxs.load("simulations")`.
32
+
33
+ """
34
+ from .. import Metadata
35
+ super(Simulations, self).__init__(
36
+ (k, Metadata(sims[k])) for k in sorted(sims)
37
+ )
38
+
39
+ @classmethod
40
+ def remote_timestamp(cls, download):
41
+ import requests
42
+ from datetime import datetime, timezone
43
+ if not download:
44
+ return datetime.min.replace(tzinfo=timezone.utc)
45
+ failed = False
46
+ try:
47
+ response = requests.head(
48
+ Simulations.last_modified_url,
49
+ headers={"X-GitHub-Api-Version": "2022-11-28"},
50
+ )
51
+ if response.status_code != 200 or "Last-Modified" not in response.headers:
52
+ failed = True
53
+ else:
54
+ remote_timestamp = datetime.strptime(
55
+ response.headers["Last-Modified"], "%a, %d %b %Y %H:%M:%S GMT"
56
+ ).replace(tzinfo=timezone.utc)
57
+ except:
58
+ failed = True
59
+ if failed:
60
+ print(
61
+ f"Failed to get the remote timestamp from <{Simulations.last_modified_url}>.\n"
62
+ + "Assuming it is old."
63
+ )
64
+ return datetime.min.replace(tzinfo=timezone.utc)
65
+ return remote_timestamp
66
+
67
+ @classmethod
68
+ @functools.lru_cache()
69
+ def load(cls, download=None):
70
+ """Load the catalog of SXS simulations
71
+
72
+ Note that — unlike most SXS data files — the simulations file is updated
73
+ frequently. As a result, this function — unlike the loading functions for most
74
+ SXS data files — will download the simulations by default each time it is called.
75
+ However, also note that this function is itself cached, meaning that the same
76
+ dict will be returned on each call in a given python session. If you want to
77
+ avoid that behavior, use `Simulations.reload`.
78
+
79
+ Parameters
80
+ ----------
81
+ download : {None, bool}, optional
82
+ If False, this function will look for the simulations in the sxs cache and
83
+ raise an error if it is not found. If True, this function will download
84
+ the simulations and raise an error if the download fails. If None (the
85
+ default), it will try to download the file, warn but fall back to the cache
86
+ if that fails, and only raise an error if the simulations is not found in the
87
+ cache. Note that this ignores the sxs configuration file entirely.
88
+
89
+ See Also
90
+ --------
91
+ sxs.sxs_directory : Locate cache directory
92
+ Simulations.reload : Avoid caching the result of this function
93
+
94
+ """
95
+ from datetime import datetime, timezone
96
+ import json
97
+ import zipfile
98
+ from .. import sxs_directory, read_config
99
+ from ..utilities import download_file
100
+
101
+ progress = read_config("download_progress", True)
102
+
103
+ remote_timestamp = cls.remote_timestamp(download is not False) # Test for literal `False`
104
+
105
+ cache_path = sxs_directory("cache") / "simulations.zip"
106
+
107
+ if cache_path.exists():
108
+ local_timestamp = datetime.fromtimestamp(cache_path.stat().st_mtime, timezone.utc)
109
+ elif download is False:
110
+ raise ValueError(f"Simulations not found in '{cache_path}' and downloading was turned off")
111
+ else:
112
+ local_timestamp = datetime.min.replace(tzinfo=timezone.utc)
113
+
114
+ download_failed = False
115
+ if (download or download is None) and remote_timestamp > local_timestamp:
116
+ # 1. Download the full json file (zipped in flight, but auto-decompressed on arrival)
117
+ # 2. Zip to a temporary file (using bzip2, which is better than the in-flight compression)
118
+ # 3. Replace the original simulations.zip with the temporary zip file
119
+ # 4. Remove the full json file
120
+ # 5. Make sure the temporary zip file is gone too
121
+ temp_json = cache_path.with_suffix(".temp.json")
122
+ temp_zip = cache_path.with_suffix(".temp.zip")
123
+ try:
124
+ try:
125
+ download_file(cls.url, temp_json, progress=progress, if_newer=False)
126
+ except Exception as e:
127
+ if download:
128
+ raise RuntimeError(f"Failed to download '{cls.url}'; try setting `download=False`") from e
129
+ download_failed = e # We'll try the cache
130
+ else:
131
+ if temp_json.exists():
132
+ with zipfile.ZipFile(temp_zip, "w", compression=zipfile.ZIP_BZIP2) as simulations_zip:
133
+ simulations_zip.write(temp_json, arcname="simulations.json")
134
+ temp_zip.replace(cache_path)
135
+ finally:
136
+ temp_json.unlink(missing_ok=True)
137
+ temp_zip.unlink(missing_ok=True)
138
+
139
+ if not cache_path.exists():
140
+ if download is False: # Test if it literally *is* False, rather than just casts to False
141
+ raise ValueError(f"The simulations file was not found in '{cache_path}', and downloading was turned off")
142
+ elif download_failed:
143
+ raise ValueError(f"Simulations not found in '{cache_path}' and download failed") from download_failed
144
+ else:
145
+ raise ValueError(f"Simulations not found in '{cache_path}' for unknown reasons")
146
+
147
+ try:
148
+ with zipfile.ZipFile(cache_path, "r") as simulations_zip:
149
+ try:
150
+ with simulations_zip.open("simulations.json") as simulations_json:
151
+ try:
152
+ simulations = json.load(simulations_json)
153
+ except Exception as e:
154
+ raise ValueError(f"Failed to parse 'simulations.json' in '{cache_path}'") from e
155
+ except Exception as e:
156
+ raise ValueError(f"Failed to open 'simulations.json' in '{cache_path}'") from e
157
+ except Exception as e:
158
+ raise ValueError(f"Failed to open '{cache_path}' as a ZIP file") from e
159
+
160
+ return cls(simulations)
161
+
162
+ @classmethod
163
+ def reload(cls, download=True):
164
+ """Reload the catalog of SXS simulations, without caching
165
+
166
+ Clears the cache of `Simulations.load` and returns the result of calling it again.
167
+ Note that in this function, the default value of `download` is `True`, rather
168
+ than `None` as in `Simulations.load` — though both behaviors are available.
169
+
170
+ Parameters
171
+ ----------
172
+ download : {None, bool}, optional
173
+ If False, this function will look for the simulations in the sxs cache and
174
+ raise an error if it is not found. If True (the default), this function
175
+ will download the simulations and raise an error if the download fails. If
176
+ None (the default), it will try to download the file, warn but fall back to
177
+ the cache if that fails, and only raise an error if the simulations is not
178
+ found in the cache. Note that this ignores the sxs configuration file
179
+ entirely.
180
+
181
+ See Also
182
+ --------
183
+ sxs.sxs_directory : Locate cache directory
184
+ Simulations.load : Caching version of this function
185
+
186
+ """
187
+ cls.load.cache_clear()
188
+ return cls.load(download=download)
189
+
190
+ @property
191
+ def dataframe(self):
192
+ """Create pandas.DataFrame containing metadata for all
193
+ simulations
194
+
195
+ Note that `pandas` is the standard Python interface for
196
+ heterogeneous data tables, like the one we have here. This
197
+ interface allows for more convenient slicing and querying of
198
+ data than the list of `dict`s provided by the `Simulations`
199
+ object.
200
+
201
+ This can also be a more convenient way to access the metadata
202
+ because the raw metadata has missing keys and mixed formats.
203
+ Iif a key is missing from the metadata for a particular key,
204
+ the dataframe will just have a `NaN` in that entry, rather
205
+ than raising an exception. Other keys may have unexpected
206
+ entries — such as the `"reference_eccentricity"` field, which
207
+ is *usually* a float but may be a string like "<0.0001" if the
208
+ eccentricity is not known precisely, but is only bounded. The
209
+ dataframe introduces a new column called
210
+ `"reference_eccentricity_bound"` that is always a float giving
211
+ an upper bound on the eccentricity.
212
+
213
+ See the `pandas` documentation for more information on how to
214
+ use the resulting dataframe, or the `Simulations` tutorial for
215
+ examples.
216
+
217
+ """
218
+ import numpy as np
219
+ import pandas as pd
220
+ from datetime import datetime, timezone
221
+
222
+ if hasattr(self, "_dataframe"):
223
+ return self._dataframe
224
+
225
+ simulations = pd.DataFrame.from_dict(self, orient="index")
226
+
227
+ def floater(x):
228
+ try:
229
+ f = float(x)
230
+ except:
231
+ f = np.nan
232
+ return f
233
+
234
+ def floaterbound(x):
235
+ try:
236
+ f = float(x)
237
+ except:
238
+ try:
239
+ f = float(x.replace("<", ""))
240
+ except:
241
+ f = np.nan
242
+ return f
243
+
244
+ def norm(x):
245
+ try:
246
+ n = np.linalg.norm(x)
247
+ except:
248
+ n = np.nan
249
+ return n
250
+
251
+ def three_vec(x):
252
+ try:
253
+ a = np.array(x, dtype=float)
254
+ if a.shape != (3,):
255
+ raise ValueError("Don't understand input as a three-vector")
256
+ except:
257
+ a = np.array([np.nan, np.nan, np.nan])
258
+ return a
259
+
260
+ def datetime_from_string(x):
261
+ try:
262
+ dt = datetime.strptime(x, "%Y-%m-%dT%H:%M:%S%z")
263
+ except:
264
+ dt = datetime.min.replace(tzinfo=timezone.utc)
265
+ return dt
266
+
267
+ sims_df = pd.concat((
268
+ simulations["object_types"].astype("category"),
269
+ simulations["initial_data_type"].astype("category"),
270
+ simulations["initial_separation"].map(floater),
271
+ simulations["initial_orbital_frequency"].map(floater),
272
+ simulations["initial_adot"].map(floater),
273
+ simulations["initial_ADM_energy"].map(floater),
274
+ simulations["initial_ADM_linear_momentum"].map(three_vec),
275
+ simulations["initial_ADM_linear_momentum"].map(norm).rename("initial_ADM_linear_momentum_mag"),
276
+ simulations["initial_ADM_angular_momentum"].map(three_vec),
277
+ simulations["initial_ADM_angular_momentum"].map(norm).rename("initial_ADM_angular_momentum_mag"),
278
+ simulations["initial_mass1"].map(floater),
279
+ simulations["initial_mass2"].map(floater),
280
+ simulations["initial_mass_ratio"].map(floater),
281
+ simulations["initial_dimensionless_spin1"].map(three_vec),
282
+ simulations["initial_dimensionless_spin1"].map(norm).rename("initial_dimensionless_spin1_mag"),
283
+ simulations["initial_dimensionless_spin2"].map(three_vec),
284
+ simulations["initial_dimensionless_spin2"].map(norm).rename("initial_dimensionless_spin2_mag"),
285
+ simulations["initial_position1"].map(three_vec),
286
+ simulations["initial_position2"].map(three_vec),
287
+ simulations["reference_time"].map(floater),
288
+ (
289
+ simulations["reference_position1"].map(three_vec)
290
+ -simulations["reference_position2"].map(three_vec)
291
+ ).map(norm).rename("reference_separation"),
292
+ simulations["reference_orbital_frequency"].map(norm).rename("reference_orbital_frequency_mag"),
293
+ simulations["reference_mass_ratio"].map(floater),
294
+ simulations["reference_dimensionless_spin1"].map(norm).rename("reference_chi1_mag"),
295
+ simulations["reference_dimensionless_spin2"].map(norm).rename("reference_chi2_mag"),
296
+ simulations["reference_chi_eff"].map(floater),
297
+ simulations["reference_chi1_perp"].map(floater),
298
+ simulations["reference_chi2_perp"].map(floater),
299
+ simulations["reference_eccentricity"].map(floater),
300
+ simulations["reference_eccentricity"].map(floaterbound).rename("reference_eccentricity_bound"),
301
+ simulations["reference_mean_anomaly"].map(floater),
302
+ simulations["reference_mass1"].map(floater),
303
+ simulations["reference_mass2"].map(floater),
304
+ simulations["reference_dimensionless_spin1"].map(three_vec),
305
+ simulations["reference_dimensionless_spin1"].map(norm).rename("reference_dimensionless_spin1_mag"),
306
+ simulations["reference_dimensionless_spin2"].map(three_vec),
307
+ simulations["reference_dimensionless_spin2"].map(norm).rename("reference_dimensionless_spin2_mag"),
308
+ simulations["reference_orbital_frequency"].map(three_vec),
309
+ simulations["reference_position1"].map(three_vec),
310
+ simulations["reference_position2"].map(three_vec),
311
+ simulations["relaxation_time"].map(floater),
312
+ #simulations["merger_time"].map(floater),
313
+ simulations["common_horizon_time"].map(floater),
314
+ simulations["remnant_mass"].map(floater),
315
+ simulations["remnant_dimensionless_spin"].map(three_vec),
316
+ simulations["remnant_dimensionless_spin"].map(norm).rename("remnant_dimensionless_spin_mag"),
317
+ simulations["remnant_velocity"].map(three_vec),
318
+ simulations["remnant_velocity"].map(norm).rename("remnant_velocity_mag"),
319
+ #simulations["final_time"].map(floater),
320
+ simulations["EOS"].fillna(simulations["eos"]),
321
+ simulations["initial_data_type"].astype("category"),
322
+ #simulations["object1"].astype("category"),
323
+ #simulations["object2"].astype("category"),
324
+ simulations["disk_mass"].map(floater),
325
+ simulations["ejecta_mass"].map(floater),
326
+ # simulations["url"],
327
+ #simulations["simulation_name"],
328
+ #simulations["alternative_names"],
329
+ # simulations["metadata_path"],
330
+ simulations["date_link_earliest"].map(datetime_from_string),
331
+ simulations["date_postprocessing"].map(datetime_from_string),
332
+ simulations["date_run_earliest"].map(datetime_from_string),
333
+ simulations["date_run_latest"].map(datetime_from_string),
334
+ # simulations["end_of_trajectory_time"].map(floater),
335
+ # simulations["merger_time"].map(floater),
336
+ simulations["number_of_orbits"].map(floater),
337
+ simulations["superseded_by"],
338
+ simulations["DOI_versions"],
339
+ simulations["keywords"],
340
+ ), axis=1)
341
+
342
+ # We have ignored the following fields present in the
343
+ # simulations.json file (as of 2024-08-04), listed here with
344
+ # the number of non-null entries:
345
+ #
346
+ # alternative_names 2778
347
+ # point_of_contact_email 2778
348
+ # authors_emails 2776
349
+ # simulation_bibtex_keys 2778
350
+ # code_bibtex_keys 2778
351
+ # initial_data_bibtex_keys 2778
352
+ # quasicircular_bibtex_keys 2778
353
+ # metadata_version 2778
354
+ # spec_revisions 2778
355
+ # spells_revision 2778
356
+ # merger_time 9
357
+ # final_time 12
358
+ # reference_spin1 2
359
+ # reference_spin2 1
360
+ # nitial_spin1 2
361
+ # initial_spin2 2
362
+ # remnant_spin 2
363
+ # initial_mass_withspin2 2
364
+ # end_of_trajectory_time 3
365
+
366
+ self._dataframe = sims_df
367
+ return sims_df
368
+
369
+ table = dataframe
sxs/time_series.py CHANGED
@@ -318,6 +318,65 @@ class TimeSeries(np.ndarray):
318
318
 
319
319
  t = time
320
320
 
321
+ @property
322
+ def abs(self):
323
+ """Absolute value of the data
324
+
325
+ Returns
326
+ -------
327
+ absolute : TimeSeries
328
+ Because the absolute values make no sense as mode weights, this is just a
329
+ plain TimeSeries object.
330
+
331
+ See Also
332
+ --------
333
+ arg
334
+
335
+ """
336
+ return np.abs(self)
337
+
338
+ @property
339
+ def arg(self):
340
+ """Complex phase angle of the data
341
+
342
+ Note that the result is not "unwrapped", meaning that there may be
343
+ discontinuities as the phase approaches ±π.
344
+
345
+ Returns
346
+ -------
347
+ phase : TimeSeries
348
+ Values are in the interval (-π, π].
349
+
350
+ See Also
351
+ --------
352
+ numpy.angle
353
+ arg_unwrapped
354
+
355
+ """
356
+ return np.angle(self)
357
+
358
+ @property
359
+ def arg_unwrapped(self):
360
+ """Complex phase angle of the data, unwrapped along the time axis
361
+
362
+ The result is "unwrapped", meaning that discontinuities as the phase approaches
363
+ ±π are removed by adding an appropriate amount to all following data points.
364
+
365
+ Returns
366
+ -------
367
+ phase : TimeSeries
368
+ Values at the initial time are in the interval (-π, π], but may evolve to
369
+ arbitrary real values.
370
+
371
+ See Also
372
+ --------
373
+ numpy.angle
374
+ numpy.unwrap
375
+ arg
376
+
377
+ """
378
+ return TimeSeries(np.unwrap(self.arg, axis=self.time_axis), self.time)
379
+
321
380
  def register_modification(self, func, **kwargs):
322
381
  """Add a record of a modification to the metadata
323
382
 
sxs/utilities/__init__.py CHANGED
@@ -15,8 +15,12 @@ from . import url, inspire, monotonicity, decimation, lvcnr, references
15
15
  from .downloads import download_file
16
16
  from .bitwise import diff, xor, multishuffle
17
17
  from .sxs_identifiers import (
18
- sxs_identifier_regex, sxs_identifier_re, lev_regex, lev_re, sxs_id,
19
- lev_number, simulation_title, sxs_id_to_url
18
+ sxs_identifier_regex, sxs_identifier_re,
19
+ lev_regex, lev_re,
20
+ sxs_id_version_lev_regex, sxs_id_version_lev_re,
21
+ sxs_id_version_lev_exact_regex, sxs_id_version_lev_exact_re,
22
+ sxs_id, sxs_id_and_version,
23
+ lev_number, simulation_title, sxs_id_to_url,
20
24
  )
21
25
  from .sxs_directories import (
22
26
  sxs_directory, read_config, write_config, sxs_path_to_system_path, cached_path
@@ -7,9 +7,13 @@ sxs_identifier_regex = (
7
7
  r"(?P<sxs_number>[0-9]+))(?:(v|V)(?P<version>[0-9.]+))?"
8
8
  )
9
9
  lev_regex = r"Lev(?P<lev>-?[0-9]+)"
10
+ sxs_id_version_lev_regex = sxs_identifier_regex + rf"(?:(:|/){lev_regex})?"
11
+ sxs_id_version_lev_exact_regex = f"^{sxs_id_version_lev_regex}$"
12
+
10
13
  sxs_identifier_re = re.compile(sxs_identifier_regex)
11
14
  lev_re = re.compile(lev_regex)
12
-
15
+ sxs_id_version_lev_re = re.compile(sxs_id_version_lev_regex)
16
+ sxs_id_version_lev_exact_re = re.compile(sxs_id_version_lev_exact_regex)
13
17
 
14
18
  def sxs_id(s, default="", include_version=False):
15
19
  """Return the SXS ID contained in the input string
@@ -28,6 +32,15 @@ def sxs_id(s, default="", include_version=False):
28
32
  4) An object with a 'title' item
29
33
 
30
34
  """
35
+ id, version = sxs_id_and_version(s, default)
36
+ if include_version:
37
+ return f"{id}{version}"
38
+ else:
39
+ return id
40
+
41
+
42
+ def sxs_id_and_version(s, default=""):
43
+ """Return the SXS ID and version contained in the input string"""
31
44
  import os.path
32
45
  import re
33
46
  try:
@@ -41,20 +54,17 @@ def sxs_id(s, default="", include_version=False):
41
54
  with open(s, "r") as f:
42
55
  s = [l.strip() for l in f.splitlines()]
43
56
  for line in s:
44
- sxs_id_line = sxs_id(line)
45
- if sxs_id_line:
57
+ sxs_id_line = sxs_id_and_version(line)
58
+ if sxs_id_line[0]:
46
59
  return sxs_id_line
47
- return default
60
+ return default, ""
48
61
  except TypeError:
49
62
  pass
50
63
  m = re.search(sxs_identifier_regex, s)
51
64
  if m:
52
- if include_version:
53
- return m["sxs_identifier"] + (f"v{m['version']}" if m["version"] else "")
54
- else:
55
- return m["sxs_identifier"]
65
+ return m["sxs_identifier"], (f"v{m['version']}" if m["version"] else "")
56
66
  else:
57
- return default
67
+ return default, ""
58
68
 
59
69
 
60
70
  def simulation_title(sxs_id):
sxs/utilities/url.py CHANGED
@@ -42,7 +42,10 @@ def parse(url):
42
42
 
43
43
  """
44
44
  match = url_regex.match(url)
45
- return getattr(match, "groupdict", {})
45
+ if hasattr(match, "groupdict"):
46
+ return match.groupdict()
47
+ else:
48
+ return {}
46
49
 
47
50
 
48
51
  def validate(url):
@@ -322,7 +322,7 @@ def save(
322
322
  def load(
323
323
  file_name, ignore_validation=None, check_md5=True,
324
324
  transform_to_inertial=True, convert_from_conjugate_pairs=True,
325
- compression=bz2, diff=diff, formats=None,
325
+ compression=bz2, diff=diff, formats=None, metadata=None,
326
326
  **kwargs
327
327
  ):
328
328
  """Load a waveform in RPDMB format
@@ -390,6 +390,9 @@ def load(
390
390
  reported in the metadata. If `None`, the default, t=0 will be
391
391
  used if present in the data, and the first time step
392
392
  otherwise.
393
+ metadata : Metadata, optional
394
+ If given, this metadata will be used instead of attempting to
395
+ load the metadata from an accompanying file.
393
396
 
394
397
  Note that the keyword parameters will be overridden by
395
398
  corresponding entries in the JSON file, if they exist. If the
@@ -567,11 +570,11 @@ def load(
567
570
  if transform_to_inertial:
568
571
  w = w.to_inertial_frame()
569
572
 
570
- if not metadata_path.exists():
571
- invalid(f'\nMetadata file "{metadata_path}" cannot be found, but is expected for this data format.')
572
- metadata = None
573
- else:
574
- metadata = Metadata.from_file(metadata_path)
573
+ if metadata is None:
574
+ if not metadata_path.exists():
575
+ invalid(f'\nMetadata file "{metadata_path}" cannot be found, but is expected for this data format.')
576
+ else:
577
+ metadata = Metadata.from_file(metadata_path)
575
578
 
576
579
  dtb = kwargs.pop("drop_times_before", 0)
577
580
  if dtb=="begin":
@@ -939,6 +939,13 @@ class WaveformModes(WaveformMixin, TimeSeries):
939
939
  """
940
940
  from spherical.wigner import _rotate
941
941
 
942
+ if self.spin_weight is None:
943
+ raise ValueError(
944
+ "Cannot rotate a waveform with unknown spin weight.\n" +
945
+ "Presumably, somewhere upstream, the spin weight was\n" +
946
+ "not set for this waveform, when it should have been."
947
+ )
948
+
942
949
  R = quaternionic.array(quat)
943
950
  wigner = spherical.Wigner(self.ell_max, ell_min=self.ell_min) #, mp_max=abs(self.spin_weight))
944
951
  D = np.zeros(wigner.Dsize, dtype=complex)
sxs/zenodo/__init__.py CHANGED
@@ -2,6 +2,8 @@
2
2
 
3
3
  """
4
4
 
5
+ import os.path
6
+ from pathlib import Path
5
7
 
6
8
  from .api import Login, Deposit, Records
7
9
  from . import catalog, simannex, surrogatemodeling
@@ -9,6 +11,17 @@ from . import catalog, simannex, surrogatemodeling
9
11
  # See https://github.com/moble/nb-marine-science for other examples using the Zenodo API
10
12
  # The other python API interface I found is here: https://github.com/moble/zenodo-python
11
13
 
14
+
15
+ def path_to_invenio(file_path):
16
+ """Convert a file path to an invenio-compatible name"""
17
+ return str(file_path).replace(os.path.sep, ":")
18
+
19
+
20
+ def invenio_to_path(file_name):
21
+ """Convert an invenio-compatible name to a file path"""
22
+ return Path(file_name.replace(":", os.path.sep))
23
+
24
+
12
25
  def translate(sxs_identifier, url=False):
13
26
  """Query data.black-holes.org to get the current Zenodo equivalent of the given SXS ID
14
27
 
sxs/zenodo/api/deposit.py CHANGED
@@ -563,7 +563,7 @@ class Deposit(object):
563
563
 
564
564
  To publish the new version, its files must differ from all previous versions.
565
565
 
566
- This action will create a new deposit, which will be a snapshot of the current resouce,
566
+ This action will create a new deposit, which will be a snapshot of the current resource,
567
567
  inheriting the metadata as well as snapshot of files. The new version deposit will have a
568
568
  state similar to a new, unpublished deposit, most importantly its files will be modifiable
569
569
  as for a new deposit.
@@ -582,7 +582,7 @@ class Deposit(object):
582
582
 
583
583
  To publish the new version, its files must differ from all previous versions.
584
584
 
585
- This action will create a new deposit, which will be a snapshot of the current resouce,
585
+ This action will create a new deposit, which will be a snapshot of the current resource,
586
586
  inheriting the metadata as well as snapshot of files. The new version deposit will have a
587
587
  state similar to a new, unpublished deposit, most importantly its files will be modifiable
588
588
  as for a new deposit.