sparclclient 1.2.8b4__tar.gz → 1.3.0b1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/.gitignore +1 -1
  2. sparclclient-1.3.0b1/.gitlab-ci.yml +21 -0
  3. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/PKG-INFO +1 -1
  4. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/requirements-internal.txt +1 -1
  5. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/source/conf.py +26 -3
  6. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/source/sparcl.rst +8 -0
  7. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/Results.py +10 -4
  8. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/__init__.py +2 -1
  9. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/client.py +9 -11
  10. sparclclient-1.3.0b1/sparcl/specutils.py +311 -0
  11. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/tests/tests_api.py +128 -3
  12. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/.github/workflows/django.yml +0 -0
  13. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/.pre-commit-config.yaml +0 -0
  14. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/.readthedocs.yaml +0 -0
  15. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/LICENSE +0 -0
  16. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/Makefile +0 -0
  17. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/README.md +0 -0
  18. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/make.bat +0 -0
  19. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/pyproject.toml +0 -0
  20. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/requirements-client.txt +0 -0
  21. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/requirements.txt +0 -0
  22. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/source/index.rst +0 -0
  23. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/benchmarks/Benchmark_SPARCL_example.ipynb +0 -0
  24. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/benchmarks/__init__.py +0 -0
  25. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/benchmarks/benchmarks.py +0 -0
  26. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/benchmarks/sparcl_benchmarking.ipynb +0 -0
  27. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/conf.py +0 -0
  28. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/exceptions.py +0 -0
  29. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/fields.py +0 -0
  30. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/gather_2d.py +0 -0
  31. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/notebooks/sparcl-examples.ipynb +0 -0
  32. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/resample_spectra.py +0 -0
  33. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/sparc.ini +0 -0
  34. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/type_conversion.py +0 -0
  35. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/unsupported.py +0 -0
  36. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/sparcl/utils.py +0 -0
  37. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/tests/expected_dev1.py +0 -0
  38. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/tests/expected_pat.py +0 -0
  39. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/tests/methods_tests.py +0 -0
  40. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/tests/utils.py +0 -0
  41. {sparclclient-1.2.8b4 → sparclclient-1.3.0b1}/tox.ini +0 -0
@@ -71,7 +71,7 @@ instance/
71
71
  .scrapy
72
72
 
73
73
  # Sphinx documentation
74
- docs/_build/
74
+ source/_build/
75
75
 
76
76
  # PyBuilder
77
77
  target/
@@ -0,0 +1,21 @@
1
+ default:
2
+ image: python:3.13
3
+
4
+ # install-job:
5
+ # stage: build
6
+ # script:
7
+ # - python -m pip install -r requirements.txt
8
+
9
+ test-api:
10
+ stage: test
11
+ script:
12
+ - python -m pip install -r requirements.txt
13
+ - python -m unittest tests.tests_api
14
+
15
+
16
+ test-sphinx:
17
+ stage: test
18
+ script:
19
+ - python -m pip install -r requirements.txt
20
+ - python -m pip install Sphinx sphinx-rtd-theme sphinx_mdinclude
21
+ - sphinx-build --jobs auto --builder html --fail-on-warning --show-traceback source source/_build/html
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: sparclclient
3
- Version: 1.2.8b4
3
+ Version: 1.3.0b1
4
4
  Summary: A client for getting spectra and meta-data from NOIRLab.
5
5
  Author-email: "S. Pothier" <datalab-spectro@noirlab.edu>
6
6
  Description-Content-Type: text/markdown
@@ -2,7 +2,7 @@
2
2
  #psutil==5.8.0 # for benchmarks
3
3
  #! speedtest # for benchmarks
4
4
  # packaging # for noaodatalab
5
- #specutils==1.3.1
5
+ specutils
6
6
  # jupyter-lab --ip=0.0.0.0
7
7
  jupyterlab # =3.1.17
8
8
 
@@ -15,6 +15,7 @@ from sparcl import __version__
15
15
  import os
16
16
  import re
17
17
  import sys
18
+ from importlib import import_module
18
19
  sys.path.insert(0, os.path.abspath(".."))
19
20
 
20
21
  # -- Project information -----------------------------------------------------
@@ -26,7 +27,7 @@ author = "S.Pothier, A.Jacques"
26
27
  #!version = client_version
27
28
  sver = __version__
28
29
  mo = re.match(
29
- "^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$", # noqa: E501, W605
30
+ r"^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$", # noqa: E501, W605
30
31
  sver,
31
32
  )
32
33
  release = sver
@@ -41,14 +42,26 @@ version = mo.group("major") + "." + mo.group("minor")
41
42
  # ones.
42
43
  extensions = [
43
44
  "sphinx.ext.autodoc",
45
+ "sphinx.ext.intersphinx",
44
46
  "sphinx.ext.doctest",
45
47
  "sphinx.ext.todo",
46
48
  "sphinx.ext.coverage",
47
49
  "sphinx.ext.napoleon",
48
50
  "sphinx.ext.viewcode",
49
51
  "sphinx_mdinclude",
52
+ "sphinx_rtd_theme",
50
53
  ]
51
54
 
55
+ # Configuration for intersphinx, copied from astropy.
56
+ intersphinx_mapping = {
57
+ 'python': ('https://docs.python.org/3/', None),
58
+ 'numpy': ('https://numpy.org/doc/stable/', None),
59
+ 'scipy': ('https://docs.scipy.org/doc/scipy/', None),
60
+ 'matplotlib': ('https://matplotlib.org/stable/', None),
61
+ 'astropy': ('https://docs.astropy.org/en/stable/', None),
62
+ 'h5py': ('https://docs.h5py.org/en/latest/', None),
63
+ 'specutils': ('https://specutils.readthedocs.io/en/latest/', None)
64
+ }
52
65
 
53
66
  # Add any paths that contain templates here, relative to this directory.
54
67
  templates_path = ["_templates"]
@@ -58,7 +71,7 @@ templates_path = ["_templates"]
58
71
  #
59
72
  # This is also used if you do content translation via gettext catalogs.
60
73
  # Usually you set "language" from the command line for these cases.
61
- language = "y"
74
+ # language = "y"
62
75
 
63
76
  # List of patterns, relative to source directory, that match files and
64
77
  # directories to ignore when looking for source files.
@@ -69,6 +82,16 @@ exclude_patterns = ["../sparcl/benchmarks/"]
69
82
  pygments_style = "sphinx"
70
83
  autosummary_generate = True
71
84
 
85
+ # This value contains a list of modules to be mocked up. This is useful when
86
+ # some external dependencies are not met at build time and break the
87
+ # building process.
88
+ autodoc_mock_imports = []
89
+ for missing in ('jwt', ):
90
+ try:
91
+ foo = import_module(missing)
92
+ except ImportError:
93
+ autodoc_mock_imports.append(missing)
94
+
72
95
  # -- Options for HTML output -------------------------------------------------
73
96
 
74
97
  # The theme to use for HTML and HTML Help pages. See the documentation for
@@ -80,7 +103,7 @@ html_theme = "sphinx_rtd_theme"
80
103
  # Add any paths that contain custom static files (such as style sheets) here,
81
104
  # relative to this directory. They are copied after the builtin static files,
82
105
  # so a file named "default.css" will overwrite the builtin "default.css".
83
- html_static_path = ["_static"]
106
+ # html_static_path = ["_static"]
84
107
 
85
108
  # -- Extension configuration -------------------------------------------------
86
109
 
@@ -24,3 +24,11 @@ sparcl.Results module
24
24
  :members:
25
25
  :inherited-members:
26
26
  :show-inheritance:
27
+
28
+ sparcl.specutils module
29
+ -----------------------
30
+
31
+ .. automodule:: sparcl.specutils
32
+ :members:
33
+ :inherited-members:
34
+ :show-inheritance:
@@ -9,6 +9,7 @@ from sparcl.utils import _AttrDict
9
9
 
10
10
  # from sparcl.gather_2d import bin_spectra_records
11
11
  import sparcl.exceptions as ex
12
+ import sparcl.specutils as su
12
13
  from warnings import warn
13
14
  import re
14
15
 
@@ -139,9 +140,7 @@ class Results(UserList):
139
140
 
140
141
  Returns:
141
142
  reordered (:class:`~sparcl.Results.Retrieved`): Contains header and
142
- reordered records.
143
- # none_idx (:obj:`list`): List of indices where record is None.
144
-
143
+ reordered records.
145
144
  """
146
145
  if len(ids_og) <= 0:
147
146
  msg = (
@@ -198,6 +197,14 @@ class Results(UserList):
198
197
  warn(msg, stacklevel=2)
199
198
  return Results(reordered, client=self.client)
200
199
 
200
+ def to_specutils(self):
201
+ """Convert results to a `specutils` object.
202
+
203
+ Returns:
204
+ to_specutils (:class:`~specutils.Spectrum`): a `specutils` object.
205
+ """
206
+ return su.to_specutils(self)
207
+
201
208
 
202
209
  # For results of retrieve()
203
210
  class Retrieved(Results):
@@ -209,7 +216,6 @@ class Retrieved(Results):
209
216
  def __repr__(self):
210
217
  return f"Retrieved Results: {len(self.recs)} records"
211
218
 
212
-
213
219
  #! def bin_spectra(self):
214
220
  #! """Align flux from all records by common wavelength bin.
215
221
  #!
@@ -37,4 +37,5 @@ __all__ = ["client", "align_records"]
37
37
  #__version__ = "1.2.5"
38
38
  #__version__ = "1.2.6"
39
39
  #__version__ = "1.2.7"
40
- __version__ = "1.2.8b4"
40
+ #__version__ = "1.2.8"
41
+ __version__ = "1.3.0b1"
@@ -267,11 +267,12 @@ class SparclClient: # was SparclApi()
267
267
 
268
268
  def token_expired(self, renew=False):
269
269
  """
270
- POST http://localhost:8050/sparc/renew_token/
271
- Content-Type: application/json
272
- {
273
- "refresh_token": "..."
274
- }
270
+ ::
271
+ POST http://localhost:8050/sparc/renew_token/
272
+ Content-Type: application/json
273
+ {
274
+ "refresh_token": "..."
275
+ }
275
276
 
276
277
  Returns an 'access' token
277
278
  """
@@ -308,9 +309,8 @@ class SparclClient: # was SparclApi()
308
309
  None.
309
310
 
310
311
  Example:
311
- >>>
312
- >> client = SparclClient(announcement=False)
313
- >> client.login('test_user@noirlab.edu', 'testpw')
312
+ >>> client = SparclClient(announcement=False)
313
+ >>> client.login('test_user@noirlab.edu', 'pw') # doctest: +SKIP
314
314
  Logged in successfully with email='test_user@noirlab.edu'
315
315
  """
316
316
 
@@ -1021,11 +1021,9 @@ class SparclClient: # was SparclApi()
1021
1021
 
1022
1022
  Example:
1023
1023
  >>> client = SparclClient(announcement=False)
1024
- >>> sids = [5840097619402313728, -8985592895187431424]
1024
+ >>> sids = [4753625089450465280, 1254253099313293312]
1025
1025
  >>> inc = ['specid', 'flux', 'wavelength', 'model']
1026
1026
  >>> ret = client.retrieve_by_specid(specid_list=sids, include=inc)
1027
- >>> len(ret.records[0].wavelength)
1028
- 4617
1029
1027
 
1030
1028
  """
1031
1029
  #!specid_list = list(specid_list)
@@ -0,0 +1,311 @@
1
+ """Functions for converting SPARCL results to specutils objects.
2
+ """
3
+ import warnings
4
+ import numpy as np
5
+ try:
6
+ # specutils >= 2.0
7
+ from specutils import Spectrum
8
+ except ImportError:
9
+ from specutils import Spectrum1D as Spectrum
10
+ from specutils import SpectrumCollection, SpectrumList
11
+ from astropy.nddata import InverseVariance
12
+ import astropy.units as u
13
+
14
+ def _validate_records(records, r0, collection):
15
+ """Validate that records can be converted to Spectrum.
16
+
17
+ Parameters
18
+ ----------
19
+ records : list of dict
20
+ All records to validate.
21
+ r0 : dict
22
+ First record, used as reference for validation.
23
+ collection: bool
24
+ If ``True``, attempt to convert to a
25
+ :class:`~specutils.SpectrumCollection` instead.
26
+
27
+ Raises
28
+ ------
29
+ ValueError
30
+ If records lack 'wavelength' attribute.
31
+ If records have different data releases.
32
+ If wavelength array lengths differ (suggests using SpectrumList).
33
+ If wavelength pixel values differ and collection=False
34
+ (suggests using SpectrumCollection).
35
+
36
+ Warnings
37
+ --------
38
+ UserWarning
39
+ If records come from different data releases.
40
+ """
41
+
42
+ # Check if the first record has wavelength data
43
+ if 'wavelength' not in r0:
44
+ raise ValueError("Results do not have a wavelength attribute. "
45
+ "Conversion is not possible.")
46
+
47
+ # Check if all records come from the same data release
48
+ if not all([r.data_release == r0.data_release for r in records]):
49
+ warnings.warn("Results are not all from the same data release, "
50
+ "conversion may not be possible.", UserWarning)
51
+
52
+ # Check if all records have the same number of wavelength points
53
+ if not all([len(r.wavelength) == len(r0.wavelength) for r in records]):
54
+ raise ValueError("Results do not have the same wavelength solution. "
55
+ "Consider using .to_SpectrumList instead.")
56
+
57
+ # If not creating a SpectrumCollection, check that wavelength values
58
+ # are identical across all records
59
+ if not collection and not all([(r.wavelength == r0.wavelength).all()
60
+ for r in records]):
61
+ raise ValueError("Results do not have the same wavelength pixels. "
62
+ "Consider using SpectrumCollection instead.")
63
+
64
+ def _extract_record_data(records, flux, uncertainty, mask, model, redshift,
65
+ meta, spectral_axis, has_model, has_redshift,
66
+ collection, single_record):
67
+ """Extract all data from records into arrays. This function modifies
68
+ arrays in-place rather than returning values.
69
+
70
+ Parameters
71
+ ----------
72
+ records: list of dict
73
+ Records containing flux, ivar, mask, wavelength, and optional
74
+ model/redshift.
75
+ flux : np.ndarray
76
+ Pre-allocated array for flux values. Shape: (n_pixels,) if
77
+ single_record, else (n_records, n_pixels).
78
+ uncertainty : np.ndarray
79
+ Pre-allocated array for inverse variance.
80
+ mask : np.ndarray
81
+ Pre-allocated array for data quality masks.
82
+ model : np.ndarray or None
83
+ Pre-allocated array for model values if has_model=True.
84
+ redshift : list
85
+ Empty list to populate with redshift values.
86
+ meta : dict
87
+ Empty dict to populate with metadata.
88
+ spectral_axis : np.ndarray
89
+ For collections, pre-allocated 2D array to store wavelength grids.
90
+ For non-collections, this is just r0.wavelength (not modified).
91
+ has_model : bool
92
+ Whether records contain 'model' attribute.
93
+ has_redshift : bool
94
+ Whether records contain 'redshift' attribute.
95
+ collection : bool
96
+ If True, stores wavelength arrays for each record in spectral_axis.
97
+ single_record : bool
98
+ If True, treats arrays as 1D. If False, treats as 2D with row per
99
+ record.
100
+
101
+ Returns
102
+ -------
103
+ None
104
+ All outputs are written to the input arrays/containers in-place.
105
+ """
106
+ for k, record in enumerate(records):
107
+ if single_record:
108
+ # For single record, assign directly (1D)
109
+ flux[:] = record.flux
110
+ uncertainty[:] = record.ivar
111
+ mask[:] = record.mask
112
+ if has_model:
113
+ model[:] = record.model
114
+ else:
115
+ # For multiple records, assign to row (2D)
116
+ flux[k, :] = record.flux
117
+ uncertainty[k, :] = record.ivar
118
+ mask[k, :] = record.mask
119
+ if has_model:
120
+ model[k, :] = record.model
121
+
122
+ # Store redshift values if available
123
+ if has_redshift:
124
+ redshift.append(record.redshift)
125
+
126
+ # For collections, each record can have its own wavelength grid
127
+ if collection:
128
+ spectral_axis[k, :] = record.wavelength
129
+
130
+ # Extract all additional metadata attributes
131
+ for attribute, value in record.items():
132
+ if attribute not in ('flux', 'ivar', 'mask', 'model', 'redshift',
133
+ 'wavelength'):
134
+ if single_record:
135
+ # For single record, store metadata values as scalars
136
+ meta[attribute] = value
137
+ else:
138
+ # For multiple records, accumulate metadata values into
139
+ # lists (creates list on first encounter, then appends)
140
+ meta.setdefault(attribute, []).append(value)
141
+
142
+ def to_Spectrum(results, *, collection=False):
143
+ """Convert `results` to :class:`specutils.Spectrum`.
144
+
145
+ Parameters
146
+ ----------
147
+ results : :class:`sparcl.Results.Retrieved`
148
+ Retrieved results, or a single record from a set of results.
149
+ collection : bool, optional
150
+ If ``True``, attempt to convert to a
151
+ :class:`~specutils.SpectrumCollection` instead.
152
+
153
+ Returns
154
+ -------
155
+ :class:`~specutils.Spectrum` or :class:`~specutils.SpectrumCollection`
156
+ The requested object.
157
+
158
+ Raises
159
+ ------
160
+ ValueError
161
+ If `results` can't be converted to a :class:`~specutils.Spectrum`
162
+ object in a valid way. For example, if some of the spectra have a
163
+ different wavelength solution.
164
+ """
165
+ # Prepare records
166
+ if isinstance(results, dict):
167
+ records = [results]
168
+ r0 = results
169
+ else:
170
+ try:
171
+ records = results.records
172
+ if len(records) == 0:
173
+ raise ValueError("No records found in results. Cannot "
174
+ "convert empty results to Spectrum.")
175
+ r0 = results.records[0]
176
+ except (IndexError, AttributeError) as e:
177
+ raise ValueError("No records found in results. Cannot "
178
+ "convert empty results to Spectrum.") from e
179
+
180
+ # Validate
181
+ _validate_records(records, r0, collection)
182
+
183
+ # Determine which optional data components exist in records
184
+ has_redshift = 'redshift' in r0
185
+ has_model = 'model' in r0
186
+ n_pixels = r0.flux.shape[0]
187
+ single_record = len(records) == 1
188
+
189
+ # Set flux shape based on number of records
190
+ if single_record:
191
+ flux_shape = (n_pixels,)
192
+ else:
193
+ flux_shape = (len(records), n_pixels)
194
+
195
+ # Build spectral axis
196
+ if collection:
197
+ spectral_axis = np.empty((len(records), r0.wavelength.shape[0]),
198
+ dtype=r0.wavelength.dtype)
199
+ else:
200
+ spectral_axis = r0.wavelength
201
+
202
+ # Initialize arrays
203
+ flux = np.empty(flux_shape, dtype=r0.flux.dtype)
204
+ uncertainty = np.empty(flux_shape, dtype=r0.ivar.dtype)
205
+ mask = np.empty(flux_shape, dtype=r0.mask.dtype)
206
+ model = np.empty(flux_shape, dtype=r0.model.dtype) if has_model else None
207
+ redshift = []
208
+ meta = {}
209
+
210
+ # Populate arrays by iterating through records
211
+ _extract_record_data(records, flux, uncertainty, mask, model, redshift,
212
+ meta, spectral_axis, has_model, has_redshift,
213
+ collection, single_record)
214
+
215
+ # Convert redshift list to numpy array if exists
216
+ if has_redshift:
217
+ redshift = np.array(redshift)
218
+ if single_record and len(redshift) == 1:
219
+ # Convert to scalar if single record
220
+ redshift = redshift[0]
221
+ else:
222
+ redshift = None
223
+
224
+ # Add model to metadata if exists
225
+ if has_model:
226
+ meta['model'] = model
227
+
228
+ # Prepare arguments common to both Spectrum and SpectrumCollection
229
+ common_args = {
230
+ 'flux': flux * 10**-17 * u.Unit('erg cm-2 s-1 AA-1'),
231
+ 'spectral_axis': spectral_axis * u.AA,
232
+ 'uncertainty': InverseVariance(uncertainty),
233
+ 'mask': mask,
234
+ 'meta': meta}
235
+
236
+ if collection:
237
+ return SpectrumCollection(**common_args)
238
+
239
+ return Spectrum(**common_args, redshift=redshift)
240
+
241
+ def to_SpectrumList(results):
242
+ """Convert `results` to :class:`specutils.SpectrumList`.
243
+
244
+ Parameters
245
+ ----------
246
+ results : :class:`sparcl.Results.Retrieved`
247
+ Retrieved results.
248
+
249
+ Returns
250
+ -------
251
+ :class:`~specutils.SpectrumList`
252
+ The requested object.
253
+ """
254
+ s = SpectrumList()
255
+ if isinstance(results, dict):
256
+ records = [results]
257
+ else:
258
+ records = results.records
259
+ for r in records:
260
+ if 'redshift' in r:
261
+ redshift = r.redshift
262
+ else:
263
+ redshift = None
264
+ meta = dict()
265
+ for attribute in r:
266
+ if attribute not in ('flux', 'wavelength', 'ivar',
267
+ 'redshift', 'mask'):
268
+ meta[attribute] = r[attribute]
269
+ s1 = Spectrum(flux=r.flux*10**-17*u.Unit('erg cm-2 s-1 AA-1'),
270
+ spectral_axis=r.wavelength*u.AA,
271
+ uncertainty=InverseVariance(r.ivar),
272
+ redshift=redshift,
273
+ mask=r.mask,
274
+ meta=meta)
275
+ s.append(s1)
276
+ return s
277
+
278
+ def to_specutils(results):
279
+ """Convert `results` to a specutils object.
280
+
281
+ Parameters
282
+ ----------
283
+ results : :class:`sparcl.Results.Retrieved`
284
+ Retrieved results.
285
+
286
+ Returns
287
+ -------
288
+ :class:`~specutils.Spectrum` or :class:`~specutils.SpectrumCollection`
289
+ or :class:`~specutils.SpectrumList`
290
+ The most natural conversion to a specutils object.
291
+
292
+ Raises
293
+ ------
294
+ ValueError
295
+ If no valid conversion can be performed.
296
+ """
297
+ try:
298
+ # Try standard Spectrum conversion first
299
+ s = to_Spectrum(results)
300
+ except ValueError as ve:
301
+ # Check the error message to determine appropriate alternative
302
+ if 'SpectrumList' in str(ve):
303
+ # Different wavelength array lengths use SpectrumList
304
+ s = to_SpectrumList(results)
305
+ elif 'SpectrumCollection' in str(ve):
306
+ # Same wavelength length but diff pixels use SpectrumCollection
307
+ s = to_Spectrum(results, collection=True)
308
+ else:
309
+ raise ValueError("Could not find a valid conversion to "
310
+ "specutils objects!")
311
+ return s
@@ -66,6 +66,8 @@ import sparcl.exceptions as ex
66
66
  import sparcl.gather_2d as sg
67
67
  import sparcl.client
68
68
  import sparcl.gather_2d
69
+ import sparcl.specutils as su
70
+ from sparcl.utils import _AttrDict
69
71
 
70
72
  #! import sparcl.utils as ut
71
73
 
@@ -125,12 +127,19 @@ def testcase_log_console(lggr):
125
127
  def load_tests(loader, tests, ignore):
126
128
  import doctest
127
129
 
130
+ IN_CI = os.getenv('CI') or os.getenv('GITLAB_CI')
131
+
132
+ if IN_CI:
133
+ print("Skipping doctests in CI environment.")
134
+ return tests
135
+
128
136
  if serverurl == _PROD:
129
137
  print(f"Arranging to run doctests against: sparcl.client")
130
138
  tests.addTests(doctest.DocTestSuite(sparcl.client))
131
139
 
132
- print(f"Arranging to run doctests against: sparcl.gather_2d")
133
- tests.addTests(doctest.DocTestSuite(sparcl.gather_2d))
140
+ # Commenting out until this package is revisited (02/02/2026)
141
+ #print(f"Arranging to run doctests against: sparcl.gather_2d")
142
+ #tests.addTests(doctest.DocTestSuite(sparcl.gather_2d))
134
143
  else:
135
144
  print(
136
145
  "Not running doctests since you are not running client"
@@ -170,15 +179,25 @@ class SparclClientTest(unittest.TestCase):
170
179
  f"{str(datetime.datetime.now())}!"
171
180
  )
172
181
 
182
+ if os.getenv('CI'):
183
+ try:
184
+ requests.get(serverurl, timeout=2)
185
+ except (requests.ConnectionError, requests.Timeout):
186
+ raise unittest.SkipTest(
187
+ f"Cannot reach {serverurl} from CI environment. "
188
+ "Run these tests locally or on a network-enabled "
189
+ "runner.")
190
+
173
191
  # Client object creation compares the version from the Server
174
192
  # against the one expected by the Client. Raise error if
175
193
  # the Client is at least one major version behind.
176
-
194
+ connect_timeout = 3.1 if os.getenv('CI') else 1.1
177
195
  cls.client = sparcl.client.SparclClient(
178
196
  url=serverurl,
179
197
  verbose=clverb,
180
198
  show_curl=showcurl,
181
199
  announcement=False,
200
+ connect_timeout=connect_timeout,
182
201
  )
183
202
  cls.timing = dict()
184
203
  cls.doc = dict()
@@ -1060,3 +1079,109 @@ class AuthTest(unittest.TestCase):
1060
1079
  warnings.filterwarnings("ignore")
1061
1080
  exp = "exp.auth_retrieve_8"
1062
1081
  self.auth_retrieve(None, self.Pub, exp)
1082
+
1083
+ class MockRetrieved(object):
1084
+ """Create an object with a .records attribute."""
1085
+ def __init__(self, user_list):
1086
+ self.records = user_list
1087
+
1088
+ class SpecutilsTest(unittest.TestCase):
1089
+ """Test the SPARCL to_specutils method"""
1090
+
1091
+ def setUp(self):
1092
+ """Create a dummy Results object."""
1093
+ self.w = numpy.linspace(3000.0, 10000.0, dtype=numpy.float32)
1094
+ self.f = numpy.ones(self.w.shape, dtype=self.w.dtype)
1095
+ self.i = 1.0/(0.1*self.f)**2
1096
+ self.m = numpy.zeros(self.w.shape, dtype=numpy.int32)
1097
+ self.r0 = _AttrDict({'redshift': 1.23,
1098
+ 'wavelength': self.w,
1099
+ 'flux': self.f,
1100
+ 'ivar': self.i,
1101
+ 'mask': self.m,
1102
+ 'data_release': 'DR12'})
1103
+ self.r = MockRetrieved([self.r0,
1104
+ _AttrDict(self.r0.copy()),
1105
+ _AttrDict(self.r0.copy()),
1106
+ _AttrDict(self.r0.copy()),
1107
+ _AttrDict(self.r0.copy())])
1108
+
1109
+ def test_to_specutils_one_result(self):
1110
+ """Test the to_specutils() method with one result."""
1111
+ s = su.to_specutils(self.r0)
1112
+ self.assertTrue(isinstance(s, su.Spectrum))
1113
+ self.assertTrue((s.mask == 0).all())
1114
+
1115
+ def test_to_specutils_multiple_results(self):
1116
+ """Test the to_specutils() method with more than one result."""
1117
+ s = su.to_specutils(self.r)
1118
+ self.assertTrue(isinstance(s, su.Spectrum))
1119
+ self.assertTrue((s.mask == 0).all())
1120
+
1121
+ def test_to_specutils_bad_results(self):
1122
+ """Test the to_specutils() method with a bad result."""
1123
+ del self.r0['wavelength']
1124
+ with self.assertRaises(ValueError) as ex:
1125
+ su.to_specutils(self.r0)
1126
+ self.assertEqual(ex.exception.args[0],
1127
+ "Could not find a valid conversion to specutils "
1128
+ "objects!")
1129
+
1130
+ def test_to_specutils_different_release(self):
1131
+ """Test for the presence of a warning if multiple releases are
1132
+ present."""
1133
+ self.r.records[1]['data_release'] = 'DR11'
1134
+ with self.assertWarns(UserWarning) as wn:
1135
+ su.to_specutils(self.r)
1136
+ self.assertEqual(wn.warning.args[0],
1137
+ "Results are not all from the same data release, "
1138
+ "conversion may not be possible.")
1139
+
1140
+ def test_to_specutils_spectrum_list(self):
1141
+ """Test conversion to SpectrumList."""
1142
+ self.r.records[1]['wavelength'] = numpy.linspace(3000.0,
1143
+ 10000.0,
1144
+ 100,
1145
+ dtype=numpy.float32)
1146
+ self.r.records[2]['wavelength'] = numpy.linspace(3000.0,
1147
+ 10000.0,
1148
+ 150,
1149
+ dtype=numpy.float32)
1150
+ self.r.records[3]['wavelength'] = numpy.linspace(3000.0,
1151
+ 10000.0,
1152
+ 200,
1153
+ dtype=numpy.float32)
1154
+ self.r.records[4]['wavelength'] = numpy.linspace(3000.0,
1155
+ 10000.0,
1156
+ 250,
1157
+ dtype=numpy.float32)
1158
+ self.r.records[1]['flux'] = numpy.ones((100, ), dtype=numpy.float32)
1159
+ self.r.records[2]['flux'] = numpy.ones((150, ), dtype=numpy.float32)
1160
+ self.r.records[3]['flux'] = numpy.ones((200, ), dtype=numpy.float32)
1161
+ self.r.records[4]['flux'] = numpy.ones((250, ), dtype=numpy.float32)
1162
+ self.r.records[1]['ivar'] = numpy.ones((100, ), dtype=numpy.float32)
1163
+ self.r.records[2]['ivar'] = numpy.ones((150, ), dtype=numpy.float32)
1164
+ self.r.records[3]['ivar'] = numpy.ones((200, ), dtype=numpy.float32)
1165
+ self.r.records[4]['ivar'] = numpy.ones((250, ), dtype=numpy.float32)
1166
+ self.r.records[1]['mask'] = numpy.zeros((100, ), dtype=numpy.int32)
1167
+ self.r.records[2]['mask'] = numpy.zeros((150, ), dtype=numpy.int32)
1168
+ self.r.records[3]['mask'] = numpy.zeros((200, ), dtype=numpy.int32)
1169
+ self.r.records[4]['mask'] = numpy.zeros((250, ), dtype=numpy.int32)
1170
+ s = su.to_specutils(self.r)
1171
+ self.assertTrue(isinstance(s, su.SpectrumList))
1172
+ self.assertTrue(isinstance(s[0], su.Spectrum))
1173
+ self.assertEqual(s[0].spectral_axis.shape, (50,))
1174
+
1175
+ def test_to_specutils_spectrum_collection(self):
1176
+ """Test conversion to SpectrumCollection."""
1177
+ self.r.records[1]['wavelength'] = self.w.copy() + 10.0
1178
+ self.r.records[2]['wavelength'] = self.w.copy() + 20.0
1179
+ self.r.records[3]['wavelength'] = self.w.copy() + 30.0
1180
+ self.r.records[4]['wavelength'] = self.w.copy() + 40.0
1181
+ s = su.to_specutils(self.r)
1182
+ self.assertTrue(isinstance(s, su.SpectrumCollection))
1183
+ self.assertEqual(s.spectral_axis.shape, (5, 50))
1184
+ self.assertEqual(s.spectral_axis[0, :].shape, (50, ))
1185
+ self.assertTrue(numpy.allclose(s.spectral_axis.value[0, :], self.w))
1186
+ self.assertTrue(numpy.allclose(s.spectral_axis.value[4, :],
1187
+ self.w + 40))
File without changes
File without changes
File without changes
File without changes
File without changes