sparclclient 1.2.8b3__tar.gz → 1.2.9b1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/.gitignore +1 -1
  2. sparclclient-1.2.9b1/.gitlab-ci.yml +21 -0
  3. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/PKG-INFO +2 -2
  4. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/pyproject.toml +1 -1
  5. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/requirements-client.txt +1 -1
  6. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/requirements-internal.txt +1 -1
  7. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/source/conf.py +26 -3
  8. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/source/sparcl.rst +8 -0
  9. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/Results.py +10 -4
  10. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/__init__.py +2 -1
  11. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/client.py +9 -11
  12. sparclclient-1.2.9b1/sparcl/specutils.py +245 -0
  13. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/tests/tests_api.py +128 -3
  14. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/.github/workflows/django.yml +0 -0
  15. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/.pre-commit-config.yaml +0 -0
  16. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/.readthedocs.yaml +0 -0
  17. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/LICENSE +0 -0
  18. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/Makefile +0 -0
  19. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/README.md +0 -0
  20. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/make.bat +0 -0
  21. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/requirements.txt +0 -0
  22. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/source/index.rst +0 -0
  23. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/benchmarks/Benchmark_SPARCL_example.ipynb +0 -0
  24. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/benchmarks/__init__.py +0 -0
  25. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/benchmarks/benchmarks.py +0 -0
  26. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/benchmarks/sparcl_benchmarking.ipynb +0 -0
  27. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/conf.py +0 -0
  28. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/exceptions.py +0 -0
  29. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/fields.py +0 -0
  30. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/gather_2d.py +0 -0
  31. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/notebooks/sparcl-examples.ipynb +0 -0
  32. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/resample_spectra.py +0 -0
  33. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/sparc.ini +0 -0
  34. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/type_conversion.py +0 -0
  35. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/unsupported.py +0 -0
  36. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/sparcl/utils.py +0 -0
  37. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/tests/expected_dev1.py +0 -0
  38. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/tests/expected_pat.py +0 -0
  39. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/tests/methods_tests.py +0 -0
  40. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/tests/utils.py +0 -0
  41. {sparclclient-1.2.8b3 → sparclclient-1.2.9b1}/tox.ini +0 -0
@@ -71,7 +71,7 @@ instance/
71
71
  .scrapy
72
72
 
73
73
  # Sphinx documentation
74
- docs/_build/
74
+ source/_build/
75
75
 
76
76
  # PyBuilder
77
77
  target/
@@ -0,0 +1,21 @@
1
+ default:
2
+ image: python:3.13
3
+
4
+ # install-job:
5
+ # stage: build
6
+ # script:
7
+ # - python -m pip install -r requirements.txt
8
+
9
+ test-api:
10
+ stage: test
11
+ script:
12
+ - python -m pip install -r requirements.txt
13
+ - python -m unittest tests.tests_api
14
+
15
+
16
+ test-sphinx:
17
+ stage: test
18
+ script:
19
+ - python -m pip install -r requirements.txt
20
+ - python -m pip install Sphinx sphinx-rtd-theme sphinx_mdinclude
21
+ - sphinx-build --jobs auto --builder html --fail-on-warning --show-traceback source source/_build/html
@@ -1,12 +1,12 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: sparclclient
3
- Version: 1.2.8b3
3
+ Version: 1.2.9b1
4
4
  Summary: A client for getting spectra and meta-data from NOIRLab.
5
5
  Author-email: "S. Pothier" <datalab-spectro@noirlab.edu>
6
6
  Description-Content-Type: text/markdown
7
7
  Classifier: License :: OSI Approved :: MIT License
8
8
  Requires-Dist: requests==2.31.0
9
- Requires-Dist: numpy>=2.2.6,<2.4.0
9
+ Requires-Dist: numpy>=1.26,<2.4.0
10
10
  Requires-Dist: spectres
11
11
  Requires-Dist: pyjwt
12
12
  Project-URL: Bug Tracker, https://github.com/pypa/sparclclient/issues
@@ -24,7 +24,7 @@ license = {file = "LICENSE"}
24
24
  classifiers = ["License :: OSI Approved :: MIT License"]
25
25
  # version is found in: ~/sandbox/sparclclient/sparcl/__init__.py
26
26
  dynamic = ["version", "description"]
27
- dependencies = ["requests==2.31.0", "numpy>=2.2.6,<2.4.0", "spectres", "pyjwt"]
27
+ dependencies = ["requests==2.31.0", "numpy>=1.26,<2.4.0", "spectres", "pyjwt"]
28
28
 
29
29
  [project.urls]
30
30
  "Homepage" = "https://github.com/pypa/sparclclient"
@@ -1,4 +1,4 @@
1
1
  requests==2.31.0 # 2.26.0
2
- numpy>=2.2.6,<2.4.0
2
+ numpy>=1.26,<2.4.0
3
3
  spectres
4
4
  pyjwt
@@ -2,7 +2,7 @@
2
2
  #psutil==5.8.0 # for benchmarks
3
3
  #! speedtest # for benchmarks
4
4
  # packaging # for noaodatalab
5
- #specutils==1.3.1
5
+ specutils
6
6
  # jupyter-lab --ip=0.0.0.0
7
7
  jupyterlab # =3.1.17
8
8
 
@@ -15,6 +15,7 @@ from sparcl import __version__
15
15
  import os
16
16
  import re
17
17
  import sys
18
+ from importlib import import_module
18
19
  sys.path.insert(0, os.path.abspath(".."))
19
20
 
20
21
  # -- Project information -----------------------------------------------------
@@ -26,7 +27,7 @@ author = "S.Pothier, A.Jacques"
26
27
  #!version = client_version
27
28
  sver = __version__
28
29
  mo = re.match(
29
- "^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$", # noqa: E501, W605
30
+ r"^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$", # noqa: E501, W605
30
31
  sver,
31
32
  )
32
33
  release = sver
@@ -41,14 +42,26 @@ version = mo.group("major") + "." + mo.group("minor")
41
42
  # ones.
42
43
  extensions = [
43
44
  "sphinx.ext.autodoc",
45
+ "sphinx.ext.intersphinx",
44
46
  "sphinx.ext.doctest",
45
47
  "sphinx.ext.todo",
46
48
  "sphinx.ext.coverage",
47
49
  "sphinx.ext.napoleon",
48
50
  "sphinx.ext.viewcode",
49
51
  "sphinx_mdinclude",
52
+ "sphinx_rtd_theme",
50
53
  ]
51
54
 
55
+ # Configuration for intersphinx, copied from astropy.
56
+ intersphinx_mapping = {
57
+ 'python': ('https://docs.python.org/3/', None),
58
+ 'numpy': ('https://numpy.org/doc/stable/', None),
59
+ 'scipy': ('https://docs.scipy.org/doc/scipy/', None),
60
+ 'matplotlib': ('https://matplotlib.org/stable/', None),
61
+ 'astropy': ('https://docs.astropy.org/en/stable/', None),
62
+ 'h5py': ('https://docs.h5py.org/en/latest/', None),
63
+ 'specutils': ('https://specutils.readthedocs.io/en/latest/', None)
64
+ }
52
65
 
53
66
  # Add any paths that contain templates here, relative to this directory.
54
67
  templates_path = ["_templates"]
@@ -58,7 +71,7 @@ templates_path = ["_templates"]
58
71
  #
59
72
  # This is also used if you do content translation via gettext catalogs.
60
73
  # Usually you set "language" from the command line for these cases.
61
- language = "y"
74
+ # language = "y"
62
75
 
63
76
  # List of patterns, relative to source directory, that match files and
64
77
  # directories to ignore when looking for source files.
@@ -69,6 +82,16 @@ exclude_patterns = ["../sparcl/benchmarks/"]
69
82
  pygments_style = "sphinx"
70
83
  autosummary_generate = True
71
84
 
85
+ # This value contains a list of modules to be mocked up. This is useful when
86
+ # some external dependencies are not met at build time and break the
87
+ # building process.
88
+ autodoc_mock_imports = []
89
+ for missing in ('jwt', ):
90
+ try:
91
+ foo = import_module(missing)
92
+ except ImportError:
93
+ autodoc_mock_imports.append(missing)
94
+
72
95
  # -- Options for HTML output -------------------------------------------------
73
96
 
74
97
  # The theme to use for HTML and HTML Help pages. See the documentation for
@@ -80,7 +103,7 @@ html_theme = "sphinx_rtd_theme"
80
103
  # Add any paths that contain custom static files (such as style sheets) here,
81
104
  # relative to this directory. They are copied after the builtin static files,
82
105
  # so a file named "default.css" will overwrite the builtin "default.css".
83
- html_static_path = ["_static"]
106
+ # html_static_path = ["_static"]
84
107
 
85
108
  # -- Extension configuration -------------------------------------------------
86
109
 
@@ -24,3 +24,11 @@ sparcl.Results module
24
24
  :members:
25
25
  :inherited-members:
26
26
  :show-inheritance:
27
+
28
+ sparcl.specutils module
29
+ -----------------------
30
+
31
+ .. automodule:: sparcl.specutils
32
+ :members:
33
+ :inherited-members:
34
+ :show-inheritance:
@@ -9,6 +9,7 @@ from sparcl.utils import _AttrDict
9
9
 
10
10
  # from sparcl.gather_2d import bin_spectra_records
11
11
  import sparcl.exceptions as ex
12
+ import sparcl.specutils as su
12
13
  from warnings import warn
13
14
  import re
14
15
 
@@ -139,9 +140,7 @@ class Results(UserList):
139
140
 
140
141
  Returns:
141
142
  reordered (:class:`~sparcl.Results.Retrieved`): Contains header and
142
- reordered records.
143
- # none_idx (:obj:`list`): List of indices where record is None.
144
-
143
+ reordered records.
145
144
  """
146
145
  if len(ids_og) <= 0:
147
146
  msg = (
@@ -198,6 +197,14 @@ class Results(UserList):
198
197
  warn(msg, stacklevel=2)
199
198
  return Results(reordered, client=self.client)
200
199
 
200
+ def to_specutils(self):
201
+ """Convert results to a `specutils` object.
202
+
203
+ Returns:
204
+ to_specutils (:class:`~specutils.Spectrum`): a `specutils` object.
205
+ """
206
+ return su.to_specutils(self)
207
+
201
208
 
202
209
  # For results of retrieve()
203
210
  class Retrieved(Results):
@@ -209,7 +216,6 @@ class Retrieved(Results):
209
216
  def __repr__(self):
210
217
  return f"Retrieved Results: {len(self.recs)} records"
211
218
 
212
-
213
219
  #! def bin_spectra(self):
214
220
  #! """Align flux from all records by common wavelength bin.
215
221
  #!
@@ -37,4 +37,5 @@ __all__ = ["client", "align_records"]
37
37
  #__version__ = "1.2.5"
38
38
  #__version__ = "1.2.6"
39
39
  #__version__ = "1.2.7"
40
- __version__ = "1.2.8b3"
40
+ #__version__ = "1.2.8"
41
+ __version__ = "1.2.9b1"
@@ -267,11 +267,12 @@ class SparclClient: # was SparclApi()
267
267
 
268
268
  def token_expired(self, renew=False):
269
269
  """
270
- POST http://localhost:8050/sparc/renew_token/
271
- Content-Type: application/json
272
- {
273
- "refresh_token": "..."
274
- }
270
+ ::
271
+ POST http://localhost:8050/sparc/renew_token/
272
+ Content-Type: application/json
273
+ {
274
+ "refresh_token": "..."
275
+ }
275
276
 
276
277
  Returns an 'access' token
277
278
  """
@@ -308,9 +309,8 @@ class SparclClient: # was SparclApi()
308
309
  None.
309
310
 
310
311
  Example:
311
- >>>
312
- >> client = SparclClient(announcement=False)
313
- >> client.login('test_user@noirlab.edu', 'testpw')
312
+ >>> client = SparclClient(announcement=False)
313
+ >>> client.login('test_user@noirlab.edu', 'pw') # doctest: +SKIP
314
314
  Logged in successfully with email='test_user@noirlab.edu'
315
315
  """
316
316
 
@@ -1021,11 +1021,9 @@ class SparclClient: # was SparclApi()
1021
1021
 
1022
1022
  Example:
1023
1023
  >>> client = SparclClient(announcement=False)
1024
- >>> sids = [5840097619402313728, -8985592895187431424]
1024
+ >>> sids = [4753625089450465280, 1254253099313293312]
1025
1025
  >>> inc = ['specid', 'flux', 'wavelength', 'model']
1026
1026
  >>> ret = client.retrieve_by_specid(specid_list=sids, include=inc)
1027
- >>> len(ret.records[0].wavelength)
1028
- 4617
1029
1027
 
1030
1028
  """
1031
1029
  #!specid_list = list(specid_list)
@@ -0,0 +1,245 @@
1
+ """Functions for converting SPARCL results to specutils objects.
2
+ """
3
+ import warnings
4
+ import numpy as np
5
+ try:
6
+ # specutils >= 2.0
7
+ from specutils import Spectrum
8
+ except ImportError:
9
+ from specutils import Spectrum1D as Spectrum
10
+ from specutils import SpectrumCollection, SpectrumList
11
+ from astropy.nddata import InverseVariance
12
+ import astropy.units as u
13
+
14
+ def _validate_records(records, r0, collection):
15
+ """Validate that records can be converted to Spectrum."""
16
+
17
+ # Check if the first record has wavelength data
18
+ if 'wavelength' not in r0:
19
+ raise ValueError("Results do not have a wavelength attribute. "
20
+ "Conversion is not possible.")
21
+
22
+ # Check if all records come from the same data release
23
+ if not all([r.data_release == r0.data_release for r in records]):
24
+ warnings.warn("Results are not all from the same data release, "
25
+ "conversion may not be possible.", UserWarning)
26
+
27
+ # Check if all records have the same number of wavelength points
28
+ if not all([len(r.wavelength) == len(r0.wavelength) for r in records]):
29
+ raise ValueError("Results do not have the same wavelength solution. "
30
+ "Consider using .to_SpectrumList instead.")
31
+
32
+ # If not creating a SpectrumCollection, check that wavelength values
33
+ # are identical across all records
34
+ if not collection and not all([(r.wavelength == r0.wavelength).all()
35
+ for r in records]):
36
+ raise ValueError("Results do not have the same wavelength pixels. "
37
+ "Consider using SpectrumCollection instead.")
38
+
39
+ def _extract_record_data(records, flux, uncertainty, mask, model, redshift,
40
+ meta, spectral_axis, has_model, has_redshift,
41
+ collection, single_record):
42
+ """Extract all data from records into arrays."""
43
+ for k, record in enumerate(records):
44
+ if single_record:
45
+ # For single record, assign directly (1D)
46
+ flux[:] = record.flux
47
+ uncertainty[:] = record.ivar
48
+ mask[:] = record.mask
49
+ if has_model:
50
+ model[:] = record.model
51
+ else:
52
+ # For multiple records, assign to row (2D)
53
+ flux[k, :] = record.flux
54
+ uncertainty[k, :] = record.ivar
55
+ mask[k, :] = record.mask
56
+ if has_model:
57
+ model[k, :] = record.model
58
+
59
+ # Store redshift values if available
60
+ if has_redshift:
61
+ redshift.append(record.redshift)
62
+
63
+ # For collections, each record can have its own wavelength grid
64
+ if collection:
65
+ spectral_axis[k, :] = record.wavelength
66
+
67
+ # Extract all additional metadata attributes
68
+ for attribute, value in record.items():
69
+ if attribute not in ('flux', 'ivar', 'mask', 'model', 'redshift',
70
+ 'wavelength'):
71
+ if single_record:
72
+ # For single record, store metadata values as scalars
73
+ meta[attribute] = value
74
+ else:
75
+ # For multiple records, accumulate metadata values into
76
+ # lists (creates list on first encounter, then appends)
77
+ meta.setdefault(attribute, []).append(value)
78
+
79
+ def to_Spectrum(results, *, collection=False):
80
+ """Convert `results` to :class:`specutils.Spectrum`.
81
+
82
+ Parameters
83
+ ----------
84
+ results : :class:`sparcl.Results.Retrieved`
85
+ Retrieved results, or a single record from a set of results.
86
+ collection : :class:`bool`, optional
87
+ If ``True``, attempt to convert to a
88
+ :class:`~specutils.SpectrumCollection` instead.
89
+
90
+ Returns
91
+ -------
92
+ :class:`~specutils.Spectrum` or :class:`~specutils.SpectrumCollection`
93
+ The requested object.
94
+
95
+ Raises
96
+ ------
97
+ ValueError
98
+ If `results` can't be converted to a :class:`~specutils.Spectrum`
99
+ object in a valid way. For example, if some of the spectra have a
100
+ different wavelength solution.
101
+ """
102
+ # Prepare records
103
+ if isinstance(results, dict):
104
+ records = [results]
105
+ r0 = results
106
+ else:
107
+ try:
108
+ records = results.records
109
+ if len(records) == 0:
110
+ raise ValueError("No records found in results. Cannot "
111
+ "convert empty results to Spectrum.")
112
+ r0 = results.records[0]
113
+ except (IndexError, AttributeError) as e:
114
+ raise ValueError("No records found in results. Cannot "
115
+ "convert empty results to Spectrum.") from e
116
+
117
+ # Validate
118
+ _validate_records(records, r0, collection)
119
+
120
+ # Determine which optional data components exist in records
121
+ has_redshift = 'redshift' in r0
122
+ has_model = 'model' in r0
123
+ n_pixels = r0.flux.shape[0]
124
+ single_record = len(records) == 1
125
+
126
+ # Set flux shape based on number of records
127
+ if single_record:
128
+ flux_shape = (n_pixels,)
129
+ else:
130
+ flux_shape = (len(records), n_pixels)
131
+
132
+ # Build spectral axis
133
+ if collection:
134
+ spectral_axis = np.zeros((len(records), r0.wavelength.shape[0]),
135
+ dtype=r0.wavelength.dtype)
136
+ else:
137
+ spectral_axis = r0.wavelength
138
+
139
+ # Initialize arrays
140
+ flux = np.zeros(flux_shape, dtype=r0.flux.dtype)
141
+ uncertainty = np.zeros(flux_shape, dtype=r0.ivar.dtype)
142
+ mask = np.zeros(flux_shape, dtype=r0.mask.dtype)
143
+ model = np.zeros(flux_shape, dtype=r0.model.dtype) if has_model else None
144
+ redshift = []
145
+ meta = {}
146
+
147
+ # Populate arrays by iterating through records
148
+ _extract_record_data(records, flux, uncertainty, mask, model, redshift,
149
+ meta, spectral_axis, has_model, has_redshift,
150
+ collection, single_record)
151
+
152
+ # Convert redshift list to numpy array if exists
153
+ if has_redshift:
154
+ redshift = np.array(redshift)
155
+ else:
156
+ redshift = None
157
+
158
+ # Add model to metadata if exists
159
+ if has_model:
160
+ meta['model'] = model
161
+
162
+ # Prepare arguments common to both Spectrum and SpectrumCollection
163
+ common_args = {
164
+ 'flux': flux * u.Unit('1e-17 erg cm-2 s-1 AA-1'),
165
+ 'spectral_axis': spectral_axis * u.AA,
166
+ 'uncertainty': InverseVariance(uncertainty),
167
+ 'mask': mask,
168
+ 'meta': meta}
169
+
170
+ if collection:
171
+ return SpectrumCollection(**common_args)
172
+
173
+ return Spectrum(**common_args, redshift=redshift)
174
+
175
+ def to_SpectrumList(results):
176
+ """Convert `results` to :class:`specutils.SpectrumList`.
177
+
178
+ Parameters
179
+ ----------
180
+ results : :class:`sparcl.Results.Retrieved`
181
+ Retrieved results.
182
+
183
+ Returns
184
+ -------
185
+ :class:`~specutils.SpectrumList`
186
+ The requested object.
187
+ """
188
+ s = SpectrumList()
189
+ if isinstance(results, dict):
190
+ records = [results]
191
+ else:
192
+ records = results.records
193
+ for r in records:
194
+ if 'redshift' in r:
195
+ redshift = r.redshift
196
+ else:
197
+ redshift = None
198
+ meta = dict()
199
+ for attribute in r:
200
+ if attribute not in ('flux', 'wavelength', 'ivar',
201
+ 'redshift', 'mask'):
202
+ meta[attribute] = r[attribute]
203
+ s1 = Spectrum(flux=r.flux*u.Unit('1e-17 erg cm-2 s-1 AA-1'),
204
+ spectral_axis=r.wavelength*u.AA,
205
+ uncertainty=InverseVariance(r.ivar),
206
+ redshift=redshift,
207
+ mask=r.mask,
208
+ meta=meta)
209
+ s.append(s1)
210
+ return s
211
+
212
+ def to_specutils(results):
213
+ """Convert `results` to a specutils object.
214
+
215
+ Parameters
216
+ ----------
217
+ results : :class:`sparcl.Results.Retrieved`
218
+ Retrieved results.
219
+
220
+ Returns
221
+ -------
222
+ :class:`~specutils.Spectrum` or :class:`~specutils.SpectrumCollection`
223
+ or :class:`~specutils.SpectrumList`
224
+ The most natural conversion to a specutils object.
225
+
226
+ Raises
227
+ ------
228
+ ValueError
229
+ If no valid conversion can be performed.
230
+ """
231
+ try:
232
+ # Try standard Spectrum conversion first
233
+ s = to_Spectrum(results)
234
+ except ValueError as ve:
235
+ # Check the error message to determine appropriate alternative
236
+ if 'SpectrumList' in str(ve):
237
+ # Different wavelength array lengths use SpectrumList
238
+ s = to_SpectrumList(results)
239
+ elif 'SpectrumCollection' in str(ve):
240
+ # Same wavelength length but diff pixels use SpectrumCollection
241
+ s = to_Spectrum(results, collection=True)
242
+ else:
243
+ raise ValueError("Could not find a valid conversion to "
244
+ "specutils objects!")
245
+ return s
@@ -66,6 +66,8 @@ import sparcl.exceptions as ex
66
66
  import sparcl.gather_2d as sg
67
67
  import sparcl.client
68
68
  import sparcl.gather_2d
69
+ import sparcl.specutils as su
70
+ from sparcl.utils import _AttrDict
69
71
 
70
72
  #! import sparcl.utils as ut
71
73
 
@@ -125,12 +127,19 @@ def testcase_log_console(lggr):
125
127
  def load_tests(loader, tests, ignore):
126
128
  import doctest
127
129
 
130
+ IN_CI = os.getenv('CI') or os.getenv('GITLAB_CI')
131
+
132
+ if IN_CI:
133
+ print("Skipping doctests in CI environment.")
134
+ return tests
135
+
128
136
  if serverurl == _PROD:
129
137
  print(f"Arranging to run doctests against: sparcl.client")
130
138
  tests.addTests(doctest.DocTestSuite(sparcl.client))
131
139
 
132
- print(f"Arranging to run doctests against: sparcl.gather_2d")
133
- tests.addTests(doctest.DocTestSuite(sparcl.gather_2d))
140
+ # Commenting out until this package is revisited (02/02/2026)
141
+ #print(f"Arranging to run doctests against: sparcl.gather_2d")
142
+ #tests.addTests(doctest.DocTestSuite(sparcl.gather_2d))
134
143
  else:
135
144
  print(
136
145
  "Not running doctests since you are not running client"
@@ -170,15 +179,25 @@ class SparclClientTest(unittest.TestCase):
170
179
  f"{str(datetime.datetime.now())}!"
171
180
  )
172
181
 
182
+ if os.getenv('CI'):
183
+ try:
184
+ requests.get(serverurl, timeout=2)
185
+ except (requests.ConnectionError, requests.Timeout):
186
+ raise unittest.SkipTest(
187
+ f"Cannot reach {serverurl} from CI environment. "
188
+ "Run these tests locally or on a network-enabled "
189
+ "runner.")
190
+
173
191
  # Client object creation compares the version from the Server
174
192
  # against the one expected by the Client. Raise error if
175
193
  # the Client is at least one major version behind.
176
-
194
+ connect_timeout = 3.1 if os.getenv('CI') else 1.1
177
195
  cls.client = sparcl.client.SparclClient(
178
196
  url=serverurl,
179
197
  verbose=clverb,
180
198
  show_curl=showcurl,
181
199
  announcement=False,
200
+ connect_timeout=connect_timeout,
182
201
  )
183
202
  cls.timing = dict()
184
203
  cls.doc = dict()
@@ -1060,3 +1079,109 @@ class AuthTest(unittest.TestCase):
1060
1079
  warnings.filterwarnings("ignore")
1061
1080
  exp = "exp.auth_retrieve_8"
1062
1081
  self.auth_retrieve(None, self.Pub, exp)
1082
+
1083
+ class MockRetrieved(object):
1084
+ """Create an object with a .records attribute."""
1085
+ def __init__(self, user_list):
1086
+ self.records = user_list
1087
+
1088
+ class SpecutilsTest(unittest.TestCase):
1089
+ """Test the SPARCL to_specutils method"""
1090
+
1091
+ def setUp(self):
1092
+ """Create a dummy Results object."""
1093
+ self.w = numpy.linspace(3000.0, 10000.0, dtype=numpy.float32)
1094
+ self.f = numpy.ones(self.w.shape, dtype=self.w.dtype)
1095
+ self.i = 1.0/(0.1*self.f)**2
1096
+ self.m = numpy.zeros(self.w.shape, dtype=numpy.int32)
1097
+ self.r0 = _AttrDict({'redshift': 1.23,
1098
+ 'wavelength': self.w,
1099
+ 'flux': self.f,
1100
+ 'ivar': self.i,
1101
+ 'mask': self.m,
1102
+ 'data_release': 'DR12'})
1103
+ self.r = MockRetrieved([self.r0,
1104
+ _AttrDict(self.r0.copy()),
1105
+ _AttrDict(self.r0.copy()),
1106
+ _AttrDict(self.r0.copy()),
1107
+ _AttrDict(self.r0.copy())])
1108
+
1109
+ def test_to_specutils_one_result(self):
1110
+ """Test the to_specutils() method with one result."""
1111
+ s = su.to_specutils(self.r0)
1112
+ self.assertTrue(isinstance(s, su.Spectrum))
1113
+ self.assertTrue((s.mask == 0).all())
1114
+
1115
+ def test_to_specutils_multiple_results(self):
1116
+ """Test the to_specutils() method with more than one result."""
1117
+ s = su.to_specutils(self.r)
1118
+ self.assertTrue(isinstance(s, su.Spectrum))
1119
+ self.assertTrue((s.mask == 0).all())
1120
+
1121
+ def test_to_specutils_bad_results(self):
1122
+ """Test the to_specutils() method with a bad result."""
1123
+ del self.r0['wavelength']
1124
+ with self.assertRaises(ValueError) as ex:
1125
+ su.to_specutils(self.r0)
1126
+ self.assertEqual(ex.exception.args[0],
1127
+ "Could not find a valid conversion to specutils "
1128
+ "objects!")
1129
+
1130
+ def test_to_specutils_different_release(self):
1131
+ """Test for the presence of a warning if multiple releases are
1132
+ present."""
1133
+ self.r.records[1]['data_release'] = 'DR11'
1134
+ with self.assertWarns(UserWarning) as wn:
1135
+ su.to_specutils(self.r)
1136
+ self.assertEqual(wn.warning.args[0],
1137
+ "Results are not all from the same data release, "
1138
+ "conversion may not be possible.")
1139
+
1140
+ def test_to_specutils_spectrum_list(self):
1141
+ """Test conversion to SpectrumList."""
1142
+ self.r.records[1]['wavelength'] = numpy.linspace(3000.0,
1143
+ 10000.0,
1144
+ 100,
1145
+ dtype=numpy.float32)
1146
+ self.r.records[2]['wavelength'] = numpy.linspace(3000.0,
1147
+ 10000.0,
1148
+ 150,
1149
+ dtype=numpy.float32)
1150
+ self.r.records[3]['wavelength'] = numpy.linspace(3000.0,
1151
+ 10000.0,
1152
+ 200,
1153
+ dtype=numpy.float32)
1154
+ self.r.records[4]['wavelength'] = numpy.linspace(3000.0,
1155
+ 10000.0,
1156
+ 250,
1157
+ dtype=numpy.float32)
1158
+ self.r.records[1]['flux'] = numpy.ones((100, ), dtype=numpy.float32)
1159
+ self.r.records[2]['flux'] = numpy.ones((150, ), dtype=numpy.float32)
1160
+ self.r.records[3]['flux'] = numpy.ones((200, ), dtype=numpy.float32)
1161
+ self.r.records[4]['flux'] = numpy.ones((250, ), dtype=numpy.float32)
1162
+ self.r.records[1]['ivar'] = numpy.ones((100, ), dtype=numpy.float32)
1163
+ self.r.records[2]['ivar'] = numpy.ones((150, ), dtype=numpy.float32)
1164
+ self.r.records[3]['ivar'] = numpy.ones((200, ), dtype=numpy.float32)
1165
+ self.r.records[4]['ivar'] = numpy.ones((250, ), dtype=numpy.float32)
1166
+ self.r.records[1]['mask'] = numpy.zeros((100, ), dtype=numpy.int32)
1167
+ self.r.records[2]['mask'] = numpy.zeros((150, ), dtype=numpy.int32)
1168
+ self.r.records[3]['mask'] = numpy.zeros((200, ), dtype=numpy.int32)
1169
+ self.r.records[4]['mask'] = numpy.zeros((250, ), dtype=numpy.int32)
1170
+ s = su.to_specutils(self.r)
1171
+ self.assertTrue(isinstance(s, su.SpectrumList))
1172
+ self.assertTrue(isinstance(s[0], su.Spectrum))
1173
+ self.assertEqual(s[0].spectral_axis.shape, (50,))
1174
+
1175
+ def test_to_specutils_spectrum_collection(self):
1176
+ """Test conversion to SpectrumCollection."""
1177
+ self.r.records[1]['wavelength'] = self.w.copy() + 10.0
1178
+ self.r.records[2]['wavelength'] = self.w.copy() + 20.0
1179
+ self.r.records[3]['wavelength'] = self.w.copy() + 30.0
1180
+ self.r.records[4]['wavelength'] = self.w.copy() + 40.0
1181
+ s = su.to_specutils(self.r)
1182
+ self.assertTrue(isinstance(s, su.SpectrumCollection))
1183
+ self.assertEqual(s.spectral_axis.shape, (5, 50))
1184
+ self.assertEqual(s.spectral_axis[0, :].shape, (50, ))
1185
+ self.assertTrue(numpy.allclose(s.spectral_axis.value[0, :], self.w))
1186
+ self.assertTrue(numpy.allclose(s.spectral_axis.value[4, :],
1187
+ self.w + 40))
File without changes
File without changes
File without changes
File without changes
File without changes