adss 1.33__py3-none-any.whl → 1.34__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- adss/auth.py +193 -9
- adss/client.py +18 -0
- adss/endpoints/images.py +64 -27
- {adss-1.33.dist-info → adss-1.34.dist-info}/METADATA +1 -1
- {adss-1.33.dist-info → adss-1.34.dist-info}/RECORD +8 -8
- {adss-1.33.dist-info → adss-1.34.dist-info}/WHEEL +0 -0
- {adss-1.33.dist-info → adss-1.34.dist-info}/licenses/LICENSE +0 -0
- {adss-1.33.dist-info → adss-1.34.dist-info}/top_level.txt +0 -0
adss/auth.py
CHANGED
@@ -20,6 +20,63 @@ _BACKOFF_FACTOR = float(os.getenv("ADSS_RETRY_BACKOFF", "0.5"))
|
|
20
20
|
_TRUST_ENV = os.getenv("ADSS_TRUST_ENV", "1").lower() not in ("0", "false", "no")
|
21
21
|
_FORCE_CLOSE_STREAMS = os.getenv("ADSS_FORCE_CLOSE_STREAMS", "0").lower() in ("1", "true", "yes")
|
22
22
|
|
23
|
+
def _read_all_bytes(resp: httpx.Response,
|
24
|
+
chunk_size: int = 1024 * 1024,
|
25
|
+
total_timeout: Optional[float] = None) -> bytes:
|
26
|
+
"""
|
27
|
+
Stream the response body to memory and return bytes.
|
28
|
+
- Respects httpx read timeout between chunks (via iter_bytes()).
|
29
|
+
- Optionally enforces an overall time budget (total_timeout).
|
30
|
+
- Validates Content-Length when present.
|
31
|
+
- Always closes the response.
|
32
|
+
"""
|
33
|
+
import io, time
|
34
|
+
from httpx import ReadTimeout, RemoteProtocolError, TransportError
|
35
|
+
|
36
|
+
# If httpx has already cached content, return it.
|
37
|
+
if hasattr(resp, "_content"):
|
38
|
+
return resp._content # type: ignore[attr-defined]
|
39
|
+
|
40
|
+
buf = io.BytesIO()
|
41
|
+
bytes_read = 0
|
42
|
+
start = time.monotonic()
|
43
|
+
|
44
|
+
expected = None
|
45
|
+
cl = resp.headers.get("Content-Length")
|
46
|
+
if cl:
|
47
|
+
try:
|
48
|
+
expected = int(cl)
|
49
|
+
except ValueError:
|
50
|
+
expected = None
|
51
|
+
|
52
|
+
try:
|
53
|
+
for chunk in resp.iter_bytes(chunk_size=chunk_size):
|
54
|
+
if not chunk:
|
55
|
+
break
|
56
|
+
buf.write(chunk)
|
57
|
+
bytes_read += len(chunk)
|
58
|
+
if total_timeout is not None and (time.monotonic() - start) > total_timeout:
|
59
|
+
raise ReadTimeout("overall read timeout exceeded")
|
60
|
+
except (ReadTimeout, RemoteProtocolError, TransportError):
|
61
|
+
# ensure socket cleanup before propagating
|
62
|
+
try:
|
63
|
+
resp.close()
|
64
|
+
finally:
|
65
|
+
raise
|
66
|
+
|
67
|
+
data = buf.getvalue()
|
68
|
+
if expected is not None and bytes_read != expected:
|
69
|
+
try:
|
70
|
+
resp.close()
|
71
|
+
finally:
|
72
|
+
raise RemoteProtocolError(
|
73
|
+
f"Incomplete body: got {bytes_read} bytes, expected {expected}"
|
74
|
+
)
|
75
|
+
|
76
|
+
# cache like httpx does; then close
|
77
|
+
resp._content = data # type: ignore[attr-defined]
|
78
|
+
resp.close()
|
79
|
+
return data
|
23
80
|
|
24
81
|
def _to_httpx_timeout(t):
|
25
82
|
"""Map (connect, read) tuple or scalar into httpx.Timeout."""
|
@@ -31,26 +88,87 @@ def _to_httpx_timeout(t):
|
|
31
88
|
return httpx.Timeout(connect=_CONNECT_TIMEOUT, read=_READ_TIMEOUT, write=_READ_TIMEOUT, pool=_CONNECT_TIMEOUT)
|
32
89
|
|
33
90
|
|
34
|
-
def _attach_requests_compat(resp: httpx.Response)
|
91
|
+
def _attach_requests_compat(resp: httpx.Response):
|
35
92
|
"""
|
36
|
-
|
37
|
-
- iter_content(chunk_size) -> yields bytes
|
38
|
-
- raw.read(
|
93
|
+
Give httpx.Response a requests-like surface and a SAFE .read():
|
94
|
+
- resp.iter_content(chunk_size) -> yields bytes
|
95
|
+
- resp.raw.read() -> returns remaining bytes
|
96
|
+
- resp.read() -> safe, streaming-based, idempotent
|
39
97
|
"""
|
98
|
+
import io, time
|
99
|
+
from httpx import ReadTimeout, RemoteProtocolError, TransportError
|
100
|
+
|
101
|
+
# requests-like streaming
|
40
102
|
if not hasattr(resp, "iter_content"):
|
41
103
|
def iter_content(chunk_size: int = 1024 * 1024):
|
42
104
|
return resp.iter_bytes(chunk_size=chunk_size)
|
43
105
|
setattr(resp, "iter_content", iter_content)
|
44
106
|
|
107
|
+
# requests-like raw.read()
|
45
108
|
if not hasattr(resp, "raw"):
|
46
109
|
class _RawAdapter:
|
47
110
|
def __init__(self, r: httpx.Response):
|
48
111
|
self._r = r
|
49
112
|
def read(self, amt: Optional[int] = None) -> bytes:
|
50
|
-
#
|
51
|
-
return self._r
|
113
|
+
# Use the same safe read under the hood
|
114
|
+
return getattr(self._r, "read")( )
|
52
115
|
setattr(resp, "raw", _RawAdapter(resp))
|
53
116
|
|
117
|
+
# ---- SAFE .read(): stream to memory, cache, and close ----
|
118
|
+
# Only replace if httpx hasn't already cached content
|
119
|
+
def _safe_read(self, *, chunk_size: int = 1024 * 1024,
|
120
|
+
total_timeout: Optional[float] = None) -> bytes:
|
121
|
+
# If httpx already cached, return it (idempotent)
|
122
|
+
if hasattr(self, "_content"):
|
123
|
+
return self._content
|
124
|
+
|
125
|
+
buf = io.BytesIO()
|
126
|
+
bytes_read = 0
|
127
|
+
start = time.monotonic()
|
128
|
+
|
129
|
+
# If server provided length, we can validate
|
130
|
+
expected = None
|
131
|
+
cl = self.headers.get("Content-Length")
|
132
|
+
if cl:
|
133
|
+
try:
|
134
|
+
expected = int(cl)
|
135
|
+
except ValueError:
|
136
|
+
expected = None
|
137
|
+
|
138
|
+
try:
|
139
|
+
for chunk in self.iter_bytes(chunk_size=chunk_size):
|
140
|
+
if not chunk:
|
141
|
+
break
|
142
|
+
buf.write(chunk)
|
143
|
+
bytes_read += len(chunk)
|
144
|
+
if total_timeout is not None and (time.monotonic() - start) > total_timeout:
|
145
|
+
raise ReadTimeout("overall read timeout exceeded")
|
146
|
+
except (ReadTimeout, RemoteProtocolError, TransportError) as e:
|
147
|
+
# Ensure the socket is cleaned up
|
148
|
+
try:
|
149
|
+
self.close()
|
150
|
+
finally:
|
151
|
+
raise
|
152
|
+
|
153
|
+
data = buf.getvalue()
|
154
|
+
# Validate length if known
|
155
|
+
if expected is not None and bytes_read != expected:
|
156
|
+
try:
|
157
|
+
self.close()
|
158
|
+
finally:
|
159
|
+
raise RemoteProtocolError(
|
160
|
+
f"Incomplete body: got {bytes_read} bytes, expected {expected}"
|
161
|
+
)
|
162
|
+
|
163
|
+
# Cache like httpx normally does, then close the stream
|
164
|
+
self._content = data
|
165
|
+
self.close()
|
166
|
+
return data
|
167
|
+
|
168
|
+
# Bind as a method (so `resp.read()` calls _safe_read)
|
169
|
+
import types
|
170
|
+
resp.read = types.MethodType(_safe_read, resp) # type: ignore[attr-defined]
|
171
|
+
|
54
172
|
return resp
|
55
173
|
|
56
174
|
|
@@ -66,7 +184,11 @@ class Auth:
|
|
66
184
|
self.verify_ssl = verify_ssl
|
67
185
|
|
68
186
|
# Single keep-alive client; set verify at construction.
|
69
|
-
self._client = httpx.Client(
|
187
|
+
self._client = httpx.Client(
|
188
|
+
trust_env=_TRUST_ENV,
|
189
|
+
verify=self.verify_ssl,
|
190
|
+
limits=httpx.Limits(max_keepalive_connections=0, max_connections=10)
|
191
|
+
)
|
70
192
|
|
71
193
|
def login(self, username: str, password: str, **kwargs) -> Tuple[str, User]:
|
72
194
|
"""
|
@@ -300,6 +422,7 @@ class Auth:
|
|
300
422
|
url: str,
|
301
423
|
headers: Optional[Dict[str, str]] = None,
|
302
424
|
auth_required: bool = False,
|
425
|
+
timeout: Optional[float] = None,
|
303
426
|
**kwargs
|
304
427
|
) -> requests.Response:
|
305
428
|
"""
|
@@ -322,7 +445,11 @@ class Auth:
|
|
322
445
|
if _FORCE_CLOSE_STREAMS:
|
323
446
|
final_headers.setdefault("Connection", "close")
|
324
447
|
|
325
|
-
|
448
|
+
if timeout is None:
|
449
|
+
timeout = _to_httpx_timeout(kwargs.pop('timeout', _DEFAULT_TIMEOUT))
|
450
|
+
else:
|
451
|
+
timeout = _to_httpx_timeout(timeout)
|
452
|
+
|
326
453
|
follow_redirects = kwargs.pop('allow_redirects', True)
|
327
454
|
kwargs.pop('verify', None) # verify is fixed on client
|
328
455
|
|
@@ -343,4 +470,61 @@ class Auth:
|
|
343
470
|
follow_redirects=follow_redirects,
|
344
471
|
)
|
345
472
|
handle_response_errors(resp) # fail fast on HTTP errors
|
346
|
-
return _attach_requests_compat(resp)
|
473
|
+
return _attach_requests_compat(resp)
|
474
|
+
|
475
|
+
def download_bytes(
|
476
|
+
self,
|
477
|
+
method: str,
|
478
|
+
url: str,
|
479
|
+
headers: Optional[Dict[str, str]] = None,
|
480
|
+
auth_required: bool = False,
|
481
|
+
timeout: Optional[float | Tuple[float, float]] = None,
|
482
|
+
total_timeout: Optional[float] = None,
|
483
|
+
**kwargs
|
484
|
+
) -> bytes:
|
485
|
+
"""
|
486
|
+
Stream a large body and return bytes.
|
487
|
+
Safe replacement for patterns that do `resp = download(...); resp.read()`.
|
488
|
+
"""
|
489
|
+
if auth_required and not self.is_authenticated():
|
490
|
+
raise AuthenticationError("Authentication required for this request")
|
491
|
+
|
492
|
+
url = self._full_url(url)
|
493
|
+
|
494
|
+
final_headers = self._get_auth_headers()
|
495
|
+
if headers:
|
496
|
+
final_headers.update(headers)
|
497
|
+
# avoid gzip surprises on binaries; optionally force close via env
|
498
|
+
final_headers.setdefault("Accept-Encoding", "identity")
|
499
|
+
if _FORCE_CLOSE_STREAMS:
|
500
|
+
final_headers.setdefault("Connection", "close")
|
501
|
+
|
502
|
+
# timeouts
|
503
|
+
if timeout is None:
|
504
|
+
timeout = _to_httpx_timeout(_DEFAULT_TIMEOUT)
|
505
|
+
else:
|
506
|
+
timeout = _to_httpx_timeout(timeout)
|
507
|
+
follow_redirects = kwargs.pop('allow_redirects', True)
|
508
|
+
kwargs.pop('verify', None)
|
509
|
+
|
510
|
+
params = kwargs.pop('params', None)
|
511
|
+
data = kwargs.pop('data', None)
|
512
|
+
json_ = kwargs.pop('json', None)
|
513
|
+
files = kwargs.pop('files', None)
|
514
|
+
|
515
|
+
# open the stream
|
516
|
+
resp = self._request_with_retries_stream(
|
517
|
+
method=method,
|
518
|
+
url=url,
|
519
|
+
headers=final_headers,
|
520
|
+
params=params,
|
521
|
+
data=data,
|
522
|
+
json=json_,
|
523
|
+
files=files,
|
524
|
+
timeout=timeout,
|
525
|
+
follow_redirects=follow_redirects,
|
526
|
+
)
|
527
|
+
handle_response_errors(resp) # raise for HTTP errors
|
528
|
+
|
529
|
+
# read it all safely and return
|
530
|
+
return _read_all_bytes(resp, total_timeout=total_timeout)
|
adss/client.py
CHANGED
@@ -294,6 +294,24 @@ class ADSSClient:
|
|
294
294
|
"""
|
295
295
|
return self.metadata.get_database_metadata(**kwargs)
|
296
296
|
|
297
|
+
def pretty_print_db_metadata(self, dbmeta: Optional[DatabaseMetadata] = None) -> None:
|
298
|
+
"""
|
299
|
+
Pretty print the database metadata in a hierarchical format.
|
300
|
+
|
301
|
+
Args:
|
302
|
+
dbmeta: Optional DatabaseMetadata object. If not provided, fetches current metadata.
|
303
|
+
"""
|
304
|
+
if dbmeta is None:
|
305
|
+
dbmeta = self.get_database_metadata()
|
306
|
+
|
307
|
+
for schema in dbmeta.schemas:
|
308
|
+
print(f"Schema: {schema.name}")
|
309
|
+
for table in schema.tables:
|
310
|
+
print(f" Table: {table.name}")
|
311
|
+
for column in table.columns:
|
312
|
+
nullable = "NULL" if column.is_nullable else "NOT NULL"
|
313
|
+
print(f" Column: {column.name} ({column.data_type}, {nullable})")
|
314
|
+
|
297
315
|
def update_profile(self,
|
298
316
|
email: Optional[str] = None,
|
299
317
|
full_name: Optional[str] = None,
|
adss/endpoints/images.py
CHANGED
@@ -7,6 +7,7 @@ import os
|
|
7
7
|
from adss.exceptions import ResourceNotFoundError
|
8
8
|
from adss.utils import handle_response_errors
|
9
9
|
|
10
|
+
import re
|
10
11
|
|
11
12
|
class ImagesEndpoint:
|
12
13
|
"""
|
@@ -573,24 +574,42 @@ class StampImagesEndpoint:
|
|
573
574
|
except Exception as e:
|
574
575
|
raise ResourceNotFoundError(f"Failed to create stamp by coordinates: {e}")
|
575
576
|
|
576
|
-
|
577
|
-
|
578
|
-
|
579
|
-
|
580
|
-
|
581
|
-
|
582
|
-
|
583
|
-
|
577
|
+
# TODO: Apply the same pattern of this functions to all download functions in this file
|
578
|
+
def create_stamp_by_object(
|
579
|
+
self,
|
580
|
+
collection_id: int,
|
581
|
+
object_name: str,
|
582
|
+
filter_name: str,
|
583
|
+
ra: float,
|
584
|
+
dec: float,
|
585
|
+
size: float,
|
586
|
+
size_unit: str = "arcmin",
|
587
|
+
format: str = "fits",
|
588
|
+
zmin: Optional[float] = None,
|
589
|
+
zmax: Optional[float] = None,
|
590
|
+
pattern: Optional[str] = None,
|
591
|
+
output_path: Optional[str] = None,
|
592
|
+
**kwargs
|
593
|
+
) -> Union[bytes, str]:
|
584
594
|
url = f"{self.base_url}/adss/v1/images/collections/{collection_id}/stamp_by_object"
|
595
|
+
|
596
|
+
# Build headers (auth if available), prefer identity for big binaries
|
585
597
|
try:
|
586
598
|
headers = self.auth_manager._get_auth_headers()
|
587
|
-
except:
|
588
|
-
headers = {
|
599
|
+
except Exception:
|
600
|
+
headers = {}
|
601
|
+
headers.setdefault("Accept", "image/png" if format == "png" else "application/fits")
|
602
|
+
headers.setdefault("Accept-Encoding", "identity")
|
589
603
|
|
604
|
+
# Payload
|
590
605
|
payload: Dict[str, Any] = {
|
591
|
-
"object_name": object_name,
|
592
|
-
"
|
593
|
-
"
|
606
|
+
"object_name": object_name,
|
607
|
+
"filter_name": filter_name,
|
608
|
+
"ra": ra,
|
609
|
+
"dec": dec,
|
610
|
+
"size": size,
|
611
|
+
"size_unit": size_unit,
|
612
|
+
"format": format,
|
594
613
|
}
|
595
614
|
if zmin is not None:
|
596
615
|
payload["zmin"] = zmin
|
@@ -599,8 +618,9 @@ class StampImagesEndpoint:
|
|
599
618
|
if pattern:
|
600
619
|
payload["pattern"] = pattern
|
601
620
|
|
621
|
+
# Download bytes in one go (no Response object leaked to callers)
|
602
622
|
try:
|
603
|
-
|
623
|
+
data = self.auth_manager.download_bytes(
|
604
624
|
method="POST",
|
605
625
|
url=url,
|
606
626
|
headers=headers,
|
@@ -608,22 +628,39 @@ class StampImagesEndpoint:
|
|
608
628
|
auth_required=False,
|
609
629
|
**kwargs
|
610
630
|
)
|
611
|
-
|
631
|
+
except Exception as e:
|
632
|
+
raise ResourceNotFoundError(f"Failed to create stamp by object: {e}")
|
612
633
|
|
613
|
-
|
614
|
-
|
615
|
-
|
634
|
+
# If no output_path => return bytes
|
635
|
+
if not output_path:
|
636
|
+
return data
|
616
637
|
|
617
|
-
|
618
|
-
|
619
|
-
if output_path:
|
620
|
-
with open(output_path, 'wb') as f:
|
621
|
-
f.write(resp.read())
|
622
|
-
return resp.read()
|
623
|
-
return resp.read()
|
638
|
+
# If writing to disk, synthesize a stable filename
|
639
|
+
ext = "fits" if format == "fits" else "png"
|
624
640
|
|
625
|
-
|
626
|
-
|
641
|
+
# sanitize components for filesystem safety
|
642
|
+
def _safe(s: str) -> str:
|
643
|
+
s = s.strip()
|
644
|
+
s = re.sub(r"\s+", "_", s) # spaces -> underscores
|
645
|
+
s = re.sub(r"[^A-Za-z0-9._\-+]", "", s) # drop weird chars
|
646
|
+
return s or "unknown"
|
647
|
+
|
648
|
+
obj = _safe(object_name)
|
649
|
+
filt = _safe(filter_name)
|
650
|
+
size_str = f"{size:g}{size_unit}"
|
651
|
+
|
652
|
+
filename = f"stamp_{obj}_{filt}_{size_str}.{ext}"
|
653
|
+
|
654
|
+
# If output_path is a dir, append filename; otherwise treat as full path
|
655
|
+
final_path = output_path
|
656
|
+
if os.path.isdir(final_path):
|
657
|
+
final_path = os.path.join(final_path, filename)
|
658
|
+
|
659
|
+
os.makedirs(os.path.dirname(final_path) or ".", exist_ok=True)
|
660
|
+
with open(final_path, "wb") as f:
|
661
|
+
f.write(data)
|
662
|
+
|
663
|
+
return final_path
|
627
664
|
|
628
665
|
|
629
666
|
class TrilogyImagesEndpoint:
|
@@ -1,11 +1,11 @@
|
|
1
1
|
adss/__init__.py,sha256=3FpHFL3Pk5BvETwd70P2QqYvDq799Cu2AGxGxudGAAE,1020
|
2
|
-
adss/auth.py,sha256=
|
3
|
-
adss/client.py,sha256=
|
2
|
+
adss/auth.py,sha256=kxKX9OSxnD7gWLcjJfKfNC_16_4rENT2Lw18QtPYq6I,18181
|
3
|
+
adss/client.py,sha256=JpqcxSSGccxFxeY4VNLjstTcupTr8B5uOGzywzoEXYU,30670
|
4
4
|
adss/exceptions.py,sha256=YeN-xRHvlSmwyS8ni2jOEhhgZK9J1jsG11pOedy3Gfg,1482
|
5
5
|
adss/utils.py,sha256=hBfE6FJD-R6OTWcIf4ChtHTS07EHFGM6Oh1OE_xOjOE,3557
|
6
6
|
adss/endpoints/__init__.py,sha256=Pr29901fT8ClCS2GasTjTiBNyn7DfVfxILpYDFsMvPA,488
|
7
7
|
adss/endpoints/admin.py,sha256=S6ZrkeA_Lh_LCpF1NHyfMKqjbIiylYXUSV65H_WKg1U,16391
|
8
|
-
adss/endpoints/images.py,sha256=
|
8
|
+
adss/endpoints/images.py,sha256=b9xE_n0F384tEK4vooT9cQmcfQEu2n2Ir_5AgaChkj0,32680
|
9
9
|
adss/endpoints/metadata.py,sha256=RPrRP6Uz6-uPMIcntMgfss9vAd5iN7JXjZbF8SW0EYg,8238
|
10
10
|
adss/endpoints/queries.py,sha256=qpJ0mdJK8DDhznkHX_DOEWkvbWKFyfemewcKyLFLUP4,17631
|
11
11
|
adss/endpoints/users.py,sha256=6Abkl3c3_YKdMYR_JWI-uL9HTHxcjlIOnE29GyN5_QE,10811
|
@@ -13,9 +13,9 @@ adss/models/__init__.py,sha256=ADWVaGy4dkpEMH3iS_6EnRSBlEgoM5Vy9zORQr-UG6w,404
|
|
13
13
|
adss/models/metadata.py,sha256=6fdH_0BenVRmeXkkKbsG2B68O-N2FXTTRgxsEhAHRoU,4058
|
14
14
|
adss/models/query.py,sha256=V1H9UAv9wORAr85aajeY7H1zaxyfNtKuEoBtBU66DbM,4820
|
15
15
|
adss/models/user.py,sha256=5qVT5qOktokmVLkGszPGCTZWv0wC-7aBMvJ8EeBOqdw,3493
|
16
|
-
adss-1.
|
16
|
+
adss-1.34.dist-info/licenses/LICENSE,sha256=yPw116pnd1J4TuMPnvm6I_irZUyC30EoBZ4BtWFAL7I,1557
|
17
17
|
dev/fetch_idr6.py,sha256=b6FrHPr-ZLaDup_wLOaQWP2fK254Sr3YNHbTxuUt088,12788
|
18
|
-
adss-1.
|
19
|
-
adss-1.
|
20
|
-
adss-1.
|
21
|
-
adss-1.
|
18
|
+
adss-1.34.dist-info/METADATA,sha256=rVawfEWIKgnlcWys4QpUE4offNyOAmpVz2lwJWxpd_4,8759
|
19
|
+
adss-1.34.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
20
|
+
adss-1.34.dist-info/top_level.txt,sha256=NT2zObOOiTWXc0yowpEjT6BiiI1e7WXlXd0ZoK7T5hk,9
|
21
|
+
adss-1.34.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|