adss 1.21__py3-none-any.whl → 1.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
adss/__init__.py CHANGED
@@ -16,9 +16,14 @@ from adss.models.user import User, Role
16
16
  from adss.models.query import Query, QueryResult
17
17
  from adss.models.metadata import Schema, Table, Column
18
18
 
19
+ from adss.utils import (
20
+ handle_response_errors, parse_datetime, parquet_to_dataframe
21
+ )
22
+
19
23
  __all__ = [
20
24
  'ADSSClient',
21
25
  'ADSSClientError', 'AuthenticationError', 'PermissionDeniedError',
22
26
  'ResourceNotFoundError', 'QueryExecutionError',
23
- 'User', 'Role', 'Query', 'QueryResult', 'Schema', 'Table', 'Column'
27
+ 'User', 'Role', 'Query', 'QueryResult', 'Schema', 'Table', 'Column',
28
+ 'handle_response_errors', 'parse_datetime', 'parquet_to_dataframe'
24
29
  ]
adss/utils.py CHANGED
@@ -13,7 +13,6 @@ from adss.exceptions import (
13
13
  ResourceNotFoundError, QueryExecutionError, ServerError
14
14
  )
15
15
 
16
-
17
16
  def handle_response_errors(response):
18
17
  """Handles HTTP response errors and raises appropriate exceptions."""
19
18
  if 200 <= response.status_code < 300:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: adss
3
- Version: 1.21
3
+ Version: 1.23
4
4
  Summary: Astronomical Data Smart System
5
5
  Home-page: https://github.com/schwarzam/adss
6
6
  Author: Gustavo Schwarz
@@ -0,0 +1,20 @@
1
+ adss/__init__.py,sha256=3FpHFL3Pk5BvETwd70P2QqYvDq799Cu2AGxGxudGAAE,1020
2
+ adss/auth.py,sha256=M8GAN8pdfmJfsEOqv_EW2ru0uP0H1JtK7IfBDrIZMyg,4025
3
+ adss/client.py,sha256=Ojc6jqZtCBMwRoTGuBiF0SEDkdWs_-xbGwyk03nHkr8,29613
4
+ adss/exceptions.py,sha256=YeN-xRHvlSmwyS8ni2jOEhhgZK9J1jsG11pOedy3Gfg,1482
5
+ adss/utils.py,sha256=KeQUtTCcye3W07oHpBnwS7g3gG-RqwWMlaE7UgDWwsU,3557
6
+ adss/endpoints/__init__.py,sha256=Pr29901fT8ClCS2GasTjTiBNyn7DfVfxILpYDFsMvPA,488
7
+ adss/endpoints/admin.py,sha256=S6ZrkeA_Lh_LCpF1NHyfMKqjbIiylYXUSV65H_WKg1U,16391
8
+ adss/endpoints/images.py,sha256=ItAiBss_jQvWQWRUvy0c9Cjn1r9lDR8eOPauqOcPcZ8,35777
9
+ adss/endpoints/metadata.py,sha256=RPrRP6Uz6-uPMIcntMgfss9vAd5iN7JXjZbF8SW0EYg,8238
10
+ adss/endpoints/queries.py,sha256=5BONw_IcGORMPNe-5J6BpoFY6z7lKcktEVhqZ9j17_8,17286
11
+ adss/endpoints/users.py,sha256=6Abkl3c3_YKdMYR_JWI-uL9HTHxcjlIOnE29GyN5_QE,10811
12
+ adss/models/__init__.py,sha256=ADWVaGy4dkpEMH3iS_6EnRSBlEgoM5Vy9zORQr-UG6w,404
13
+ adss/models/metadata.py,sha256=6fdH_0BenVRmeXkkKbsG2B68O-N2FXTTRgxsEhAHRoU,4058
14
+ adss/models/query.py,sha256=Af-iojZb-nO6qj-yMT_PlNM7Hip6EwBfNeaQPMJPNM0,4293
15
+ adss/models/user.py,sha256=5qVT5qOktokmVLkGszPGCTZWv0wC-7aBMvJ8EeBOqdw,3493
16
+ adss-1.23.dist-info/LICENSE,sha256=1aYqcyqjrdNXY9hqgZkCWprcoA112oKvdrfPyvMYPTc,1468
17
+ adss-1.23.dist-info/METADATA,sha256=jkFEx5_HoE0kyop0UCpMGZTJDYnBjsLgAzSz9e8MBG8,379
18
+ adss-1.23.dist-info/WHEEL,sha256=Wyh-_nZ0DJYolHNn1_hMa4lM7uDedD_RGVwbmTjyItk,91
19
+ adss-1.23.dist-info/top_level.txt,sha256=ebD44L3R0PEvEFoRCJ-RjTIsQ9Yjpo2aAYC1BMtueLg,5
20
+ adss-1.23.dist-info/RECORD,,
adss/adss_manager.py DELETED
@@ -1,53 +0,0 @@
1
- import requests
2
- import pprint
3
- import os
4
-
5
- import xml.etree.ElementTree as ET
6
-
7
- from adss.table import Table
8
- from adss.variables import BASEURL
9
-
10
- class ADSSManager:
11
- def __init__(self):
12
- self.tables = {}
13
-
14
- def load_tables(self):
15
- res = requests.get(os.path.join(BASEURL, 'tables'))
16
-
17
- # Parse the XML
18
- root = ET.fromstring(res.content)
19
-
20
- # In this XML, the root element is in the "vosi" namespace but the child elements (schema, table, etc.)
21
- # are unqualified (i.e. have no prefix). Thus, we can search for them without a namespace.
22
- tables = []
23
- for schema_elem in root.findall('schema'):
24
- schema_name = schema_elem.find('name').text if schema_elem.find('name') is not None else None
25
-
26
- for table_elem in schema_elem.findall('table'):
27
- table_name = table_elem.find('name').text if table_elem.find('name') is not None else None
28
- columns = []
29
- for col_elem in table_elem.findall('column'):
30
- col_name = col_elem.find('name').text if col_elem.find('name') is not None else None
31
- #dataType_elem = col_elem.find('dataType')
32
- #data_type = dataType_elem.text if dataType_elem is not None else None
33
- # The xsi:type attribute is in the XML namespace for xsi
34
- #xsi_type = dataType_elem.get('{http://www.w3.org/2001/XMLSchema-instance}type') if dataType_elem is not None else None
35
- columns.append(col_name)
36
-
37
- if schema_name == "public":
38
- name = table_name
39
- else:
40
- name = f"{schema_name}.{table_name}"
41
-
42
- tables.append(Table(name, columns))
43
-
44
- self.tables = tables
45
-
46
- def print_tables(self):
47
- pprint.pprint(self.tables)
48
-
49
- def get_table(self, name):
50
- for table in self.tables:
51
- if name in table.name:
52
- return table
53
- return None
File without changes
@@ -1,123 +0,0 @@
1
- from astropy.io.votable import from_table, writeto
2
- from xml.dom import minidom
3
- from astropy.table import Table
4
-
5
-
6
- from adss.variables import BASEURL
7
- from adss.utils.format_table import format_result_table
8
- import requests
9
- import os
10
-
11
- import time
12
-
13
- import io
14
-
15
- async_url = os.path.join(BASEURL, "async")
16
-
17
- def execute_async(query, table_upload=None, refresh_rate=5):
18
- """Perform async queries on splus cloud TAP service.
19
-
20
- Args:
21
- query (str): query itself.
22
- table_upload (pandas.DataFrame, optional): table to upload. Defaults to None.
23
- publicdata (bool, optional): If internal wants to access public data. Defaults to None.
24
-
25
- Returns:
26
- astropy.table.Table: result table.
27
- """
28
-
29
- data = {
30
- "request": 'doQuery',
31
- "version": '1.0',
32
- "lang": 'ADQL',
33
- "phase": 'run',
34
- "query": query,
35
- "format": 'csv'
36
- }
37
-
38
- if str(type(table_upload)) != "<class 'NoneType'>":
39
- if 'astropy.table' in str(type(table_upload)):
40
- if len(table_upload) > 6000:
41
- print('Cutting to the first 6000 objects!')
42
- table_upload = table_upload[0:6000]
43
- table_upload = from_table(table_upload)
44
-
45
- IObytes = io.BytesIO()
46
- writeto(table_upload, IObytes)
47
-
48
- IObytes.seek(0)
49
- else:
50
- table_upload = from_table(table_upload)
51
-
52
- IObytes = io.BytesIO()
53
- writeto(table_upload, IObytes)
54
-
55
- IObytes.seek(0)
56
-
57
- elif 'astropy.io.votable' in str(type(table_upload)):
58
- if table_upload.get_first_table().nrows > 6000:
59
- return 'votable bigger than 6000'
60
- else:
61
- IObytes = io.BytesIO()
62
- writeto(table_upload, IObytes)
63
- IObytes.seek(0)
64
-
65
- elif 'DataFrame' in str(type(table_upload)):
66
- if len(table_upload) > 6000:
67
- print('Cutting to the first 6000 objects!')
68
- table_upload = table_upload[0:6000]
69
- table_upload = Table.from_pandas(table_upload)
70
- table_upload = from_table(table_upload)
71
- IObytes = io.BytesIO()
72
- writeto(table_upload, IObytes)
73
- IObytes.seek(0)
74
- else:
75
- table_upload = Table.from_pandas(table_upload)
76
- table_upload = from_table(table_upload)
77
- IObytes = io.BytesIO()
78
- writeto(table_upload, IObytes)
79
- IObytes.seek(0)
80
-
81
- else:
82
- return 'Table type not supported'
83
-
84
- data['upload'] = 'upload,param:uplTable'
85
- res = requests.post(async_url, data = data, files={'uplTable': IObytes.read()})
86
-
87
- if not table_upload:
88
- res = requests.post(async_url, data = data)
89
-
90
- xmldoc = minidom.parse(io.BytesIO(res.content))
91
-
92
- try:
93
- item = xmldoc.getElementsByTagName('phase')[0]
94
- process = item.firstChild.data
95
-
96
- item = xmldoc.getElementsByTagName('jobId')[0]
97
- jobID = item.firstChild.data
98
-
99
- while process == 'EXECUTING':
100
- res = requests.get(os.path.join(async_url, jobID))
101
- xmldoc = minidom.parse(io.BytesIO(res.content))
102
-
103
- item = xmldoc.getElementsByTagName('phase')[0]
104
- process = item.firstChild.data
105
- time.sleep(refresh_rate)
106
-
107
- if process == 'COMPLETED':
108
- item = xmldoc.getElementsByTagName('result')[0]
109
- link = item.attributes['xlink:href'].value
110
-
111
- res = requests.get(link)
112
-
113
- return format_result_table(Table.read(io.BytesIO(res.content), format="csv"))
114
-
115
- if process == 'ERROR':
116
- item = xmldoc.getElementsByTagName('message')[0]
117
- message = item.firstChild.data
118
-
119
- print("Error: ", message)
120
-
121
- except:
122
- item = xmldoc.getElementsByTagName('INFO')
123
- print(item[0].attributes['value'].value, ": ", item[0].firstChild.data)
@@ -1,42 +0,0 @@
1
- from adss.variables import BASEURL
2
- from adss.utils.format_table import format_result_table
3
-
4
- from astropy.table import Table
5
- from xml.dom import minidom
6
- import requests
7
- import os
8
- import io
9
-
10
- from requests.exceptions import Timeout
11
-
12
- sync_url = os.path.join(BASEURL, "sync")
13
-
14
- def execute_sync(query, timeout=20):
15
- data = {
16
- "request": "doQuery",
17
- "version": "1.0",
18
- "lang": "ADQL",
19
- "phase": "run",
20
- "query": query,
21
- "format": "csv"
22
- }
23
-
24
- # Make request to TAP server
25
- try:
26
- res = requests.post(sync_url, data=data, timeout=20)
27
- except Timeout:
28
- raise Exception("Request to TAP server timed out, for large queries use async query")
29
-
30
- # Handle errors from TAP response
31
- if res.status_code != 200:
32
- xmldoc = minidom.parse(io.BytesIO(res.content))
33
- item = xmldoc.getElementsByTagName("INFO")
34
- for i in item:
35
- if i.getAttribute("name") == "QUERY_STATUS" and i.getAttribute("value") == "ERROR":
36
- error_message = i.firstChild.data
37
- raise Exception(f"ADQL Query Error: {error_message}")
38
-
39
- # Convert CSV response to Astropy Table
40
- return format_result_table(Table.read(io.BytesIO(res.content), format="csv"))
41
-
42
-
File without changes
@@ -1,21 +0,0 @@
1
- from adss.executors.sync_query import execute_sync
2
- from adss.executors.async_query import execute_async
3
-
4
- def cone_search(table, ra, dec, radius):
5
- if radius < 0:
6
- raise ValueError("Radius must be positive")
7
- if radius > 60:
8
- raise ValueError("Radius must be less than 60 arcsecs")
9
-
10
- query = f"""select * from {table}
11
- WHERE 1 = CONTAINS( POINT('ICRS', ra, dec),
12
- CIRCLE('ICRS', {ra}, {dec}, {radius}./3600.))
13
- """
14
- return execute_sync(query)
15
-
16
- def large_cone_search(table, ra, dec, radius):
17
- query = f"""select * from {table}
18
- WHERE 1 = CONTAINS( POINT('ICRS', ra, dec),
19
- CIRCLE('ICRS', {ra}, {dec}, {radius}./3600.))
20
- """
21
- return execute_async(query)
adss/table.py DELETED
@@ -1,295 +0,0 @@
1
- from adss.executors.sync_query import execute_sync
2
- from adss.executors.async_query import execute_async
3
- from adss.utils import format_table
4
-
5
- import re
6
-
7
- class Table:
8
- def __init__(self, name, columns):
9
- self.name = name
10
- self.columns = columns
11
- self.selected_columns = []
12
- self.constrains = []
13
-
14
- def __repr__(self):
15
- return f"Table(name={self.name}, columns={len(self.columns)})"
16
-
17
- def __str__(self):
18
- return f"Table: {self.name} ({len(self.columns)} columns)"
19
-
20
- def check_column(self, column):
21
- return column in self.columns
22
-
23
- def format_columns(self, columns):
24
- # Use provided columns list rather than an undefined variable
25
- return ','.join(columns)
26
-
27
- def set_columns(self, columns):
28
- if not isinstance(columns, list):
29
- columns = [columns]
30
- for column in columns:
31
- if not self.check_column(column):
32
- raise ValueError(f"Column {column} not in table {self.name}, options are {self.columns}")
33
- self.selected_columns = columns
34
-
35
- def set_constrains(self, constrains):
36
- self.constrains = constrains
37
-
38
- def cone_search(self, ra, dec, radius_arcsec, columns=None, method = 'sync'):
39
- if radius_arcsec < 0:
40
- raise ValueError("Radius must be positive")
41
- if radius_arcsec > 60:
42
- raise ValueError("Radius must be less than 60 arcsecs")
43
-
44
- if columns:
45
- columns_str = self.format_columns(columns)
46
- elif self.selected_columns:
47
- columns_str = self.format_columns(self.selected_columns)
48
- else:
49
- columns_str = "*" # Select all columns
50
-
51
- constraints_str = ""
52
- if self.constrains:
53
- constraints_str = " AND (" + self.constrains + ")"
54
-
55
- query = f"""SELECT {columns_str} FROM {self.name}
56
- WHERE 1 = CONTAINS(
57
- POINT('ICRS', ra, dec),
58
- CIRCLE('ICRS', {ra}, {dec}, {radius_arcsec}/3600.0)
59
- ){constraints_str}
60
- """
61
-
62
- print(query)
63
- if method == 'sync':
64
- return execute_sync(query)
65
- else:
66
- return execute_async(query)
67
-
68
- def cone_cross_match(
69
- self,
70
- other_table,
71
- match_arcsec,
72
- ra,
73
- dec,
74
- radius_arcsec,
75
- columns=None,
76
- other_columns=None,
77
- other_suffix=None,
78
- method='sync'
79
- ):
80
- """
81
- Perform a cone search on the current table (t1) and then cross-match with another table (t2)
82
- using a matching radius (match_arcsec).
83
-
84
- The query first restricts table t1 to a cone centered at (ra, dec) with a radius of radius_arcsec.
85
- Then, for each object in t1, it finds matching objects in table t2 that lie within match_arcsec
86
- of the t1 object's coordinates.
87
-
88
- Additionally:
89
- - If a non-empty `other_suffix` is provided, each selected column from t2 will be aliased with that suffix.
90
- - The constraints for each table are processed so that the columns in the conditions are properly qualified with t1 or t2.
91
-
92
- Parameters:
93
- other_table (Table): The table to match against (t2).
94
- match_arcsec (float): The cross-match tolerance radius (in arcseconds) between t1 and t2.
95
- ra (float): Right Ascension for the cone center (t1).
96
- dec (float): Declination for the cone center (t1).
97
- radius_arcsec (float): The cone search radius (in arcseconds) for filtering t1.
98
- columns (list or None): Columns to select from the current table (t1).
99
- other_columns (list or None): Columns to select from the other table (t2).
100
- other_suffix (str or None): Optional suffix to append to each t2 column alias.
101
- method (str): Use 'sync' for synchronous execution or 'async' for asynchronous.
102
-
103
- Returns:
104
- The result of the query execution via execute_sync or execute_async.
105
- """
106
-
107
- # Helper function to qualify constraint column names with the proper alias.
108
- # It looks for each column name as a whole word (not already preceded by an alias) and prefixes it.
109
- def apply_alias_to_constraint(constraint, alias, columns_list):
110
- for col in columns_list:
111
- # (?<![\w\.]) ensures that we do not match if the column is already prefixed (like t1.ra)
112
- pattern = r'(?<![\w\.])\b' + re.escape(col) + r'\b'
113
- constraint = re.sub(pattern, f"{alias}.{col}", constraint)
114
- return constraint
115
-
116
- # Validate match_arcsec
117
- if match_arcsec <= 0:
118
- raise ValueError("Match radius must be positive")
119
- if match_arcsec > 3:
120
- print("Match radius may be too large; consider a value less than 3 arcsecs")
121
-
122
- # Determine columns for t1
123
- if columns:
124
- t1_columns_list = columns if isinstance(columns, list) else [columns]
125
- t1_columns = ', '.join(f"t1.{col}" for col in t1_columns_list)
126
- elif self.selected_columns:
127
- t1_columns = ', '.join(f"t1.{col}" for col in self.selected_columns)
128
- else:
129
- t1_columns = "t1.*"
130
-
131
- # Determine columns for t2, adding suffix if provided
132
- if other_columns:
133
- t2_columns_list = other_columns if isinstance(other_columns, list) else [other_columns]
134
- if other_suffix:
135
- t2_columns = ', '.join(f"t2.{col} AS {col}{other_suffix}" for col in t2_columns_list)
136
- else:
137
- t2_columns = ', '.join(f"t2.{col}" for col in t2_columns_list)
138
- elif other_table.selected_columns:
139
- if other_suffix:
140
- t2_columns = ', '.join(f"t2.{col} AS {col}{other_suffix}" for col in other_table.selected_columns)
141
- else:
142
- t2_columns = ', '.join(f"t2.{col}" for col in other_table.selected_columns)
143
- else:
144
- t2_columns = "t2.*"
145
-
146
- # Process constraints for t1: apply alias "t1" to each column mentioned in the constraint.
147
- constraints_t1 = ""
148
- if self.constrains:
149
- if isinstance(self.constrains, str):
150
- processed_constraint = apply_alias_to_constraint(self.constrains, "t1", self.columns)
151
- constraints_t1 = " AND (" + processed_constraint + ")"
152
- elif isinstance(self.constrains, list):
153
- processed_constraints = []
154
- for c in self.constrains:
155
- processed_constraints.append(apply_alias_to_constraint(c, "t1", self.columns))
156
- constraints_t1 = " AND (" + " AND ".join(processed_constraints) + ")"
157
-
158
- # Process constraints for t2: apply alias "t2" to each column mentioned in the constraint.
159
- constraints_t2 = ""
160
- if other_table.constrains:
161
- if isinstance(other_table.constrains, str):
162
- processed_constraint = apply_alias_to_constraint(other_table.constrains, "t2", other_table.columns)
163
- constraints_t2 = " AND (" + processed_constraint + ")"
164
- elif isinstance(other_table.constrains, list):
165
- processed_constraints = []
166
- for c in other_table.constrains:
167
- processed_constraints.append(apply_alias_to_constraint(c, "t2", other_table.columns))
168
- constraints_t2 = " AND (" + " AND ".join(processed_constraints) + ")"
169
- # Build the query:
170
- # 1. The first CONTAINS clause performs the cross-match between t1 and t2 with match_arcsec tolerance.
171
- # 2. The second CONTAINS clause restricts t1 objects to the cone centered at (ra, dec) with radius radius_arcsec.
172
- query = f"""SELECT {t1_columns}, {t2_columns}
173
- FROM {self.name} AS t1, {other_table.name} AS t2
174
- WHERE 1 = CONTAINS(
175
- POINT('ICRS', t2.ra, t2.dec),
176
- CIRCLE('ICRS', t1.ra, t1.dec, {match_arcsec}/3600.0)
177
- )
178
- AND 1 = CONTAINS(
179
- POINT('ICRS', t1.ra, t1.dec),
180
- CIRCLE('ICRS', {ra}, {dec}, {radius_arcsec}/3600.0)
181
- )
182
- {constraints_t1}
183
- {constraints_t2}
184
- """
185
-
186
- print(query)
187
- if method == 'async':
188
- return execute_async(query)
189
- else:
190
- return execute_sync(query)
191
-
192
- def table_cross_match(
193
- self,
194
- other_table,
195
- match_arcsec,
196
- columns=None,
197
- other_columns=None,
198
- other_suffix=None,
199
- method='async'
200
- ):
201
- """
202
- Perform a cone search on the current table (t1) and then cross-match with another
203
- table (Dataframe or astropy Table) (t2)
204
- using a matching radius (match_arcsec).
205
-
206
- For each object in t1, it finds matching objects in table t2 that lie within match_arcsec
207
- of the t1 object's coordinates.
208
-
209
- Additionally:
210
- - If a non-empty `other_suffix` is provided, each selected column from t2 will be aliased with that suffix.
211
- - The constraints for each table are processed so that the columns in the conditions are properly qualified with t1 or t2.
212
-
213
- Parameters:
214
- other_table (astropy.table.Table): The table to match against (t2).
215
- match_arcsec (float): The cross-match tolerance radius (in arcseconds) between t1 and t2.
216
- columns (list or None): Columns to select from the current table (t1).
217
- other_columns (list or None): Columns to select from the other table (t2).
218
- other_suffix (str or None): Optional suffix to append to each t2 column alias.
219
- method (str): Use 'sync' for synchronous execution or 'async' for asynchronous.
220
-
221
- Returns:
222
- The result of the query execution via execute_sync or execute_async.
223
- """
224
-
225
- # Helper function to qualify constraint column names with the proper alias.
226
- # It looks for each column name as a whole word (not already preceded by an alias) and prefixes it.
227
- def apply_alias_to_constraint(constraint, alias, columns_list):
228
- for col in columns_list:
229
- # (?<![\w\.]) ensures that we do not match if the column is already prefixed (like t1.ra)
230
- pattern = r'(?<![\w\.])\b' + re.escape(col) + r'\b'
231
- constraint = re.sub(pattern, f"{alias}.{col}", constraint)
232
- return constraint
233
-
234
- # Validate match_arcsec
235
- if match_arcsec <= 0:
236
- raise ValueError("Match radius must be positive")
237
- if match_arcsec > 3:
238
- print("Match radius may be too large; consider a value less than 3 arcsecs")
239
-
240
- # Determine columns for t1
241
- if columns:
242
- t1_columns_list = columns if isinstance(columns, list) else [columns]
243
- t1_columns = ', '.join(f"t1.{col}" for col in t1_columns_list)
244
- elif self.selected_columns:
245
- t1_columns = ', '.join(f"t1.{col}" for col in self.selected_columns)
246
- else:
247
- t1_columns = "t1.*"
248
-
249
- # Determine columns for t2, adding suffix if provided
250
- if not other_columns:
251
- raise ValueError("Must provide columns for the input table (other_columns param)")
252
-
253
- if not "ra" in other_columns or not "dec" in other_columns:
254
- raise ValueError("Input table must have 'ra' and 'dec' columns")
255
-
256
- other_table = other_table[other_columns]
257
-
258
- t2_columns_list = other_columns if isinstance(other_columns, list) else [other_columns]
259
- if other_suffix:
260
- t2_columns = ', '.join(f"t2.{col} AS {col}{other_suffix}" for col in t2_columns_list)
261
- else:
262
- t2_columns = ', '.join(f"t2.{col}" for col in t2_columns_list)
263
-
264
- # Process constraints for t1: apply alias "t1" to each column mentioned in the constraint.
265
- constraints_t1 = ""
266
- if self.constrains:
267
- if isinstance(self.constrains, str):
268
- processed_constraint = apply_alias_to_constraint(self.constrains, "t1", self.columns)
269
- constraints_t1 = " (" + processed_constraint + ")"
270
- elif isinstance(self.constrains, list):
271
- processed_constraints = []
272
- for c in self.constrains:
273
- processed_constraints.append(apply_alias_to_constraint(c, "t1", self.columns))
274
- constraints_t1 = " (" + " AND ".join(processed_constraints) + ")"
275
-
276
- if constraints_t1:
277
- constraints_t1 = "WHERE " + constraints_t1
278
- # Build the query:
279
- # 1. The first CONTAINS clause performs the cross-match between t1 and t2 with match_arcsec tolerance.
280
- # 2. The second CONTAINS clause restricts t1 objects to the cone centered at (ra, dec) with radius radius_arcsec.
281
- query = f"""SELECT {t1_columns}, {t2_columns}
282
- FROM {self.name} AS t1 JOIN tap_upload.upload AS t2 ON
283
- 1 = CONTAINS(
284
- POINT('ICRS', t1.ra, t1.dec),
285
- CIRCLE('ICRS', t2.ra, t2.dec, {match_arcsec}/3600.0)
286
- )
287
- {constraints_t1}
288
- """
289
-
290
- print(query)
291
- if method == 'async':
292
- return execute_async(query, table_upload=other_table)
293
- else:
294
- raise ValueError("Synchronous execution not supported yet for table cross-match")
295
- #return execute_sync(query)
adss/utils/__init__.py DELETED
File without changes
@@ -1,115 +0,0 @@
1
- import numpy as np
2
- from astropy.table import Table
3
-
4
- def vectorized_string_to_masked_array(column_data):
5
- """
6
- Fully vectorized conversion of formatted string arrays to NumPy masked arrays.
7
- The strings are assumed to be wrapped in curly braces (e.g. "{1,2,3}").
8
- Any occurrence of the literal "NULL" in a cell will be masked.
9
-
10
- Parameters
11
- ----------
12
- column_data : numpy.ndarray
13
- A 1D NumPy array of strings. Each element is a formatted array like "{1,2,3}".
14
-
15
- Returns
16
- -------
17
- numpy.ma.MaskedArray
18
- A masked array where "NULL" entries are masked.
19
- """
20
- # Remove curly braces (but do not remove "NULL")
21
- clean_data = np.char.replace(column_data.astype(str), "{", "")
22
- clean_data = np.char.replace(clean_data, "}", "")
23
-
24
- # Split each string by comma into a list of items (with possible surrounding whitespace)
25
- split_arrays = np.char.split(clean_data, ",")
26
-
27
- # --- Determine type by scanning for a first non-"NULL" value ---
28
- first_value = None
29
- for row in split_arrays:
30
- for item in row:
31
- item_str = item.strip()
32
- if item_str != "NULL":
33
- first_value = item_str
34
- break
35
- if first_value is not None:
36
- break
37
-
38
- # If no non-NULL value is found, default to a masked object array.
39
- if first_value is None:
40
- data = [np.array(row) for row in split_arrays]
41
- mask = [np.full(len(row), True, dtype=bool) for row in split_arrays]
42
- return np.ma.masked_array(data, mask=mask)
43
-
44
- # Try to determine numeric type.
45
- # (If first_value consists solely of digits, we'll assume integer.
46
- # Otherwise, if it can be converted to float, we'll use float.
47
- # Else, we default to string.)
48
- is_integer = first_value.isdigit()
49
- is_float = False
50
- if not is_integer:
51
- try:
52
- float(first_value)
53
- is_float = True
54
- except Exception:
55
- pass
56
-
57
- # Prepare lists to store converted rows and corresponding masks.
58
- data_list = []
59
- mask_list = []
60
-
61
- # Conversion helper functions
62
- def convert_item(item, conv):
63
- item = item.strip()
64
- if item == "NULL":
65
- return None, True
66
- else:
67
- return conv(item), False
68
-
69
- if is_integer:
70
- conv_func = int
71
- dtype = np.int64
72
- elif is_float:
73
- conv_func = float
74
- dtype = np.float64
75
- else:
76
- conv_func = lambda x: x
77
- dtype = object
78
-
79
- # Process each row
80
- for row in split_arrays:
81
- row_vals = []
82
- row_mask = []
83
- for item in row:
84
- val, is_mask = convert_item(item, conv_func)
85
- # For masked numeric values, we insert a dummy (0 or 0.0) value.
86
- if is_mask:
87
- if dtype in (np.int64, np.float64):
88
- row_vals.append(0)
89
- else:
90
- row_vals.append("")
91
- else:
92
- row_vals.append(val)
93
- row_mask.append(is_mask)
94
- # Convert row to an array of the target dtype.
95
- row_arr = np.array(row_vals, dtype=dtype)
96
- data_list.append(row_arr)
97
- mask_list.append(np.array(row_mask, dtype=bool))
98
-
99
- # Create and return a masked array.
100
- return np.ma.masked_array(data_list, mask=mask_list)
101
-
102
- def format_result_table(tab):
103
- if tab is None or len(tab) == 0:
104
- return None
105
-
106
- for col in tab.colnames:
107
- if len(tab[col]) == 0:
108
- continue
109
- if not "<U" in str(tab[col].dtype):
110
- continue
111
-
112
- if "{" in tab[col][0]:
113
- tab[col] = vectorized_string_to_masked_array(tab[col])
114
-
115
- return tab
adss/variables.py DELETED
@@ -1,3 +0,0 @@
1
-
2
-
3
- BASEURL = "http://andromeda.cbpf.br:8080/tap/tap"
@@ -1,30 +0,0 @@
1
- adss/__init__.py,sha256=KiM0xXp_U9MqUIb_hLayoNW5lilW-LNSsH4PkV5dx-4,855
2
- adss/adss_manager.py,sha256=vaS6y1IycVW8AjpAQeG58VwjCDLyMBUylJzYSeg6D9o,2068
3
- adss/auth.py,sha256=M8GAN8pdfmJfsEOqv_EW2ru0uP0H1JtK7IfBDrIZMyg,4025
4
- adss/client.py,sha256=Ojc6jqZtCBMwRoTGuBiF0SEDkdWs_-xbGwyk03nHkr8,29613
5
- adss/exceptions.py,sha256=YeN-xRHvlSmwyS8ni2jOEhhgZK9J1jsG11pOedy3Gfg,1482
6
- adss/table.py,sha256=Ua663njPk2sg8BtQPo1wZ-V09YvnjrEyIb_SmBhdOYY,13383
7
- adss/utils.py,sha256=0RISndgXnwVy8cLMFa4Mm7CfGqwGdX-X-HZ0NmPDVD0,3558
8
- adss/variables.py,sha256=kmbwxJBDC97yKakrnBvONRh1FVvSXU4YKqnjExAU2ZA,51
9
- adss/endpoints/__init__.py,sha256=Pr29901fT8ClCS2GasTjTiBNyn7DfVfxILpYDFsMvPA,488
10
- adss/endpoints/admin.py,sha256=S6ZrkeA_Lh_LCpF1NHyfMKqjbIiylYXUSV65H_WKg1U,16391
11
- adss/endpoints/images.py,sha256=ItAiBss_jQvWQWRUvy0c9Cjn1r9lDR8eOPauqOcPcZ8,35777
12
- adss/endpoints/metadata.py,sha256=RPrRP6Uz6-uPMIcntMgfss9vAd5iN7JXjZbF8SW0EYg,8238
13
- adss/endpoints/queries.py,sha256=5BONw_IcGORMPNe-5J6BpoFY6z7lKcktEVhqZ9j17_8,17286
14
- adss/endpoints/users.py,sha256=6Abkl3c3_YKdMYR_JWI-uL9HTHxcjlIOnE29GyN5_QE,10811
15
- adss/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
- adss/executors/async_query.py,sha256=qmSm-Ex8ZJSjn8lmm_F5BufVpDV-w6vOqXie7krCp7k,3977
17
- adss/executors/sync_query.py,sha256=3e-ALG3GnA906o_oefci5XHNcdnoPWuc67ml-YATMKE,1243
18
- adss/models/__init__.py,sha256=ADWVaGy4dkpEMH3iS_6EnRSBlEgoM5Vy9zORQr-UG6w,404
19
- adss/models/metadata.py,sha256=6fdH_0BenVRmeXkkKbsG2B68O-N2FXTTRgxsEhAHRoU,4058
20
- adss/models/query.py,sha256=Af-iojZb-nO6qj-yMT_PlNM7Hip6EwBfNeaQPMJPNM0,4293
21
- adss/models/user.py,sha256=5qVT5qOktokmVLkGszPGCTZWv0wC-7aBMvJ8EeBOqdw,3493
22
- adss/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
- adss/operations/cone_search.py,sha256=qfdFA2TGqnzuggz4nep21_y4LgmHP4ZMpVupxn87dB0,706
24
- adss/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
- adss/utils/format_table.py,sha256=UYCQ6Xum3dPHrh0cAh_KCj6vHShAvdHlV0rtIv7J09Q,3695
26
- adss-1.21.dist-info/LICENSE,sha256=1aYqcyqjrdNXY9hqgZkCWprcoA112oKvdrfPyvMYPTc,1468
27
- adss-1.21.dist-info/METADATA,sha256=M75h6eW69l2JcUGupkTkkQp2-pcW3qfgkmj3_5qFIpE,379
28
- adss-1.21.dist-info/WHEEL,sha256=Wyh-_nZ0DJYolHNn1_hMa4lM7uDedD_RGVwbmTjyItk,91
29
- adss-1.21.dist-info/top_level.txt,sha256=ebD44L3R0PEvEFoRCJ-RjTIsQ9Yjpo2aAYC1BMtueLg,5
30
- adss-1.21.dist-info/RECORD,,
File without changes
File without changes