adss 0.1__py3-none-any.whl → 1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
adss/adss_manager.py ADDED
@@ -0,0 +1,53 @@
1
+ import requests
2
+ import pprint
3
+ import os
4
+
5
+ import xml.etree.ElementTree as ET
6
+
7
+ from adss.table import Table
8
+ from adss.variables import BASEURL
9
+
10
+ class ADSSManager:
11
+ def __init__(self):
12
+ self.tables = {}
13
+
14
+ def load_tables(self):
15
+ res = requests.get(os.path.join(BASEURL, 'tables'))
16
+
17
+ # Parse the XML
18
+ root = ET.fromstring(res.content)
19
+
20
+ # In this XML, the root element is in the "vosi" namespace but the child elements (schema, table, etc.)
21
+ # are unqualified (i.e. have no prefix). Thus, we can search for them without a namespace.
22
+ tables = []
23
+ for schema_elem in root.findall('schema'):
24
+ schema_name = schema_elem.find('name').text if schema_elem.find('name') is not None else None
25
+
26
+ for table_elem in schema_elem.findall('table'):
27
+ table_name = table_elem.find('name').text if table_elem.find('name') is not None else None
28
+ columns = []
29
+ for col_elem in table_elem.findall('column'):
30
+ col_name = col_elem.find('name').text if col_elem.find('name') is not None else None
31
+ #dataType_elem = col_elem.find('dataType')
32
+ #data_type = dataType_elem.text if dataType_elem is not None else None
33
+ # The xsi:type attribute is in the XML namespace for xsi
34
+ #xsi_type = dataType_elem.get('{http://www.w3.org/2001/XMLSchema-instance}type') if dataType_elem is not None else None
35
+ columns.append(col_name)
36
+
37
+ if schema_name == "public":
38
+ name = table_name
39
+ else:
40
+ name = f"{schema_name}.{table_name}"
41
+
42
+ tables.append(Table(name, columns))
43
+
44
+ self.tables = tables
45
+
46
+ def print_tables(self):
47
+ pprint.pprint(self.tables)
48
+
49
+ def get_table(self, name):
50
+ for table in self.tables:
51
+ if name in table.name:
52
+ return table
53
+ return None
@@ -2,7 +2,9 @@ from astropy.io.votable import from_table, writeto
2
2
  from xml.dom import minidom
3
3
  from astropy.table import Table
4
4
 
5
+
5
6
  from adss.variables import BASEURL
7
+ from adss.utils.format_table import format_result_table
6
8
  import requests
7
9
  import os
8
10
 
@@ -32,7 +34,7 @@ def execute_async(query, table_upload=None, refresh_rate=5):
32
34
  "query": query,
33
35
  "format": 'csv'
34
36
  }
35
-
37
+
36
38
  if str(type(table_upload)) != "<class 'NoneType'>":
37
39
  if 'astropy.table' in str(type(table_upload)):
38
40
  if len(table_upload) > 6000:
@@ -76,7 +78,6 @@ def execute_async(query, table_upload=None, refresh_rate=5):
76
78
  writeto(table_upload, IObytes)
77
79
  IObytes.seek(0)
78
80
 
79
-
80
81
  else:
81
82
  return 'Table type not supported'
82
83
 
@@ -109,7 +110,7 @@ def execute_async(query, table_upload=None, refresh_rate=5):
109
110
 
110
111
  res = requests.get(link)
111
112
 
112
- return Table.read(io.BytesIO(res.content), format="csv")
113
+ return format_result_table(Table.read(io.BytesIO(res.content), format="csv"))
113
114
 
114
115
  if process == 'ERROR':
115
116
  item = xmldoc.getElementsByTagName('message')[0]
@@ -1,4 +1,5 @@
1
1
  from adss.variables import BASEURL
2
+ from adss.utils.format_table import format_result_table
2
3
 
3
4
  from astropy.table import Table
4
5
  from xml.dom import minidom
@@ -6,9 +7,11 @@ import requests
6
7
  import os
7
8
  import io
8
9
 
10
+ from requests.exceptions import Timeout
11
+
9
12
  sync_url = os.path.join(BASEURL, "sync")
10
13
 
11
- def execute_sync(query):
14
+ def execute_sync(query, timeout=20):
12
15
  data = {
13
16
  "request": "doQuery",
14
17
  "version": "1.0",
@@ -19,7 +22,10 @@ def execute_sync(query):
19
22
  }
20
23
 
21
24
  # Make request to TAP server
22
- res = requests.post(sync_url, data=data)
25
+ try:
26
+ res = requests.post(sync_url, data=data, timeout=20)
27
+ except Timeout:
28
+ raise Exception("Request to TAP server timed out, for large queries use async query")
23
29
 
24
30
  # Handle errors from TAP response
25
31
  if res.status_code != 200:
@@ -31,6 +37,6 @@ def execute_sync(query):
31
37
  raise Exception(f"ADQL Query Error: {error_message}")
32
38
 
33
39
  # Convert CSV response to Astropy Table
34
- return Table.read(io.BytesIO(res.content), format="csv")
40
+ return format_result_table(Table.read(io.BytesIO(res.content), format="csv"))
35
41
 
36
42
 
adss/table.py ADDED
@@ -0,0 +1,295 @@
1
+ from adss.executors.sync_query import execute_sync
2
+ from adss.executors.async_query import execute_async
3
+ from adss.utils import format_table
4
+
5
+ import re
6
+
7
+ class Table:
8
+ def __init__(self, name, columns):
9
+ self.name = name
10
+ self.columns = columns
11
+ self.selected_columns = []
12
+ self.constrains = []
13
+
14
+ def __repr__(self):
15
+ return f"Table(name={self.name}, columns={len(self.columns)})"
16
+
17
+ def __str__(self):
18
+ return f"Table: {self.name} ({len(self.columns)} columns)"
19
+
20
+ def check_column(self, column):
21
+ return column in self.columns
22
+
23
+ def format_columns(self, columns):
24
+ # Use provided columns list rather than an undefined variable
25
+ return ','.join(columns)
26
+
27
+ def set_columns(self, columns):
28
+ if not isinstance(columns, list):
29
+ columns = [columns]
30
+ for column in columns:
31
+ if not self.check_column(column):
32
+ raise ValueError(f"Column {column} not in table {self.name}, options are {self.columns}")
33
+ self.selected_columns = columns
34
+
35
+ def set_constrains(self, constrains):
36
+ self.constrains = constrains
37
+
38
+ def cone_search(self, ra, dec, radius_arcsec, columns=None, method = 'sync'):
39
+ if radius_arcsec < 0:
40
+ raise ValueError("Radius must be positive")
41
+ if radius_arcsec > 60:
42
+ raise ValueError("Radius must be less than 60 arcsecs")
43
+
44
+ if columns:
45
+ columns_str = self.format_columns(columns)
46
+ elif self.selected_columns:
47
+ columns_str = self.format_columns(self.selected_columns)
48
+ else:
49
+ columns_str = "*" # Select all columns
50
+
51
+ constraints_str = ""
52
+ if self.constrains:
53
+ constraints_str = " AND (" + self.constrains + ")"
54
+
55
+ query = f"""SELECT {columns_str} FROM {self.name}
56
+ WHERE 1 = CONTAINS(
57
+ POINT('ICRS', ra, dec),
58
+ CIRCLE('ICRS', {ra}, {dec}, {radius_arcsec}/3600.0)
59
+ ){constraints_str}
60
+ """
61
+
62
+ print(query)
63
+ if method == 'sync':
64
+ return execute_sync(query)
65
+ else:
66
+ return execute_async(query)
67
+
68
+ def cone_cross_match(
69
+ self,
70
+ other_table,
71
+ match_arcsec,
72
+ ra,
73
+ dec,
74
+ radius_arcsec,
75
+ columns=None,
76
+ other_columns=None,
77
+ other_suffix=None,
78
+ method='sync'
79
+ ):
80
+ """
81
+ Perform a cone search on the current table (t1) and then cross-match with another table (t2)
82
+ using a matching radius (match_arcsec).
83
+
84
+ The query first restricts table t1 to a cone centered at (ra, dec) with a radius of radius_arcsec.
85
+ Then, for each object in t1, it finds matching objects in table t2 that lie within match_arcsec
86
+ of the t1 object's coordinates.
87
+
88
+ Additionally:
89
+ - If a non-empty `other_suffix` is provided, each selected column from t2 will be aliased with that suffix.
90
+ - The constraints for each table are processed so that the columns in the conditions are properly qualified with t1 or t2.
91
+
92
+ Parameters:
93
+ other_table (Table): The table to match against (t2).
94
+ match_arcsec (float): The cross-match tolerance radius (in arcseconds) between t1 and t2.
95
+ ra (float): Right Ascension for the cone center (t1).
96
+ dec (float): Declination for the cone center (t1).
97
+ radius_arcsec (float): The cone search radius (in arcseconds) for filtering t1.
98
+ columns (list or None): Columns to select from the current table (t1).
99
+ other_columns (list or None): Columns to select from the other table (t2).
100
+ other_suffix (str or None): Optional suffix to append to each t2 column alias.
101
+ method (str): Use 'sync' for synchronous execution or 'async' for asynchronous.
102
+
103
+ Returns:
104
+ The result of the query execution via execute_sync or execute_async.
105
+ """
106
+
107
+ # Helper function to qualify constraint column names with the proper alias.
108
+ # It looks for each column name as a whole word (not already preceded by an alias) and prefixes it.
109
+ def apply_alias_to_constraint(constraint, alias, columns_list):
110
+ for col in columns_list:
111
+ # (?<![\w\.]) ensures that we do not match if the column is already prefixed (like t1.ra)
112
+ pattern = r'(?<![\w\.])\b' + re.escape(col) + r'\b'
113
+ constraint = re.sub(pattern, f"{alias}.{col}", constraint)
114
+ return constraint
115
+
116
+ # Validate match_arcsec
117
+ if match_arcsec <= 0:
118
+ raise ValueError("Match radius must be positive")
119
+ if match_arcsec > 3:
120
+ print("Match radius may be too large; consider a value less than 3 arcsecs")
121
+
122
+ # Determine columns for t1
123
+ if columns:
124
+ t1_columns_list = columns if isinstance(columns, list) else [columns]
125
+ t1_columns = ', '.join(f"t1.{col}" for col in t1_columns_list)
126
+ elif self.selected_columns:
127
+ t1_columns = ', '.join(f"t1.{col}" for col in self.selected_columns)
128
+ else:
129
+ t1_columns = "t1.*"
130
+
131
+ # Determine columns for t2, adding suffix if provided
132
+ if other_columns:
133
+ t2_columns_list = other_columns if isinstance(other_columns, list) else [other_columns]
134
+ if other_suffix:
135
+ t2_columns = ', '.join(f"t2.{col} AS {col}{other_suffix}" for col in t2_columns_list)
136
+ else:
137
+ t2_columns = ', '.join(f"t2.{col}" for col in t2_columns_list)
138
+ elif other_table.selected_columns:
139
+ if other_suffix:
140
+ t2_columns = ', '.join(f"t2.{col} AS {col}{other_suffix}" for col in other_table.selected_columns)
141
+ else:
142
+ t2_columns = ', '.join(f"t2.{col}" for col in other_table.selected_columns)
143
+ else:
144
+ t2_columns = "t2.*"
145
+
146
+ # Process constraints for t1: apply alias "t1" to each column mentioned in the constraint.
147
+ constraints_t1 = ""
148
+ if self.constrains:
149
+ if isinstance(self.constrains, str):
150
+ processed_constraint = apply_alias_to_constraint(self.constrains, "t1", self.columns)
151
+ constraints_t1 = " AND (" + processed_constraint + ")"
152
+ elif isinstance(self.constrains, list):
153
+ processed_constraints = []
154
+ for c in self.constrains:
155
+ processed_constraints.append(apply_alias_to_constraint(c, "t1", self.columns))
156
+ constraints_t1 = " AND (" + " AND ".join(processed_constraints) + ")"
157
+
158
+ # Process constraints for t2: apply alias "t2" to each column mentioned in the constraint.
159
+ constraints_t2 = ""
160
+ if other_table.constrains:
161
+ if isinstance(other_table.constrains, str):
162
+ processed_constraint = apply_alias_to_constraint(other_table.constrains, "t2", other_table.columns)
163
+ constraints_t2 = " AND (" + processed_constraint + ")"
164
+ elif isinstance(other_table.constrains, list):
165
+ processed_constraints = []
166
+ for c in other_table.constrains:
167
+ processed_constraints.append(apply_alias_to_constraint(c, "t2", other_table.columns))
168
+ constraints_t2 = " AND (" + " AND ".join(processed_constraints) + ")"
169
+ # Build the query:
170
+ # 1. The first CONTAINS clause performs the cross-match between t1 and t2 with match_arcsec tolerance.
171
+ # 2. The second CONTAINS clause restricts t1 objects to the cone centered at (ra, dec) with radius radius_arcsec.
172
+ query = f"""SELECT {t1_columns}, {t2_columns}
173
+ FROM {self.name} AS t1, {other_table.name} AS t2
174
+ WHERE 1 = CONTAINS(
175
+ POINT('ICRS', t2.ra, t2.dec),
176
+ CIRCLE('ICRS', t1.ra, t1.dec, {match_arcsec}/3600.0)
177
+ )
178
+ AND 1 = CONTAINS(
179
+ POINT('ICRS', t1.ra, t1.dec),
180
+ CIRCLE('ICRS', {ra}, {dec}, {radius_arcsec}/3600.0)
181
+ )
182
+ {constraints_t1}
183
+ {constraints_t2}
184
+ """
185
+
186
+ print(query)
187
+ if method == 'async':
188
+ return execute_async(query)
189
+ else:
190
+ return execute_sync(query)
191
+
192
+ def table_cross_match(
193
+ self,
194
+ other_table,
195
+ match_arcsec,
196
+ columns=None,
197
+ other_columns=None,
198
+ other_suffix=None,
199
+ method='async'
200
+ ):
201
+ """
202
+ Perform a cone search on the current table (t1) and then cross-match with another
203
+ table (Dataframe or astropy Table) (t2)
204
+ using a matching radius (match_arcsec).
205
+
206
+ For each object in t1, it finds matching objects in table t2 that lie within match_arcsec
207
+ of the t1 object's coordinates.
208
+
209
+ Additionally:
210
+ - If a non-empty `other_suffix` is provided, each selected column from t2 will be aliased with that suffix.
211
+ - The constraints for each table are processed so that the columns in the conditions are properly qualified with t1 or t2.
212
+
213
+ Parameters:
214
+ other_table (astropy.table.Table): The table to match against (t2).
215
+ match_arcsec (float): The cross-match tolerance radius (in arcseconds) between t1 and t2.
216
+ columns (list or None): Columns to select from the current table (t1).
217
+ other_columns (list or None): Columns to select from the other table (t2).
218
+ other_suffix (str or None): Optional suffix to append to each t2 column alias.
219
+ method (str): Use 'sync' for synchronous execution or 'async' for asynchronous.
220
+
221
+ Returns:
222
+ The result of the query execution via execute_sync or execute_async.
223
+ """
224
+
225
+ # Helper function to qualify constraint column names with the proper alias.
226
+ # It looks for each column name as a whole word (not already preceded by an alias) and prefixes it.
227
+ def apply_alias_to_constraint(constraint, alias, columns_list):
228
+ for col in columns_list:
229
+ # (?<![\w\.]) ensures that we do not match if the column is already prefixed (like t1.ra)
230
+ pattern = r'(?<![\w\.])\b' + re.escape(col) + r'\b'
231
+ constraint = re.sub(pattern, f"{alias}.{col}", constraint)
232
+ return constraint
233
+
234
+ # Validate match_arcsec
235
+ if match_arcsec <= 0:
236
+ raise ValueError("Match radius must be positive")
237
+ if match_arcsec > 3:
238
+ print("Match radius may be too large; consider a value less than 3 arcsecs")
239
+
240
+ # Determine columns for t1
241
+ if columns:
242
+ t1_columns_list = columns if isinstance(columns, list) else [columns]
243
+ t1_columns = ', '.join(f"t1.{col}" for col in t1_columns_list)
244
+ elif self.selected_columns:
245
+ t1_columns = ', '.join(f"t1.{col}" for col in self.selected_columns)
246
+ else:
247
+ t1_columns = "t1.*"
248
+
249
+ # Determine columns for t2, adding suffix if provided
250
+ if not other_columns:
251
+ raise ValueError("Must provide columns for the input table (other_columns param)")
252
+
253
+ if not "ra" in other_columns or not "dec" in other_columns:
254
+ raise ValueError("Input table must have 'ra' and 'dec' columns")
255
+
256
+ other_table = other_table[other_columns]
257
+
258
+ t2_columns_list = other_columns if isinstance(other_columns, list) else [other_columns]
259
+ if other_suffix:
260
+ t2_columns = ', '.join(f"t2.{col} AS {col}{other_suffix}" for col in t2_columns_list)
261
+ else:
262
+ t2_columns = ', '.join(f"t2.{col}" for col in t2_columns_list)
263
+
264
+ # Process constraints for t1: apply alias "t1" to each column mentioned in the constraint.
265
+ constraints_t1 = ""
266
+ if self.constrains:
267
+ if isinstance(self.constrains, str):
268
+ processed_constraint = apply_alias_to_constraint(self.constrains, "t1", self.columns)
269
+ constraints_t1 = " (" + processed_constraint + ")"
270
+ elif isinstance(self.constrains, list):
271
+ processed_constraints = []
272
+ for c in self.constrains:
273
+ processed_constraints.append(apply_alias_to_constraint(c, "t1", self.columns))
274
+ constraints_t1 = " (" + " AND ".join(processed_constraints) + ")"
275
+
276
+ if constraints_t1:
277
+ constraints_t1 = "WHERE " + constraints_t1
278
+ # Build the query:
279
+ # 1. The first CONTAINS clause performs the cross-match between t1 and t2 with match_arcsec tolerance.
280
+ # 2. The second CONTAINS clause restricts t1 objects to the cone centered at (ra, dec) with radius radius_arcsec.
281
+ query = f"""SELECT {t1_columns}, {t2_columns}
282
+ FROM {self.name} AS t1 JOIN tap_upload.upload AS t2 ON
283
+ 1 = CONTAINS(
284
+ POINT('ICRS', t1.ra, t1.dec),
285
+ CIRCLE('ICRS', t2.ra, t2.dec, {match_arcsec}/3600.0)
286
+ )
287
+ {constraints_t1}
288
+ """
289
+
290
+ print(query)
291
+ if method == 'async':
292
+ return execute_async(query, table_upload=other_table)
293
+ else:
294
+ raise ValueError("Synchronous execution not supported yet for table cross-match")
295
+ #return execute_sync(query)
adss/utils/__init__.py ADDED
File without changes
@@ -0,0 +1,115 @@
1
+ import numpy as np
2
+ from astropy.table import Table
3
+
4
+ def vectorized_string_to_masked_array(column_data):
5
+ """
6
+ Fully vectorized conversion of formatted string arrays to NumPy masked arrays.
7
+ The strings are assumed to be wrapped in curly braces (e.g. "{1,2,3}").
8
+ Any occurrence of the literal "NULL" in a cell will be masked.
9
+
10
+ Parameters
11
+ ----------
12
+ column_data : numpy.ndarray
13
+ A 1D NumPy array of strings. Each element is a formatted array like "{1,2,3}".
14
+
15
+ Returns
16
+ -------
17
+ numpy.ma.MaskedArray
18
+ A masked array where "NULL" entries are masked.
19
+ """
20
+ # Remove curly braces (but do not remove "NULL")
21
+ clean_data = np.char.replace(column_data.astype(str), "{", "")
22
+ clean_data = np.char.replace(clean_data, "}", "")
23
+
24
+ # Split each string by comma into a list of items (with possible surrounding whitespace)
25
+ split_arrays = np.char.split(clean_data, ",")
26
+
27
+ # --- Determine type by scanning for a first non-"NULL" value ---
28
+ first_value = None
29
+ for row in split_arrays:
30
+ for item in row:
31
+ item_str = item.strip()
32
+ if item_str != "NULL":
33
+ first_value = item_str
34
+ break
35
+ if first_value is not None:
36
+ break
37
+
38
+ # If no non-NULL value is found, default to a masked object array.
39
+ if first_value is None:
40
+ data = [np.array(row) for row in split_arrays]
41
+ mask = [np.full(len(row), True, dtype=bool) for row in split_arrays]
42
+ return np.ma.masked_array(data, mask=mask)
43
+
44
+ # Try to determine numeric type.
45
+ # (If first_value consists solely of digits, we'll assume integer.
46
+ # Otherwise, if it can be converted to float, we'll use float.
47
+ # Else, we default to string.)
48
+ is_integer = first_value.isdigit()
49
+ is_float = False
50
+ if not is_integer:
51
+ try:
52
+ float(first_value)
53
+ is_float = True
54
+ except Exception:
55
+ pass
56
+
57
+ # Prepare lists to store converted rows and corresponding masks.
58
+ data_list = []
59
+ mask_list = []
60
+
61
+ # Conversion helper functions
62
+ def convert_item(item, conv):
63
+ item = item.strip()
64
+ if item == "NULL":
65
+ return None, True
66
+ else:
67
+ return conv(item), False
68
+
69
+ if is_integer:
70
+ conv_func = int
71
+ dtype = np.int64
72
+ elif is_float:
73
+ conv_func = float
74
+ dtype = np.float64
75
+ else:
76
+ conv_func = lambda x: x
77
+ dtype = object
78
+
79
+ # Process each row
80
+ for row in split_arrays:
81
+ row_vals = []
82
+ row_mask = []
83
+ for item in row:
84
+ val, is_mask = convert_item(item, conv_func)
85
+ # For masked numeric values, we insert a dummy (0 or 0.0) value.
86
+ if is_mask:
87
+ if dtype in (np.int64, np.float64):
88
+ row_vals.append(0)
89
+ else:
90
+ row_vals.append("")
91
+ else:
92
+ row_vals.append(val)
93
+ row_mask.append(is_mask)
94
+ # Convert row to an array of the target dtype.
95
+ row_arr = np.array(row_vals, dtype=dtype)
96
+ data_list.append(row_arr)
97
+ mask_list.append(np.array(row_mask, dtype=bool))
98
+
99
+ # Create and return a masked array.
100
+ return np.ma.masked_array(data_list, mask=mask_list)
101
+
102
+ def format_result_table(tab):
103
+ if tab is None or len(tab) == 0:
104
+ return None
105
+
106
+ for col in tab.colnames:
107
+ if len(tab[col]) == 0:
108
+ continue
109
+ if not "<U" in str(tab[col].dtype):
110
+ continue
111
+
112
+ if "{" in tab[col][0]:
113
+ tab[col] = vectorized_string_to_masked_array(tab[col])
114
+
115
+ return tab
@@ -0,0 +1,11 @@
1
+ Copyright <YEAR> <COPYRIGHT HOLDER>
2
+
3
+ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
4
+
5
+ 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
6
+
7
+ 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
8
+
9
+ 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
10
+
11
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: adss
3
- Version: 0.1
3
+ Version: 1.0
4
4
  Summary: Astronomical Data Smart System
5
5
  Home-page: https://github.com/schwarzam/adss
6
6
  Author: Gustavo Schwarz
7
7
  Author-email: gustavo.b.schwarz@gmail.com
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: License :: OSI Approved :: Apache Software License
10
+ License-File: LICENSE
10
11
  Requires-Dist: requests
11
12
  Requires-Dist: astropy
12
- Requires-Dist: pyvo
13
13
 
@@ -0,0 +1,16 @@
1
+ adss/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ adss/adss_manager.py,sha256=vaS6y1IycVW8AjpAQeG58VwjCDLyMBUylJzYSeg6D9o,2068
3
+ adss/table.py,sha256=Ua663njPk2sg8BtQPo1wZ-V09YvnjrEyIb_SmBhdOYY,13383
4
+ adss/variables.py,sha256=kmbwxJBDC97yKakrnBvONRh1FVvSXU4YKqnjExAU2ZA,51
5
+ adss/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ adss/executors/async_query.py,sha256=qmSm-Ex8ZJSjn8lmm_F5BufVpDV-w6vOqXie7krCp7k,3977
7
+ adss/executors/sync_query.py,sha256=3e-ALG3GnA906o_oefci5XHNcdnoPWuc67ml-YATMKE,1243
8
+ adss/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ adss/operations/cone_search.py,sha256=qfdFA2TGqnzuggz4nep21_y4LgmHP4ZMpVupxn87dB0,706
10
+ adss/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
+ adss/utils/format_table.py,sha256=UYCQ6Xum3dPHrh0cAh_KCj6vHShAvdHlV0rtIv7J09Q,3695
12
+ adss-1.0.dist-info/LICENSE,sha256=1aYqcyqjrdNXY9hqgZkCWprcoA112oKvdrfPyvMYPTc,1468
13
+ adss-1.0.dist-info/METADATA,sha256=mXmdv0NUDgCapF58GJOkJj5_bCU5rjHgCjYI1cCDY7Q,378
14
+ adss-1.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
15
+ adss-1.0.dist-info/top_level.txt,sha256=ebD44L3R0PEvEFoRCJ-RjTIsQ9Yjpo2aAYC1BMtueLg,5
16
+ adss-1.0.dist-info/RECORD,,
adss-0.1.dist-info/RECORD DELETED
@@ -1,11 +0,0 @@
1
- adss/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- adss/variables.py,sha256=kmbwxJBDC97yKakrnBvONRh1FVvSXU4YKqnjExAU2ZA,51
3
- adss/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- adss/executors/async_query.py,sha256=2xVnnVp-0-dFR8GbDzG2be3dT2s8DpPN1ja59o4d_bM,3904
5
- adss/executors/sync_query.py,sha256=s-t95UtFCdkyBTpnrDgCrVS9wSFXdgIvtDLfNJ21BUA,974
6
- adss/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- adss/operations/cone_search.py,sha256=qfdFA2TGqnzuggz4nep21_y4LgmHP4ZMpVupxn87dB0,706
8
- adss-0.1.dist-info/METADATA,sha256=ilpdvMwhIRDpROZuVhtJTKueu5KvQEfMEQVUKpwQvv8,376
9
- adss-0.1.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
10
- adss-0.1.dist-info/top_level.txt,sha256=ebD44L3R0PEvEFoRCJ-RjTIsQ9Yjpo2aAYC1BMtueLg,5
11
- adss-0.1.dist-info/RECORD,,
File without changes
File without changes