atlasai-dstoolkit-client 0.0.12__py3-none-any.whl → 0.0.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -33,5 +33,5 @@ del osp
33
33
  del os
34
34
 
35
35
 
36
- from . import fabric, feature, dataset, model
36
+ from . import fabric, feature, dataset, model, workflows
37
37
  from .login import login, logout
@@ -15,6 +15,7 @@
15
15
  import logging
16
16
  import uuid
17
17
 
18
+ import geopandas as gpd
18
19
  import pandas as pd
19
20
  from shapely import wkb
20
21
 
@@ -60,7 +61,7 @@ class FeatureExport:
60
61
  self._export = self._details()
61
62
  return self.export
62
63
 
63
- def results(self, limit=None) -> pd.DataFrame:
64
+ def results(self, limit=None, as_gdf=False) -> pd.DataFrame:
64
65
  if not self.export:
65
66
  return pd.DataFrame([])
66
67
 
@@ -84,6 +85,8 @@ class FeatureExport:
84
85
 
85
86
  try:
86
87
  df['shape'] = wkb.loads(df['shape'])
88
+ if as_gdf:
89
+ df = gpd.GeoDataFrame(df, geometry="shape")
87
90
  except Exception:
88
91
  logger.warning('Shape field is not in WKB format. Skipping conversion.')
89
92
 
@@ -0,0 +1,18 @@
1
+ from rich.console import Console
2
+ from rich.text import Text
3
+
4
+ console = Console()
5
+
6
+ def print_title(text):
7
+ text = Text(text, style="bold underline", justify="center")
8
+ console.print(text)
9
+
10
+ def print_subtitle(text):
11
+ text = Text(text, style="italic cyan", justify="center")
12
+ console.print(text)
13
+
14
+ def print_body(text):
15
+ if isinstance(text, bytes):
16
+ text = text.decode("utf-8")
17
+ text = Text(text, style="white", justify="left")
18
+ console.print(text)
@@ -0,0 +1,234 @@
1
+ # Copyright 2025 AtlasAI PBC. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ import ast
15
+ import logging
16
+ import os
17
+ import time
18
+
19
+ import pandas as pd
20
+ from tqdm import tqdm
21
+
22
+ from concurrent.futures import ThreadPoolExecutor, as_completed
23
+ from dataclasses import dataclass
24
+
25
+ import requests
26
+
27
+ from . import api, output as o
28
+
29
+ logger = logging.getLogger('atlasai.toolkit')
30
+
31
+ @dataclass()
32
+ class WorkflowResponse:
33
+ id: str
34
+ uid: str
35
+ name: str
36
+ create_date: str
37
+ update_date: str
38
+ status: str
39
+ message: str
40
+
41
+
42
+ class Workflow:
43
+ type = None
44
+ id = None
45
+
46
+ def __init__(self, id=None):
47
+ # Allow the selection of an existing workflow and check the logs.
48
+ self.id = id
49
+
50
+ @property
51
+ def status(self):
52
+ _result = self.get()
53
+ return _result['data']['status']
54
+
55
+ def results(self, *args, **kwargs):
56
+ return self._results(*args, **kwargs)
57
+
58
+ def _set(self, result):
59
+ self.id = result['data']['id']
60
+
61
+ def _config(self):
62
+ raise NotImplementedError()
63
+
64
+ def _validate(self):
65
+ raise NotImplementedError()
66
+
67
+ def _results(self, *args, **kwargs):
68
+ raise NotImplementedError
69
+
70
+ def get(self):
71
+ if not self.id:
72
+ raise Exception('No valid execution of the workflow. Please use the `run` method first.')
73
+
74
+ _, result = api._get(resource=f'workflow/{self.id}')
75
+ return result
76
+
77
+ def logs(self, only_errors=True):
78
+ def cleanup_logs(logs):
79
+ if not logs:
80
+ return "-"
81
+ return ast.literal_eval(logs).decode('utf-8')
82
+
83
+ if not self.id:
84
+ raise Exception('No valid execution of the workflow. Please use the `run` method first.')
85
+
86
+ workflow = self.get()
87
+
88
+ _, result = api._get(resource=f'workflow/{self.id}/logs', params={'only_errors': only_errors})
89
+
90
+ o.print_title(f'Workflow: {workflow["data"]["name"]}')
91
+ o.print_subtitle(f'Status: {workflow["data"]["status"]}')
92
+ o.print_subtitle(f'Message: {workflow["data"]["message"] or "-"}')
93
+
94
+ o.print_body('---------------------------------------------------')
95
+ if not result['data']:
96
+ if only_errors:
97
+ o.print_title('No errors detected.')
98
+ else:
99
+ o.print_title('No pod found to pull logs from.')
100
+
101
+ for pod, data in result['data'].items():
102
+ o.print_title(f'Pod: {pod}')
103
+ o.print_subtitle(f'Message: {data["message"] or "-"}')
104
+ o.print_subtitle(f'Logs: {cleanup_logs(data.get("logs"))}')
105
+ o.print_body('---------------------------------------------------')
106
+
107
+ def run(self, wait_until_complete=False):
108
+ self._validate()
109
+ status = 'Running'
110
+ message = ''
111
+ data = dict(
112
+ type=self.type,
113
+ config=self._config()
114
+ )
115
+ _, result = api._post(resource='workflow', data=data)
116
+ self._set(result)
117
+ if wait_until_complete:
118
+ while True:
119
+ _result = self.get()
120
+ if _result['data']['status'].lower() != 'running':
121
+ status = _result['data']['status']
122
+ message = _result['data']['message']
123
+ break
124
+
125
+ time.sleep(5)
126
+ return WorkflowResponse(**result['data'], status=status, message=message)
127
+
128
+ class Test(Workflow):
129
+ type = 'test'
130
+
131
+ def __repr__(self):
132
+ return 'Test'
133
+
134
+ def __str__(self):
135
+ return 'Test'
136
+
137
+ def configure(self):
138
+ pass
139
+
140
+ def _config(self):
141
+ return dict()
142
+
143
+ def _validate(self):
144
+ pass
145
+
146
+
147
+ class Electrification(Workflow):
148
+ type = 'electrification'
149
+
150
+ def __init__(self, id=None):
151
+ self.aoi_name = None
152
+ self.aoi_path = None
153
+ self.aoi = None
154
+ self.output_path = None
155
+
156
+ super().__init__(id)
157
+
158
+ def __repr__(self):
159
+ return f'Electrification({self.aoi_name})'
160
+
161
+ def __str__(self):
162
+ return f'Electrification({self.aoi_name})'
163
+
164
+ def configure(self, aoi_name=None, aoi_path=None, aoi=None, output_path=None):
165
+ self.aoi_name = aoi_name or self.aoi_name
166
+ self.aoi_path = aoi_path or self.aoi_path
167
+ self.aoi = aoi or self.aoi
168
+ self.output_path = output_path or self.output_path
169
+
170
+ def _config(self):
171
+ cfg = {}
172
+ for name, value in (
173
+ ('aoi_name', self.aoi_name),
174
+ ('aoi_geojson', self.aoi),
175
+ ('aoi_geojson_uri', self.aoi_path),
176
+ ('output_bucket', self.output_path)
177
+ ):
178
+ if value:
179
+ cfg[name] = value
180
+ return cfg
181
+
182
+ def _validate(self):
183
+ if not self.aoi_name:
184
+ raise Exception('`aoi_name` must be specified.')
185
+
186
+ if not self.aoi_path and not self.aoi:
187
+ raise Exception('`aoi_path` or `aoi` must be specified.')
188
+
189
+ def _results(self, save_to=None):
190
+ results = []
191
+ if not self.id:
192
+ raise Exception('No valid execution of the workflow. Please use the `run` method first.')
193
+
194
+ workflow = self.get()
195
+ if not workflow['data']['status'] == 'Succeeded':
196
+ raise Exception(f'Workflow not in `Succeeded` state. Current state: {workflow["data"]["status"]}')
197
+
198
+ _, result = api._get(resource=f'workflow/{self.id}/results')
199
+
200
+ with ThreadPoolExecutor(max_workers=8) as ex, tqdm(total=len(result['data']), desc="Downloading", unit="file") as pbar:
201
+ fut_map = {ex.submit(self._download_file, res["name"], res["url"], save_to): i
202
+ for i, res in enumerate(result['data'])}
203
+ for fut in as_completed(fut_map):
204
+ results.append(fut.result())
205
+ pbar.update(1)
206
+
207
+ return results
208
+
209
+ def _download_file(self, name, url, to=None):
210
+ local_path = os.path.join(to or os.getcwd(), name.split('/')[-1])
211
+
212
+ logger.debug(f'Downloading :{name}')
213
+
214
+ with requests.get(url, stream=True, timeout=120) as r:
215
+
216
+ logger.debug(f'Finished downloading: {name}')
217
+ r.raise_for_status()
218
+ with open(local_path, "wb") as f:
219
+ for chunk in r.iter_content(chunk_size=1024 * 1024):
220
+ if chunk:
221
+ f.write(chunk)
222
+
223
+ logger.debug(f'Finished saving: {name}')
224
+
225
+ return local_path
226
+
227
+
228
+ def List(search=None, offset=0, limit=100):
229
+ pd.set_option("display.max_colwidth", None)
230
+ resource = 'workflows'
231
+ _, data = api._list(resource=resource, params={'search': search, 'offset': offset, 'limit': limit})
232
+ df = pd.DataFrame(data['data'])
233
+ df = df.drop(columns=['update_date', 'uid'])
234
+ return df
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: atlasai-dstoolkit-client
3
- Version: 0.0.12
3
+ Version: 0.0.15
4
4
  Summary: UNKNOWN
5
5
  Home-page: UNKNOWN
6
6
  Author: AtlasAI SWE
@@ -12,12 +12,14 @@ License-File: LICENSE.txt
12
12
  Requires-Dist: arrow (<2.0.0,>=1.3.0)
13
13
  Requires-Dist: atlasai-mlhub-client
14
14
  Requires-Dist: furl (<3.0.0,>=2.1.2)
15
+ Requires-Dist: geopandas (~=1.1.1)
15
16
  Requires-Dist: pandas (<3.0.0,>=2.2.3)
16
17
  Requires-Dist: pyarrow (>=19.0.1)
17
18
  Requires-Dist: python-dotenv (<2.0.0,>=1.0.1)
18
19
  Requires-Dist: requests (<=3.0.0,>=2.32.3)
19
20
  Requires-Dist: semver (<4.0.0,>=3.0.2)
20
21
  Requires-Dist: shapely (~=2.1.0)
22
+ Requires-Dist: tqdm (~=4.66.5)
21
23
  Provides-Extra: dev
22
24
  Requires-Dist: coverage ; extra == 'dev'
23
25
  Requires-Dist: flake8 ; extra == 'dev'
@@ -1,16 +1,18 @@
1
- atlasai/toolkit/__init__.py,sha256=rjt3GfV7vA3RFMIzqPwBebJUXdQbeTLmEYmm7SOoOGw,1002
1
+ atlasai/toolkit/__init__.py,sha256=0d2EYTgFf-RH2z1M83gGfZdrt_PBXbJk6o5ZVAKCPtA,1013
2
2
  atlasai/toolkit/api.py,sha256=BvO-gLRmbmkKduwbbADjcLlIkS9blzfM_cbMR4DhQmU,5269
3
3
  atlasai/toolkit/constants.py,sha256=Jxozn9tKOvAxyOYOZ6bzFtI9m1YETJF1GURzTl9NNC8,422
4
4
  atlasai/toolkit/dataset.py,sha256=fhzhxF9YMzIwEpaJZEPOK2SuLMJhVGI75eahxnH_T2c,3254
5
5
  atlasai/toolkit/fabric.py,sha256=6aFR2PGQc9P3Qa07WdBg9eKoUzU8n2y_-gGjYcyMrWY,1921
6
- atlasai/toolkit/feature.py,sha256=VKwX_yAwwMo1WuLZ_RbKVJEH4L1PAoYEPzWa0lH9ats,2828
6
+ atlasai/toolkit/feature.py,sha256=Q5M9zOGafynYuKaELL1kZemYPfKAZh84TgH7jw9J3ZU,2949
7
7
  atlasai/toolkit/init.py,sha256=JkdJ6QGdYWrq65jgz2pn5RYXUeUe2Ez88_-eMf5CNi0,1100
8
8
  atlasai/toolkit/login.py,sha256=n4ydfo9qCsmbZq6er1xeljBD76vdTJGjbhYHMmOyDbQ,3061
9
9
  atlasai/toolkit/model.py,sha256=RUe0HbDpzvHOV9A4rzG3PgN9boMWDHQ2tR7IKHXzbx8,4126
10
+ atlasai/toolkit/output.py,sha256=FyDjrpVlbrEyfHfwOpxp8H57jx_qXahDjO1qpHIeuYM,473
10
11
  atlasai/toolkit/requests.py,sha256=X86nIo07hAjUlilZcZ1lV8RB7KOsTKbTGtcY_SpFEXY,1223
11
12
  atlasai/toolkit/utils.py,sha256=lYh3P2XOshRgHCjFeXJ0FOJWQW64sddgx8c2kL6Wqwc,1566
12
- atlasai_dstoolkit_client-0.0.12.dist-info/LICENSE.txt,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
13
- atlasai_dstoolkit_client-0.0.12.dist-info/METADATA,sha256=2vdA8hWTJEIvSm1OgPRt9wmsTtZO_5oR_znxLDNuDbI,1405
14
- atlasai_dstoolkit_client-0.0.12.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
15
- atlasai_dstoolkit_client-0.0.12.dist-info/top_level.txt,sha256=HRTbErU8nmHFDaJJ5R_XYbwpt21dqdjDpSva8xyy_0k,8
16
- atlasai_dstoolkit_client-0.0.12.dist-info/RECORD,,
13
+ atlasai/toolkit/workflows.py,sha256=9NTqPZqoT46tdjMuFApYsGUpbNNDP_Gp2gYMXgVLo_s,7158
14
+ atlasai_dstoolkit_client-0.0.15.dist-info/LICENSE.txt,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
15
+ atlasai_dstoolkit_client-0.0.15.dist-info/METADATA,sha256=Ada-4ks6hKHOq1GmPEJJKnO9TlATP2XVRyAat70spbo,1471
16
+ atlasai_dstoolkit_client-0.0.15.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
17
+ atlasai_dstoolkit_client-0.0.15.dist-info/top_level.txt,sha256=HRTbErU8nmHFDaJJ5R_XYbwpt21dqdjDpSva8xyy_0k,8
18
+ atlasai_dstoolkit_client-0.0.15.dist-info/RECORD,,