dara-core 1.19.0__py3-none-any.whl → 1.20.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dara/core/__init__.py +1 -0
- dara/core/auth/basic.py +13 -7
- dara/core/auth/definitions.py +2 -2
- dara/core/auth/utils.py +1 -1
- dara/core/base_definitions.py +7 -42
- dara/core/data_utils.py +16 -17
- dara/core/definitions.py +8 -8
- dara/core/interactivity/__init__.py +6 -0
- dara/core/interactivity/actions.py +26 -22
- dara/core/interactivity/any_data_variable.py +7 -135
- dara/core/interactivity/any_variable.py +1 -1
- dara/core/interactivity/client_variable.py +71 -0
- dara/core/interactivity/data_variable.py +8 -266
- dara/core/interactivity/derived_data_variable.py +6 -290
- dara/core/interactivity/derived_variable.py +379 -199
- dara/core/interactivity/filtering.py +29 -2
- dara/core/interactivity/loop_variable.py +2 -2
- dara/core/interactivity/non_data_variable.py +5 -68
- dara/core/interactivity/plain_variable.py +87 -14
- dara/core/interactivity/server_variable.py +325 -0
- dara/core/interactivity/state_variable.py +69 -0
- dara/core/interactivity/switch_variable.py +15 -15
- dara/core/interactivity/tabular_variable.py +94 -0
- dara/core/interactivity/url_variable.py +10 -90
- dara/core/internal/cache_store/cache_store.py +5 -20
- dara/core/internal/dependency_resolution.py +27 -69
- dara/core/internal/devtools.py +10 -3
- dara/core/internal/execute_action.py +9 -3
- dara/core/internal/multi_resource_lock.py +70 -0
- dara/core/internal/normalization.py +0 -5
- dara/core/internal/pandas_utils.py +105 -3
- dara/core/internal/pool/definitions.py +1 -1
- dara/core/internal/pool/task_pool.py +9 -6
- dara/core/internal/pool/utils.py +19 -14
- dara/core/internal/registries.py +3 -2
- dara/core/internal/registry.py +1 -1
- dara/core/internal/registry_lookup.py +5 -3
- dara/core/internal/routing.py +52 -121
- dara/core/internal/store.py +2 -29
- dara/core/internal/tasks.py +372 -182
- dara/core/internal/utils.py +25 -3
- dara/core/internal/websocket.py +1 -1
- dara/core/js_tooling/js_utils.py +2 -0
- dara/core/logging.py +10 -6
- dara/core/persistence.py +26 -4
- dara/core/umd/dara.core.umd.js +1082 -1464
- dara/core/visual/dynamic_component.py +17 -13
- {dara_core-1.19.0.dist-info → dara_core-1.20.0a1.dist-info}/METADATA +11 -11
- {dara_core-1.19.0.dist-info → dara_core-1.20.0a1.dist-info}/RECORD +52 -47
- {dara_core-1.19.0.dist-info → dara_core-1.20.0a1.dist-info}/LICENSE +0 -0
- {dara_core-1.19.0.dist-info → dara_core-1.20.0a1.dist-info}/WHEEL +0 -0
- {dara_core-1.19.0.dist-info → dara_core-1.20.0a1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
from collections import Counter
|
|
2
|
+
from contextlib import asynccontextmanager
|
|
3
|
+
|
|
4
|
+
import anyio
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class MultiResourceLock:
|
|
8
|
+
"""
|
|
9
|
+
A class that manages multiple named locks for concurrent access to shared resources.
|
|
10
|
+
|
|
11
|
+
This class allows for acquiring and releasing locks on named resources, ensuring
|
|
12
|
+
that only one task can access a specific resource at a time. It automatically
|
|
13
|
+
creates locks for new resources and cleans them up when they're no longer in use.
|
|
14
|
+
|
|
15
|
+
:reentrant:
|
|
16
|
+
If True a task can acquire the same resource more than once; every
|
|
17
|
+
subsequent acquire of an already-held lock is a no-op. If False the
|
|
18
|
+
second attempt raises ``RuntimeError``.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(self):
|
|
22
|
+
self._locks: dict[str, anyio.Lock] = {}
|
|
23
|
+
self._waiters = Counter[str]()
|
|
24
|
+
self._cleanup_lock = anyio.Lock()
|
|
25
|
+
|
|
26
|
+
def is_locked(self, resource_name: str) -> bool:
|
|
27
|
+
"""
|
|
28
|
+
Check if a lock for the specified resource is currently held.
|
|
29
|
+
|
|
30
|
+
:param resource_name (str): The name of the resource to check.
|
|
31
|
+
:return: True if the lock is held, False otherwise.
|
|
32
|
+
"""
|
|
33
|
+
return resource_name in self._locks and self._locks[resource_name].locked()
|
|
34
|
+
|
|
35
|
+
@asynccontextmanager
|
|
36
|
+
async def acquire(self, resource_name: str):
|
|
37
|
+
"""
|
|
38
|
+
Acquire a lock for the specified resource.
|
|
39
|
+
|
|
40
|
+
This method is an async context manager that acquires a lock for the given
|
|
41
|
+
resource name. If the lock doesn't exist, it creates one. It also keeps
|
|
42
|
+
track of waiters to ensure proper cleanup when the resource is no longer in use.
|
|
43
|
+
|
|
44
|
+
:param resource_name (str): The name of the resource to lock.
|
|
45
|
+
|
|
46
|
+
Usage:
|
|
47
|
+
```python
|
|
48
|
+
async with multi_lock.acquire_lock("resource_a"):
|
|
49
|
+
# Critical section for "resource_a"
|
|
50
|
+
...
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
Note:
|
|
54
|
+
The lock is automatically released when exiting the context manager.
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
async with self._cleanup_lock:
|
|
58
|
+
if resource_name not in self._locks:
|
|
59
|
+
self._locks[resource_name] = anyio.Lock()
|
|
60
|
+
self._waiters[resource_name] += 1
|
|
61
|
+
|
|
62
|
+
try:
|
|
63
|
+
async with self._locks[resource_name]:
|
|
64
|
+
yield
|
|
65
|
+
finally:
|
|
66
|
+
async with self._cleanup_lock:
|
|
67
|
+
self._waiters[resource_name] -= 1
|
|
68
|
+
if self._waiters[resource_name] <= 0:
|
|
69
|
+
del self._waiters[resource_name]
|
|
70
|
+
del self._locks[resource_name]
|
|
@@ -31,7 +31,6 @@ from typing import (
|
|
|
31
31
|
from typing_extensions import TypedDict, TypeGuard
|
|
32
32
|
|
|
33
33
|
from dara.core.base_definitions import DaraBaseModel as BaseModel
|
|
34
|
-
from dara.core.internal.hashing import hash_object
|
|
35
34
|
|
|
36
35
|
JsonLike = Union[Mapping, List]
|
|
37
36
|
|
|
@@ -81,10 +80,6 @@ def _get_identifier(obj: Referrable) -> str:
|
|
|
81
80
|
nested = ','.join(cast(List[str], obj['nested']))
|
|
82
81
|
identifier = f'{identifier}:{nested}'
|
|
83
82
|
|
|
84
|
-
if _is_referrable_with_filters(obj):
|
|
85
|
-
filter_hash = hash_object(obj['filters'])
|
|
86
|
-
identifier = f'{identifier}:{filter_hash}'
|
|
87
|
-
|
|
88
83
|
return identifier
|
|
89
84
|
|
|
90
85
|
|
|
@@ -15,13 +15,24 @@ See the License for the specific language governing permissions and
|
|
|
15
15
|
limitations under the License.
|
|
16
16
|
"""
|
|
17
17
|
|
|
18
|
-
|
|
18
|
+
import json
|
|
19
|
+
import uuid
|
|
20
|
+
from typing import Any, Literal, Optional, TypeVar, Union, cast, overload
|
|
19
21
|
|
|
20
|
-
from pandas import DataFrame, MultiIndex
|
|
22
|
+
from pandas import DataFrame, MultiIndex, Series
|
|
23
|
+
from typing_extensions import TypedDict, TypeGuard
|
|
21
24
|
|
|
22
25
|
INDEX = '__index__'
|
|
23
26
|
|
|
24
27
|
|
|
28
|
+
@overload
|
|
29
|
+
def append_index(df: DataFrame) -> DataFrame: ...
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@overload
|
|
33
|
+
def append_index(df: None) -> None: ...
|
|
34
|
+
|
|
35
|
+
|
|
25
36
|
def append_index(df: Optional[DataFrame]) -> Optional[DataFrame]:
|
|
26
37
|
"""
|
|
27
38
|
Add a numerical index column to the dataframe
|
|
@@ -65,6 +76,12 @@ def df_convert_to_internal(original_df: DataFrame) -> DataFrame:
|
|
|
65
76
|
if any(isinstance(c, str) and c.startswith('__col__') for c in df.columns):
|
|
66
77
|
return df
|
|
67
78
|
|
|
79
|
+
# Apply display transformations to the DataFrame
|
|
80
|
+
format_for_display(df)
|
|
81
|
+
|
|
82
|
+
# Append index to match the way we process the original DataFrame
|
|
83
|
+
df = cast(DataFrame, append_index(df))
|
|
84
|
+
|
|
68
85
|
# Handle hierarchical columns: [(A, B), (A, C)] -> ['A_B', 'A_C']
|
|
69
86
|
if isinstance(df.columns, MultiIndex):
|
|
70
87
|
df.columns = ['_'.join(col).strip() if col[0] != INDEX else INDEX for col in df.columns.values]
|
|
@@ -90,4 +107,89 @@ def df_convert_to_internal(original_df: DataFrame) -> DataFrame:
|
|
|
90
107
|
|
|
91
108
|
|
|
92
109
|
def df_to_json(df: DataFrame) -> str:
|
|
93
|
-
return df_convert_to_internal(df).to_json(orient='records') or ''
|
|
110
|
+
return df_convert_to_internal(df).to_json(orient='records', date_unit='ns') or ''
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def format_for_display(df: DataFrame) -> None:
|
|
114
|
+
"""
|
|
115
|
+
Apply transformations to a DataFrame to make it suitable for display.
|
|
116
|
+
Not: this does NOT make a copy of the DataFrame
|
|
117
|
+
"""
|
|
118
|
+
for col in df.columns:
|
|
119
|
+
column_data = df[col]
|
|
120
|
+
if isinstance(column_data, DataFrame):
|
|
121
|
+
# Handle duplicate column names - format each column in the sub-DataFrame
|
|
122
|
+
for sub_col in column_data.columns:
|
|
123
|
+
if isinstance(column_data[sub_col], Series) and column_data[sub_col].dtype == 'object':
|
|
124
|
+
column_data.loc[:, sub_col] = column_data[sub_col].apply(str)
|
|
125
|
+
elif column_data.dtype == 'object':
|
|
126
|
+
# We need to convert all values to string to avoid issues with
|
|
127
|
+
# displaying data in the Table component, for example when
|
|
128
|
+
# displaying datetime and number objects in the same column
|
|
129
|
+
df.loc[:, col] = column_data.apply(str)
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
class FieldType(TypedDict):
|
|
133
|
+
name: Union[str, tuple[str, ...]]
|
|
134
|
+
type: Literal['integer', 'number', 'boolean', 'datetime', 'duration', 'any', 'str']
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class DataFrameSchema(TypedDict):
|
|
138
|
+
fields: list[FieldType]
|
|
139
|
+
primaryKey: list[str]
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
class DataResponse(TypedDict):
|
|
143
|
+
data: Optional[DataFrame]
|
|
144
|
+
count: int
|
|
145
|
+
schema: Optional[DataFrameSchema]
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def is_data_response(response: Any) -> TypeGuard[DataResponse]:
|
|
149
|
+
has_shape = isinstance(response, dict) and 'data' in response and 'count' in response
|
|
150
|
+
if not has_shape:
|
|
151
|
+
return False
|
|
152
|
+
return response['data'] is None or isinstance(response['data'], DataFrame)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def data_response_to_json(response: DataResponse) -> str:
|
|
156
|
+
"""
|
|
157
|
+
Serialize a DataResponse to JSON.
|
|
158
|
+
|
|
159
|
+
json.dumps() custom serializers only accept value->value mappings, whereas `to_json` on pandas returns a string directly.
|
|
160
|
+
To avoid double serialization, we first insert a placeholder string and then replace it with the actual serialized JSON.
|
|
161
|
+
"""
|
|
162
|
+
placeholder = str(uuid.uuid4())
|
|
163
|
+
|
|
164
|
+
def _custom_serializer(obj: Any) -> Any:
|
|
165
|
+
if isinstance(obj, DataFrame):
|
|
166
|
+
return placeholder
|
|
167
|
+
raise TypeError(f'Object of type {type(obj)} is not JSON serializable')
|
|
168
|
+
|
|
169
|
+
result = json.dumps(response, default=_custom_serializer)
|
|
170
|
+
result = result.replace(
|
|
171
|
+
f'"{placeholder}"', df_to_json(response['data']) if response['data'] is not None else 'null'
|
|
172
|
+
)
|
|
173
|
+
return result
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def build_data_response(data: DataFrame, count: int) -> DataResponse:
|
|
177
|
+
data_internal = df_convert_to_internal(data)
|
|
178
|
+
schema = get_schema(data_internal)
|
|
179
|
+
|
|
180
|
+
return DataResponse(data=data, count=count, schema=schema)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def get_schema(df: DataFrame):
|
|
184
|
+
from pandas.io.json._table_schema import build_table_schema
|
|
185
|
+
|
|
186
|
+
raw_schema = build_table_schema(df)
|
|
187
|
+
|
|
188
|
+
for field_data in cast(list, raw_schema['fields']):
|
|
189
|
+
if field_data.get('type') == 'datetime':
|
|
190
|
+
# for datetime fields we need to know the resolution, so we get the actual e.g. `datetime64[ns]` string
|
|
191
|
+
column_name = field_data.get('name')
|
|
192
|
+
dtype_str = str(df[column_name].dtype)
|
|
193
|
+
field_data['type'] = dtype_str
|
|
194
|
+
|
|
195
|
+
return cast(DataFrameSchema, raw_schema)
|
|
@@ -95,7 +95,7 @@ class TaskDefinition:
|
|
|
95
95
|
def __await__(self):
|
|
96
96
|
"""Await the underlying event, then return or raise the result"""
|
|
97
97
|
yield from self.event.wait().__await__()
|
|
98
|
-
if isinstance(self.result,
|
|
98
|
+
if isinstance(self.result, BaseException):
|
|
99
99
|
raise self.result
|
|
100
100
|
return self.result
|
|
101
101
|
|
|
@@ -158,7 +158,7 @@ class TaskPool:
|
|
|
158
158
|
|
|
159
159
|
task = self.tasks.pop(task_uid)
|
|
160
160
|
if not task.event.is_set():
|
|
161
|
-
task.result =
|
|
161
|
+
task.result = anyio.get_cancelled_exc_class()()
|
|
162
162
|
task.event.set()
|
|
163
163
|
|
|
164
164
|
# Task in progress, stop the worker
|
|
@@ -499,11 +499,14 @@ class TaskPool:
|
|
|
499
499
|
while self.status not in (PoolStatus.ERROR, PoolStatus.STOPPED):
|
|
500
500
|
await anyio.sleep(0.1)
|
|
501
501
|
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
502
|
+
try:
|
|
503
|
+
self._handle_excess_workers()
|
|
504
|
+
self._handle_orphaned_workers()
|
|
505
|
+
self._handle_dead_workers()
|
|
506
|
+
self._create_workers()
|
|
507
|
+
await self._process_next_worker_message()
|
|
508
|
+
except Exception as e:
|
|
509
|
+
dev_logger.error('Error in task pool', e)
|
|
507
510
|
finally:
|
|
508
511
|
self.loop_stopped.set()
|
|
509
512
|
|
dara/core/internal/pool/utils.py
CHANGED
|
@@ -27,6 +27,8 @@ from typing import Any, Callable, Optional, Tuple
|
|
|
27
27
|
import anyio
|
|
28
28
|
from tblib import Traceback
|
|
29
29
|
|
|
30
|
+
from dara.core.logging import dev_logger
|
|
31
|
+
|
|
30
32
|
|
|
31
33
|
class SubprocessException:
|
|
32
34
|
"""
|
|
@@ -131,20 +133,23 @@ async def stop_process_async(process: BaseProcess, timeout: float = 3):
|
|
|
131
133
|
# Terminate and wait for it to shutdown
|
|
132
134
|
process.terminate()
|
|
133
135
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
136
|
+
try:
|
|
137
|
+
# mimic process.join() in an async way to not block
|
|
138
|
+
await wait_while(process.is_alive, timeout)
|
|
139
|
+
|
|
140
|
+
# If it's still alive
|
|
141
|
+
if process.is_alive():
|
|
142
|
+
try:
|
|
143
|
+
os.kill(process.pid, signal.SIGKILL)
|
|
144
|
+
await wait_while(process.is_alive, timeout)
|
|
145
|
+
except OSError as e:
|
|
146
|
+
raise RuntimeError(f'Unable to terminate subprocess with PID {process.pid}') from e
|
|
147
|
+
|
|
148
|
+
# If it's still alive raise an exception
|
|
149
|
+
if process.is_alive():
|
|
150
|
+
raise RuntimeError(f'Unable to terminate subprocess with PID {process.pid}')
|
|
151
|
+
except Exception as e:
|
|
152
|
+
dev_logger.error('Error stopping process', e)
|
|
148
153
|
|
|
149
154
|
|
|
150
155
|
def stop_process(process: BaseProcess, timeout: float = 3):
|
dara/core/internal/registries.py
CHANGED
|
@@ -27,11 +27,11 @@ from dara.core.definitions import (
|
|
|
27
27
|
EndpointConfiguration,
|
|
28
28
|
Template,
|
|
29
29
|
)
|
|
30
|
-
from dara.core.interactivity.data_variable import DataVariableRegistryEntry
|
|
31
30
|
from dara.core.interactivity.derived_variable import (
|
|
32
31
|
DerivedVariableRegistryEntry,
|
|
33
32
|
LatestValueRegistryEntry,
|
|
34
33
|
)
|
|
34
|
+
from dara.core.interactivity.server_variable import ServerVariableRegistryEntry
|
|
35
35
|
from dara.core.internal.download import DownloadDataEntry
|
|
36
36
|
from dara.core.internal.registry import Registry, RegistryType
|
|
37
37
|
from dara.core.internal.websocket import CustomClientMessagePayload
|
|
@@ -44,7 +44,8 @@ upload_resolver_registry = Registry[UploadResolverDef](
|
|
|
44
44
|
) # functions for upload resolvers requiring backend calls
|
|
45
45
|
component_registry = Registry[ComponentTypeAnnotation](RegistryType.COMPONENTS, CORE_COMPONENTS)
|
|
46
46
|
config_registry = Registry[EndpointConfiguration](RegistryType.ENDPOINT_CONFIG)
|
|
47
|
-
|
|
47
|
+
server_variable_registry = Registry[ServerVariableRegistryEntry](RegistryType.SERVER_VARIABLE, allow_duplicates=False)
|
|
48
|
+
"""map of server variable uid -> server variable entry"""
|
|
48
49
|
derived_variable_registry = Registry[DerivedVariableRegistryEntry](
|
|
49
50
|
RegistryType.DERIVED_VARIABLE, allow_duplicates=False
|
|
50
51
|
)
|
dara/core/internal/registry.py
CHANGED
|
@@ -32,7 +32,7 @@ class RegistryType(str, Enum):
|
|
|
32
32
|
DOWNLOAD = 'Download'
|
|
33
33
|
COMPONENTS = 'Components'
|
|
34
34
|
ENDPOINT_CONFIG = 'Endpoint Configuration'
|
|
35
|
-
|
|
35
|
+
SERVER_VARIABLE = 'ServerVariable'
|
|
36
36
|
DERIVED_VARIABLE = 'DerivedVariable'
|
|
37
37
|
LAST_VALUE = 'LatestValue'
|
|
38
38
|
TEMPLATE = 'Template'
|
|
@@ -16,7 +16,7 @@ limitations under the License.
|
|
|
16
16
|
"""
|
|
17
17
|
|
|
18
18
|
from collections.abc import Coroutine
|
|
19
|
-
from typing import Callable, Dict, Literal, Union
|
|
19
|
+
from typing import Callable, Dict, Literal, TypeVar, Union
|
|
20
20
|
|
|
21
21
|
from dara.core.internal.registry import Registry, RegistryType
|
|
22
22
|
from dara.core.internal.utils import async_dedupe
|
|
@@ -24,8 +24,8 @@ from dara.core.internal.utils import async_dedupe
|
|
|
24
24
|
RegistryLookupKey = Literal[
|
|
25
25
|
RegistryType.ACTION,
|
|
26
26
|
RegistryType.COMPONENTS,
|
|
27
|
-
RegistryType.DATA_VARIABLE,
|
|
28
27
|
RegistryType.DERIVED_VARIABLE,
|
|
28
|
+
RegistryType.SERVER_VARIABLE,
|
|
29
29
|
RegistryType.STATIC_KWARGS,
|
|
30
30
|
RegistryType.UPLOAD_RESOLVER,
|
|
31
31
|
RegistryType.BACKEND_STORE,
|
|
@@ -33,6 +33,8 @@ RegistryLookupKey = Literal[
|
|
|
33
33
|
]
|
|
34
34
|
CustomRegistryLookup = Dict[RegistryLookupKey, Callable[[str], Coroutine]]
|
|
35
35
|
|
|
36
|
+
RegistryType = TypeVar('RegistryType')
|
|
37
|
+
|
|
36
38
|
|
|
37
39
|
class RegistryLookup:
|
|
38
40
|
"""
|
|
@@ -45,7 +47,7 @@ class RegistryLookup:
|
|
|
45
47
|
self.handlers = handlers
|
|
46
48
|
|
|
47
49
|
@async_dedupe
|
|
48
|
-
async def get(self, registry: Registry, uid: str):
|
|
50
|
+
async def get(self, registry: Registry[RegistryType], uid: str) -> RegistryType:
|
|
49
51
|
"""
|
|
50
52
|
Get the entry from registry by uid.
|
|
51
53
|
If uid is not in registry and it has a external handler that defined, will execute the handler
|
dara/core/internal/routing.py
CHANGED
|
@@ -16,7 +16,6 @@ limitations under the License.
|
|
|
16
16
|
"""
|
|
17
17
|
|
|
18
18
|
import inspect
|
|
19
|
-
import json
|
|
20
19
|
import os
|
|
21
20
|
from collections.abc import Mapping
|
|
22
21
|
from functools import wraps
|
|
@@ -24,7 +23,6 @@ from importlib.metadata import version
|
|
|
24
23
|
from typing import Any, Callable, Dict, List, Optional
|
|
25
24
|
|
|
26
25
|
import anyio
|
|
27
|
-
import pandas
|
|
28
26
|
from fastapi import (
|
|
29
27
|
APIRouter,
|
|
30
28
|
Body,
|
|
@@ -40,26 +38,28 @@ from fastapi.responses import StreamingResponse
|
|
|
40
38
|
from pandas import DataFrame
|
|
41
39
|
from pydantic import BaseModel
|
|
42
40
|
from starlette.background import BackgroundTask
|
|
41
|
+
from starlette.status import HTTP_415_UNSUPPORTED_MEDIA_TYPE
|
|
43
42
|
|
|
44
43
|
from dara.core.auth.routes import verify_session
|
|
45
|
-
from dara.core.base_definitions import ActionResolverDef, BaseTask, UploadResolverDef
|
|
44
|
+
from dara.core.base_definitions import ActionResolverDef, BaseTask, NonTabularDataError, UploadResolverDef
|
|
46
45
|
from dara.core.configuration import Configuration
|
|
47
|
-
from dara.core.interactivity.any_data_variable import
|
|
46
|
+
from dara.core.interactivity.any_data_variable import upload
|
|
48
47
|
from dara.core.interactivity.filtering import FilterQuery, Pagination
|
|
48
|
+
from dara.core.interactivity.server_variable import ServerVariable
|
|
49
49
|
from dara.core.internal.cache_store import CacheStore
|
|
50
50
|
from dara.core.internal.download import DownloadRegistryEntry
|
|
51
51
|
from dara.core.internal.execute_action import CURRENT_ACTION_ID
|
|
52
52
|
from dara.core.internal.normalization import NormalizedPayload, denormalize, normalize
|
|
53
|
-
from dara.core.internal.pandas_utils import df_to_json
|
|
53
|
+
from dara.core.internal.pandas_utils import data_response_to_json, df_to_json, is_data_response
|
|
54
54
|
from dara.core.internal.registries import (
|
|
55
55
|
action_def_registry,
|
|
56
56
|
action_registry,
|
|
57
57
|
backend_store_registry,
|
|
58
58
|
component_registry,
|
|
59
|
-
data_variable_registry,
|
|
60
59
|
derived_variable_registry,
|
|
61
60
|
download_code_registry,
|
|
62
61
|
latest_value_registry,
|
|
62
|
+
server_variable_registry,
|
|
63
63
|
static_kwargs_registry,
|
|
64
64
|
template_registry,
|
|
65
65
|
upload_resolver_registry,
|
|
@@ -307,135 +307,65 @@ def create_router(config: Configuration):
|
|
|
307
307
|
except KeyError as err:
|
|
308
308
|
raise ValueError(f'Could not find latest value for derived variable with uid: {uid}') from err
|
|
309
309
|
|
|
310
|
-
class
|
|
310
|
+
class TabularRequestBody(BaseModel):
|
|
311
311
|
filters: Optional[FilterQuery] = None
|
|
312
|
-
|
|
313
|
-
|
|
312
|
+
ws_channel: str
|
|
313
|
+
dv_values: Optional[NormalizedPayload[List[Any]]] = None
|
|
314
|
+
"""DerivedVariable values if variable is a DerivedVariable"""
|
|
315
|
+
force_key: Optional[str] = None
|
|
316
|
+
"""Optional force key if variable is a DerivedVariable and a recalculation is forced"""
|
|
314
317
|
|
|
315
|
-
@core_api_router.post('/
|
|
316
|
-
async def
|
|
318
|
+
@core_api_router.post('/tabular-variable/{uid}', dependencies=[Depends(verify_session)])
|
|
319
|
+
async def get_tabular_variable(
|
|
317
320
|
uid: str,
|
|
318
|
-
body:
|
|
321
|
+
body: TabularRequestBody,
|
|
319
322
|
offset: Optional[int] = None,
|
|
320
323
|
limit: Optional[int] = None,
|
|
321
324
|
order_by: Optional[str] = None,
|
|
322
325
|
index: Optional[str] = None,
|
|
323
326
|
):
|
|
327
|
+
"""
|
|
328
|
+
Generic endpoint for getting tabular data from a variable.
|
|
329
|
+
Supports ServerVariables and DerivedVariables.
|
|
330
|
+
"""
|
|
331
|
+
WS_CHANNEL.set(body.ws_channel)
|
|
332
|
+
|
|
324
333
|
try:
|
|
325
|
-
|
|
326
|
-
task_mgr: TaskManager = utils_registry.get('TaskManager')
|
|
334
|
+
pagination = Pagination(offset=offset, limit=limit, orderBy=order_by, index=index)
|
|
327
335
|
registry_mgr: RegistryLookup = utils_registry.get('RegistryLookup')
|
|
328
|
-
data_variable_entry: DataVariableRegistryEntry = await registry_mgr.get(data_variable_registry, uid)
|
|
329
|
-
|
|
330
|
-
data = None
|
|
331
|
-
WS_CHANNEL.set(body.ws_channel)
|
|
332
|
-
|
|
333
|
-
if data_variable_entry.type == 'derived':
|
|
334
|
-
if body.cache_key is None:
|
|
335
|
-
raise HTTPException(
|
|
336
|
-
status_code=400,
|
|
337
|
-
detail='Cache key is required for derived data variables',
|
|
338
|
-
)
|
|
339
|
-
|
|
340
|
-
if body.ws_channel is None:
|
|
341
|
-
raise HTTPException(
|
|
342
|
-
status_code=400,
|
|
343
|
-
detail='Websocket channel is required for derived data variables',
|
|
344
|
-
)
|
|
345
|
-
|
|
346
|
-
derived_variable_entry = await registry_mgr.get(derived_variable_registry, uid)
|
|
347
|
-
|
|
348
|
-
data = await data_variable_entry.get_data(
|
|
349
|
-
derived_variable_entry,
|
|
350
|
-
data_variable_entry,
|
|
351
|
-
body.cache_key,
|
|
352
|
-
store,
|
|
353
|
-
body.filters,
|
|
354
|
-
Pagination(offset=offset, limit=limit, orderBy=order_by, index=index),
|
|
355
|
-
format_for_display=True,
|
|
356
|
-
)
|
|
357
|
-
if isinstance(data, BaseTask):
|
|
358
|
-
await task_mgr.run_task(data, body.ws_channel)
|
|
359
|
-
return {'task_id': data.task_id}
|
|
360
|
-
elif data_variable_entry.type == 'plain':
|
|
361
|
-
data = await data_variable_entry.get_data(
|
|
362
|
-
data_variable_entry,
|
|
363
|
-
store,
|
|
364
|
-
body.filters,
|
|
365
|
-
Pagination(offset=offset, limit=limit, orderBy=order_by, index=index),
|
|
366
|
-
format_for_display=True,
|
|
367
|
-
)
|
|
368
336
|
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
'uid': uid,
|
|
375
|
-
}, # type: ignore
|
|
376
|
-
)
|
|
377
|
-
|
|
378
|
-
if data is None:
|
|
379
|
-
return None
|
|
380
|
-
|
|
381
|
-
# Explicitly convert to JSON to avoid implicit serialization;
|
|
382
|
-
# return as records as that makes more sense in a JSON structure
|
|
383
|
-
return Response(
|
|
384
|
-
content=df_to_json(data) if isinstance(data, pandas.DataFrame) else data,
|
|
385
|
-
media_type='application/json',
|
|
386
|
-
) # type: ignore
|
|
387
|
-
except ValueError as e:
|
|
388
|
-
raise HTTPException(status_code=400, detail=str(e)) from e
|
|
337
|
+
# ServerVariable
|
|
338
|
+
if body.dv_values is None:
|
|
339
|
+
server_variable_entry = await registry_mgr.get(server_variable_registry, uid)
|
|
340
|
+
data_response = await ServerVariable.get_tabular_data(server_variable_entry, body.filters, pagination)
|
|
341
|
+
return Response(data_response_to_json(data_response), media_type='application/json')
|
|
389
342
|
|
|
390
|
-
|
|
391
|
-
cache_key: Optional[str] = None
|
|
392
|
-
filters: Optional[FilterQuery] = None
|
|
393
|
-
|
|
394
|
-
@core_api_router.post('/data-variable/{uid}/count', dependencies=[Depends(verify_session)])
|
|
395
|
-
async def get_data_variable_count(uid: str, body: Optional[DataVariableCountRequestBody] = None):
|
|
396
|
-
try:
|
|
343
|
+
# DerivedVariable
|
|
397
344
|
store: CacheStore = utils_registry.get('Store')
|
|
398
|
-
|
|
399
|
-
variable_def = await registry_mgr.get(
|
|
345
|
+
task_mgr: TaskManager = utils_registry.get('TaskManager')
|
|
346
|
+
variable_def = await registry_mgr.get(derived_variable_registry, uid)
|
|
347
|
+
values = denormalize(body.dv_values.data, body.dv_values.lookup)
|
|
400
348
|
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
)
|
|
349
|
+
result = await variable_def.get_tabular_data(
|
|
350
|
+
variable_def, store, task_mgr, values, body.force_key, pagination, body.filters
|
|
351
|
+
)
|
|
405
352
|
|
|
406
|
-
if
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
detail="Cache key is required when requesting DerivedDataVariable's count",
|
|
410
|
-
)
|
|
353
|
+
if isinstance(result, BaseTask):
|
|
354
|
+
await task_mgr.run_task(result, body.ws_channel)
|
|
355
|
+
return {'task_id': result.task_id}
|
|
411
356
|
|
|
412
|
-
return
|
|
413
|
-
except
|
|
414
|
-
raise HTTPException(status_code=
|
|
357
|
+
return Response(data_response_to_json(result), media_type='application/json')
|
|
358
|
+
except NonTabularDataError as e:
|
|
359
|
+
raise HTTPException(status_code=HTTP_415_UNSUPPORTED_MEDIA_TYPE, detail=str(e)) from e
|
|
415
360
|
|
|
416
|
-
@core_api_router.get('/
|
|
417
|
-
async def
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
return await data_def.get_schema(data_def, store)
|
|
425
|
-
|
|
426
|
-
if cache_key is None:
|
|
427
|
-
raise HTTPException(
|
|
428
|
-
status_code=400,
|
|
429
|
-
detail='Cache key is required when requesting DerivedDataVariable schema',
|
|
430
|
-
)
|
|
431
|
-
|
|
432
|
-
# Use the other registry for derived variables
|
|
433
|
-
derived_ref = await registry_mgr.get(derived_variable_registry, uid)
|
|
434
|
-
data = await data_def.get_schema(derived_ref, store, cache_key)
|
|
435
|
-
content = json.dumps(jsonable_encoder(data)) if isinstance(data, dict) else data
|
|
436
|
-
return Response(content=content, media_type='application/json')
|
|
437
|
-
except ValueError as e:
|
|
438
|
-
raise HTTPException(status_code=400, detail=str(e)) from e
|
|
361
|
+
@core_api_router.get('/server-variable/{uid}/sequence', dependencies=[Depends(verify_session)])
|
|
362
|
+
async def get_server_variable_sequence(
|
|
363
|
+
uid: str,
|
|
364
|
+
):
|
|
365
|
+
registry_mgr: RegistryLookup = utils_registry.get('RegistryLookup')
|
|
366
|
+
server_variable_entry = await registry_mgr.get(server_variable_registry, uid)
|
|
367
|
+
seq_num = await ServerVariable.get_sequence_number(server_variable_entry)
|
|
368
|
+
return {'sequence_number': seq_num}
|
|
439
369
|
|
|
440
370
|
@core_api_router.post('/data/upload', dependencies=[Depends(verify_session)])
|
|
441
371
|
async def upload_data(
|
|
@@ -473,7 +403,6 @@ def create_router(config: Configuration):
|
|
|
473
403
|
values: NormalizedPayload[List[Any]]
|
|
474
404
|
force_key: Optional[str] = None
|
|
475
405
|
ws_channel: str
|
|
476
|
-
is_data_variable: Optional[bool] = False
|
|
477
406
|
|
|
478
407
|
@core_api_router.post('/derived-variable/{uid}', dependencies=[Depends(verify_session)])
|
|
479
408
|
async def get_derived_variable(uid: str, body: DerivedStateRequestBody):
|
|
@@ -553,9 +482,11 @@ def create_router(config: Configuration):
|
|
|
553
482
|
{'value': res},
|
|
554
483
|
)
|
|
555
484
|
|
|
556
|
-
# Serialize dataframes correctly
|
|
485
|
+
# Serialize dataframes correctly, either direct or as a DataResponse
|
|
557
486
|
if isinstance(res, DataFrame):
|
|
558
|
-
return Response(df_to_json(res))
|
|
487
|
+
return Response(df_to_json(res), media_type='application/json')
|
|
488
|
+
elif is_data_response(res):
|
|
489
|
+
return Response(data_response_to_json(res), media_type='application/json')
|
|
559
490
|
|
|
560
491
|
return res
|
|
561
492
|
except Exception as err:
|