dara-core 1.20.0a1__py3-none-any.whl → 1.20.1a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. dara/core/__init__.py +0 -3
  2. dara/core/actions.py +2 -1
  3. dara/core/auth/basic.py +16 -22
  4. dara/core/auth/definitions.py +2 -2
  5. dara/core/auth/routes.py +5 -5
  6. dara/core/auth/utils.py +5 -5
  7. dara/core/base_definitions.py +64 -22
  8. dara/core/cli.py +7 -8
  9. dara/core/configuration.py +2 -5
  10. dara/core/css.py +2 -1
  11. dara/core/data_utils.py +19 -18
  12. dara/core/defaults.py +7 -6
  13. dara/core/definitions.py +19 -50
  14. dara/core/http.py +3 -7
  15. dara/core/interactivity/__init__.py +0 -6
  16. dara/core/interactivity/actions.py +50 -52
  17. dara/core/interactivity/any_data_variable.py +134 -7
  18. dara/core/interactivity/any_variable.py +8 -5
  19. dara/core/interactivity/data_variable.py +266 -8
  20. dara/core/interactivity/derived_data_variable.py +290 -7
  21. dara/core/interactivity/derived_variable.py +174 -414
  22. dara/core/interactivity/filtering.py +27 -46
  23. dara/core/interactivity/loop_variable.py +2 -2
  24. dara/core/interactivity/non_data_variable.py +68 -5
  25. dara/core/interactivity/plain_variable.py +15 -89
  26. dara/core/interactivity/switch_variable.py +19 -19
  27. dara/core/interactivity/url_variable.py +90 -10
  28. dara/core/internal/cache_store/base_impl.py +1 -2
  29. dara/core/internal/cache_store/cache_store.py +25 -22
  30. dara/core/internal/cache_store/keep_all.py +1 -4
  31. dara/core/internal/cache_store/lru.py +1 -5
  32. dara/core/internal/cache_store/ttl.py +1 -4
  33. dara/core/internal/cgroup.py +1 -1
  34. dara/core/internal/dependency_resolution.py +66 -60
  35. dara/core/internal/devtools.py +5 -12
  36. dara/core/internal/download.py +4 -13
  37. dara/core/internal/encoder_registry.py +7 -7
  38. dara/core/internal/execute_action.py +13 -13
  39. dara/core/internal/hashing.py +3 -1
  40. dara/core/internal/import_discovery.py +4 -3
  41. dara/core/internal/normalization.py +18 -9
  42. dara/core/internal/pandas_utils.py +5 -107
  43. dara/core/internal/pool/definitions.py +1 -1
  44. dara/core/internal/pool/task_pool.py +16 -25
  45. dara/core/internal/pool/utils.py +18 -21
  46. dara/core/internal/pool/worker.py +2 -3
  47. dara/core/internal/port_utils.py +1 -1
  48. dara/core/internal/registries.py +6 -12
  49. dara/core/internal/registry.py +2 -4
  50. dara/core/internal/registry_lookup.py +5 -11
  51. dara/core/internal/routing.py +145 -109
  52. dara/core/internal/scheduler.py +8 -13
  53. dara/core/internal/settings.py +2 -2
  54. dara/core/internal/store.py +29 -2
  55. dara/core/internal/tasks.py +195 -379
  56. dara/core/internal/utils.py +13 -36
  57. dara/core/internal/websocket.py +20 -21
  58. dara/core/js_tooling/js_utils.py +26 -28
  59. dara/core/js_tooling/templates/vite.config.template.ts +3 -12
  60. dara/core/logging.py +12 -13
  61. dara/core/main.py +11 -14
  62. dara/core/metrics/cache.py +1 -1
  63. dara/core/metrics/utils.py +3 -3
  64. dara/core/persistence.py +5 -27
  65. dara/core/umd/dara.core.umd.js +55425 -59091
  66. dara/core/visual/components/__init__.py +2 -2
  67. dara/core/visual/components/fallback.py +4 -30
  68. dara/core/visual/components/for_cmp.py +1 -4
  69. dara/core/visual/css/__init__.py +31 -30
  70. dara/core/visual/dynamic_component.py +28 -31
  71. dara/core/visual/progress_updater.py +3 -4
  72. {dara_core-1.20.0a1.dist-info → dara_core-1.20.1a1.dist-info}/METADATA +11 -12
  73. dara_core-1.20.1a1.dist-info/RECORD +114 -0
  74. dara/core/interactivity/client_variable.py +0 -71
  75. dara/core/interactivity/server_variable.py +0 -325
  76. dara/core/interactivity/state_variable.py +0 -69
  77. dara/core/interactivity/tabular_variable.py +0 -94
  78. dara/core/internal/multi_resource_lock.py +0 -70
  79. dara_core-1.20.0a1.dist-info/RECORD +0 -119
  80. {dara_core-1.20.0a1.dist-info → dara_core-1.20.1a1.dist-info}/LICENSE +0 -0
  81. {dara_core-1.20.0a1.dist-info → dara_core-1.20.1a1.dist-info}/WHEEL +0 -0
  82. {dara_core-1.20.0a1.dist-info → dara_core-1.20.1a1.dist-info}/entry_points.txt +0 -0
@@ -15,11 +15,11 @@ See the License for the specific language governing permissions and
15
15
  limitations under the License.
16
16
  """
17
17
 
18
- from collections.abc import Mapping
19
18
  from typing import (
20
19
  Any,
21
20
  Generic,
22
21
  List,
22
+ Mapping,
23
23
  Optional,
24
24
  Tuple,
25
25
  TypeVar,
@@ -31,6 +31,7 @@ from typing import (
31
31
  from typing_extensions import TypedDict, TypeGuard
32
32
 
33
33
  from dara.core.base_definitions import DaraBaseModel as BaseModel
34
+ from dara.core.internal.hashing import hash_object
34
35
 
35
36
  JsonLike = Union[Mapping, List]
36
37
 
@@ -47,7 +48,7 @@ class Placeholder(TypedDict):
47
48
  Placeholder object 'Referrable' objects are replaced with
48
49
  """
49
50
 
50
- __ref: str
51
+ __ref: str # pylint: disable=unused-private-member
51
52
 
52
53
 
53
54
  class Referrable(TypedDict):
@@ -55,7 +56,7 @@ class Referrable(TypedDict):
55
56
  Describes an object which can be replaced by a Placeholder.
56
57
  """
57
58
 
58
- __typename: str
59
+ __typename: str # pylint: disable=unused-private-member
59
60
  uid: str
60
61
 
61
62
 
@@ -80,6 +81,10 @@ def _get_identifier(obj: Referrable) -> str:
80
81
  nested = ','.join(cast(List[str], obj['nested']))
81
82
  identifier = f'{identifier}:{nested}'
82
83
 
84
+ if _is_referrable_with_filters(obj):
85
+ filter_hash = hash_object(obj['filters'])
86
+ identifier = f'{identifier}:{filter_hash}'
87
+
83
88
  return identifier
84
89
 
85
90
 
@@ -128,11 +133,13 @@ def _loop(iterable: JsonLike):
128
133
 
129
134
 
130
135
  @overload
131
- def normalize(obj: Mapping, check_root: bool = True) -> Tuple[Mapping, Mapping]: ...
136
+ def normalize(obj: Mapping, check_root: bool = True) -> Tuple[Mapping, Mapping]:
137
+ ...
132
138
 
133
139
 
134
140
  @overload
135
- def normalize(obj: List, check_root: bool = True) -> Tuple[List, Mapping]: ...
141
+ def normalize(obj: List, check_root: bool = True) -> Tuple[List, Mapping]:
142
+ ...
136
143
 
137
144
 
138
145
  def normalize(obj: JsonLike, check_root: bool = True) -> Tuple[JsonLike, Mapping]:
@@ -162,7 +169,7 @@ def normalize(obj: JsonLike, check_root: bool = True) -> Tuple[JsonLike, Mapping
162
169
  for key, value in _loop(obj):
163
170
  # For iterables, recursively call normalize
164
171
  if isinstance(value, (dict, list)):
165
- _normalized, _lookup = normalize(value) # type: ignore
172
+ _normalized, _lookup = normalize(value) # type: ignore
166
173
  output[key] = _normalized # type: ignore
167
174
  lookup.update(_lookup)
168
175
  else:
@@ -173,11 +180,13 @@ def normalize(obj: JsonLike, check_root: bool = True) -> Tuple[JsonLike, Mapping
173
180
 
174
181
 
175
182
  @overload
176
- def denormalize(normalized_obj: Mapping, lookup: Mapping) -> Mapping: ...
183
+ def denormalize(normalized_obj: Mapping, lookup: Mapping) -> Mapping:
184
+ ...
177
185
 
178
186
 
179
187
  @overload
180
- def denormalize(normalized_obj: List, lookup: Mapping) -> List: ...
188
+ def denormalize(normalized_obj: List, lookup: Mapping) -> List:
189
+ ...
181
190
 
182
191
 
183
192
  def denormalize(normalized_obj: JsonLike, lookup: Mapping) -> Optional[JsonLike]:
@@ -197,7 +206,7 @@ def denormalize(normalized_obj: JsonLike, lookup: Mapping) -> Optional[JsonLike]
197
206
  # Whole object is a placeholder
198
207
  if _is_placeholder(normalized_obj):
199
208
  ref = normalized_obj['__ref']
200
- referrable = lookup.get(ref, None)
209
+ referrable = lookup[ref] if ref in lookup else None
201
210
 
202
211
  if isinstance(referrable, (list, dict)):
203
212
  return denormalize(referrable, lookup)
@@ -15,24 +15,13 @@ See the License for the specific language governing permissions and
15
15
  limitations under the License.
16
16
  """
17
17
 
18
- import json
19
- import uuid
20
- from typing import Any, Literal, Optional, TypeVar, Union, cast, overload
18
+ from typing import Optional, TypeVar
21
19
 
22
- from pandas import DataFrame, MultiIndex, Series
23
- from typing_extensions import TypedDict, TypeGuard
20
+ from pandas import DataFrame, MultiIndex
24
21
 
25
22
  INDEX = '__index__'
26
23
 
27
24
 
28
- @overload
29
- def append_index(df: DataFrame) -> DataFrame: ...
30
-
31
-
32
- @overload
33
- def append_index(df: None) -> None: ...
34
-
35
-
36
25
  def append_index(df: Optional[DataFrame]) -> Optional[DataFrame]:
37
26
  """
38
27
  Add a numerical index column to the dataframe
@@ -42,7 +31,7 @@ def append_index(df: Optional[DataFrame]) -> Optional[DataFrame]:
42
31
 
43
32
  if INDEX not in df.columns:
44
33
  new_df = df.copy()
45
- new_df.insert(0, INDEX, range(0, len(df.index))) # type: ignore
34
+ new_df.insert(0, INDEX, range(0, len(df.index)))
46
35
  return new_df
47
36
 
48
37
  return df
@@ -58,7 +47,7 @@ def remove_index(value: value_type) -> value_type:
58
47
  Otherwise return same value untouched.
59
48
  """
60
49
  if isinstance(value, DataFrame):
61
- return cast(value_type, value.drop(columns=['__index__'], inplace=False, errors='ignore'))
50
+ return value.drop(columns=['__index__'], inplace=False, errors='ignore')
62
51
 
63
52
  return value
64
53
 
@@ -76,12 +65,6 @@ def df_convert_to_internal(original_df: DataFrame) -> DataFrame:
76
65
  if any(isinstance(c, str) and c.startswith('__col__') for c in df.columns):
77
66
  return df
78
67
 
79
- # Apply display transformations to the DataFrame
80
- format_for_display(df)
81
-
82
- # Append index to match the way we process the original DataFrame
83
- df = cast(DataFrame, append_index(df))
84
-
85
68
  # Handle hierarchical columns: [(A, B), (A, C)] -> ['A_B', 'A_C']
86
69
  if isinstance(df.columns, MultiIndex):
87
70
  df.columns = ['_'.join(col).strip() if col[0] != INDEX else INDEX for col in df.columns.values]
@@ -107,89 +90,4 @@ def df_convert_to_internal(original_df: DataFrame) -> DataFrame:
107
90
 
108
91
 
109
92
  def df_to_json(df: DataFrame) -> str:
110
- return df_convert_to_internal(df).to_json(orient='records', date_unit='ns') or ''
111
-
112
-
113
- def format_for_display(df: DataFrame) -> None:
114
- """
115
- Apply transformations to a DataFrame to make it suitable for display.
116
- Not: this does NOT make a copy of the DataFrame
117
- """
118
- for col in df.columns:
119
- column_data = df[col]
120
- if isinstance(column_data, DataFrame):
121
- # Handle duplicate column names - format each column in the sub-DataFrame
122
- for sub_col in column_data.columns:
123
- if isinstance(column_data[sub_col], Series) and column_data[sub_col].dtype == 'object':
124
- column_data.loc[:, sub_col] = column_data[sub_col].apply(str)
125
- elif column_data.dtype == 'object':
126
- # We need to convert all values to string to avoid issues with
127
- # displaying data in the Table component, for example when
128
- # displaying datetime and number objects in the same column
129
- df.loc[:, col] = column_data.apply(str)
130
-
131
-
132
- class FieldType(TypedDict):
133
- name: Union[str, tuple[str, ...]]
134
- type: Literal['integer', 'number', 'boolean', 'datetime', 'duration', 'any', 'str']
135
-
136
-
137
- class DataFrameSchema(TypedDict):
138
- fields: list[FieldType]
139
- primaryKey: list[str]
140
-
141
-
142
- class DataResponse(TypedDict):
143
- data: Optional[DataFrame]
144
- count: int
145
- schema: Optional[DataFrameSchema]
146
-
147
-
148
- def is_data_response(response: Any) -> TypeGuard[DataResponse]:
149
- has_shape = isinstance(response, dict) and 'data' in response and 'count' in response
150
- if not has_shape:
151
- return False
152
- return response['data'] is None or isinstance(response['data'], DataFrame)
153
-
154
-
155
- def data_response_to_json(response: DataResponse) -> str:
156
- """
157
- Serialize a DataResponse to JSON.
158
-
159
- json.dumps() custom serializers only accept value->value mappings, whereas `to_json` on pandas returns a string directly.
160
- To avoid double serialization, we first insert a placeholder string and then replace it with the actual serialized JSON.
161
- """
162
- placeholder = str(uuid.uuid4())
163
-
164
- def _custom_serializer(obj: Any) -> Any:
165
- if isinstance(obj, DataFrame):
166
- return placeholder
167
- raise TypeError(f'Object of type {type(obj)} is not JSON serializable')
168
-
169
- result = json.dumps(response, default=_custom_serializer)
170
- result = result.replace(
171
- f'"{placeholder}"', df_to_json(response['data']) if response['data'] is not None else 'null'
172
- )
173
- return result
174
-
175
-
176
- def build_data_response(data: DataFrame, count: int) -> DataResponse:
177
- data_internal = df_convert_to_internal(data)
178
- schema = get_schema(data_internal)
179
-
180
- return DataResponse(data=data, count=count, schema=schema)
181
-
182
-
183
- def get_schema(df: DataFrame):
184
- from pandas.io.json._table_schema import build_table_schema
185
-
186
- raw_schema = build_table_schema(df)
187
-
188
- for field_data in cast(list, raw_schema['fields']):
189
- if field_data.get('type') == 'datetime':
190
- # for datetime fields we need to know the resolution, so we get the actual e.g. `datetime64[ns]` string
191
- column_name = field_data.get('name')
192
- dtype_str = str(df[column_name].dtype)
193
- field_data['type'] = dtype_str
194
-
195
- return cast(DataFrameSchema, raw_schema)
93
+ return df_convert_to_internal(df).to_json(orient='records') or ''
@@ -95,7 +95,7 @@ class TaskDefinition:
95
95
  def __await__(self):
96
96
  """Await the underlying event, then return or raise the result"""
97
97
  yield from self.event.wait().__await__()
98
- if isinstance(self.result, BaseException):
98
+ if isinstance(self.result, Exception):
99
99
  raise self.result
100
100
  return self.result
101
101
 
@@ -16,11 +16,10 @@ limitations under the License.
16
16
  """
17
17
 
18
18
  import atexit
19
- from collections.abc import Coroutine
20
19
  from contextlib import contextmanager
21
20
  from datetime import datetime
22
21
  from multiprocessing import active_children
23
- from typing import Any, Callable, Dict, Optional, Union, cast
22
+ from typing import Any, Callable, Coroutine, Dict, Optional, cast
24
23
 
25
24
  import anyio
26
25
  from anyio.abc import TaskGroup
@@ -103,18 +102,16 @@ class TaskPool:
103
102
  try:
104
103
  await wait_while(
105
104
  lambda: self.status != PoolStatus.RUNNING
106
- or len(self.workers) != self.desired_workers
105
+ or not len(self.workers) == self.desired_workers
107
106
  or not all(w.status == WorkerStatus.IDLE for w in self.workers.values()),
108
107
  timeout=timeout,
109
108
  )
110
- except TimeoutError as e:
111
- raise RuntimeError('Failed to start pool') from e
109
+ except TimeoutError:
110
+ raise RuntimeError('Failed to start pool')
112
111
  else:
113
112
  raise RuntimeError('Pool already started')
114
113
 
115
- def submit(
116
- self, task_uid: str, function_name: str, args: Union[tuple, None] = None, kwargs: Union[dict, None] = None
117
- ) -> TaskDefinition:
114
+ def submit(self, task_uid: str, function_name: str, args: tuple = (), kwargs: dict = {}) -> TaskDefinition:
118
115
  """
119
116
  Submit a new task to the pool
120
117
 
@@ -123,10 +120,6 @@ class TaskPool:
123
120
  :param args: list of arguments to pass to the function
124
121
  :param kwargs: dict of kwargs to pass to the function
125
122
  """
126
- if args is None:
127
- args = ()
128
- if kwargs is None:
129
- kwargs = {}
130
123
  self._check_pool_state()
131
124
 
132
125
  # Create a task definition to keep track of its progress
@@ -158,7 +151,7 @@ class TaskPool:
158
151
 
159
152
  task = self.tasks.pop(task_uid)
160
153
  if not task.event.is_set():
161
- task.result = anyio.get_cancelled_exc_class()()
154
+ task.result = Exception('Task cancelled')
162
155
  task.event.set()
163
156
 
164
157
  # Task in progress, stop the worker
@@ -470,8 +463,9 @@ class TaskPool:
470
463
  )
471
464
  elif is_log(worker_msg):
472
465
  dev_logger.info(f'Task: {worker_msg["task_uid"]}', {'logs': worker_msg['log']})
473
- elif is_progress(worker_msg) and worker_msg['task_uid'] in self._progress_subscribers:
474
- await self._progress_subscribers[worker_msg['task_uid']](worker_msg['progress'], worker_msg['message'])
466
+ elif is_progress(worker_msg):
467
+ if worker_msg['task_uid'] in self._progress_subscribers:
468
+ await self._progress_subscribers[worker_msg['task_uid']](worker_msg['progress'], worker_msg['message'])
475
469
 
476
470
  async def _wait_queue_depletion(self, timeout: Optional[float] = None):
477
471
  """
@@ -484,8 +478,8 @@ class TaskPool:
484
478
  condition=lambda: self.status in (PoolStatus.CLOSED, PoolStatus.RUNNING) and len(self.tasks) > 0,
485
479
  timeout=timeout,
486
480
  )
487
- except TimeoutError as e:
488
- raise TimeoutError('Tasks are still being executed') from e
481
+ except TimeoutError:
482
+ raise TimeoutError('Tasks are still being executed')
489
483
 
490
484
  async def _core_loop(self):
491
485
  """
@@ -499,14 +493,11 @@ class TaskPool:
499
493
  while self.status not in (PoolStatus.ERROR, PoolStatus.STOPPED):
500
494
  await anyio.sleep(0.1)
501
495
 
502
- try:
503
- self._handle_excess_workers()
504
- self._handle_orphaned_workers()
505
- self._handle_dead_workers()
506
- self._create_workers()
507
- await self._process_next_worker_message()
508
- except Exception as e:
509
- dev_logger.error('Error in task pool', e)
496
+ self._handle_excess_workers()
497
+ self._handle_orphaned_workers()
498
+ self._handle_dead_workers()
499
+ self._create_workers()
500
+ await self._process_next_worker_message()
510
501
  finally:
511
502
  self.loop_stopped.set()
512
503
 
@@ -27,8 +27,6 @@ from typing import Any, Callable, Optional, Tuple
27
27
  import anyio
28
28
  from tblib import Traceback
29
29
 
30
- from dara.core.logging import dev_logger
31
-
32
30
 
33
31
  class SubprocessException:
34
32
  """
@@ -91,7 +89,9 @@ def read_from_shared_memory(pointer: SharedMemoryPointer) -> Any:
91
89
  data = shared_mem.buf[:data_size]
92
90
 
93
91
  # Unpickle and deepcopy
94
- decoded_payload_shared = pickle.loads(shared_mem.buf) # nosec B301 # we trust the shared memory pointer passed by the pool
92
+ decoded_payload_shared = pickle.loads(
93
+ shared_mem.buf
94
+ ) # nosec B301 # we trust the shared memory pointer passed by the pool
95
95
  decoded_payload = copy.deepcopy(decoded_payload_shared)
96
96
 
97
97
  # Cleanup
@@ -133,23 +133,20 @@ async def stop_process_async(process: BaseProcess, timeout: float = 3):
133
133
  # Terminate and wait for it to shutdown
134
134
  process.terminate()
135
135
 
136
- try:
137
- # mimic process.join() in an async way to not block
138
- await wait_while(process.is_alive, timeout)
139
-
140
- # If it's still alive
141
- if process.is_alive():
142
- try:
143
- os.kill(process.pid, signal.SIGKILL)
144
- await wait_while(process.is_alive, timeout)
145
- except OSError as e:
146
- raise RuntimeError(f'Unable to terminate subprocess with PID {process.pid}') from e
147
-
148
- # If it's still alive raise an exception
149
- if process.is_alive():
136
+ # mimic process.join() in an async way to not block
137
+ await wait_while(process.is_alive, timeout)
138
+
139
+ # If it's still alive
140
+ if process.is_alive():
141
+ try:
142
+ os.kill(process.pid, signal.SIGKILL)
143
+ await wait_while(process.is_alive, timeout)
144
+ except OSError:
150
145
  raise RuntimeError(f'Unable to terminate subprocess with PID {process.pid}')
151
- except Exception as e:
152
- dev_logger.error('Error stopping process', e)
146
+
147
+ # If it's still alive raise an exception
148
+ if process.is_alive():
149
+ raise RuntimeError(f'Unable to terminate subprocess with PID {process.pid}')
153
150
 
154
151
 
155
152
  def stop_process(process: BaseProcess, timeout: float = 3):
@@ -174,8 +171,8 @@ def stop_process(process: BaseProcess, timeout: float = 3):
174
171
  try:
175
172
  os.kill(process.pid, signal.SIGKILL)
176
173
  process.join(timeout)
177
- except OSError as e:
178
- raise RuntimeError(f'Unable to terminate subprocess with PID {process.pid}') from e
174
+ except OSError:
175
+ raise RuntimeError(f'Unable to terminate subprocess with PID {process.pid}')
179
176
 
180
177
  # If it's still alive raise an exception
181
178
  if process.is_alive():
@@ -57,8 +57,7 @@ class StdoutLogger:
57
57
  self.channel.worker_api.log(self.task_uid, msg)
58
58
 
59
59
  def flush(self):
60
- if sys.__stdout__:
61
- sys.__stdout__.flush()
60
+ sys.__stdout__.flush()
62
61
 
63
62
 
64
63
  def execute_function(func: Callable, args: tuple, kwargs: dict):
@@ -165,7 +164,7 @@ def worker_loop(worker_params: WorkerParameters, channel: Channel):
165
164
 
166
165
  # Redirect logs via the channel
167
166
  stdout_logger = StdoutLogger(task_uid, channel)
168
- sys.stdout = stdout_logger # type: ignore
167
+ sys.stdout = stdout_logger # type: ignore
169
168
 
170
169
  try:
171
170
  payload_pointer = task['payload']
@@ -27,7 +27,7 @@ def is_available(host: str, port: int) -> bool:
27
27
  """
28
28
  try:
29
29
  with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
30
- sock.settimeout(2.0) # timeout in case port is blocked
30
+ sock.settimeout(2.0) # timeout in case port is blocked
31
31
  return sock.connect_ex((host, port)) != 0
32
32
  except BaseException:
33
33
  return False
@@ -15,9 +15,8 @@ See the License for the specific language governing permissions and
15
15
  limitations under the License.
16
16
  """
17
17
 
18
- from collections.abc import Mapping
19
18
  from datetime import datetime
20
- from typing import Any, Callable, Set
19
+ from typing import Any, Callable, Mapping, Set
21
20
 
22
21
  from dara.core.auth import BaseAuthConfig
23
22
  from dara.core.base_definitions import ActionDef, ActionResolverDef, UploadResolverDef
@@ -27,25 +26,23 @@ from dara.core.definitions import (
27
26
  EndpointConfiguration,
28
27
  Template,
29
28
  )
29
+ from dara.core.interactivity.data_variable import DataVariableRegistryEntry
30
30
  from dara.core.interactivity.derived_variable import (
31
31
  DerivedVariableRegistryEntry,
32
32
  LatestValueRegistryEntry,
33
33
  )
34
- from dara.core.interactivity.server_variable import ServerVariableRegistryEntry
35
- from dara.core.internal.download import DownloadDataEntry
36
34
  from dara.core.internal.registry import Registry, RegistryType
37
35
  from dara.core.internal.websocket import CustomClientMessagePayload
38
36
  from dara.core.persistence import BackendStoreEntry
39
37
 
40
- action_def_registry = Registry[ActionDef](RegistryType.ACTION_DEF, CORE_ACTIONS) # all registered actions
41
- action_registry = Registry[ActionResolverDef](RegistryType.ACTION) # functions for actions requiring backend calls
38
+ action_def_registry = Registry[ActionDef](RegistryType.ACTION_DEF, CORE_ACTIONS) # all registered actions
39
+ action_registry = Registry[ActionResolverDef](RegistryType.ACTION) # functions for actions requiring backend calls
42
40
  upload_resolver_registry = Registry[UploadResolverDef](
43
41
  RegistryType.UPLOAD_RESOLVER
44
- ) # functions for upload resolvers requiring backend calls
42
+ ) # functions for upload resolvers requiring backend calls
45
43
  component_registry = Registry[ComponentTypeAnnotation](RegistryType.COMPONENTS, CORE_COMPONENTS)
46
44
  config_registry = Registry[EndpointConfiguration](RegistryType.ENDPOINT_CONFIG)
47
- server_variable_registry = Registry[ServerVariableRegistryEntry](RegistryType.SERVER_VARIABLE, allow_duplicates=False)
48
- """map of server variable uid -> server variable entry"""
45
+ data_variable_registry = Registry[DataVariableRegistryEntry](RegistryType.DATA_VARIABLE, allow_duplicates=False)
49
46
  derived_variable_registry = Registry[DerivedVariableRegistryEntry](
50
47
  RegistryType.DERIVED_VARIABLE, allow_duplicates=False
51
48
  )
@@ -72,6 +69,3 @@ custom_ws_handlers_registry = Registry[Callable[[str, CustomClientMessagePayload
72
69
 
73
70
  backend_store_registry = Registry[BackendStoreEntry](RegistryType.BACKEND_STORE, allow_duplicates=False)
74
71
  """map of store uid -> store instance"""
75
-
76
- download_code_registry = Registry[DownloadDataEntry](RegistryType.DOWNLOAD_CODE, allow_duplicates=False)
77
- """map of download codes -> download data entry, used only to allow overriding download code behaviour via RegistryLookup"""
@@ -16,9 +16,8 @@ limitations under the License.
16
16
  """
17
17
 
18
18
  import copy
19
- from collections.abc import MutableMapping
20
19
  from enum import Enum
21
- from typing import Generic, Optional, TypeVar
20
+ from typing import Generic, MutableMapping, Optional, TypeVar
22
21
 
23
22
  from dara.core.metrics import CACHE_METRICS_TRACKER, total_size
24
23
 
@@ -32,7 +31,7 @@ class RegistryType(str, Enum):
32
31
  DOWNLOAD = 'Download'
33
32
  COMPONENTS = 'Components'
34
33
  ENDPOINT_CONFIG = 'Endpoint Configuration'
35
- SERVER_VARIABLE = 'ServerVariable'
34
+ DATA_VARIABLE = 'DataVariable'
36
35
  DERIVED_VARIABLE = 'DerivedVariable'
37
36
  LAST_VALUE = 'LatestValue'
38
37
  TEMPLATE = 'Template'
@@ -44,7 +43,6 @@ class RegistryType(str, Enum):
44
43
  PENDING_TOKENS = 'Pending tokens'
45
44
  CUSTOM_WS_HANDLERS = 'Custom WS handlers'
46
45
  BACKEND_STORE = 'Backend Store'
47
- DOWNLOAD_CODE = 'Download Code'
48
46
 
49
47
 
50
48
  class Registry(Generic[T]):
@@ -15,8 +15,7 @@ See the License for the specific language governing permissions and
15
15
  limitations under the License.
16
16
  """
17
17
 
18
- from collections.abc import Coroutine
19
- from typing import Callable, Dict, Literal, TypeVar, Union
18
+ from typing import Callable, Coroutine, Dict, Literal
20
19
 
21
20
  from dara.core.internal.registry import Registry, RegistryType
22
21
  from dara.core.internal.utils import async_dedupe
@@ -24,30 +23,25 @@ from dara.core.internal.utils import async_dedupe
24
23
  RegistryLookupKey = Literal[
25
24
  RegistryType.ACTION,
26
25
  RegistryType.COMPONENTS,
26
+ RegistryType.DATA_VARIABLE,
27
27
  RegistryType.DERIVED_VARIABLE,
28
- RegistryType.SERVER_VARIABLE,
29
28
  RegistryType.STATIC_KWARGS,
30
29
  RegistryType.UPLOAD_RESOLVER,
31
30
  RegistryType.BACKEND_STORE,
32
- RegistryType.DOWNLOAD_CODE,
33
31
  ]
34
32
  CustomRegistryLookup = Dict[RegistryLookupKey, Callable[[str], Coroutine]]
35
33
 
36
- RegistryType = TypeVar('RegistryType')
37
-
38
34
 
39
35
  class RegistryLookup:
40
36
  """
41
37
  Manages registry Lookup.
42
38
  """
43
39
 
44
- def __init__(self, handlers: Union[CustomRegistryLookup, None] = None):
45
- if handlers is None:
46
- handlers = {}
40
+ def __init__(self, handlers: CustomRegistryLookup = {}):
47
41
  self.handlers = handlers
48
42
 
49
43
  @async_dedupe
50
- async def get(self, registry: Registry[RegistryType], uid: str) -> RegistryType:
44
+ async def get(self, registry: Registry, uid: str):
51
45
  """
52
46
  Get the entry from registry by uid.
53
47
  If uid is not in registry and it has a external handler that defined, will execute the handler
@@ -68,4 +62,4 @@ class RegistryLookup:
68
62
  return entry
69
63
  raise ValueError(
70
64
  f'Could not find uid {uid} in {registry.name} registry, did you register it before the app was initialized?'
71
- ) from e
65
+ ).with_traceback(e.__traceback__)