flyte 2.0.0b22__py3-none-any.whl → 2.0.0b24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flyte might be problematic. Click here for more details.

Files changed (90) hide show
  1. flyte/__init__.py +7 -1
  2. flyte/_bin/runtime.py +35 -5
  3. flyte/_cache/cache.py +4 -2
  4. flyte/_cache/local_cache.py +215 -0
  5. flyte/_code_bundle/bundle.py +10 -2
  6. flyte/_context.py +4 -1
  7. flyte/_debug/constants.py +0 -1
  8. flyte/_debug/vscode.py +6 -1
  9. flyte/_deploy.py +193 -52
  10. flyte/_environment.py +5 -0
  11. flyte/_excepthook.py +1 -1
  12. flyte/_image.py +104 -75
  13. flyte/_initialize.py +51 -0
  14. flyte/_internal/controllers/_local_controller.py +64 -24
  15. flyte/_internal/controllers/remote/_action.py +4 -1
  16. flyte/_internal/controllers/remote/_controller.py +5 -2
  17. flyte/_internal/controllers/remote/_core.py +6 -3
  18. flyte/_internal/controllers/remote/_informer.py +1 -1
  19. flyte/_internal/imagebuild/docker_builder.py +92 -28
  20. flyte/_internal/imagebuild/image_builder.py +7 -13
  21. flyte/_internal/imagebuild/remote_builder.py +6 -1
  22. flyte/_internal/runtime/io.py +13 -1
  23. flyte/_internal/runtime/rusty.py +17 -2
  24. flyte/_internal/runtime/task_serde.py +11 -21
  25. flyte/_internal/runtime/taskrunner.py +1 -1
  26. flyte/_internal/runtime/trigger_serde.py +153 -0
  27. flyte/_logging.py +1 -1
  28. flyte/_protos/common/identifier_pb2.py +19 -1
  29. flyte/_protos/common/identifier_pb2.pyi +22 -0
  30. flyte/_protos/workflow/common_pb2.py +14 -3
  31. flyte/_protos/workflow/common_pb2.pyi +49 -0
  32. flyte/_protos/workflow/queue_service_pb2.py +41 -35
  33. flyte/_protos/workflow/queue_service_pb2.pyi +26 -12
  34. flyte/_protos/workflow/queue_service_pb2_grpc.py +34 -0
  35. flyte/_protos/workflow/run_definition_pb2.py +38 -38
  36. flyte/_protos/workflow/run_definition_pb2.pyi +4 -2
  37. flyte/_protos/workflow/run_service_pb2.py +60 -50
  38. flyte/_protos/workflow/run_service_pb2.pyi +24 -6
  39. flyte/_protos/workflow/run_service_pb2_grpc.py +34 -0
  40. flyte/_protos/workflow/task_definition_pb2.py +15 -11
  41. flyte/_protos/workflow/task_definition_pb2.pyi +19 -2
  42. flyte/_protos/workflow/task_service_pb2.py +18 -17
  43. flyte/_protos/workflow/task_service_pb2.pyi +5 -2
  44. flyte/_protos/workflow/trigger_definition_pb2.py +66 -0
  45. flyte/_protos/workflow/trigger_definition_pb2.pyi +117 -0
  46. flyte/_protos/workflow/trigger_definition_pb2_grpc.py +4 -0
  47. flyte/_protos/workflow/trigger_service_pb2.py +96 -0
  48. flyte/_protos/workflow/trigger_service_pb2.pyi +110 -0
  49. flyte/_protos/workflow/trigger_service_pb2_grpc.py +281 -0
  50. flyte/_run.py +42 -15
  51. flyte/_task.py +36 -4
  52. flyte/_task_environment.py +62 -15
  53. flyte/_trigger.py +382 -0
  54. flyte/_version.py +3 -3
  55. flyte/cli/_abort.py +3 -3
  56. flyte/cli/_build.py +1 -3
  57. flyte/cli/_common.py +29 -2
  58. flyte/cli/_create.py +74 -0
  59. flyte/cli/_delete.py +23 -1
  60. flyte/cli/_deploy.py +13 -9
  61. flyte/cli/_get.py +75 -34
  62. flyte/cli/_params.py +4 -2
  63. flyte/cli/_run.py +27 -22
  64. flyte/cli/_update.py +36 -0
  65. flyte/cli/_user.py +17 -0
  66. flyte/cli/main.py +9 -1
  67. flyte/errors.py +9 -0
  68. flyte/extend.py +4 -0
  69. flyte/io/_dir.py +513 -115
  70. flyte/io/_file.py +495 -135
  71. flyte/models.py +32 -0
  72. flyte/remote/__init__.py +6 -1
  73. flyte/remote/_client/_protocols.py +36 -2
  74. flyte/remote/_client/controlplane.py +19 -3
  75. flyte/remote/_run.py +42 -2
  76. flyte/remote/_task.py +14 -1
  77. flyte/remote/_trigger.py +308 -0
  78. flyte/remote/_user.py +33 -0
  79. flyte/storage/__init__.py +6 -1
  80. flyte/storage/_storage.py +119 -101
  81. flyte/types/_pickle.py +16 -3
  82. {flyte-2.0.0b22.data → flyte-2.0.0b24.data}/scripts/runtime.py +35 -5
  83. {flyte-2.0.0b22.dist-info → flyte-2.0.0b24.dist-info}/METADATA +3 -1
  84. {flyte-2.0.0b22.dist-info → flyte-2.0.0b24.dist-info}/RECORD +89 -77
  85. flyte/_protos/secret/secret_pb2_grpc_grpc.py +0 -198
  86. {flyte-2.0.0b22.data → flyte-2.0.0b24.data}/scripts/debug.py +0 -0
  87. {flyte-2.0.0b22.dist-info → flyte-2.0.0b24.dist-info}/WHEEL +0 -0
  88. {flyte-2.0.0b22.dist-info → flyte-2.0.0b24.dist-info}/entry_points.txt +0 -0
  89. {flyte-2.0.0b22.dist-info → flyte-2.0.0b24.dist-info}/licenses/LICENSE +0 -0
  90. {flyte-2.0.0b22.dist-info → flyte-2.0.0b24.dist-info}/top_level.txt +0 -0
flyte/storage/_storage.py CHANGED
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import os
2
4
  import pathlib
3
5
  import random
@@ -7,6 +9,7 @@ from typing import AsyncGenerator, Optional
7
9
  from uuid import UUID
8
10
 
9
11
  import fsspec
12
+ import obstore
10
13
  from fsspec.asyn import AsyncFileSystem
11
14
  from fsspec.utils import get_protocol
12
15
  from obstore.exceptions import GenericError
@@ -14,7 +17,10 @@ from obstore.fsspec import register
14
17
 
15
18
  from flyte._initialize import get_storage
16
19
  from flyte._logging import logger
17
- from flyte.errors import InitializationError
20
+ from flyte.errors import InitializationError, OnlyAsyncIOSupportedError
21
+
22
+ if typing.TYPE_CHECKING:
23
+ from obstore import AsyncReadableFile, AsyncWritableFile
18
24
 
19
25
  _OBSTORE_SUPPORTED_PROTOCOLS = ["s3", "gs", "abfs", "abfss"]
20
26
 
@@ -204,34 +210,48 @@ async def put(from_path: str, to_path: Optional[str] = None, recursive: bool = F
204
210
  return to_path
205
211
 
206
212
 
207
- async def _put_stream_obstore_bypass(data_iterable: typing.AsyncIterable[bytes] | bytes, to_path: str, **kwargs) -> str:
213
+ async def _open_obstore_bypass(path: str, mode: str = "rb", **kwargs) -> AsyncReadableFile | AsyncWritableFile:
208
214
  """
209
- NOTE: This can break if obstore changes its API.
210
-
211
- This function is a workaround for obstore's fsspec implementation which does not support async file operations.
212
- It uses the synchronous methods directly to put a stream of data.
215
+ Simple obstore bypass for opening files. No fallbacks, obstore only.
213
216
  """
214
- import obstore
215
217
  from obstore.store import ObjectStore
216
218
 
217
- fs = get_underlying_filesystem(path=to_path)
218
- if not hasattr(fs, "_split_path") or not hasattr(fs, "_construct_store"):
219
- raise NotImplementedError(f"Obstore bypass not supported for {fs.protocol} protocol, methods missing.")
220
- bucket, path = fs._split_path(to_path) # pylint: disable=W0212
219
+ fs = get_underlying_filesystem(path=path)
220
+ bucket, file_path = fs._split_path(path) # pylint: disable=W0212
221
221
  store: ObjectStore = fs._construct_store(bucket)
222
- if "attributes" in kwargs:
223
- attributes = kwargs.pop("attributes")
224
- else:
225
- attributes = {}
226
- buf_file = obstore.open_writer_async(store, path, attributes=attributes)
227
- if isinstance(data_iterable, bytes):
228
- await buf_file.write(data_iterable)
229
- else:
230
- async for data in data_iterable:
231
- await buf_file.write(data)
232
- # await buf_file.flush()
233
- await buf_file.close()
234
- return to_path
222
+
223
+ file_handle: AsyncReadableFile | AsyncWritableFile
224
+
225
+ if "w" in mode:
226
+ attributes = kwargs.pop("attributes", {})
227
+ file_handle = obstore.open_writer_async(store, file_path, attributes=attributes)
228
+ else: # read mode
229
+ buffer_size = kwargs.pop("buffer_size", 10 * 2**20)
230
+ file_handle = await obstore.open_reader_async(store, file_path, buffer_size=buffer_size)
231
+
232
+ return file_handle
233
+
234
+
235
+ async def open(path: str, mode: str = "rb", **kwargs) -> AsyncReadableFile | AsyncWritableFile:
236
+ """
237
+ Asynchronously open a file and return an async context manager.
238
+ This function checks if the underlying filesystem supports obstore bypass.
239
+ If it does, it uses obstore to open the file. Otherwise, it falls back to
240
+ the standard _open function which uses AsyncFileSystem.
241
+
242
+ It will raise NotImplementedError if neither obstore nor AsyncFileSystem is supported.
243
+ """
244
+ fs = get_underlying_filesystem(path=path)
245
+
246
+ # Check if we should use obstore bypass
247
+ if _is_obstore_supported_protocol(fs.protocol) and hasattr(fs, "_split_path") and hasattr(fs, "_construct_store"):
248
+ return await _open_obstore_bypass(path, mode, **kwargs)
249
+
250
+ # Fallback to normal open
251
+ if isinstance(fs, AsyncFileSystem):
252
+ return await fs.open_async(path, mode, **kwargs)
253
+
254
+ raise OnlyAsyncIOSupportedError(f"Filesystem {fs} does not support async operations")
235
255
 
236
256
 
237
257
  async def put_stream(
@@ -259,60 +279,31 @@ async def put_stream(
259
279
 
260
280
  ctx = internal_ctx()
261
281
  to_path = ctx.raw_data.get_random_remote_path(file_name=name)
262
- fs = get_underlying_filesystem(path=to_path)
263
282
 
264
- file_handle = None
265
- if isinstance(fs, AsyncFileSystem):
266
- try:
267
- if _is_obstore_supported_protocol(fs.protocol):
268
- # If the protocol is supported by obstore, use the obstore bypass method
269
- return await _put_stream_obstore_bypass(data_iterable, to_path=to_path, **kwargs)
270
- file_handle = await fs.open_async(to_path, "wb", **kwargs)
271
- if isinstance(data_iterable, bytes):
272
- await file_handle.write(data_iterable)
273
- else:
274
- async for data in data_iterable:
275
- await file_handle.write(data)
276
- return str(to_path)
277
- except NotImplementedError as e:
278
- logger.debug(f"{fs} doesn't implement 'open_async', falling back to sync, {e}")
279
- finally:
280
- if file_handle is not None:
281
- await file_handle.close()
282
-
283
- with fs.open(to_path, "wb", **kwargs) as f:
283
+ # Check if we should use obstore bypass
284
+ fs = get_underlying_filesystem(path=to_path)
285
+ try:
286
+ file_handle = typing.cast("AsyncWritableFile", await open(to_path, "wb", **kwargs))
284
287
  if isinstance(data_iterable, bytes):
285
- f.write(data_iterable)
288
+ await file_handle.write(data_iterable)
286
289
  else:
287
- # If data_iterable is async iterable, iterate over it and write each chunk to the file
288
290
  async for data in data_iterable:
289
- f.write(data)
290
- return str(to_path)
291
-
292
-
293
- async def _get_stream_obstore_bypass(path: str, chunk_size, **kwargs) -> AsyncGenerator[bytes, None]:
294
- """
295
- NOTE: This can break if obstore changes its API.
296
- This function is a workaround for obstore's fsspec implementation which does not support async file operations.
297
- It uses the synchronous methods directly to get a stream of data.
298
- """
299
- import obstore
300
- from obstore.store import ObjectStore
291
+ await file_handle.write(data)
292
+ await file_handle.close()
293
+ return str(to_path)
294
+ except OnlyAsyncIOSupportedError:
295
+ pass
296
+
297
+ # Fallback to normal open
298
+ file_handle_io: typing.IO = fs.open(to_path, mode="wb", **kwargs)
299
+ if isinstance(data_iterable, bytes):
300
+ file_handle_io.write(data_iterable)
301
+ else:
302
+ async for data in data_iterable:
303
+ file_handle_io.write(data)
304
+ file_handle_io.close()
301
305
 
302
- fs = get_underlying_filesystem(path=path)
303
- if not hasattr(fs, "_split_path") or not hasattr(fs, "_construct_store"):
304
- raise NotImplementedError(f"Obstore bypass not supported for {fs.protocol} protocol, methods missing.")
305
- bucket, rem_path = fs._split_path(path) # pylint: disable=W0212
306
- store: ObjectStore = fs._construct_store(bucket)
307
- buf_file = await obstore.open_reader_async(store, rem_path, buffer_size=chunk_size)
308
- try:
309
- while True:
310
- chunk = await buf_file.read()
311
- if not chunk:
312
- break
313
- yield bytes(chunk)
314
- finally:
315
- buf_file.close()
306
+ return str(to_path)
316
307
 
317
308
 
318
309
  async def get_stream(path: str, chunk_size=10 * 2**20, **kwargs) -> AsyncGenerator[bytes, None]:
@@ -322,42 +313,41 @@ async def get_stream(path: str, chunk_size=10 * 2**20, **kwargs) -> AsyncGenerat
322
313
  Example usage:
323
314
  ```python
324
315
  import flyte.storage as storage
325
- obj = storage.get_stream(path="s3://my_bucket/my_file.txt")
316
+ async for chunk in storage.get_stream(path="s3://my_bucket/my_file.txt"):
317
+ process(chunk)
326
318
  ```
327
319
 
328
320
  :param path: Path to the remote location where the data will be downloaded.
329
321
  :param kwargs: Additional arguments to be passed to the underlying filesystem.
330
322
  :param chunk_size: Size of each chunk to be read from the file.
331
- :return: An async iterator that yields chunks of data.
323
+ :return: An async iterator that yields chunks of bytes.
332
324
  """
333
- fs = get_underlying_filesystem(path=path, **kwargs)
325
+ # Check if we should use obstore bypass
326
+ fs = get_underlying_filesystem(path=path)
327
+ if _is_obstore_supported_protocol(fs.protocol) and hasattr(fs, "_split_path") and hasattr(fs, "_construct_store"):
328
+ # Set buffer_size for obstore if chunk_size is provided
329
+ if "buffer_size" not in kwargs:
330
+ kwargs["buffer_size"] = chunk_size
331
+ file_handle = typing.cast("AsyncReadableFile", await _open_obstore_bypass(path, "rb", **kwargs))
332
+ while chunk := await file_handle.read():
333
+ yield bytes(chunk)
334
+ return
334
335
 
335
- file_size = fs.info(path)["size"]
336
- total_read = 0
337
- file_handle = None
338
- try:
339
- if _is_obstore_supported_protocol(fs.protocol):
340
- # If the protocol is supported by obstore, use the obstore bypass method
341
- async for x in _get_stream_obstore_bypass(path, chunk_size=chunk_size, **kwargs):
342
- yield x
343
- return
344
- if isinstance(fs, AsyncFileSystem):
345
- file_handle = await fs.open_async(path, "rb")
346
- while chunk := await file_handle.read(min(chunk_size, file_size - total_read)):
347
- total_read += len(chunk)
348
- yield chunk
349
- return
350
- except NotImplementedError as e:
351
- logger.debug(f"{fs} doesn't implement 'open_async', falling back to sync, error: {e}")
352
- finally:
353
- if file_handle is not None:
354
- file_handle.close()
355
-
356
- # Sync fallback
357
- with fs.open(path, "rb") as file_handle:
358
- while chunk := file_handle.read(min(chunk_size, file_size - total_read)):
359
- total_read += len(chunk)
336
+ # Fallback to normal open
337
+ if "block_size" not in kwargs:
338
+ kwargs["block_size"] = chunk_size
339
+
340
+ if isinstance(fs, AsyncFileSystem):
341
+ file_handle = await fs.open_async(path, "rb", **kwargs)
342
+ while chunk := await file_handle.read():
360
343
  yield chunk
344
+ await file_handle.close()
345
+ return
346
+
347
+ file_handle = fs.open(path, "rb", **kwargs)
348
+ while chunk := file_handle.read():
349
+ yield chunk
350
+ file_handle.close()
361
351
 
362
352
 
363
353
  def join(*paths: str) -> str:
@@ -370,4 +360,32 @@ def join(*paths: str) -> str:
370
360
  return str(os.path.join(*paths))
371
361
 
372
362
 
363
+ async def exists(path: str, **kwargs) -> bool:
364
+ """
365
+ Check if a path exists.
366
+
367
+ :param path: Path to be checked.
368
+ :param kwargs: Additional arguments to be passed to the underlying filesystem.
369
+ :return: True if the path exists, False otherwise.
370
+ """
371
+ try:
372
+ fs = get_underlying_filesystem(path=path, **kwargs)
373
+ if isinstance(fs, AsyncFileSystem):
374
+ _ = await fs._info(path)
375
+ return True
376
+ _ = fs.info(path)
377
+ return True
378
+ except FileNotFoundError:
379
+ return False
380
+
381
+
382
+ def exists_sync(path: str, **kwargs) -> bool:
383
+ try:
384
+ fs = get_underlying_filesystem(path=path, **kwargs)
385
+ _ = fs.info(path)
386
+ return True
387
+ except FileNotFoundError:
388
+ return False
389
+
390
+
373
391
  register(_OBSTORE_SUPPORTED_PROTOCOLS, asynchronous=True)
flyte/types/_pickle.py CHANGED
@@ -116,14 +116,27 @@ class FlytePickleTransformer(TypeTransformer[FlytePickle]):
116
116
  and literal_type.blob.format == FlytePickleTransformer.PYTHON_PICKLE_FORMAT
117
117
  ):
118
118
  return FlytePickle
119
+ if literal_type.simple == types_pb2.SimpleType.BINARY:
120
+ return FlytePickle
119
121
 
120
122
  raise ValueError(f"Transformer {self} cannot reverse {literal_type}")
121
123
 
122
124
  def get_literal_type(self, t: Type[T]) -> types_pb2.LiteralType:
123
125
  lt = types_pb2.LiteralType(
124
- blob=types_pb2.BlobType(
125
- format=self.PYTHON_PICKLE_FORMAT, dimensionality=types_pb2.BlobType.BlobDimensionality.SINGLE
126
- )
126
+ union_type=types_pb2.UnionType(
127
+ variants=[
128
+ types_pb2.LiteralType(
129
+ blob=types_pb2.BlobType(
130
+ format=self.PYTHON_PICKLE_FORMAT,
131
+ dimensionality=types_pb2.BlobType.BlobDimensionality.SINGLE,
132
+ ),
133
+ structure=types_pb2.TypeStructure(tag=self.name),
134
+ ),
135
+ types_pb2.LiteralType(
136
+ simple=types_pb2.SimpleType.BINARY, structure=types_pb2.TypeStructure(tag=self.name)
137
+ ),
138
+ ]
139
+ ),
127
140
  )
128
141
  lt.metadata = {"python_class_name": str(t)}
129
142
  return lt
@@ -12,6 +12,8 @@ from typing import Any, List
12
12
 
13
13
  import click
14
14
 
15
+ from flyte.models import PathRewrite
16
+
15
17
  # Todo: work with pvditt to make these the names
16
18
  # ACTION_NAME = "_U_ACTION_NAME"
17
19
  # RUN_NAME = "_U_RUN_NAME"
@@ -28,8 +30,8 @@ ENDPOINT_OVERRIDE = "_U_EP_OVERRIDE"
28
30
  RUN_OUTPUT_BASE_DIR = "_U_RUN_BASE"
29
31
  FLYTE_ENABLE_VSCODE_KEY = "_F_E_VS"
30
32
 
31
- # TODO: Remove this after proper auth is implemented
32
33
  _UNION_EAGER_API_KEY_ENV_VAR = "_UNION_EAGER_API_KEY"
34
+ _F_PATH_REWRITE = "_F_PATH_REWRITE"
33
35
 
34
36
 
35
37
  @click.group()
@@ -94,6 +96,7 @@ def main(
94
96
  import flyte
95
97
  import flyte._utils as utils
96
98
  import flyte.errors
99
+ import flyte.storage as storage
97
100
  from flyte._initialize import init
98
101
  from flyte._internal.controllers import create_controller
99
102
  from flyte._internal.imagebuild.image_builder import ImageCache
@@ -136,19 +139,34 @@ def main(
136
139
  controller_kwargs["insecure"] = True
137
140
  logger.debug(f"Using controller endpoint: {ep} with kwargs: {controller_kwargs}")
138
141
 
139
- bundle = CodeBundle(tgz=tgz, pkl=pkl, destination=dest, computed_version=version)
142
+ bundle = None
143
+ if tgz or pkl:
144
+ bundle = CodeBundle(tgz=tgz, pkl=pkl, destination=dest, computed_version=version)
140
145
  init(org=org, project=project, domain=domain, image_builder="remote", **controller_kwargs)
141
146
  # Controller is created with the same kwargs as init, so that it can be used to run tasks
142
147
  controller = create_controller(ct="remote", **controller_kwargs)
143
148
 
144
149
  ic = ImageCache.from_transport(image_cache) if image_cache else None
145
150
 
151
+ path_rewrite_cfg = os.getenv(_F_PATH_REWRITE, None)
152
+ path_rewrite = None
153
+ if path_rewrite_cfg:
154
+ potential_path_rewrite = PathRewrite.from_str(path_rewrite_cfg)
155
+ if storage.exists_sync(potential_path_rewrite.new_prefix):
156
+ path_rewrite = potential_path_rewrite
157
+ logger.info(f"Path rewrite configured for {path_rewrite.new_prefix}")
158
+ else:
159
+ logger.error(
160
+ f"Path rewrite failed for path {potential_path_rewrite.new_prefix}, "
161
+ f"not found, reverting to original path {potential_path_rewrite.old_prefix}"
162
+ )
163
+
146
164
  # Create a coroutine to load the task and run it
147
165
  task_coroutine = load_and_run_task(
148
166
  resolver=resolver,
149
167
  resolver_args=resolver_args,
150
168
  action=ActionID(name=name, run_name=run_name, project=project, domain=domain, org=org),
151
- raw_data_path=RawDataPath(path=raw_data_path),
169
+ raw_data_path=RawDataPath(path=raw_data_path, path_rewrite=path_rewrite),
152
170
  checkpoints=Checkpoints(checkpoint_path, prev_checkpoint),
153
171
  code_bundle=bundle,
154
172
  input_path=inputs,
@@ -166,8 +184,20 @@ def main(
166
184
  async def _run_and_stop():
167
185
  loop = asyncio.get_event_loop()
168
186
  loop.set_exception_handler(flyte.errors.silence_grpc_polling_error)
169
- await utils.run_coros(controller_failure, task_coroutine)
170
- await controller.stop()
187
+ try:
188
+ await utils.run_coros(controller_failure, task_coroutine)
189
+ await controller.stop()
190
+ except flyte.errors.RuntimeSystemError as e:
191
+ logger.error(f"Runtime system error: {e}")
192
+ from flyte._internal.runtime.convert import convert_from_native_to_error
193
+ from flyte._internal.runtime.io import upload_error
194
+
195
+ logger.error(f"Flyte runtime failed for action {name} with run name {run_name}, error: {e}")
196
+ err = convert_from_native_to_error(e)
197
+ path = await upload_error(err.err, outputs_path)
198
+ logger.error(f"Run {run_name} Action {name} failed with error: {err}. Uploaded error to {path}")
199
+ await controller.stop()
200
+ raise
171
201
 
172
202
  asyncio.run(_run_and_stop())
173
203
  logger.warning(f"Flyte runtime completed for action {name} with run name {run_name}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: flyte
3
- Version: 2.0.0b22
3
+ Version: 2.0.0b24
4
4
  Summary: Add your description here
5
5
  Author-email: Ketan Umare <kumare3@users.noreply.github.com>
6
6
  Requires-Python: >=3.10
@@ -25,6 +25,8 @@ Requires-Dist: async-lru>=2.0.5
25
25
  Requires-Dist: mashumaro
26
26
  Requires-Dist: dataclasses_json
27
27
  Requires-Dist: aiolimiter>=1.2.1
28
+ Provides-Extra: aiosqlite
29
+ Requires-Dist: aiosqlite>=0.21.0; extra == "aiosqlite"
28
30
  Dynamic: license-file
29
31
 
30
32
  # Flyte 2 SDK 🚀