fractal-server 2.14.0a28__py3-none-any.whl → 2.14.0a30__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.14.0a28"
1
+ __VERSION__ = "2.14.0a30"
@@ -27,6 +27,7 @@ class HistoryRun(SQLModel, table=True):
27
27
  default=None,
28
28
  ondelete="SET NULL",
29
29
  )
30
+ job_id: int = Field(foreign_key="jobv2.id")
30
31
 
31
32
  workflowtask_dump: dict[str, Any] = Field(
32
33
  sa_column=Column(JSONB, nullable=False),
@@ -10,6 +10,7 @@ from fractal_server.app.db import AsyncSession
10
10
  from fractal_server.app.db import get_async_db
11
11
  from fractal_server.app.models import UserOAuth
12
12
  from fractal_server.app.routes.auth import current_active_user
13
+ from fractal_server.images.tools import aggregate_types
13
14
  from fractal_server.images.tools import filter_image_list
14
15
 
15
16
  router = APIRouter()
@@ -42,12 +43,8 @@ async def verify_unique_types(
42
43
  type_filters=query.type_filters,
43
44
  )
44
45
 
45
- # NOTE: see issue 2486
46
- available_types = set(
47
- _type for _img in filtered_images for _type in _img["types"].keys()
48
- )
49
-
50
46
  # Get actual values for each available type
47
+ available_types = aggregate_types(filtered_images)
51
48
  values_per_type: dict[str, set] = {
52
49
  _type: set() for _type in available_types
53
50
  }
@@ -15,6 +15,7 @@ def process_workflow(
15
15
  workflow: WorkflowV2,
16
16
  dataset: DatasetV2,
17
17
  workflow_dir_local: Path,
18
+ job_id: int,
18
19
  workflow_dir_remote: Optional[Path] = None,
19
20
  first_task_index: Optional[int] = None,
20
21
  last_task_index: Optional[int] = None,
@@ -75,6 +76,7 @@ def process_workflow(
75
76
  first_task_index : (last_task_index + 1)
76
77
  ],
77
78
  dataset=dataset,
79
+ job_id=job_id,
78
80
  runner=runner,
79
81
  workflow_dir_local=workflow_dir_local,
80
82
  workflow_dir_remote=workflow_dir_local,
@@ -38,6 +38,7 @@ def process_workflow(
38
38
  workflow: WorkflowV2,
39
39
  dataset: DatasetV2,
40
40
  workflow_dir_local: Path,
41
+ job_id: int,
41
42
  workflow_dir_remote: Optional[Path] = None,
42
43
  first_task_index: Optional[int] = None,
43
44
  last_task_index: Optional[int] = None,
@@ -87,6 +88,7 @@ def process_workflow(
87
88
  first_task_index : (last_task_index + 1)
88
89
  ],
89
90
  dataset=dataset,
91
+ job_id=job_id,
90
92
  runner=runner,
91
93
  workflow_dir_local=workflow_dir_local,
92
94
  workflow_dir_remote=workflow_dir_remote,
@@ -33,6 +33,7 @@ def process_workflow(
33
33
  workflow: WorkflowV2,
34
34
  dataset: DatasetV2,
35
35
  workflow_dir_local: Path,
36
+ job_id: int,
36
37
  workflow_dir_remote: Optional[Path] = None,
37
38
  first_task_index: Optional[int] = None,
38
39
  last_task_index: Optional[int] = None,
@@ -79,6 +80,7 @@ def process_workflow(
79
80
  first_task_index : (last_task_index + 1)
80
81
  ],
81
82
  dataset=dataset,
83
+ job_id=job_id,
82
84
  runner=runner,
83
85
  workflow_dir_local=workflow_dir_local,
84
86
  workflow_dir_remote=workflow_dir_remote,
@@ -8,9 +8,13 @@ from fractal_server.app.models.v2 import HistoryImageCache
8
8
  from fractal_server.app.models.v2 import HistoryRun
9
9
  from fractal_server.app.models.v2 import HistoryUnit
10
10
  from fractal_server.app.schemas.v2 import HistoryUnitStatus
11
+ from fractal_server.logger import set_logger
12
+
11
13
 
12
14
  _CHUNK_SIZE = 2_000
13
15
 
16
+ logger = set_logger(__name__)
17
+
14
18
 
15
19
  def update_status_of_history_run(
16
20
  *,
@@ -46,7 +50,12 @@ def bulk_update_status_of_history_unit(
46
50
  status: HistoryUnitStatus,
47
51
  db_sync: Session,
48
52
  ) -> None:
49
- for ind in range(0, len(history_unit_ids), _CHUNK_SIZE):
53
+
54
+ len_history_unit_ids = len(history_unit_ids)
55
+ logger.debug(
56
+ f"[bulk_update_status_of_history_unit] {len_history_unit_ids=}."
57
+ )
58
+ for ind in range(0, len_history_unit_ids, _CHUNK_SIZE):
50
59
  db_sync.execute(
51
60
  update(HistoryUnit)
52
61
  .where(
@@ -85,10 +94,14 @@ def bulk_upsert_image_cache_fast(
85
94
  List of dictionaries for objects to be upsert-ed.
86
95
  db: A sync database session
87
96
  """
88
- if len(list_upsert_objects) == 0:
97
+ len_list_upsert_objects = len(list_upsert_objects)
98
+
99
+ logger.debug(f"[bulk_upsert_image_cache_fast] {len_list_upsert_objects=}.")
100
+
101
+ if len_list_upsert_objects == 0:
89
102
  return None
90
103
 
91
- for ind in range(0, len(list_upsert_objects), _CHUNK_SIZE):
104
+ for ind in range(0, len_list_upsert_objects, _CHUNK_SIZE):
92
105
  stmt = pg_insert(HistoryImageCache).values(
93
106
  list_upsert_objects[ind : ind + _CHUNK_SIZE]
94
107
  )
@@ -42,6 +42,7 @@ def execute_tasks_v2(
42
42
  runner: BaseRunner,
43
43
  user_id: int,
44
44
  workflow_dir_local: Path,
45
+ job_id: int,
45
46
  workflow_dir_remote: Optional[Path] = None,
46
47
  logger_name: Optional[str] = None,
47
48
  get_runner_config: Callable[
@@ -119,6 +120,7 @@ def execute_tasks_v2(
119
120
  history_run = HistoryRun(
120
121
  dataset_id=dataset.id,
121
122
  workflowtask_id=wftask.id,
123
+ job_id=job_id,
122
124
  workflowtask_dump=workflowtask_dump,
123
125
  task_group_dump=task_group_dump,
124
126
  num_available_images=num_available_images,
@@ -128,6 +130,10 @@ def execute_tasks_v2(
128
130
  db.commit()
129
131
  db.refresh(history_run)
130
132
  history_run_id = history_run.id
133
+ logger.debug(
134
+ "[execute_tasks_v2] Created `HistoryRun` with "
135
+ f"{history_run_id=}."
136
+ )
131
137
 
132
138
  # TASK EXECUTION (V2)
133
139
  if task.type in ["non_parallel", "converter_non_parallel"]:
@@ -187,6 +187,10 @@ def run_v2_task_non_parallel(
187
187
  db.add(history_unit)
188
188
  db.commit()
189
189
  db.refresh(history_unit)
190
+ logger.debug(
191
+ "[run_v2_task_non_parallel] Created `HistoryUnit` with "
192
+ f"{history_run_id=}."
193
+ )
190
194
  history_unit_id = history_unit.id
191
195
  bulk_upsert_image_cache_fast(
192
196
  db=db,
@@ -301,6 +305,10 @@ def run_v2_task_parallel(
301
305
  with next(get_sync_db()) as db:
302
306
  db.add_all(history_units)
303
307
  db.commit()
308
+ logger.debug(
309
+ f"[run_v2_task_non_parallel] Created {len(history_units)} "
310
+ "`HistoryUnit`s."
311
+ )
304
312
 
305
313
  for history_unit in history_units:
306
314
  db.refresh(history_unit)
@@ -419,6 +427,10 @@ def run_v2_task_compound(
419
427
  db.commit()
420
428
  db.refresh(history_unit)
421
429
  init_history_unit_id = history_unit.id
430
+ logger.debug(
431
+ "[run_v2_task_compound] Created `HistoryUnit` with "
432
+ f"{init_history_unit_id=}."
433
+ )
422
434
  # Create one `HistoryImageCache` for each input image
423
435
  bulk_upsert_image_cache_fast(
424
436
  db=db,
@@ -468,14 +480,6 @@ def run_v2_task_compound(
468
480
 
469
481
  num_tasks = 1 + len(parallelization_list)
470
482
 
471
- # Mark the init-task `HistoryUnit` as "done"
472
- with next(get_sync_db()) as db:
473
- update_status_of_history_unit(
474
- history_unit_id=init_history_unit_id,
475
- status=HistoryUnitStatus.DONE,
476
- db_sync=db,
477
- )
478
-
479
483
  # 3/B: parallel part of a compound task
480
484
  _check_parallelization_list_size(parallelization_list)
481
485
 
@@ -536,6 +540,10 @@ def run_v2_task_compound(
536
540
  db.commit()
537
541
  for history_unit in history_units:
538
542
  db.refresh(history_unit)
543
+ logger.debug(
544
+ f"[run_v2_task_compound] Created {len(history_units)} "
545
+ "`HistoryUnit`s."
546
+ )
539
547
  history_unit_ids = [history_unit.id for history_unit in history_units]
540
548
 
541
549
  results, exceptions = runner.multisubmit(
@@ -585,7 +593,7 @@ def run_v2_task_compound(
585
593
  else:
586
594
  bulk_update_status_of_history_unit(
587
595
  history_unit_ids=history_unit_ids,
588
- status=HistoryUnitStatus.DONE,
596
+ status=HistoryUnitStatus.DONE + [init_history_unit_id],
589
597
  db_sync=db,
590
598
  )
591
599
 
@@ -282,6 +282,7 @@ def submit_workflow(
282
282
  process_workflow(
283
283
  workflow=workflow,
284
284
  dataset=dataset,
285
+ job_id=job_id,
285
286
  user_id=user_id,
286
287
  workflow_dir_local=WORKFLOW_DIR_LOCAL,
287
288
  workflow_dir_remote=WORKFLOW_DIR_REMOTE,
@@ -45,6 +45,7 @@ class HistoryRunRead(BaseModel):
45
45
  id: int
46
46
  dataset_id: int
47
47
  workflowtask_id: Optional[int] = None
48
+ job_id: int
48
49
  workflowtask_dump: dict[str, Any]
49
50
  task_group_dump: dict[str, Any]
50
51
  timestamp_started: AwareDatetime
@@ -0,0 +1,41 @@
1
+ """job id in history run
2
+
3
+ Revision ID: c90a7c76e996
4
+ Revises: f37aceb45062
5
+ Create Date: 2025-04-16 10:44:30.219309
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "c90a7c76e996"
14
+ down_revision = "f37aceb45062"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("historyrun", schema=None) as batch_op:
22
+ batch_op.add_column(sa.Column("job_id", sa.Integer(), nullable=False))
23
+ batch_op.create_foreign_key(
24
+ batch_op.f("fk_historyrun_job_id_jobv2"),
25
+ "jobv2",
26
+ ["job_id"],
27
+ ["id"],
28
+ )
29
+
30
+ # ### end Alembic commands ###
31
+
32
+
33
+ def downgrade() -> None:
34
+ # ### commands auto generated by Alembic - please adjust! ###
35
+ with op.batch_alter_table("historyrun", schema=None) as batch_op:
36
+ batch_op.drop_constraint(
37
+ batch_op.f("fk_historyrun_job_id_jobv2"), type_="foreignkey"
38
+ )
39
+ batch_op.drop_column("job_id")
40
+
41
+ # ### end Alembic commands ###
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.14.0a28
3
+ Version: 2.14.0a30
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  License: BSD-3-Clause
6
6
  Author: Tommaso Comparin
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=7UElp_oL26Xzwy0fazO-_dZcH0gdX29TdHAqVARp7O8,26
1
+ fractal_server/__init__.py,sha256=SFFDWPMmFk-6eyFIzPxZ-xU9S6oQw1VgiT5XGEJQlWA,26
2
2
  fractal_server/__main__.py,sha256=rkM8xjY1KeS3l63irB8yCrlVobR-73uDapC4wvrIlxI,6957
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -12,7 +12,7 @@ fractal_server/app/models/user_settings.py,sha256=Y-ZV-uZAFLZqXxy8c5_Qeh_F7zQuZD
12
12
  fractal_server/app/models/v2/__init__.py,sha256=vjHwek7-IXmaZZL9VF0nD30YL9ca4wNc8P4RXJK_kDc,832
13
13
  fractal_server/app/models/v2/accounting.py,sha256=f2ALxfKKBNxFLJTtC2-YqRepVK253x68y7zkD2V_Nls,1115
14
14
  fractal_server/app/models/v2/dataset.py,sha256=Xa3YLmqvSChBJoqlSsjmt-5x0zC-6rSx2eafFnMukfo,1240
15
- fractal_server/app/models/v2/history.py,sha256=u4i0NZko8eX5YKAk3MvVIIxU3owJ7D9tEPS_uJT9rrQ,2034
15
+ fractal_server/app/models/v2/history.py,sha256=6yuYhsXgahHxv5FmDdv__aFndT228_rBFjTtkS-3Ohg,2082
16
16
  fractal_server/app/models/v2/job.py,sha256=JWrEjX_E4iRFr5MbmtV_aY28J-5D469awLr0rfa5Kig,2052
17
17
  fractal_server/app/models/v2/project.py,sha256=rAHoh5KfYwIaW7rTX0_O0jvWmxEvfo1BafvmcXuSSRk,786
18
18
  fractal_server/app/models/v2/task.py,sha256=8KEROaadgccXRZIP7EriBp2j1FgzYkgiirOi5_fG79M,1494
@@ -47,7 +47,7 @@ fractal_server/app/routes/api/v2/task_collection.py,sha256=IDNF6sjDuU37HIQ0TuQA-
47
47
  fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=totsl0DOC2DFLw8vgqOFivvftpEk3KbFDeOHT0UVQUs,5997
48
48
  fractal_server/app/routes/api/v2/task_group.py,sha256=62zcVTdheXM5V3WmFuqisIqgETjXmZaRpNMcDX5bXS0,7408
49
49
  fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=3o9bCC8ubMwffQPPaxQZy-CjH9IB2RkIReIecI6L2_w,9300
50
- fractal_server/app/routes/api/v2/verify_image_types.py,sha256=zGT1el58P-E7dVttyuo6MdCC0DtsxiP-NqMawl6EpGE,1950
50
+ fractal_server/app/routes/api/v2/verify_image_types.py,sha256=RBi6-3Sp1wYm_obDPRcEBtLvRfsRknufbZyhGGHVo6I,1924
51
51
  fractal_server/app/routes/api/v2/workflow.py,sha256=sW6Nm7dfzUY354hawyEkpQHy7rUvV2FCV8DPorH-TDU,10270
52
52
  fractal_server/app/routes/api/v2/workflow_import.py,sha256=INmnhlMEBJp-vHPR0f940DANPmIidts3OfcooeM_aNA,11205
53
53
  fractal_server/app/routes/api/v2/workflowtask.py,sha256=7_syX2EO7ibF6Xkm7HBPhsUYq6aYnKNeC5iSaafQhG4,11342
@@ -95,16 +95,16 @@ fractal_server/app/runner/set_start_and_last_task_index.py,sha256=-q4zVybAj8ek2X
95
95
  fractal_server/app/runner/shutdown.py,sha256=9pfSKHDNdIcm0eY-opgRTi7y0HmvfPmYiu9JR6Idark,2082
96
96
  fractal_server/app/runner/task_files.py,sha256=27xFuPzSJc1Pw912CfSMPOhOIpvNwpkyLCnycqdo9lw,4365
97
97
  fractal_server/app/runner/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
98
- fractal_server/app/runner/v2/_local.py,sha256=DK8yagbvd6HHjcDVhUzTy0f7MURlTkQha-NM6OZKgJc,3044
99
- fractal_server/app/runner/v2/_slurm_ssh.py,sha256=_bytOf8z9sdrhI03D6eqg-aQPnJ7V2-qnqpcHAYizns,3278
100
- fractal_server/app/runner/v2/_slurm_sudo.py,sha256=DBCNxifXmMkpu71Wnk5u9-wKT7PV1WROQuY_4DYoZRI,2993
101
- fractal_server/app/runner/v2/db_tools.py,sha256=BfwDhIDssBmEu6HDRj1RSvDYLaoLSWFByro1Ca70aA8,2966
98
+ fractal_server/app/runner/v2/_local.py,sha256=Ggdxx_XOlMya3bgXn_vGd2WMNVmLQaO3w9ZPaxYlRQk,3088
99
+ fractal_server/app/runner/v2/_slurm_ssh.py,sha256=CEaJLajwdDjdpxY1_7aTLb9wqgzeOuxLlSewScMEx_Y,3322
100
+ fractal_server/app/runner/v2/_slurm_sudo.py,sha256=TVihkQKMX6YWEWxXJjQo0WEQOjVy7FVVLmbM3MCulR0,3037
101
+ fractal_server/app/runner/v2/db_tools.py,sha256=du5dKhMMFMErQXbGIgu9JvO_vtMensodyPsyDeqz1yQ,3324
102
102
  fractal_server/app/runner/v2/deduplicate_list.py,sha256=IVTE4abBU1bUprFTkxrTfYKnvkNTanWQ-KWh_etiT08,645
103
103
  fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
104
- fractal_server/app/runner/v2/runner.py,sha256=sbBOH5gCErxK0fCPPGBWtLtqsSwtmrhTth5OLUGMeZQ,15658
105
- fractal_server/app/runner/v2/runner_functions.py,sha256=2im4gskJRLN4SQ6jhfgbU-U0-nEz5r7YtrpC10S2aWg,18209
104
+ fractal_server/app/runner/v2/runner.py,sha256=aNMPABdTS9kJADL2JUeRNI6Ir-gDFTOnRI2tFRohjOU,15848
105
+ fractal_server/app/runner/v2/runner_functions.py,sha256=a7pmlFrtQ9f42NVwHUwiS_3rVwtaLeR0l3zastvn2jk,18518
106
106
  fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=_h_OOffq3d7V0uHa8Uvs0mj31y1GSZBUXjDDF3WjVjY,3620
107
- fractal_server/app/runner/v2/submit_workflow.py,sha256=EDUyUuIPwZHb2zm7SCRRoFsGq2cN-b5OKw6CYkZ8kWk,13048
107
+ fractal_server/app/runner/v2/submit_workflow.py,sha256=QywUGIoHAHnrWgfnyX8W9kVqKY-RvVyNLpzrbsXZOZ4,13075
108
108
  fractal_server/app/runner/v2/task_interface.py,sha256=IXdQTI8rXFgXv1Ez0js4CjKFf3QwO2GCHRTuwiFtiTQ,2891
109
109
  fractal_server/app/runner/versions.py,sha256=dSaPRWqmFPHjg20kTCHmi_dmGNcCETflDtDLronNanU,852
110
110
  fractal_server/app/schemas/__init__.py,sha256=stURAU_t3AOBaH0HSUbV-GKhlPKngnnIMoqWc3orFyI,135
@@ -117,7 +117,7 @@ fractal_server/app/schemas/v2/__init__.py,sha256=wXS4ZEzobWx5dh-XLjMZWpd-JMwWFPO
117
117
  fractal_server/app/schemas/v2/accounting.py,sha256=Wylt7uWTiDIFlHJOh4XEtYitk2FjFlmnodDrJDxcr0E,397
118
118
  fractal_server/app/schemas/v2/dataset.py,sha256=xNWdOW8hhL5Wx-iwyUPrZfWcC8fFuGDgdOHvZLbGVME,2782
119
119
  fractal_server/app/schemas/v2/dumps.py,sha256=uc9itXekO5IFfR6UucpQ5BX9NZZ8erE4hRR6S6aXlOc,2284
120
- fractal_server/app/schemas/v2/history.py,sha256=EhfTBYNVVWeWTj5QFrnuaD4zB73fThK1lyhxB2OBFoc,1659
120
+ fractal_server/app/schemas/v2/history.py,sha256=Y3rc96DOPGQGZWJtBYVHiBjMQEhFtMq4WGkV4vs1oDE,1675
121
121
  fractal_server/app/schemas/v2/job.py,sha256=OXPB4oPiMVWYgZu0lGzM_LGACvhWBavsW7c3MmivdDM,4556
122
122
  fractal_server/app/schemas/v2/manifest.py,sha256=8mmB0QwxEgAeGgwKD_fT-o-wFy7lb6HxNXbp17IJqNY,7281
123
123
  fractal_server/app/schemas/v2/project.py,sha256=ulgCmUnX0w-0jrSjVYIT7sxeK95CSNGh2msXydhsgYI,885
@@ -166,6 +166,7 @@ fractal_server/migrations/versions/9fd26a2b0de4_add_workflow_timestamp_created.p
166
166
  fractal_server/migrations/versions/a7f4d6137b53_add_workflow_dump_to_applyworkflow.py,sha256=ekDUML7ILpmdoqEclKbEUdyLi4uw9HSG_sTjG2hp_JE,867
167
167
  fractal_server/migrations/versions/af1ef1c83c9b_add_accounting_tables.py,sha256=BftudWuSGvKGBzIL5AMb3yWkgTAuaKPBGsYcOzp_gLQ,1899
168
168
  fractal_server/migrations/versions/af8673379a5c_drop_old_filter_columns.py,sha256=9sLd0F7nO5chHHm7RZ4wBA-9bvWomS-av_odKwODADM,1551
169
+ fractal_server/migrations/versions/c90a7c76e996_job_id_in_history_run.py,sha256=Y1cPwmFOZ4mx3v2XZM6adgu8u0L0VD_R4ADURyMb2ro,1102
169
170
  fractal_server/migrations/versions/d256a7379ab8_taskgroup_activity_and_venv_info_to_.py,sha256=HN3_Pk8G81SzdYjg4K1RZAyjKSlsZGvcYE2nWOUbwxQ,3861
170
171
  fractal_server/migrations/versions/d4fe3708d309_make_applyworkflow_workflow_dump_non_.py,sha256=6cHEZFuTXiQg9yu32Y3RH1XAl71av141WQ6UMbiITIg,949
171
172
  fractal_server/migrations/versions/da2cb2ac4255_user_group_viewer_paths.py,sha256=yGWSA2HIHUybcVy66xBITk08opV2DFYSCIIrulaUZhI,901
@@ -208,8 +209,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=Kc_nSzdlV6KIsO0CQSPs1w70zLyENP
208
209
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
209
210
  fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
210
211
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
211
- fractal_server-2.14.0a28.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
212
- fractal_server-2.14.0a28.dist-info/METADATA,sha256=bSrvv3snGeMW_3fQ2zdtKvoGU2SZm5J0-EZPxEcy43c,4563
213
- fractal_server-2.14.0a28.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
214
- fractal_server-2.14.0a28.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
215
- fractal_server-2.14.0a28.dist-info/RECORD,,
212
+ fractal_server-2.14.0a30.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
213
+ fractal_server-2.14.0a30.dist-info/METADATA,sha256=0N6oyTMnvT6iCt7t3wApMtB6XIZ8-aGQnvE3bS584bY,4563
214
+ fractal_server-2.14.0a30.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
215
+ fractal_server-2.14.0a30.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
216
+ fractal_server-2.14.0a30.dist-info/RECORD,,