fractal-server 1.2.2__py3-none-any.whl → 1.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "1.2.2"
1
+ __VERSION__ = "1.2.3"
@@ -52,6 +52,7 @@ async def get_job(
52
52
  except (KeyError, FileNotFoundError):
53
53
  pass
54
54
 
55
+ await db.close()
55
56
  return job_read
56
57
 
57
58
 
@@ -85,6 +86,8 @@ async def download_job_logs(
85
86
  for fpath in working_dir_path.glob("*"):
86
87
  zipfile.write(filename=str(fpath), arcname=str(fpath.name))
87
88
 
89
+ await db.close()
90
+
88
91
  return StreamingResponse(
89
92
  iter([byte_stream.getvalue()]),
90
93
  media_type="application/x-zip-compressed",
@@ -143,6 +143,7 @@ async def get_list_project(
143
143
  )
144
144
  res = await db.execute(stm)
145
145
  project_list = res.scalars().all()
146
+ await db.close()
146
147
  return project_list
147
148
 
148
149
 
@@ -177,6 +178,7 @@ async def create_project(
177
178
  db.add(db_project)
178
179
  await db.commit()
179
180
  await db.refresh(db_project)
181
+ await db.close()
180
182
  except IntegrityError as e:
181
183
  await db.rollback()
182
184
  logger = set_logger("create_project")
@@ -200,7 +202,9 @@ async def apply_workflow(
200
202
  background_tasks: BackgroundTasks,
201
203
  user: User = Depends(current_active_user),
202
204
  db: AsyncSession = Depends(get_db),
203
- db_sync: DBSyncSession = Depends(get_sync_db),
205
+ db_sync: DBSyncSession = Depends(
206
+ get_sync_db
207
+ ), # FIXME: why both sync and async? # noqa
204
208
  ) -> Optional[ApplyWorkflowRead]:
205
209
  output = await _get_dataset_check_owner(
206
210
  project_id=apply_workflow.project_id,
@@ -293,15 +297,18 @@ async def apply_workflow(
293
297
 
294
298
  background_tasks.add_task(
295
299
  submit_workflow,
296
- workflow=workflow,
297
- input_dataset=input_dataset,
298
- output_dataset=output_dataset,
300
+ workflow_id=workflow.id,
301
+ input_dataset_id=input_dataset.id,
302
+ output_dataset_id=output_dataset.id,
299
303
  job_id=job.id,
300
304
  worker_init=apply_workflow.worker_init,
301
305
  slurm_user=user.slurm_user,
302
306
  user_cache_dir=user.cache_dir,
303
307
  )
304
308
 
309
+ await db.close()
310
+ db_sync.close()
311
+
305
312
  return job
306
313
 
307
314
 
@@ -320,6 +327,7 @@ async def get_project(
320
327
  project = await _get_project_check_owner(
321
328
  project_id=project_id, user_id=user.id, db=db
322
329
  )
330
+ await db.close()
323
331
  return project
324
332
 
325
333
 
@@ -337,6 +345,7 @@ async def delete_project(
337
345
  )
338
346
  await db.delete(project)
339
347
  await db.commit()
348
+ await db.close()
340
349
  return Response(status_code=status.HTTP_204_NO_CONTENT)
341
350
 
342
351
 
@@ -361,6 +370,8 @@ async def add_dataset(
361
370
  db.add(db_dataset)
362
371
  await db.commit()
363
372
  await db.refresh(db_dataset)
373
+ await db.close()
374
+
364
375
  return db_dataset
365
376
 
366
377
 
@@ -379,6 +390,7 @@ async def get_workflow_list(
379
390
  stm = select(Workflow).where(Workflow.project_id == project_id)
380
391
  res = await db.execute(stm)
381
392
  workflow_list = res.scalars().all()
393
+ await db.close()
382
394
  return workflow_list
383
395
 
384
396
 
@@ -397,6 +409,7 @@ async def get_job_list(
397
409
  stm = select(ApplyWorkflow).where(ApplyWorkflow.project_id == project_id)
398
410
  res = await db.execute(stm)
399
411
  job_list = res.scalars().all()
412
+ await db.close()
400
413
  return job_list
401
414
 
402
415
 
@@ -415,6 +428,7 @@ async def edit_project(
415
428
 
416
429
  await db.commit()
417
430
  await db.refresh(project)
431
+ await db.close()
418
432
  return project
419
433
 
420
434
 
@@ -435,6 +449,7 @@ async def get_dataset(
435
449
  project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
436
450
  )
437
451
  dataset = output["dataset"]
452
+ await db.close()
438
453
  return dataset
439
454
 
440
455
 
@@ -462,6 +477,7 @@ async def patch_dataset(
462
477
 
463
478
  await db.commit()
464
479
  await db.refresh(db_dataset)
480
+ await db.close()
465
481
  return db_dataset
466
482
 
467
483
 
@@ -488,6 +504,7 @@ async def delete_dataset(
488
504
  dataset = res.scalar()
489
505
  await db.delete(dataset)
490
506
  await db.commit()
507
+ await db.close()
491
508
  return Response(status_code=status.HTTP_204_NO_CONTENT)
492
509
 
493
510
 
@@ -521,6 +538,7 @@ async def add_resource(
521
538
  db.add(db_resource)
522
539
  await db.commit()
523
540
  await db.refresh(db_resource)
541
+ await db.close()
524
542
  return db_resource
525
543
 
526
544
 
@@ -543,6 +561,7 @@ async def get_resource(
543
561
  stm = select(Resource).where(Resource.dataset_id == dataset_id)
544
562
  res = await db.execute(stm)
545
563
  resource_list = res.scalars().all()
564
+ await db.close()
546
565
  return resource_list
547
566
 
548
567
 
@@ -573,6 +592,7 @@ async def delete_resource(
573
592
  )
574
593
  await db.delete(resource)
575
594
  await db.commit()
595
+ await db.close()
576
596
  return Response(status_code=status.HTTP_204_NO_CONTENT)
577
597
 
578
598
 
@@ -614,6 +634,7 @@ async def edit_resource(
614
634
  setattr(orig_resource, key, value)
615
635
  await db.commit()
616
636
  await db.refresh(orig_resource)
637
+ await db.close()
617
638
  return orig_resource
618
639
 
619
640
 
@@ -715,4 +736,5 @@ async def import_workflow_into_project(
715
736
  db=db,
716
737
  )
717
738
 
739
+ await db.close()
718
740
  return db_workflow
@@ -35,9 +35,7 @@ from ....tasks.collection import get_collection_path
35
35
  from ....tasks.collection import get_log_path
36
36
  from ....tasks.collection import inspect_package
37
37
  from ...db import AsyncSession
38
- from ...db import DBSyncSession
39
38
  from ...db import get_db
40
- from ...db import get_sync_db
41
39
  from ...models import State
42
40
  from ...models import Task
43
41
  from ...security import current_active_superuser
@@ -48,7 +46,9 @@ router = APIRouter()
48
46
 
49
47
 
50
48
  async def _background_collect_pip(
51
- state: State, venv_path: Path, task_pkg: _TaskCollectPip, db: AsyncSession
49
+ state_id: int,
50
+ venv_path: Path,
51
+ task_pkg: _TaskCollectPip,
52
52
  ) -> None:
53
53
  """
54
54
  Install package and collect tasks
@@ -59,6 +59,12 @@ async def _background_collect_pip(
59
59
  In case of error, copy the log into the state and delete the package
60
60
  directory.
61
61
  """
62
+
63
+ # Note: anext(get_db()) is only available for python>=3.10
64
+ db = await get_db().__anext__()
65
+
66
+ state: State = await db.get(State, state_id)
67
+
62
68
  logger_name = task_pkg.package.replace("/", "_")
63
69
  logger = set_logger(
64
70
  logger_name=logger_name,
@@ -110,6 +116,7 @@ async def _background_collect_pip(
110
116
  logger.debug("Task-collection status: OK")
111
117
  logger.info("Background task collection completed successfully")
112
118
  close_logger(logger)
119
+ await db.close()
113
120
 
114
121
  except Exception as e:
115
122
  # Write last logs to file
@@ -124,6 +131,7 @@ async def _background_collect_pip(
124
131
  state.data = data.sanitised_dict()
125
132
  await db.merge(state)
126
133
  await db.commit()
134
+ await db.close()
127
135
 
128
136
  # Delete corrupted package dir
129
137
  shell_rmtree(venv_path)
@@ -140,6 +148,7 @@ async def _insert_tasks(
140
148
  db.add_all(task_db_list)
141
149
  await db.commit()
142
150
  await asyncio.gather(*[db.refresh(t) for t in task_db_list])
151
+ await db.close()
143
152
  return task_db_list
144
153
 
145
154
 
@@ -208,6 +217,7 @@ async def collect_tasks_pip(
208
217
  try:
209
218
  task_collect_status = get_collection_data(venv_path)
210
219
  except FileNotFoundError as e:
220
+ await db.close()
211
221
  raise HTTPException(
212
222
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
213
223
  detail=(
@@ -219,6 +229,7 @@ async def collect_tasks_pip(
219
229
  task_collect_status.info = "Already installed"
220
230
  state = State(data=task_collect_status.sanitised_dict())
221
231
  response.status_code == status.HTTP_200_OK
232
+ await db.close()
222
233
  return state
223
234
  settings = Inject(get_settings)
224
235
 
@@ -237,10 +248,9 @@ async def collect_tasks_pip(
237
248
 
238
249
  background_tasks.add_task(
239
250
  _background_collect_pip,
240
- state=state,
251
+ state_id=state.id,
241
252
  venv_path=venv_path,
242
253
  task_pkg=task_pkg,
243
- db=db,
244
254
  )
245
255
  logger.debug(
246
256
  "Task-collection endpoint: start background collection "
@@ -253,6 +263,7 @@ async def collect_tasks_pip(
253
263
  )
254
264
  state.data["info"] = info
255
265
  response.status_code = status.HTTP_201_CREATED
266
+ await db.close()
256
267
  return state
257
268
 
258
269
 
@@ -270,6 +281,7 @@ async def check_collection_status(
270
281
  logger.debug(f"Querying state for state.id={state_id}")
271
282
  state = await db.get(State, state_id)
272
283
  if not state:
284
+ await db.close()
273
285
  raise HTTPException(
274
286
  status_code=status.HTTP_404_NOT_FOUND,
275
287
  detail=f"No task collection info with id={state_id}",
@@ -282,6 +294,7 @@ async def check_collection_status(
282
294
  data.log = get_collection_log(data.venv_path)
283
295
  state.data = data.sanitised_dict()
284
296
  close_logger(logger)
297
+ await db.close()
285
298
  return state
286
299
 
287
300
 
@@ -297,19 +310,21 @@ async def get_list_task(
297
310
  res = await db.execute(stm)
298
311
  task_list = res.scalars().unique().fetchall()
299
312
  await asyncio.gather(*[db.refresh(t) for t in task_list])
313
+ await db.close()
300
314
  return task_list
301
315
 
302
316
 
303
317
  @router.get("/{task_id}", response_model=TaskRead)
304
- def get_task(
318
+ async def get_task(
305
319
  task_id: int,
306
320
  user: User = Depends(current_active_user),
307
- db_sync: DBSyncSession = Depends(get_sync_db),
321
+ db: AsyncSession = Depends(get_db),
308
322
  ) -> TaskRead:
309
323
  """
310
324
  Get info on a specific task
311
325
  """
312
- task = db_sync.get(Task, task_id)
326
+ task = await db.get(Task, task_id)
327
+ await db.close()
313
328
  return task
314
329
 
315
330
 
@@ -346,6 +361,7 @@ async def patch_task(
346
361
 
347
362
  await db.commit()
348
363
  await db.refresh(db_task)
364
+ await db.close()
349
365
  return db_task
350
366
 
351
367
 
@@ -369,4 +385,5 @@ async def create_task(
369
385
  db.add(db_task)
370
386
  await db.commit()
371
387
  await db.refresh(db_task)
388
+ await db.close()
372
389
  return db_task
@@ -184,6 +184,7 @@ async def create_workflow(
184
184
  db.add(db_workflow)
185
185
  await db.commit()
186
186
  await db.refresh(db_workflow)
187
+ await db.close()
187
188
  return db_workflow
188
189
 
189
190
 
@@ -247,6 +248,7 @@ async def patch_workflow(
247
248
  setattr(workflow, key, value)
248
249
  await db.commit()
249
250
  await db.refresh(workflow)
251
+ await db.close()
250
252
 
251
253
  return workflow
252
254
 
@@ -292,6 +294,7 @@ async def add_task_to_workflow(
292
294
  db=db,
293
295
  )
294
296
 
297
+ await db.close()
295
298
  return workflow_task
296
299
 
297
300
 
@@ -337,6 +340,7 @@ async def patch_workflow_task(
337
340
 
338
341
  await db.commit()
339
342
  await db.refresh(db_workflow_task)
343
+ await db.close()
340
344
 
341
345
  return db_workflow_task
342
346
 
@@ -387,4 +391,5 @@ async def export_worfklow(
387
391
  workflow = await _get_workflow_check_owner(
388
392
  workflow_id=workflow_id, user_id=user.id, db=db
389
393
  )
394
+ await db.close()
390
395
  return workflow
@@ -68,9 +68,9 @@ def get_process_workflow():
68
68
 
69
69
  async def submit_workflow(
70
70
  *,
71
- workflow: Workflow,
72
- input_dataset: Dataset,
73
- output_dataset: Dataset,
71
+ workflow_id: int,
72
+ input_dataset_id: int,
73
+ output_dataset_id: int,
74
74
  job_id: int,
75
75
  worker_init: Optional[str] = None,
76
76
  slurm_user: Optional[str] = None,
@@ -83,16 +83,14 @@ async def submit_workflow(
83
83
  backend (e.g. local or slurm backend).
84
84
 
85
85
  Args:
86
- workflow:
87
- Workflow being applied
88
- input_dataset:
89
- Input dataset
90
- output_dataset:
91
- the destination dataset of the workflow. If not provided,
86
+ workflow_id:
87
+ ID of the workflow being applied
88
+ input_dataset_id
89
+ Input dataset ID
90
+ output_dataset_id:
91
+ ID of the destination dataset of the workflow. If not provided,
92
92
  overwriting of the input dataset is implied and an error is raised
93
- if the dataset is in read only mode. If a string is passed and the
94
- dataset does not exist, a new dataset with that name is created and
95
- within it a new resource with the same name.
93
+ if the dataset is in read only mode.
96
94
  job_id:
97
95
  Id of the job record which stores the state for the current
98
96
  workflow application.
@@ -107,10 +105,21 @@ async def submit_workflow(
107
105
  The username to impersonate for the workflow execution, for the
108
106
  slurm backend.
109
107
  """
108
+
110
109
  db_sync = next(DB.get_sync_db())
111
- job: ApplyWorkflow = db_sync.get(ApplyWorkflow, job_id) # type: ignore
110
+
111
+ job: ApplyWorkflow = db_sync.get(ApplyWorkflow, job_id)
112
112
  if not job:
113
113
  raise ValueError("Cannot fetch job from database")
114
+ input_dataset: Dataset = db_sync.get(Dataset, input_dataset_id)
115
+ if not input_dataset:
116
+ raise ValueError("Cannot fetch input_dataset from database")
117
+ output_dataset: Dataset = db_sync.get(Dataset, output_dataset_id)
118
+ if not output_dataset:
119
+ raise ValueError("Cannot fetch output_dataset from database")
120
+ workflow: Workflow = db_sync.get(Workflow, workflow_id)
121
+ if not workflow:
122
+ raise ValueError("Cannot fetch workflow from database")
114
123
 
115
124
  # Select backend
116
125
  settings = Inject(get_settings)
@@ -178,7 +187,13 @@ async def submit_workflow(
178
187
  logger.debug(f"job.workflow_dir_user: {str(WORKFLOW_DIR_USER)}")
179
188
  logger.debug(f'START workflow "{workflow.name}"')
180
189
 
190
+ # Note: from the docs, "The Session.close() method does not prevent the
191
+ # Session from being used again"
192
+ # (https://docs.sqlalchemy.org/en/20/orm/session_api.html#sqlalchemy.orm.Session.close)
193
+ db_sync.close()
194
+
181
195
  try:
196
+
182
197
  output_dataset.meta = await process_workflow(
183
198
  workflow=workflow,
184
199
  input_paths=input_paths,
@@ -192,6 +207,10 @@ async def submit_workflow(
192
207
  worker_init=worker_init,
193
208
  )
194
209
 
210
+ logger.info(
211
+ f'End execution of workflow "{workflow.name}"; '
212
+ f"more logs at {str(log_file_path)}"
213
+ )
195
214
  logger.debug(f'END workflow "{workflow.name}"')
196
215
 
197
216
  db_sync.merge(output_dataset)
@@ -240,3 +259,4 @@ async def submit_workflow(
240
259
  finally:
241
260
  close_job_logger(logger)
242
261
  db_sync.commit()
262
+ db_sync.close()
@@ -173,6 +173,7 @@ async def list_users(
173
173
  stm = select(User)
174
174
  res = await db.execute(stm)
175
175
  user_list = res.scalars().all()
176
+ await db.close()
176
177
  return user_list
177
178
 
178
179
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 1.2.2
3
+ Version: 1.2.3
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,14 +1,14 @@
1
1
  fractal_server/.gitignore,sha256=9swdobOQ_Celjt2OgRUpy9oEpNqTBn-1NRUFfJU4rps,69
2
- fractal_server/__init__.py,sha256=OOlw46faCY9wzYwazGLg7oNccHXfFeZp3CIso18O8SU,22
2
+ fractal_server/__init__.py,sha256=t3mOgNMHEo0S33akuGM5qsLlIvf6r557V5R6caoFy5w,22
3
3
  fractal_server/__main__.py,sha256=znijcImbcEC4P26ICOhEJ9VY3_5vWdMwQcl-WP25sYA,2202
4
4
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
5
5
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  fractal_server/app/api/__init__.py,sha256=A_RS6hUnnFef2dcWbvG0kLmQdNs26g9W47fAbWc_WXo,887
7
7
  fractal_server/app/api/v1/__init__.py,sha256=2HMymr1YkUk39V8iof7KENyLnre4ghouOSvNZ_kF1ec,24
8
- fractal_server/app/api/v1/job.py,sha256=X6jwUL3tNZCR8qXCOBNUEfKd7osFlvT81UqC8zYZeNY,2740
9
- fractal_server/app/api/v1/project.py,sha256=-sBt9jgwW2DnUuFMs1wV7UBhqBZu68Wb8DjKjV9MpM8,21983
10
- fractal_server/app/api/v1/task.py,sha256=raoRHkMqJgjfm4ApMX1R-e5sk3a8hC2Sl3_2XlVMw54,11794
11
- fractal_server/app/api/v1/workflow.py,sha256=elm9mbfcHlis_VUAAnAzxn9FxRMbeSkl-F3Qan3Sdjc,11398
8
+ fractal_server/app/api/v1/job.py,sha256=UldY2tt904rF95JXaMf7yIkEK0BIVI1k7YchJVr847o,2783
9
+ fractal_server/app/api/v1/project.py,sha256=WxSnUaY6Xhn1sHlNVKih7iUp6uEJeHzrucfka4heAl8,22441
10
+ fractal_server/app/api/v1/task.py,sha256=9S62HkQuj8Co1EzBd-vbjxx-fKipbJSwifov0ftNYdo,12138
11
+ fractal_server/app/api/v1/workflow.py,sha256=DPmDgS3Thf1HS46FZCEni9vQEDkA7wPj0BmBa-T2wU0,11503
12
12
  fractal_server/app/db/__init__.py,sha256=Ont7FOvNEEx8-_GFsJ5Lv_5nbxySs_bEpLTdKEzOqDM,2724
13
13
  fractal_server/app/models/__init__.py,sha256=MyHn6KDfdPvBpfvcR2QrhWuP1qle3St3e_YzNYcu9y8,372
14
14
  fractal_server/app/models/job.py,sha256=qBLchAcwUSXwCUfji5OJhsFVU2pDph8CdJu_mqCVtUE,3242
@@ -18,7 +18,7 @@ fractal_server/app/models/state.py,sha256=uVMwkpalARAni7DNbOQ44v9LikRHJqR7MWlsGy
18
18
  fractal_server/app/models/task.py,sha256=3lTZMmHcQK0CgWLjcceyOANdqRt-89n1_z1AMEL2L2Q,1080
19
19
  fractal_server/app/models/workflow.py,sha256=Tf4sT4kciDMhGRCTUFWDtEfTvNj3mdWhCx6fURJp9Vc,5371
20
20
  fractal_server/app/runner/.gitignore,sha256=ytzN_oyHWXrGU7iFAtoHSTUbM6Rn6kG0Zkddg0xZk6s,16
21
- fractal_server/app/runner/__init__.py,sha256=AMOa4XqmxSYISl3z_S0DzgACkSADGeY0gg_nB3mjf4Y,8282
21
+ fractal_server/app/runner/__init__.py,sha256=dvv6hAQwMGDuJ1S8AiuiCayIX1BDwQC95ywY8VGTmjc,8991
22
22
  fractal_server/app/runner/_common.py,sha256=PoiBIaCclo1DqjqrIY4qbEAvnRgxQ4MgN3x7xfqXUNw,19294
23
23
  fractal_server/app/runner/_local/__init__.py,sha256=JuwhEinF-Yu5C7kkXKqs5cl3OYzCWSSl8uz1HK2eIYY,5460
24
24
  fractal_server/app/runner/_local/_local_config.py,sha256=tz93dzmG2NtgQiCO3k81QgDzNbC1x38_tOjNKNyqYEY,3273
@@ -34,7 +34,7 @@ fractal_server/app/runner/_slurm/_subprocess_run_as_user.py,sha256=k7JWKJ5oSM-WE
34
34
  fractal_server/app/runner/_slurm/executor.py,sha256=AgmDD3qXVyyI8FWU-udQLS3PAuc1V84oZC7HoYM6q3o,40272
35
35
  fractal_server/app/runner/_slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
36
36
  fractal_server/app/runner/common.py,sha256=JvE0MICApdnuE3b0UeCr0A-GBk8XB1IpZM1tG_lUqko,9749
37
- fractal_server/app/security/__init__.py,sha256=zK1SyIdJHq6n48cOOsd3TK4wFx09EwioyDNv5HhWGug,7379
37
+ fractal_server/app/security/__init__.py,sha256=MEOl7CN5nMQE-DhFclvSRtpzRVoY-sCqjkaQsIZNJ1c,7400
38
38
  fractal_server/common/.git,sha256=Tc99_jnBzoLdeHxTTuFRz3i7bxoCBKM-WemKlGVThi8,49
39
39
  fractal_server/common/.github/workflows/ci.yml,sha256=48COJi9_H9cyRV1kWyp54dtS64PoN9Bt9f8IWEYuZTE,717
40
40
  fractal_server/common/.github/workflows/project-management.yml,sha256=Qe6bkHmZVGAIL1LP56mL8Aro0vpMElFY85Ozn6DtysY,364
@@ -75,8 +75,8 @@ fractal_server/syringe.py,sha256=3qSMW3YaMKKnLdgnooAINOPxnCOxP7y2jeAQYB21Gdo,278
75
75
  fractal_server/tasks/__init__.py,sha256=Wzuxf5EoH1v0fYzRpAZHG_S-Z9f6DmbIsuSvllBCGvc,72
76
76
  fractal_server/tasks/collection.py,sha256=hvZeOHenJwvjyFgxIxU7OqOwG8SdmCA9MTBuRAt_Onc,12782
77
77
  fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
78
- fractal_server-1.2.2.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
79
- fractal_server-1.2.2.dist-info/WHEEL,sha256=vVCvjcmxuUltf8cYhJ0sJMRDLr1XsPuxEId8YDzbyCY,88
80
- fractal_server-1.2.2.dist-info/METADATA,sha256=nLMyfOnpOBc3PyDqmmwllHc2sCuR1tSO_JQMJQJeaac,3589
81
- fractal_server-1.2.2.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
82
- fractal_server-1.2.2.dist-info/RECORD,,
78
+ fractal_server-1.2.3.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
79
+ fractal_server-1.2.3.dist-info/WHEEL,sha256=vVCvjcmxuUltf8cYhJ0sJMRDLr1XsPuxEId8YDzbyCY,88
80
+ fractal_server-1.2.3.dist-info/METADATA,sha256=OsUYHA4w9Br2-QyFi1ZSg47FgnGFxi-JXiq4rfMeOvE,3589
81
+ fractal_server-1.2.3.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
82
+ fractal_server-1.2.3.dist-info/RECORD,,