fractal-server 1.4.3a2__py3-none-any.whl → 1.4.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "1.4.3a2"
1
+ __VERSION__ = "1.4.4"
@@ -209,6 +209,7 @@ async def view_job(
209
209
  start_timestamp_max: Optional[datetime] = None,
210
210
  end_timestamp_min: Optional[datetime] = None,
211
211
  end_timestamp_max: Optional[datetime] = None,
212
+ log: bool = True,
212
213
  user: User = Depends(current_active_superuser),
213
214
  db: AsyncSession = Depends(get_async_db),
214
215
  ) -> list[ApplyWorkflowRead]:
@@ -232,6 +233,8 @@ async def view_job(
232
233
  after `end_timestamp_min`.
233
234
  end_timestamp_max: If not `None`, select a rows with `end_timestamp`
234
235
  before `end_timestamp_min`.
236
+ log: If `True`, include `job.log`, if `False`
237
+ `job.log` is set to `None`.
235
238
  """
236
239
  stm = select(ApplyWorkflow)
237
240
 
@@ -267,6 +270,9 @@ async def view_job(
267
270
  res = await db.execute(stm)
268
271
  job_list = res.scalars().all()
269
272
  await db.close()
273
+ if not log:
274
+ for job in job_list:
275
+ setattr(job, "log", None)
270
276
 
271
277
  return job_list
272
278
 
@@ -69,6 +69,7 @@ async def create_dataset(
69
69
  )
70
70
  async def read_dataset_list(
71
71
  project_id: int,
72
+ history: bool = True,
72
73
  user: User = Depends(current_active_user),
73
74
  db: AsyncSession = Depends(get_async_db),
74
75
  ) -> Optional[list[DatasetRead]]:
@@ -84,7 +85,12 @@ async def read_dataset_list(
84
85
  # `project.dataset_list` - ref
85
86
  # https://github.com/fractal-analytics-platform/fractal-server/pull/1082#issuecomment-1856676097.
86
87
  stm = select(Dataset).where(Dataset.project_id == project.id)
87
- dataset_list = (await db.execute(stm)).scalars().all()
88
+ res = await db.execute(stm)
89
+ dataset_list = res.scalars().all()
90
+ await db.close()
91
+ if not history:
92
+ for ds in dataset_list:
93
+ setattr(ds, "history", [])
88
94
  return dataset_list
89
95
 
90
96
 
@@ -516,6 +522,7 @@ async def get_workflowtask_status(
516
522
 
517
523
  @router.get("/dataset/", response_model=list[DatasetRead])
518
524
  async def get_user_datasets(
525
+ history: bool = True,
519
526
  user: User = Depends(current_active_user),
520
527
  db: AsyncSession = Depends(get_async_db),
521
528
  ) -> list[DatasetRead]:
@@ -526,4 +533,8 @@ async def get_user_datasets(
526
533
  stm = stm.join(Project).where(Project.user_list.any(User.id == user.id))
527
534
  res = await db.execute(stm)
528
535
  dataset_list = res.scalars().all()
536
+ await db.close()
537
+ if not history:
538
+ for ds in dataset_list:
539
+ setattr(ds, "history", [])
529
540
  return dataset_list
@@ -11,7 +11,9 @@ from sqlmodel import select
11
11
  from ....db import AsyncSession
12
12
  from ....db import get_async_db
13
13
  from ....models import ApplyWorkflow
14
+ from ....models import JobStatusType
14
15
  from ....models import Project
16
+ from ....runner._common import WORKFLOW_LOG_FILENAME
15
17
  from ....schemas import ApplyWorkflowRead
16
18
  from ....security import current_active_user
17
19
  from ....security import User
@@ -22,23 +24,26 @@ from ._aux_functions import _get_job_check_owner
22
24
  from ._aux_functions import _get_project_check_owner
23
25
  from ._aux_functions import _get_workflow_check_owner
24
26
 
25
-
26
27
  router = APIRouter()
27
28
 
28
29
 
29
30
  @router.get("/job/", response_model=list[ApplyWorkflowRead])
30
31
  async def get_user_jobs(
31
32
  user: User = Depends(current_active_user),
33
+ log: bool = True,
32
34
  db: AsyncSession = Depends(get_async_db),
33
35
  ) -> list[ApplyWorkflowRead]:
34
36
  """
35
37
  Returns all the jobs of the current user
36
38
  """
37
-
38
39
  stm = select(ApplyWorkflow)
39
40
  stm = stm.join(Project).where(Project.user_list.any(User.id == user.id))
40
41
  res = await db.execute(stm)
41
42
  job_list = res.scalars().all()
43
+ await db.close()
44
+ if not log:
45
+ for job in job_list:
46
+ setattr(job, "log", None)
42
47
 
43
48
  return job_list
44
49
 
@@ -72,6 +77,7 @@ async def get_workflow_jobs(
72
77
  async def read_job(
73
78
  project_id: int,
74
79
  job_id: int,
80
+ show_tmp_logs: bool = False,
75
81
  user: User = Depends(current_active_user),
76
82
  db: AsyncSession = Depends(get_async_db),
77
83
  ) -> Optional[ApplyWorkflowRead]:
@@ -86,8 +92,12 @@ async def read_job(
86
92
  db=db,
87
93
  )
88
94
  job = output["job"]
89
-
90
95
  await db.close()
96
+
97
+ if show_tmp_logs and (job.status == JobStatusType.SUBMITTED):
98
+ with open(f"{job.working_dir}/{WORKFLOW_LOG_FILENAME}", "r") as f:
99
+ job.log = f.read()
100
+
91
101
  return job
92
102
 
93
103
 
@@ -132,6 +142,7 @@ async def download_job_logs(
132
142
  async def get_job_list(
133
143
  project_id: int,
134
144
  user: User = Depends(current_active_user),
145
+ log: bool = True,
135
146
  db: AsyncSession = Depends(get_async_db),
136
147
  ) -> Optional[list[ApplyWorkflowRead]]:
137
148
  """
@@ -144,6 +155,10 @@ async def get_job_list(
144
155
  stm = select(ApplyWorkflow).where(ApplyWorkflow.project_id == project.id)
145
156
  res = await db.execute(stm)
146
157
  job_list = res.scalars().all()
158
+ await db.close()
159
+ if not log:
160
+ for job in job_list:
161
+ setattr(job, "log", None)
147
162
 
148
163
  return job_list
149
164
 
@@ -1,4 +1,5 @@
1
1
  from datetime import datetime
2
+ from datetime import timedelta
2
3
  from datetime import timezone
3
4
  from typing import Optional
4
5
 
@@ -416,13 +417,6 @@ async def apply_workflow(
416
417
  workflow_dump=dict(
417
418
  **workflow.model_dump(exclude={"task_list", "timestamp_created"}),
418
419
  timestamp_created=_encode_as_utc(workflow.timestamp_created),
419
- task_list=[
420
- dict(
421
- **wf_task.model_dump(exclude={"task"}),
422
- task=wf_task.task.model_dump(),
423
- )
424
- for wf_task in workflow.task_list
425
- ],
426
420
  ),
427
421
  project_dump=dict(
428
422
  **project.model_dump(exclude={"user_list", "timestamp_created"}),
@@ -430,6 +424,39 @@ async def apply_workflow(
430
424
  ),
431
425
  **apply_workflow.dict(),
432
426
  )
427
+
428
+ # Rate Limiting:
429
+ # raise `429 TOO MANY REQUESTS` if this endpoint has been called with the
430
+ # same database keys (Project, Workflow and Datasets) during the last
431
+ # `settings.FRACTAL_API_SUBMIT_RATE_LIMIT` seconds.
432
+ stm = (
433
+ select(ApplyWorkflow)
434
+ .where(ApplyWorkflow.project_id == project_id)
435
+ .where(ApplyWorkflow.workflow_id == workflow_id)
436
+ .where(ApplyWorkflow.input_dataset_id == input_dataset_id)
437
+ .where(ApplyWorkflow.output_dataset_id == output_dataset_id)
438
+ )
439
+ res = await db.execute(stm)
440
+ db_jobs = res.scalars().all()
441
+ if db_jobs and any(
442
+ abs(
443
+ job.start_timestamp
444
+ - db_job.start_timestamp.replace(tzinfo=timezone.utc)
445
+ )
446
+ < timedelta(seconds=settings.FRACTAL_API_SUBMIT_RATE_LIMIT)
447
+ for db_job in db_jobs
448
+ ):
449
+ raise HTTPException(
450
+ status_code=status.HTTP_429_TOO_MANY_REQUESTS,
451
+ detail=(
452
+ f"The endpoint 'POST /{project_id}/workflow/{workflow_id}/"
453
+ "apply/' was called several times with an interval of less "
454
+ f"than {settings.FRACTAL_API_SUBMIT_RATE_LIMIT} seconds, using"
455
+ " the same foreign keys. If it was intentional, please wait "
456
+ "and try again."
457
+ ),
458
+ )
459
+
433
460
  db.add(job)
434
461
  await db.commit()
435
462
  await db.refresh(job)
@@ -29,6 +29,7 @@ logger = set_logger(__name__)
29
29
  @router.get("/", response_model=list[TaskRead])
30
30
  async def get_list_task(
31
31
  user: User = Depends(current_active_user),
32
+ args_schema: bool = True,
32
33
  db: AsyncSession = Depends(get_async_db),
33
34
  ) -> list[TaskRead]:
34
35
  """
@@ -38,6 +39,10 @@ async def get_list_task(
38
39
  res = await db.execute(stm)
39
40
  task_list = res.scalars().all()
40
41
  await db.close()
42
+ if not args_schema:
43
+ for task in task_list:
44
+ setattr(task, "args_schema", None)
45
+
41
46
  return task_list
42
47
 
43
48
 
@@ -33,6 +33,7 @@ from ..models import Dataset
33
33
  from ..models import Workflow
34
34
  from ..models import WorkflowTask
35
35
  from ..schemas import JobStatusType
36
+ from ._common import WORKFLOW_LOG_FILENAME
36
37
  from ._local import process_workflow as local_process_workflow
37
38
  from .common import close_job_logger
38
39
  from .common import JobExecutionError
@@ -204,7 +205,7 @@ async def submit_workflow(
204
205
 
205
206
  # Write logs
206
207
  logger_name = f"WF{workflow_id}_job{job_id}"
207
- log_file_path = WORKFLOW_DIR / "workflow.log"
208
+ log_file_path = WORKFLOW_DIR / WORKFLOW_LOG_FILENAME
208
209
  logger = set_logger(
209
210
  logger_name=logger_name,
210
211
  log_file_path=log_file_path,
@@ -308,9 +309,9 @@ async def submit_workflow(
308
309
 
309
310
  exception_args_string = "\n".join(e.args)
310
311
  job.log = (
311
- f"TASK ERROR:"
312
- f"Task id: {e.workflow_task_id} ({e.task_name}), "
313
- f"{e.workflow_task_order=}\n"
312
+ f"TASK ERROR: "
313
+ f"Task name: {e.task_name}, "
314
+ f"position in Workflow: {e.workflow_task_order=}\n"
314
315
  f"TRACEBACK:\n{exception_args_string}"
315
316
  )
316
317
  db_sync.merge(job)
@@ -339,7 +340,7 @@ async def submit_workflow(
339
340
  job.status = JobStatusType.FAILED
340
341
  job.end_timestamp = get_timestamp()
341
342
  error = e.assemble_error()
342
- job.log = f"JOB ERROR:\nTRACEBACK:\n{error}"
343
+ job.log = f"JOB ERROR in Fractal job {job.id}:\nTRACEBACK:\n{error}"
343
344
  db_sync.merge(job)
344
345
  close_job_logger(logger)
345
346
  db_sync.commit()
@@ -367,7 +368,10 @@ async def submit_workflow(
367
368
 
368
369
  job.status = JobStatusType.FAILED
369
370
  job.end_timestamp = get_timestamp()
370
- job.log = f"UNKNOWN ERROR\nOriginal error: {current_traceback}"
371
+ job.log = (
372
+ f"UNKNOWN ERROR in Fractal job {job.id}\n"
373
+ f"TRACEBACK:\n{current_traceback}"
374
+ )
371
375
  db_sync.merge(job)
372
376
  close_job_logger(logger)
373
377
  db_sync.commit()
@@ -29,6 +29,7 @@ from .common import write_args_file
29
29
  HISTORY_FILENAME = "history.json"
30
30
  METADATA_FILENAME = "metadata.json"
31
31
  SHUTDOWN_FILENAME = "shutdown"
32
+ WORKFLOW_LOG_FILENAME = "workflow.log"
32
33
 
33
34
 
34
35
  def no_op_submit_setup_call(
@@ -45,7 +45,6 @@ class WorkflowDump(BaseModel):
45
45
  id: int
46
46
  name: str
47
47
  project_id: int
48
- task_list: list[WorkflowTaskDump]
49
48
  timestamp_created: str
50
49
 
51
50
 
fractal_server/config.py CHANGED
@@ -348,6 +348,11 @@ class Settings(BaseSettings):
348
348
  Default values correspond to `vite` defaults.
349
349
  """
350
350
 
351
+ FRACTAL_API_SUBMIT_RATE_LIMIT: int = 2
352
+ """
353
+ TBD
354
+ """
355
+
351
356
  ###########################################################################
352
357
  # BUSINESS LOGIC
353
358
  ###########################################################################
@@ -423,7 +428,7 @@ class Settings(BaseSettings):
423
428
 
424
429
  def check(self):
425
430
  """
426
- Make sure that mandatory variables are set
431
+ Make sure that required variables are set
427
432
 
428
433
  This method must be called before the server starts
429
434
  """
fractal_server/logger.py CHANGED
@@ -12,11 +12,8 @@
12
12
  """
13
13
  This module provides logging utilities
14
14
  """
15
- import functools
16
15
  import logging
17
- import time
18
16
  from pathlib import Path
19
- from typing import Callable
20
17
  from typing import Optional
21
18
  from typing import Union
22
19
 
@@ -128,26 +125,3 @@ def close_logger(logger: logging.Logger) -> None:
128
125
  """
129
126
  for handle in logger.handlers:
130
127
  handle.close()
131
-
132
-
133
- def wrap_with_timing_logs(func: Callable):
134
- """
135
- Wrap a function with start/end logs, including the elapsed time
136
- """
137
-
138
- @functools.wraps(func)
139
- def wrapped(*args, **kwargs):
140
- name = func.__name__
141
- logger = set_logger(name)
142
- logger.debug(f'START execution of "{name}"')
143
-
144
- t_start = time.perf_counter()
145
- res = func(*args, **kwargs)
146
- elapsed = time.perf_counter() - t_start
147
-
148
- logger.debug(
149
- f'END execution of "{name}"; ' f"elapsed: {elapsed:.3f} seconds"
150
- )
151
- return res
152
-
153
- return wrapped
fractal_server/main.py CHANGED
@@ -46,7 +46,7 @@ def check_settings() -> None:
46
46
  """
47
47
  Check and register the settings
48
48
 
49
- Verify the consistency of the settings, in particular that mandatory
49
+ Verify the consistency of the settings, in particular that required
50
50
  variables are set.
51
51
 
52
52
  Raises:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 1.4.3a2
3
+ Version: 1.4.4
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -16,22 +16,22 @@ Classifier: Programming Language :: Python :: 3.12
16
16
  Provides-Extra: gunicorn
17
17
  Provides-Extra: postgres
18
18
  Provides-Extra: slurm
19
- Requires-Dist: aiosqlite (>=0.17.0,<0.18.0)
19
+ Requires-Dist: aiosqlite (>=0.19.0,<0.20.0)
20
20
  Requires-Dist: alembic (>=1.9.1,<2.0.0)
21
21
  Requires-Dist: asyncpg (>=0.29.0,<0.30.0) ; extra == "postgres"
22
22
  Requires-Dist: bcrypt (==4.0.1)
23
23
  Requires-Dist: cloudpickle (>=2.2.1,<2.3.0) ; extra == "slurm"
24
24
  Requires-Dist: clusterfutures (>=0.5,<0.6) ; extra == "slurm"
25
- Requires-Dist: fastapi (>=0.103.0,<0.104.0)
25
+ Requires-Dist: fastapi (>=0.109.0,<0.110.0)
26
26
  Requires-Dist: fastapi-users[oauth] (>=12.1.0,<13.0.0)
27
- Requires-Dist: gunicorn (>=20.1.0,<21.0.0) ; extra == "gunicorn"
27
+ Requires-Dist: gunicorn (>=21.2.0,<22.0.0) ; extra == "gunicorn"
28
28
  Requires-Dist: packaging (>=23.2,<24.0)
29
29
  Requires-Dist: psycopg2 (>=2.9.5,<3.0.0) ; extra == "postgres"
30
30
  Requires-Dist: pydantic (>=1.10.8,<2)
31
31
  Requires-Dist: python-dotenv (>=0.21.0,<0.22.0)
32
32
  Requires-Dist: sqlalchemy[asyncio] (>=2.0.23,<2.1)
33
33
  Requires-Dist: sqlmodel (>=0.0.14,<0.0.15)
34
- Requires-Dist: uvicorn (>=0.20.0,<0.21.0)
34
+ Requires-Dist: uvicorn (>=0.27.0,<0.28.0)
35
35
  Project-URL: Changelog, https://github.com/fractal-analytics-platform/fractal-server/blob/main/CHANGELOG.md
36
36
  Project-URL: Documentation, https://fractal-analytics-platform.github.io/fractal-server
37
37
  Project-URL: Repository, https://github.com/fractal-analytics-platform/fractal-server
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=_ECkBLUQDi6re0WR880xGqfjLkGsSTX-sEdOxAv1efw,24
1
+ fractal_server/__init__.py,sha256=8obn0-T0C9HlycjxZhjVrus2eShmFRKPUdShUa8gkRs,22
2
2
  fractal_server/__main__.py,sha256=tI_hCeR1l0Q7PVQilFDssEYYEziPgJO7jLJHp6sm-04,4942
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -13,14 +13,14 @@ fractal_server/app/models/state.py,sha256=rSTjYPfPZntEfdQudKp6yu5vsdyfHA7nMYNRIB
13
13
  fractal_server/app/models/task.py,sha256=APndtea9A7EF7TtpVK8kWapBM01a6nk3FFCrQbbioI8,2632
14
14
  fractal_server/app/models/workflow.py,sha256=B6v3qqNDb6hvAyDN63n5vkemNueR2aH6zpwSGLlcRNE,3933
15
15
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
- fractal_server/app/routes/admin.py,sha256=0fh67CvfNb75GUKnVi7s6EJMDUGk9rlStdAe1-33zRk,12863
16
+ fractal_server/app/routes/admin.py,sha256=oAu5aMQnEibttwpZ1Yw4OznA96Ozln4zEG1v-mBKp4E,13062
17
17
  fractal_server/app/routes/api/__init__.py,sha256=EVyZrEq3I_1643QGTPCC5lgCp4xH_auYbrFfogTm4pc,315
18
18
  fractal_server/app/routes/api/v1/__init__.py,sha256=V4nhYyMIqhlJxbotLTYikq_ghb6KID0ZKOOYaOq7C-g,944
19
19
  fractal_server/app/routes/api/v1/_aux_functions.py,sha256=wcrYf29PrCrRHAH_8CIOfMge17RGU8iTro4jKvjajDM,11948
20
- fractal_server/app/routes/api/v1/dataset.py,sha256=4zKZKgX4cAdP_yTuOvFHXvdX9OiayHlrBZlXdOAOi8c,16265
21
- fractal_server/app/routes/api/v1/job.py,sha256=6AZ-dRQK4roocZZntHGHPNSKtK-WL9xX3apmruVTTj4,4782
22
- fractal_server/app/routes/api/v1/project.py,sha256=3VbaOgeSYfuizycculuTwyvyDmyzMinvml3oPKD1mEY,14525
23
- fractal_server/app/routes/api/v1/task.py,sha256=FtGfqhapIOuGj5gxHYXabm2jU4L71h5_5-0VnuzZQ0g,5612
20
+ fractal_server/app/routes/api/v1/dataset.py,sha256=_MRUS4_0kADkuKG0VciBQNFUDFj8PCF9GV9896W4eJc,16553
21
+ fractal_server/app/routes/api/v1/job.py,sha256=voHYyYph5-8XncFrRabf1Kzct1rWMsXtb4l0rDZOajc,5328
22
+ fractal_server/app/routes/api/v1/project.py,sha256=uOrX135mzd9BZr6_j2XEafthRNQEUxBoFErrmhRJJcM,15636
23
+ fractal_server/app/routes/api/v1/task.py,sha256=CwGbmlJYoKlX_PuoV273tALAb0WCNuuc9DxqLkDlUtA,5745
24
24
  fractal_server/app/routes/api/v1/task_collection.py,sha256=zKkKd-3hne16hYCaopySvkj1l8HOfWozgjHsQaceGN8,8340
25
25
  fractal_server/app/routes/api/v1/workflow.py,sha256=3dfFBUh0qJ_h4zMEsRgPit7g2Nu7v0CczeyfVA_Q4Fw,10864
26
26
  fractal_server/app/routes/api/v1/workflowtask.py,sha256=9QrsnZatai4PXvRgD7gfT-8QGRu787-2wenN_6gfYuo,5550
@@ -29,8 +29,8 @@ fractal_server/app/routes/aux/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5N
29
29
  fractal_server/app/routes/aux/_job.py,sha256=whx2G9PCCt-Hw_lgsZa1ECQlhDKNq4eHvwqgpgvBgwg,1246
30
30
  fractal_server/app/routes/aux/_runner.py,sha256=psW6fsoo_VrAHrD5UQPbqFYikCp0m16VRymC-U1yUTk,675
31
31
  fractal_server/app/runner/.gitignore,sha256=ytzN_oyHWXrGU7iFAtoHSTUbM6Rn6kG0Zkddg0xZk6s,16
32
- fractal_server/app/runner/__init__.py,sha256=ByYDW7C2qw7s7oxsFdBT1b5wO_qFkH_6EPyDPHxJ5DM,13659
33
- fractal_server/app/runner/_common.py,sha256=WwFyhqAEe9SHNCrlJq8is-1RF_LZOF9RqawK0JXMy4g,22672
32
+ fractal_server/app/runner/__init__.py,sha256=aqJlx3OWKe2Be4aFuDm8Ul2xTbrfJ2_M-taQw1zPxuM,13795
33
+ fractal_server/app/runner/_common.py,sha256=h2vOrHiJXA5Lmcd58FBRR9LYMWqv2bhrDbZ75V0YHLs,22711
34
34
  fractal_server/app/runner/_local/__init__.py,sha256=gHsilCnT9VkqVbKpnEIZCnx4BuDydWcKneeWHWb2410,6799
35
35
  fractal_server/app/runner/_local/_local_config.py,sha256=-oNTsjEUmytHlsYpWfw2CrPvSxDFeEhZSdQvI_wf3Mk,3245
36
36
  fractal_server/app/runner/_local/_submit_setup.py,sha256=cP4gjQ_3TFgqglscQacp9dB3aqeXup5tVqqWE7TZl9Q,1631
@@ -50,7 +50,7 @@ fractal_server/app/schemas/__init__.py,sha256=vjGKGMM45ywNClHV5KZ2u9eGLCa4p7i6ue
50
50
  fractal_server/app/schemas/_validators.py,sha256=s9a6AX4-3Vfoy1Y_HMQA3lXm4FLdmnODYUD4lfsJr6w,2549
51
51
  fractal_server/app/schemas/applyworkflow.py,sha256=hDYB5Oao1uq1RURUBSxFJH7L3AO5YTXCqTxnvICkeZA,4264
52
52
  fractal_server/app/schemas/dataset.py,sha256=e5rM5vyrxWsuWn-rb0BUaGLYS5BtE_Ksq4Vpi8FjDGM,3375
53
- fractal_server/app/schemas/dumps.py,sha256=GPeTeg2yoQCPgaQoj_jHA8Lnt1fL_j4QBOb0IKH0lv8,1296
53
+ fractal_server/app/schemas/dumps.py,sha256=ovxbPB6Zfq1t2R8exBHj-jl0clvI-BcVyGfamU25qtY,1258
54
54
  fractal_server/app/schemas/json_schemas/manifest.json,sha256=yXYKHbYXPYSkSXMTLfTpfCUGBtmQuPTk1xuSXscdba4,1787
55
55
  fractal_server/app/schemas/manifest.py,sha256=xxTd39dAXMK9Ox1y-p3gbyg0zd5udW99pV4JngCUGwM,3819
56
56
  fractal_server/app/schemas/project.py,sha256=NSileJqsKdvRd7wg5nUC-uAJhyAKuTLIZU1FU_w1HqY,1196
@@ -60,11 +60,11 @@ fractal_server/app/schemas/task_collection.py,sha256=nkbW076pB0wWYyWkFpplyLBBEWu
60
60
  fractal_server/app/schemas/user.py,sha256=rE8WgBz-ceVUs0Sz2ZwcjUrSTZTnS0ys5SBtD2XD9r8,3113
61
61
  fractal_server/app/schemas/workflow.py,sha256=sbao4_hWHzby5w7syKB045XLLEwsYv-GHczBSMHM8QU,4525
62
62
  fractal_server/app/security/__init__.py,sha256=wxosoHc3mJYPCdPMyWnRD8w_2OgnKYp2aDkdmwrZh5k,11203
63
- fractal_server/config.py,sha256=CpKlEfY5sl7snBJGXvTFoFnfMst2hPQtPlJccrGzm8w,15021
63
+ fractal_server/config.py,sha256=MwlEW9AIr9-MecmilpYkd1v3QvvY-0nfq5g4nh1R8ZE,15088
64
64
  fractal_server/data_migrations/1_4_3.py,sha256=UHmyxo97Hv7sG70bGcpqskkhVGd-bdVA8R5F4ybvfPY,10998
65
65
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
66
- fractal_server/logger.py,sha256=keri8i960WHT8Zz9Rm2MwfnrA2dw9TsrfCmojqtGDLs,4562
67
- fractal_server/main.py,sha256=dyU9jGCJCAGiTF0veggAbcMivs3I6U98341tkSxdcpg,3303
66
+ fractal_server/logger.py,sha256=95duXY8eSxf1HWg0CVn8SUGNzgJw9ZR0FlapDDF6WAY,3924
67
+ fractal_server/main.py,sha256=qtpLWfXXzhdg4TceM7vsrLvdPVAX2LbFmVvdfzkmyD0,3302
68
68
  fractal_server/migrations/README,sha256=4rQvyDfqodGhpJw74VYijRmgFP49ji5chyEemWGHsuw,59
69
69
  fractal_server/migrations/env.py,sha256=05EoWw0p43ojTNiz7UVG4lsl057B4ImSgXiHmiU-M80,2690
70
70
  fractal_server/migrations/script.py.mako,sha256=oMXw9LC3zRbinWWPPDgeZ4z9FJrV2zhRWiYdS5YgNbI,526
@@ -91,8 +91,8 @@ fractal_server/tasks/background_operations.py,sha256=GiDIE4s3tVkjJbUle7rSzQsldiF
91
91
  fractal_server/tasks/endpoint_operations.py,sha256=PC94y_sNajyGxNFsgxNGB8FDZF8MuCxquL6l63FJeY4,5549
92
92
  fractal_server/tasks/utils.py,sha256=-j8T1VBbjTt5fjP2XdIcs0nBwSkYyuv_yLI1troBg9Q,2274
93
93
  fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
94
- fractal_server-1.4.3a2.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
95
- fractal_server-1.4.3a2.dist-info/METADATA,sha256=hjEtDfvodvt06mC4QvyuucekoVmZ1Xz1h7CS6cdutFk,4265
96
- fractal_server-1.4.3a2.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
97
- fractal_server-1.4.3a2.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
98
- fractal_server-1.4.3a2.dist-info/RECORD,,
94
+ fractal_server-1.4.4.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
95
+ fractal_server-1.4.4.dist-info/METADATA,sha256=j0o06oZZfm9Qnfs93U5nQSM-pxe9uduwfdbjedgunqg,4263
96
+ fractal_server-1.4.4.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
97
+ fractal_server-1.4.4.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
98
+ fractal_server-1.4.4.dist-info/RECORD,,