fractal-server 1.3.14a0__py3-none-any.whl → 1.4.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "1.3.14a0"
1
+ __VERSION__ = "1.4.0a1"
@@ -7,6 +7,7 @@ from ...config import get_settings
7
7
  from ...syringe import Inject
8
8
  from .v1.dataset import router as dataset_router
9
9
  from .v1.job import router as job_router
10
+ from .v1.monitoring import router as router_monitoring # noqa
10
11
  from .v1.project import router as project_router
11
12
  from .v1.task import router as task_router
12
13
  from .v1.task_collection import router as taskcollection_router
@@ -7,20 +7,19 @@ from fastapi import Depends
7
7
  from fastapi import HTTPException
8
8
  from fastapi import Response
9
9
  from fastapi import status
10
- from sqlmodel import or_
11
10
  from sqlmodel import select
12
11
 
13
12
  from ...db import AsyncSession
14
13
  from ...db import get_db
15
14
  from ...models import ApplyWorkflow
16
15
  from ...models import Dataset
17
- from ...models import JobStatusType
18
16
  from ...models import Resource
19
17
  from ...runner._common import HISTORY_FILENAME
20
18
  from ...schemas import DatasetCreate
21
19
  from ...schemas import DatasetRead
22
20
  from ...schemas import DatasetStatusRead
23
21
  from ...schemas import DatasetUpdate
22
+ from ...schemas import JobStatusType
24
23
  from ...schemas import ResourceCreate
25
24
  from ...schemas import ResourceRead
26
25
  from ...schemas import ResourceUpdate
@@ -145,24 +144,6 @@ async def delete_dataset(
145
144
  )
146
145
  dataset = output["dataset"]
147
146
 
148
- # Check that no ApplyWorkflow is in relationship with the current Dataset
149
- stm = select(ApplyWorkflow).filter(
150
- or_(
151
- ApplyWorkflow.input_dataset_id == dataset_id,
152
- ApplyWorkflow.output_dataset_id == dataset_id,
153
- )
154
- )
155
- res = await db.execute(stm)
156
- job = res.scalars().first()
157
- if job:
158
- raise HTTPException(
159
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
160
- detail=(
161
- f"Cannot remove dataset {dataset_id}: "
162
- f"it's still linked to job {job.id}."
163
- ),
164
- )
165
-
166
147
  await db.delete(dataset)
167
148
  await db.commit()
168
149
  await db.close()
@@ -7,6 +7,7 @@ from zipfile import ZipFile
7
7
  from fastapi import APIRouter
8
8
  from fastapi import Depends
9
9
  from fastapi import HTTPException
10
+ from fastapi import Response
10
11
  from fastapi import status
11
12
  from fastapi.responses import StreamingResponse
12
13
  from sqlmodel import select
@@ -22,11 +23,50 @@ from ...security import current_active_user
22
23
  from ...security import User
23
24
  from ._aux_functions import _get_job_check_owner
24
25
  from ._aux_functions import _get_project_check_owner
26
+ from ._aux_functions import _get_workflow_check_owner
25
27
 
26
28
 
27
29
  router = APIRouter()
28
30
 
29
31
 
32
+ @router.get("/project/job/", response_model=list[ApplyWorkflowRead])
33
+ async def get_user_jobs(
34
+ user: User = Depends(current_active_user),
35
+ ) -> list[ApplyWorkflowRead]:
36
+ """
37
+ Returns all the jobs of the current user
38
+ """
39
+
40
+ job_list = [
41
+ job for project in user.project_list for job in project.job_list
42
+ ]
43
+
44
+ return job_list
45
+
46
+
47
+ @router.get(
48
+ "/project/{project_id}/workflow/{workflow_id}/job/",
49
+ response_model=list[ApplyWorkflowRead],
50
+ )
51
+ async def get_workflow_jobs(
52
+ project_id: int,
53
+ workflow_id: int,
54
+ user: User = Depends(current_active_user),
55
+ db: AsyncSession = Depends(get_db),
56
+ ) -> Optional[list[ApplyWorkflowRead]]:
57
+ """
58
+ Returns all the jobs related to a specific workflow
59
+ """
60
+
61
+ workflow = await _get_workflow_check_owner(
62
+ project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
63
+ )
64
+ job_list = workflow.job_list
65
+ await db.close()
66
+
67
+ return job_list
68
+
69
+
30
70
  @router.get(
31
71
  "/project/{project_id}/job/{job_id}",
32
72
  response_model=ApplyWorkflowRead,
@@ -119,14 +159,14 @@ async def get_job_list(
119
159
 
120
160
  @router.get(
121
161
  "/project/{project_id}/job/{job_id}/stop/",
122
- status_code=200,
162
+ status_code=204,
123
163
  )
124
164
  async def stop_job(
125
165
  project_id: int,
126
166
  job_id: int,
127
167
  user: User = Depends(current_active_user),
128
168
  db: AsyncSession = Depends(get_db),
129
- ) -> Optional[ApplyWorkflow]:
169
+ ) -> Response:
130
170
  """
131
171
  Stop execution of a workflow job (only available for slurm backend)
132
172
  """
@@ -161,4 +201,4 @@ async def stop_job(
161
201
  with shutdown_file.open("w") as f:
162
202
  f.write(f"Trigger executor shutdown for {job.id=}, {project_id=}.")
163
203
 
164
- return job
204
+ return Response(status_code=status.HTTP_204_NO_CONTENT)
@@ -0,0 +1,150 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from fastapi import APIRouter
5
+ from fastapi import Depends
6
+ from sqlalchemy import func
7
+ from sqlmodel import select
8
+
9
+ from ...db import AsyncSession
10
+ from ...db import get_db
11
+ from ...models import ApplyWorkflow
12
+ from ...models import Dataset
13
+ from ...models import JobStatusType
14
+ from ...models import Project
15
+ from ...models import Workflow
16
+ from ...schemas import ApplyWorkflowRead
17
+ from ...schemas import DatasetRead
18
+ from ...schemas import ProjectRead
19
+ from ...schemas import WorkflowRead
20
+ from ...security import current_active_superuser
21
+ from ...security import User
22
+
23
+
24
+ router = APIRouter()
25
+
26
+
27
+ @router.get("/project/", response_model=list[ProjectRead])
28
+ async def monitor_project(
29
+ id: Optional[int] = None,
30
+ user_id: Optional[int] = None,
31
+ user: User = Depends(current_active_superuser),
32
+ db: AsyncSession = Depends(get_db),
33
+ ) -> list[ProjectRead]:
34
+
35
+ stm = select(Project)
36
+
37
+ if id is not None:
38
+ stm = stm.where(Project.id == id)
39
+
40
+ if user_id is not None:
41
+ stm = stm.where(Project.user_list.any(User.id == user_id))
42
+
43
+ res = await db.execute(stm)
44
+ project_list = res.scalars().all()
45
+ await db.close()
46
+
47
+ return project_list
48
+
49
+
50
+ @router.get("/workflow/", response_model=list[WorkflowRead])
51
+ async def monitor_workflow(
52
+ id: Optional[int] = None,
53
+ project_id: Optional[int] = None,
54
+ name_contains: Optional[str] = None,
55
+ user: User = Depends(current_active_superuser),
56
+ db: AsyncSession = Depends(get_db),
57
+ ) -> list[WorkflowRead]:
58
+ stm = select(Workflow)
59
+
60
+ if id is not None:
61
+ stm = stm.where(Workflow.id == id)
62
+ if project_id is not None:
63
+ stm = stm.where(Workflow.project_id == project_id)
64
+ if name_contains is not None:
65
+ # SQLAlchemy2: use icontains
66
+ stm = stm.where(
67
+ func.lower(Workflow.name).contains(name_contains.lower())
68
+ )
69
+
70
+ res = await db.execute(stm)
71
+ workflow_list = res.scalars().all()
72
+ await db.close()
73
+
74
+ return workflow_list
75
+
76
+
77
+ @router.get("/dataset/", response_model=list[DatasetRead])
78
+ async def monitor_dataset(
79
+ id: Optional[int] = None,
80
+ project_id: Optional[int] = None,
81
+ name_contains: Optional[str] = None,
82
+ type: Optional[str] = None,
83
+ user: User = Depends(current_active_superuser),
84
+ db: AsyncSession = Depends(get_db),
85
+ ) -> list[DatasetRead]:
86
+ stm = select(Dataset)
87
+
88
+ if id is not None:
89
+ stm = stm.where(Dataset.id == id)
90
+ if project_id is not None:
91
+ stm = stm.where(Dataset.project_id == project_id)
92
+ if name_contains is not None:
93
+ # SQLAlchemy2: use icontains
94
+ stm = stm.where(
95
+ func.lower(Dataset.name).contains(name_contains.lower())
96
+ )
97
+ if type is not None:
98
+ stm = stm.where(Dataset.type == type)
99
+
100
+ res = await db.execute(stm)
101
+ dataset_list = res.scalars().all()
102
+ await db.close()
103
+
104
+ return dataset_list
105
+
106
+
107
+ @router.get("/job/", response_model=list[ApplyWorkflowRead])
108
+ async def monitor_job(
109
+ id: Optional[int] = None,
110
+ project_id: Optional[int] = None,
111
+ input_dataset_id: Optional[int] = None,
112
+ output_dataset_id: Optional[int] = None,
113
+ workflow_id: Optional[int] = None,
114
+ status: Optional[JobStatusType] = None,
115
+ start_timestamp_min: Optional[datetime] = None,
116
+ start_timestamp_max: Optional[datetime] = None,
117
+ end_timestamp_min: Optional[datetime] = None,
118
+ end_timestamp_max: Optional[datetime] = None,
119
+ user: User = Depends(current_active_superuser),
120
+ db: AsyncSession = Depends(get_db),
121
+ ) -> list[ApplyWorkflowRead]:
122
+
123
+ stm = select(ApplyWorkflow)
124
+
125
+ if id is not None:
126
+ stm = stm.where(ApplyWorkflow.id == id)
127
+ if project_id is not None:
128
+ stm = stm.where(ApplyWorkflow.project_id == project_id)
129
+ if input_dataset_id is not None:
130
+ stm = stm.where(ApplyWorkflow.input_dataset_id == input_dataset_id)
131
+ if output_dataset_id is not None:
132
+ stm = stm.where(ApplyWorkflow.output_dataset_id == output_dataset_id)
133
+ if workflow_id is not None:
134
+ stm = stm.where(ApplyWorkflow.workflow_id == workflow_id)
135
+ if status is not None:
136
+ stm = stm.where(ApplyWorkflow.status == status)
137
+ if start_timestamp_min is not None:
138
+ stm = stm.where(ApplyWorkflow.start_timestamp >= start_timestamp_min)
139
+ if start_timestamp_max is not None:
140
+ stm = stm.where(ApplyWorkflow.start_timestamp <= start_timestamp_max)
141
+ if end_timestamp_min is not None:
142
+ stm = stm.where(ApplyWorkflow.end_timestamp >= end_timestamp_min)
143
+ if end_timestamp_max is not None:
144
+ stm = stm.where(ApplyWorkflow.end_timestamp <= end_timestamp_max)
145
+
146
+ res = await db.execute(stm)
147
+ job_list = res.scalars().all()
148
+ await db.close()
149
+
150
+ return job_list
@@ -18,7 +18,6 @@ from ...db import DBSyncSession
18
18
  from ...db import get_db
19
19
  from ...db import get_sync_db
20
20
  from ...models import ApplyWorkflow
21
- from ...models import JobStatusType
22
21
  from ...models import LinkUserProject
23
22
  from ...models import Project
24
23
  from ...runner import submit_workflow
@@ -26,6 +25,7 @@ from ...runner import validate_workflow_compatibility
26
25
  from ...runner.common import set_start_and_last_task_index
27
26
  from ...schemas import ApplyWorkflowCreate
28
27
  from ...schemas import ApplyWorkflowRead
28
+ from ...schemas import JobStatusType
29
29
  from ...schemas import ProjectCreate
30
30
  from ...schemas import ProjectRead
31
31
  from ...schemas import ProjectUpdate
@@ -302,10 +302,13 @@ async def apply_workflow(
302
302
  input_dataset_id=input_dataset_id,
303
303
  output_dataset_id=output_dataset_id,
304
304
  workflow_id=workflow_id,
305
+ user_email=user.email,
306
+ input_dataset_dump=input_dataset.dict(),
307
+ output_dataset_dump=output_dataset.dict(),
305
308
  workflow_dump=dict(
306
309
  workflow.dict(exclude={"task_list"}),
307
310
  task_list=[
308
- dict(wf_task.task.dict(exclude={"task"}), task=wf_task.dict())
311
+ dict(wf_task.dict(exclude={"task"}), task=wf_task.task.dict())
309
312
  for wf_task in workflow.task_list
310
313
  ],
311
314
  ),
@@ -23,7 +23,6 @@ from ....logger import close_logger
23
23
  from ....logger import set_logger
24
24
  from ...db import AsyncSession
25
25
  from ...db import get_db
26
- from ...models import ApplyWorkflow
27
26
  from ...models import Task
28
27
  from ...models import Workflow
29
28
  from ...schemas import WorkflowCreate
@@ -186,19 +185,6 @@ async def delete_workflow(
186
185
  project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
187
186
  )
188
187
 
189
- # Check that no ApplyWorkflow is in relationship with the current Workflow
190
- stm = select(ApplyWorkflow).where(ApplyWorkflow.workflow_id == workflow_id)
191
- res = await db.execute(stm)
192
- job = res.scalars().first()
193
- if job:
194
- raise HTTPException(
195
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
196
- detail=(
197
- f"Cannot remove workflow {workflow_id}: "
198
- f"it's still linked to job {job.id}."
199
- ),
200
- )
201
-
202
188
  await db.delete(workflow)
203
189
  await db.commit()
204
190
 
@@ -10,6 +10,7 @@ from sqlmodel import SQLModel
10
10
 
11
11
  from ..schemas.dataset import _DatasetBase
12
12
  from ..schemas.dataset import _ResourceBase
13
+ from .job import ApplyWorkflow
13
14
 
14
15
 
15
16
  class Resource(_ResourceBase, SQLModel, table=True):
@@ -37,6 +38,19 @@ class Dataset(_DatasetBase, SQLModel, table=True):
37
38
 
38
39
  id: Optional[int] = Field(default=None, primary_key=True)
39
40
  project_id: int = Field(foreign_key="project.id")
41
+
42
+ list_jobs_input: list[ApplyWorkflow] = Relationship( # noqa: F821
43
+ sa_relationship_kwargs=dict(
44
+ lazy="selectin",
45
+ primaryjoin="ApplyWorkflow.input_dataset_id==Dataset.id",
46
+ )
47
+ )
48
+ list_jobs_output: list[ApplyWorkflow] = Relationship( # noqa: F821
49
+ sa_relationship_kwargs=dict(
50
+ lazy="selectin",
51
+ primaryjoin="ApplyWorkflow.output_dataset_id==Dataset.id",
52
+ )
53
+ )
40
54
  resource_list: list[Resource] = Relationship(
41
55
  sa_relationship_kwargs={
42
56
  "lazy": "selectin",
@@ -45,6 +59,7 @@ class Dataset(_DatasetBase, SQLModel, table=True):
45
59
  "cascade": "all, delete-orphan",
46
60
  }
47
61
  )
62
+
48
63
  meta: dict[str, Any] = Field(sa_column=Column(JSON), default={})
49
64
  history: list[dict[str, Any]] = Field(
50
65
  sa_column=Column(JSON, server_default="[]", nullable=False)
@@ -1,5 +1,4 @@
1
1
  from datetime import datetime
2
- from enum import Enum
3
2
  from typing import Any
4
3
  from typing import Optional
5
4
 
@@ -7,38 +6,11 @@ from sqlalchemy import Column
7
6
  from sqlalchemy.types import DateTime
8
7
  from sqlalchemy.types import JSON
9
8
  from sqlmodel import Field
10
- from sqlmodel import Relationship
11
9
  from sqlmodel import SQLModel
12
10
 
13
11
  from ...utils import get_timestamp
12
+ from ..schemas import JobStatusType
14
13
  from ..schemas.applyworkflow import _ApplyWorkflowBase
15
- from .dataset import Dataset
16
- from .workflow import Workflow
17
-
18
-
19
- class JobStatusType(str, Enum):
20
- """
21
- Define the job status available
22
-
23
- Attributes:
24
- SUBMITTED:
25
- The workflow has been applied but not yet scheduled with an
26
- executor. In this phase, due diligence takes place, such as
27
- creating working directory, assemblying arguments, etc.
28
- RUNNING:
29
- The workflow was scheduled with an executor. Note that it might not
30
- yet be running within the executor, e.g., jobs could still be
31
- pending within a SLURM executor.
32
- DONE:
33
- The workflow was applied successfully
34
- FAILED:
35
- The workflow terminated with an error.
36
- """
37
-
38
- SUBMITTED = "submitted"
39
- RUNNING = "running"
40
- DONE = "done"
41
- FAILED = "failed"
42
14
 
43
15
 
44
16
  class ApplyWorkflow(_ApplyWorkflowBase, SQLModel, table=True):
@@ -87,37 +59,36 @@ class ApplyWorkflow(_ApplyWorkflowBase, SQLModel, table=True):
87
59
  arbitrary_types_allowed = True
88
60
 
89
61
  id: Optional[int] = Field(default=None, primary_key=True)
90
- project_id: int = Field(foreign_key="project.id")
91
- input_dataset_id: int = Field(foreign_key="dataset.id")
92
- output_dataset_id: int = Field(foreign_key="dataset.id")
93
- workflow_id: int = Field(foreign_key="workflow.id")
94
- working_dir: Optional[str]
95
- working_dir_user: Optional[str]
96
- first_task_index: int
97
- last_task_index: int
98
62
 
99
- input_dataset: Dataset = Relationship(
100
- sa_relationship_kwargs=dict(
101
- lazy="selectin",
102
- primaryjoin="ApplyWorkflow.input_dataset_id==Dataset.id",
103
- )
63
+ project_id: Optional[int] = Field(foreign_key="project.id")
64
+
65
+ workflow_id: Optional[int] = Field(foreign_key="workflow.id")
66
+
67
+ input_dataset_id: Optional[int] = Field(foreign_key="dataset.id")
68
+ output_dataset_id: Optional[int] = Field(foreign_key="dataset.id")
69
+
70
+ user_email: str = Field(nullable=False)
71
+ input_dataset_dump: dict[str, Any] = Field(
72
+ sa_column=Column(JSON, nullable=False)
73
+ )
74
+ output_dataset_dump: dict[str, Any] = Field(
75
+ sa_column=Column(JSON, nullable=False)
104
76
  )
105
- output_dataset: Dataset = Relationship(
106
- sa_relationship_kwargs=dict(
107
- lazy="selectin",
108
- primaryjoin="ApplyWorkflow.output_dataset_id==Dataset.id",
109
- )
77
+ workflow_dump: Optional[dict[str, Any]] = Field(
78
+ sa_column=Column(JSON, nullable=True)
110
79
  )
111
- workflow: Workflow = Relationship()
112
80
 
113
- workflow_dump: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
81
+ working_dir: Optional[str]
82
+ working_dir_user: Optional[str]
83
+ first_task_index: int
84
+ last_task_index: int
114
85
 
115
86
  start_timestamp: datetime = Field(
116
87
  default_factory=get_timestamp,
117
- sa_column=Column(DateTime(timezone=True)),
88
+ sa_column=Column(DateTime(timezone=True), nullable=False),
118
89
  )
119
90
  end_timestamp: Optional[datetime] = Field(
120
91
  default=None, sa_column=Column(DateTime(timezone=True))
121
92
  )
122
- status: JobStatusType = JobStatusType.SUBMITTED
93
+ status: str = JobStatusType.SUBMITTED
123
94
  log: Optional[str] = None
@@ -37,9 +37,8 @@ class Project(_ProjectBase, SQLModel, table=True):
37
37
  },
38
38
  )
39
39
 
40
- job_list: list[ApplyWorkflow] = Relationship( # noqa
40
+ job_list: list[ApplyWorkflow] = Relationship(
41
41
  sa_relationship_kwargs={
42
42
  "lazy": "selectin",
43
- "cascade": "all, delete-orphan",
44
- },
43
+ }
45
44
  )
@@ -13,6 +13,7 @@ from sqlmodel import SQLModel
13
13
  from ..db import AsyncSession
14
14
  from ..schemas.workflow import _WorkflowBase
15
15
  from ..schemas.workflow import _WorkflowTaskBase
16
+ from .job import ApplyWorkflow
16
17
  from .task import Task
17
18
 
18
19
 
@@ -106,7 +107,7 @@ class Workflow(_WorkflowBase, SQLModel, table=True):
106
107
  id: Optional[int] = Field(default=None, primary_key=True)
107
108
  project_id: int = Field(foreign_key="project.id")
108
109
 
109
- task_list: list["WorkflowTask"] = Relationship(
110
+ task_list: list[WorkflowTask] = Relationship(
110
111
  sa_relationship_kwargs=dict(
111
112
  lazy="selectin",
112
113
  order_by="WorkflowTask.order",
@@ -114,6 +115,9 @@ class Workflow(_WorkflowBase, SQLModel, table=True):
114
115
  cascade="all, delete-orphan",
115
116
  ),
116
117
  )
118
+ job_list: list[ApplyWorkflow] = Relationship(
119
+ sa_relationship_kwargs={"lazy": "selectin"}
120
+ )
117
121
 
118
122
  async def insert_task(
119
123
  self,
@@ -30,9 +30,9 @@ from ...utils import get_timestamp
30
30
  from ..db import DB
31
31
  from ..models import ApplyWorkflow
32
32
  from ..models import Dataset
33
- from ..models import JobStatusType
34
33
  from ..models import Workflow
35
34
  from ..models import WorkflowTask
35
+ from ..schemas import JobStatusType
36
36
  from ._local import process_workflow as local_process_workflow
37
37
  from .common import close_job_logger
38
38
  from .common import JobExecutionError
@@ -187,6 +187,22 @@ async def submit_workflow(
187
187
  job.status = JobStatusType.RUNNING
188
188
  db_sync.merge(job)
189
189
  db_sync.commit()
190
+
191
+ # After Session.commit() is called, either explicitly or when using a
192
+ # context manager, all objects associated with the Session are expired.
193
+ # https://docs.sqlalchemy.org/en/14/orm/
194
+ # session_basics.html#opening-and-closing-a-session
195
+ # https://docs.sqlalchemy.org/en/14/orm/
196
+ # session_state_management.html#refreshing-expiring
197
+
198
+ # See issue #928:
199
+ # https://github.com/fractal-analytics-platform/
200
+ # fractal-server/issues/928
201
+
202
+ db_sync.refresh(input_dataset)
203
+ db_sync.refresh(output_dataset)
204
+ db_sync.refresh(workflow)
205
+
190
206
  # Write logs
191
207
  logger_name = f"WF{workflow_id}_job{job_id}"
192
208
  log_file_path = WORKFLOW_DIR / "workflow.log"
@@ -3,6 +3,7 @@ Schemas for API request/response bodies
3
3
  """
4
4
  from .applyworkflow import ApplyWorkflowCreate # noqa: F401
5
5
  from .applyworkflow import ApplyWorkflowRead # noqa: F401
6
+ from .applyworkflow import JobStatusType # noqa: F401
6
7
  from .dataset import DatasetCreate # noqa: F401
7
8
  from .dataset import DatasetRead # noqa: F401
8
9
  from .dataset import DatasetStatusRead # noqa: F401
@@ -1,4 +1,5 @@
1
1
  from datetime import datetime
2
+ from enum import Enum
2
3
  from typing import Any
3
4
  from typing import Optional
4
5
 
@@ -14,6 +15,31 @@ __all__ = (
14
15
  )
15
16
 
16
17
 
18
+ class JobStatusType(str, Enum):
19
+ """
20
+ Define the available job statuses
21
+
22
+ Attributes:
23
+ SUBMITTED:
24
+ The workflow has been applied but not yet scheduled with an
25
+ executor. In this phase, due diligence takes place, such as
26
+ creating working directory, assemblying arguments, etc.
27
+ RUNNING:
28
+ The workflow was scheduled with an executor. Note that it might not
29
+ yet be running within the executor, e.g., jobs could still be
30
+ pending within a SLURM executor.
31
+ DONE:
32
+ The workflow was applied successfully
33
+ FAILED:
34
+ The workflow terminated with an error.
35
+ """
36
+
37
+ SUBMITTED = "submitted"
38
+ RUNNING = "running"
39
+ DONE = "done"
40
+ FAILED = "failed"
41
+
42
+
17
43
  class _ApplyWorkflowBase(BaseModel):
18
44
  """
19
45
  Base class for `ApplyWorkflow`.
@@ -99,14 +125,17 @@ class ApplyWorkflowRead(_ApplyWorkflowBase):
99
125
 
100
126
  id: int
101
127
  project_id: int
128
+ user_email: str
102
129
  workflow_id: int
130
+ workflow_dump: Optional[dict[str, Any]]
103
131
  input_dataset_id: int
132
+ input_dataset_dump: Optional[dict[str, Any]]
104
133
  output_dataset_id: int
134
+ output_dataset_dump: Optional[dict[str, Any]]
105
135
  start_timestamp: datetime
106
136
  end_timestamp: Optional[datetime]
107
137
  status: str
108
138
  log: Optional[str]
109
- workflow_dump: Optional[dict[str, Any]]
110
139
  working_dir: Optional[str]
111
140
  working_dir_user: Optional[str]
112
141
  first_task_index: Optional[int]
@@ -4,7 +4,9 @@ from pydantic import BaseModel
4
4
  from pydantic import validator
5
5
 
6
6
  from ._validators import valstr
7
+ from .applyworkflow import ApplyWorkflowRead
7
8
  from .dataset import DatasetRead
9
+ from .workflow import WorkflowRead
8
10
 
9
11
 
10
12
  __all__ = (
@@ -47,6 +49,8 @@ class ProjectRead(_ProjectBase):
47
49
 
48
50
  id: int
49
51
  dataset_list: list[DatasetRead] = []
52
+ workflow_list: list[WorkflowRead] = []
53
+ job_list: list[ApplyWorkflowRead] = []
50
54
 
51
55
 
52
56
  class ProjectUpdate(_ProjectBase):
fractal_server/main.py CHANGED
@@ -48,10 +48,15 @@ def collect_routers(app: FastAPI) -> None:
48
48
  """
49
49
  from .app.api import router_default
50
50
  from .app.api import router_v1
51
+ from .app.api import router_monitoring
52
+
51
53
  from .app.security import auth_router
52
54
 
53
55
  app.include_router(router_default, prefix="/api")
54
56
  app.include_router(router_v1, prefix="/api/v1")
57
+ app.include_router(
58
+ router_monitoring, prefix="/monitoring", tags=["Monitoring"]
59
+ )
55
60
  app.include_router(auth_router, prefix="/auth", tags=["auth"])
56
61
 
57
62
 
@@ -0,0 +1,86 @@
1
+ """Add dumps to ApplyWorkflow
2
+
3
+ Revision ID: 84bf0fffde30
4
+ Revises: 99ea79d9e5d2
5
+ Create Date: 2023-10-26 16:11:44.061971
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+
11
+ # revision identifiers, used by Alembic.
12
+ revision = "84bf0fffde30"
13
+ down_revision = "99ea79d9e5d2"
14
+ branch_labels = None
15
+ depends_on = None
16
+
17
+
18
+ def upgrade() -> None:
19
+ # ### commands auto generated by Alembic - please adjust! ###
20
+ with op.batch_alter_table("applyworkflow", schema=None) as batch_op:
21
+ batch_op.add_column(
22
+ sa.Column(
23
+ "user_email",
24
+ sa.String(),
25
+ server_default="__UNDEFINED__",
26
+ nullable=False,
27
+ )
28
+ )
29
+ batch_op.add_column(
30
+ sa.Column(
31
+ "input_dataset_dump",
32
+ sa.JSON(),
33
+ server_default="{}",
34
+ nullable=False,
35
+ )
36
+ )
37
+ batch_op.add_column(
38
+ sa.Column(
39
+ "output_dataset_dump",
40
+ sa.JSON(),
41
+ server_default="{}",
42
+ nullable=False,
43
+ )
44
+ )
45
+
46
+ batch_op.alter_column(
47
+ "project_id", existing_type=sa.INTEGER(), nullable=True
48
+ )
49
+ batch_op.alter_column(
50
+ "workflow_id", existing_type=sa.INTEGER(), nullable=True
51
+ )
52
+ batch_op.alter_column(
53
+ "input_dataset_id", existing_type=sa.INTEGER(), nullable=True
54
+ )
55
+ batch_op.alter_column(
56
+ "output_dataset_id", existing_type=sa.INTEGER(), nullable=True
57
+ )
58
+
59
+ with op.batch_alter_table("applyworkflow", schema=None) as batch_op:
60
+ batch_op.alter_column("user_email", server_default=None)
61
+ batch_op.alter_column("input_dataset_dump", server_default=None)
62
+ batch_op.alter_column("output_dataset_dump", server_default=None)
63
+
64
+ # ### end Alembic commands ###
65
+
66
+
67
+ def downgrade() -> None:
68
+ # ### commands auto generated by Alembic - please adjust! ###
69
+ with op.batch_alter_table("applyworkflow", schema=None) as batch_op:
70
+ batch_op.alter_column(
71
+ "output_dataset_id", existing_type=sa.INTEGER(), nullable=False
72
+ )
73
+ batch_op.alter_column(
74
+ "input_dataset_id", existing_type=sa.INTEGER(), nullable=False
75
+ )
76
+ batch_op.alter_column(
77
+ "workflow_id", existing_type=sa.INTEGER(), nullable=False
78
+ )
79
+ batch_op.alter_column(
80
+ "project_id", existing_type=sa.INTEGER(), nullable=False
81
+ )
82
+ batch_op.drop_column("output_dataset_dump")
83
+ batch_op.drop_column("input_dataset_dump")
84
+ batch_op.drop_column("user_email")
85
+
86
+ # ### end Alembic commands ###
@@ -0,0 +1,35 @@
1
+ """Make ApplyWorkflow.start_timestamp not nullable
2
+
3
+ Revision ID: e75cac726012
4
+ Revises: 84bf0fffde30
5
+ Create Date: 2023-10-30 15:51:18.808789
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+
11
+ # revision identifiers, used by Alembic.
12
+ revision = "e75cac726012"
13
+ down_revision = "84bf0fffde30"
14
+ branch_labels = None
15
+ depends_on = None
16
+
17
+
18
+ def upgrade() -> None:
19
+ # ### commands auto generated by Alembic - please adjust! ###
20
+ with op.batch_alter_table("applyworkflow", schema=None) as batch_op:
21
+ batch_op.alter_column(
22
+ "start_timestamp", existing_type=sa.DATETIME(), nullable=False
23
+ )
24
+
25
+ # ### end Alembic commands ###
26
+
27
+
28
+ def downgrade() -> None:
29
+ # ### commands auto generated by Alembic - please adjust! ###
30
+ with op.batch_alter_table("applyworkflow", schema=None) as batch_op:
31
+ batch_op.alter_column(
32
+ "start_timestamp", existing_type=sa.DATETIME(), nullable=True
33
+ )
34
+
35
+ # ### end Alembic commands ###
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 1.3.14a0
3
+ Version: 1.4.0a1
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,29 +1,30 @@
1
- fractal_server/__init__.py,sha256=iPNo2MDnIP2-b3B7XSPtF_n3wyAA3KmQilmX-gUiWo8,25
1
+ fractal_server/__init__.py,sha256=FghpTUvsrKafSXB-cqMEGar7ZwCBi90wwLD4GOQ4kfs,24
2
2
  fractal_server/__main__.py,sha256=znijcImbcEC4P26ICOhEJ9VY3_5vWdMwQcl-WP25sYA,2202
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- fractal_server/app/api/__init__.py,sha256=_g30kAzDmakaxQQYwSZwVbid1O-3zMzQqnSuQZOWI3U,1192
5
+ fractal_server/app/api/__init__.py,sha256=zYFP-qSzNrgPYwqBza-NYGjSm-PyAnHXftmJsd1SOlo,1255
6
6
  fractal_server/app/api/v1/__init__.py,sha256=2HMymr1YkUk39V8iof7KENyLnre4ghouOSvNZ_kF1ec,24
7
7
  fractal_server/app/api/v1/_aux_functions.py,sha256=_-D-iTKvOfH1yufa4lIHvP88Sgic8cvKXq3GmDHC-lk,9659
8
- fractal_server/app/api/v1/dataset.py,sha256=wWMtz48i_XzK8Ja-1u1t1XZLY9gXWqlJR8V8tJo4z_Q,14564
9
- fractal_server/app/api/v1/job.py,sha256=k1TxhmU_VMiHb6RxJUsiVaLWcH5GvtwsB44DsnmxNmc,4550
10
- fractal_server/app/api/v1/project.py,sha256=LlNkkKpJ7WtEcMOQ1KzGwxYqxScUex-wTlu1x1KL4rs,10236
8
+ fractal_server/app/api/v1/dataset.py,sha256=sCtOPfKWiuhCzM2w2gPx2UsKQKDQizTZojw1biqi7qw,13952
9
+ fractal_server/app/api/v1/job.py,sha256=APac5RLanwCf_3oY1PbWpKASQP4i1CJWq-bFtnEKpxg,5613
10
+ fractal_server/app/api/v1/monitoring.py,sha256=LN9RI0iST_NshPfmnOa7vFW_wKyZbcA3gBDjFY-9ffw,4736
11
+ fractal_server/app/api/v1/project.py,sha256=ZJCZi1CYqkdBXI83jzqZ8W3qPutYGxrh538TLfNfZA8,10368
11
12
  fractal_server/app/api/v1/task.py,sha256=0MJNhn5f8KZy4XBMUoJNhrk3E6GBQWcVfKoQzP5XXWw,5582
12
13
  fractal_server/app/api/v1/task_collection.py,sha256=mY1cSGepWvVz6IJCnFYA8iy4hU-8qsA1HbiQXZjg1OM,11697
13
- fractal_server/app/api/v1/workflow.py,sha256=A54m9PDD7MhT6s5-8kIrefEJ5uVxigxBohelT7XCLVo,9379
14
+ fractal_server/app/api/v1/workflow.py,sha256=TwSRMGHgLJZxwgHI24q8Z7-ONu2CL1YVpxG8yBYE_yk,8858
14
15
  fractal_server/app/api/v1/workflowtask.py,sha256=TIsCSBFImoRq0rz16ZVlFwTL-Qd9Uqywbq-DT4OxYh0,5421
15
16
  fractal_server/app/db/__init__.py,sha256=4DP-jOZDLawKwPTToOVTR0vO2bc-R7s0lwZUXFv7k5s,3464
16
17
  fractal_server/app/models/__init__.py,sha256=RuxWH8fsmkTWsjLhYjrxSt-mvk74coCilAQlX2Q6OO0,353
17
- fractal_server/app/models/dataset.py,sha256=CEXKMwiMMtFTShc85zzac50ADeF21SFvGR-C7lCBn7A,1628
18
- fractal_server/app/models/job.py,sha256=G00iw0z0n8cM3vNxPqD0GxrV90g7r2QPib4eDX0Fsk0,3729
18
+ fractal_server/app/models/dataset.py,sha256=fcZkb2y7PXlFJAyZtndJ7Gf4c8VpkWjZe47iMybv1aE,2109
19
+ fractal_server/app/models/job.py,sha256=DbX7OMx88eC-232C_OdYOpNeyN0tma7p8J3x7HB43os,2768
19
20
  fractal_server/app/models/linkuserproject.py,sha256=RVtl25Q_N99uoVDE7wx0IN0SgFjc7Id5XbScsgrjv_E,309
20
- fractal_server/app/models/project.py,sha256=U3MvLVBestug5dCCw22VA-l8BeUnGoaNye5OPexsqoA,1191
21
+ fractal_server/app/models/project.py,sha256=SNY8CCDHjd_iv1S7theFaRuVy87BuppTrByHFkJqcpE,1137
21
22
  fractal_server/app/models/security.py,sha256=Dp54Hf7I72oo9PERdyR0_zStw2ppYlFVi5MhFWIE6Lw,2438
22
23
  fractal_server/app/models/state.py,sha256=rSTjYPfPZntEfdQudKp6yu5vsdyfHA7nMYNRIBWsiAQ,1087
23
24
  fractal_server/app/models/task.py,sha256=APndtea9A7EF7TtpVK8kWapBM01a6nk3FFCrQbbioI8,2632
24
- fractal_server/app/models/workflow.py,sha256=VlX-MNfqw3z-EVKMvwVR9HbnOFNHRnO-5sDYFQOSijQ,5191
25
+ fractal_server/app/models/workflow.py,sha256=r_bdKxzGgRjiwJW2fMYV9pvqcAs2swh1xg_q8pvXSbE,5328
25
26
  fractal_server/app/runner/.gitignore,sha256=ytzN_oyHWXrGU7iFAtoHSTUbM6Rn6kG0Zkddg0xZk6s,16
26
- fractal_server/app/runner/__init__.py,sha256=Y-B9lpw3Hbuc9uTCG0qcDDz5uMvnWXyxd3B5nfpbEdw,12978
27
+ fractal_server/app/runner/__init__.py,sha256=WNMxT9XFEocPOdJRN6FfMek-LecOpBYXfJxmxupMRUE,13597
27
28
  fractal_server/app/runner/_common.py,sha256=XjyE8DZE6WECeFXI6i0vHVD6JywZQxkJgZrL-ep1USQ,22642
28
29
  fractal_server/app/runner/_local/__init__.py,sha256=mSJzpF6u6rgsSYO25szNVr2B296h7_iKD1eqS3o87Qo,6532
29
30
  fractal_server/app/runner/_local/_local_config.py,sha256=-oNTsjEUmytHlsYpWfw2CrPvSxDFeEhZSdQvI_wf3Mk,3245
@@ -40,13 +41,13 @@ fractal_server/app/runner/_slurm/executor.py,sha256=ao5YuWtjsIfTYUucE1SvNS8a99Sg
40
41
  fractal_server/app/runner/_slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
41
42
  fractal_server/app/runner/common.py,sha256=nz0ZuIro0iwZm-OV-e-Y-PrtgKcLK0d7BrzebWyEWEk,9496
42
43
  fractal_server/app/runner/handle_failed_job.py,sha256=PKgJARHjXyv33sDsl7oTINdcTu7EwmFmIkp38RqAE3Q,4641
43
- fractal_server/app/schemas/__init__.py,sha256=Roc1gkp3jsafkgcsY8u5S6nshj9ER9fuAkthplrX4kI,1804
44
+ fractal_server/app/schemas/__init__.py,sha256=th51Dzbe94xatOIMVheqlq0rVFy8oI1CHRfzCbjiTSg,1859
44
45
  fractal_server/app/schemas/_validators.py,sha256=7YEbgrnGRpzkLMfZzQNfczEmcNnO__SmVOaBHhzaiXE,1819
45
- fractal_server/app/schemas/applyworkflow.py,sha256=gGlVSfYZlVdWRhIhQ_0tzBKfuW1y1-ZeIS5UvjpXZTM,2954
46
+ fractal_server/app/schemas/applyworkflow.py,sha256=iT7FBWgb9EfMSUdFCSv6vPy6223nsPqdQCLTYHrM8Uw,3871
46
47
  fractal_server/app/schemas/dataset.py,sha256=PPqGTsRQ5JEwkiM4NcjPYFckxnCdi_Zov-bWXDm1LUk,3092
47
48
  fractal_server/app/schemas/json_schemas/manifest.json,sha256=yXYKHbYXPYSkSXMTLfTpfCUGBtmQuPTk1xuSXscdba4,1787
48
49
  fractal_server/app/schemas/manifest.py,sha256=xxTd39dAXMK9Ox1y-p3gbyg0zd5udW99pV4JngCUGwM,3819
49
- fractal_server/app/schemas/project.py,sha256=GoV1yUPVSJ7eFGXIBrYh_4FOVKYcBRzPbW7ImnBAg-4,1047
50
+ fractal_server/app/schemas/project.py,sha256=D_HZgwkR7OZoMlZaN7hFPrbB7T5gQPucjza0ekZNdQM,1213
50
51
  fractal_server/app/schemas/state.py,sha256=CS8Rs5qF21TsnqmyzUHLqRaX1b61Oc6Yra6POYpYSQY,762
51
52
  fractal_server/app/schemas/task.py,sha256=2TBE5Ne9tO_-a2-Es0PRXMT8ZddSInTOPMor7u8-gx0,3671
52
53
  fractal_server/app/schemas/task_collection.py,sha256=mPk6E1LK2UvnHkhIQWHmTztsVT99iHZn-UZy7mGNjUk,2965
@@ -55,24 +56,26 @@ fractal_server/app/schemas/workflow.py,sha256=oFoO62JH5hfMJjKoicdpyC5hd2O9XgqoAm
55
56
  fractal_server/app/security/__init__.py,sha256=qjTt5vvtIq6eMIMBCDewrQK9A6Lw2DW2HnifWuxeDmA,10933
56
57
  fractal_server/config.py,sha256=zekTDA_FhQG_RYgOWEIxT6KyJKRpMge-pB-iYRr4sIY,14447
57
58
  fractal_server/logger.py,sha256=keri8i960WHT8Zz9Rm2MwfnrA2dw9TsrfCmojqtGDLs,4562
58
- fractal_server/main.py,sha256=9_T_cMqf0EfbfYwkYhKeU36v9PFi95BoydapKpmaTKc,5932
59
+ fractal_server/main.py,sha256=JR-Q7QBFq97Wy0FTrXy9YxiyusWNuCFJMDyvJ_Vatko,6075
59
60
  fractal_server/migrations/README,sha256=4rQvyDfqodGhpJw74VYijRmgFP49ji5chyEemWGHsuw,59
60
61
  fractal_server/migrations/env.py,sha256=05EoWw0p43ojTNiz7UVG4lsl057B4ImSgXiHmiU-M80,2690
61
62
  fractal_server/migrations/script.py.mako,sha256=oMXw9LC3zRbinWWPPDgeZ4z9FJrV2zhRWiYdS5YgNbI,526
62
63
  fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_args_.py,sha256=-wHe-fOffmYeAm0JXVl_lxZ7hhDkaEVqxgxpHkb_uL8,954
63
64
  fractal_server/migrations/versions/50a13d6138fd_initial_schema.py,sha256=zwXegXs9J40eyCWi3w0c_iIBVJjXNn4VdVnQaT3KxDg,8770
64
65
  fractal_server/migrations/versions/70e77f1c38b0_add_applyworkflow_first_task_index_and_.py,sha256=Q-DsMzG3IcUV2Ol1dhJWosDvKERamBE6QvA2zzS5zpQ,1632
66
+ fractal_server/migrations/versions/84bf0fffde30_add_dumps_to_applyworkflow.py,sha256=NSCuhANChsg76vBkShBl-9tQ4VEHubOjtAv1etHhlvY,2684
65
67
  fractal_server/migrations/versions/8f79bd162e35_add_docs_info_and_docs_link_to_task_.py,sha256=6pgODDtyAxevZvAJBj9IJ41inhV1RpwbpZr_qfPPu1A,1115
66
68
  fractal_server/migrations/versions/99ea79d9e5d2_add_dataset_history.py,sha256=0im6TxDr53sKKcjiPgeH4ftVRGnRXZSh2lPbRQ1Ir9w,883
67
69
  fractal_server/migrations/versions/a7f4d6137b53_add_workflow_dump_to_applyworkflow.py,sha256=ekDUML7ILpmdoqEclKbEUdyLi4uw9HSG_sTjG2hp_JE,867
70
+ fractal_server/migrations/versions/e75cac726012_make_applyworkflow_start_timestamp_not_.py,sha256=lOggSvzGWqQvnxxFuSM6W50Ui49R918A-uBuiZJ0pNM,963
68
71
  fractal_server/migrations/versions/f384e1c0cf5d_drop_task_default_args_columns.py,sha256=9BwqUS9Gf7UW_KjrzHbtViC880qhD452KAytkHWWZyk,746
69
72
  fractal_server/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
70
73
  fractal_server/syringe.py,sha256=3qSMW3YaMKKnLdgnooAINOPxnCOxP7y2jeAQYB21Gdo,2786
71
74
  fractal_server/tasks/__init__.py,sha256=Wzuxf5EoH1v0fYzRpAZHG_S-Z9f6DmbIsuSvllBCGvc,72
72
75
  fractal_server/tasks/collection.py,sha256=POKvQyS5G5ySybH0r0v21I_ZQ5AREe9kAqr_uFfGyaU,17627
73
76
  fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
74
- fractal_server-1.3.14a0.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
75
- fractal_server-1.3.14a0.dist-info/METADATA,sha256=05R2BTkBa2A2yJEcoed4gnJvlEZqTAmRhSVvouiBJs8,3774
76
- fractal_server-1.3.14a0.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
77
- fractal_server-1.3.14a0.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
78
- fractal_server-1.3.14a0.dist-info/RECORD,,
77
+ fractal_server-1.4.0a1.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
78
+ fractal_server-1.4.0a1.dist-info/METADATA,sha256=2drgsD8Zsk0dNZIT_CkCRCdqCWqBcIuZAwFMfvuAYhw,3773
79
+ fractal_server-1.4.0a1.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
80
+ fractal_server-1.4.0a1.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
81
+ fractal_server-1.4.0a1.dist-info/RECORD,,