fractal-server 2.11.0a10__py3-none-any.whl → 2.12.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +0 -2
  3. fractal_server/app/models/linkuserproject.py +0 -9
  4. fractal_server/app/models/v2/dataset.py +0 -4
  5. fractal_server/app/models/v2/workflowtask.py +0 -4
  6. fractal_server/app/routes/aux/_job.py +1 -3
  7. fractal_server/app/runner/filenames.py +0 -2
  8. fractal_server/app/runner/shutdown.py +3 -27
  9. fractal_server/config.py +1 -15
  10. fractal_server/main.py +1 -12
  11. fractal_server/migrations/versions/1eac13a26c83_drop_v1_tables.py +67 -0
  12. fractal_server/migrations/versions/af8673379a5c_drop_old_filter_columns.py +54 -0
  13. fractal_server/string_tools.py +0 -21
  14. fractal_server/tasks/utils.py +0 -24
  15. {fractal_server-2.11.0a10.dist-info → fractal_server-2.12.0a0.dist-info}/METADATA +1 -1
  16. {fractal_server-2.11.0a10.dist-info → fractal_server-2.12.0a0.dist-info}/RECORD +19 -63
  17. fractal_server/app/models/v1/__init__.py +0 -13
  18. fractal_server/app/models/v1/dataset.py +0 -71
  19. fractal_server/app/models/v1/job.py +0 -101
  20. fractal_server/app/models/v1/project.py +0 -29
  21. fractal_server/app/models/v1/state.py +0 -34
  22. fractal_server/app/models/v1/task.py +0 -85
  23. fractal_server/app/models/v1/workflow.py +0 -133
  24. fractal_server/app/routes/admin/v1.py +0 -377
  25. fractal_server/app/routes/api/v1/__init__.py +0 -26
  26. fractal_server/app/routes/api/v1/_aux_functions.py +0 -478
  27. fractal_server/app/routes/api/v1/dataset.py +0 -554
  28. fractal_server/app/routes/api/v1/job.py +0 -195
  29. fractal_server/app/routes/api/v1/project.py +0 -475
  30. fractal_server/app/routes/api/v1/task.py +0 -203
  31. fractal_server/app/routes/api/v1/task_collection.py +0 -239
  32. fractal_server/app/routes/api/v1/workflow.py +0 -355
  33. fractal_server/app/routes/api/v1/workflowtask.py +0 -187
  34. fractal_server/app/runner/async_wrap_v1.py +0 -27
  35. fractal_server/app/runner/v1/__init__.py +0 -415
  36. fractal_server/app/runner/v1/_common.py +0 -620
  37. fractal_server/app/runner/v1/_local/__init__.py +0 -186
  38. fractal_server/app/runner/v1/_local/_local_config.py +0 -105
  39. fractal_server/app/runner/v1/_local/_submit_setup.py +0 -48
  40. fractal_server/app/runner/v1/_local/executor.py +0 -100
  41. fractal_server/app/runner/v1/_slurm/__init__.py +0 -312
  42. fractal_server/app/runner/v1/_slurm/_submit_setup.py +0 -81
  43. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +0 -163
  44. fractal_server/app/runner/v1/common.py +0 -117
  45. fractal_server/app/runner/v1/handle_failed_job.py +0 -141
  46. fractal_server/app/schemas/v1/__init__.py +0 -37
  47. fractal_server/app/schemas/v1/applyworkflow.py +0 -161
  48. fractal_server/app/schemas/v1/dataset.py +0 -165
  49. fractal_server/app/schemas/v1/dumps.py +0 -64
  50. fractal_server/app/schemas/v1/manifest.py +0 -126
  51. fractal_server/app/schemas/v1/project.py +0 -66
  52. fractal_server/app/schemas/v1/state.py +0 -18
  53. fractal_server/app/schemas/v1/task.py +0 -167
  54. fractal_server/app/schemas/v1/task_collection.py +0 -110
  55. fractal_server/app/schemas/v1/workflow.py +0 -212
  56. fractal_server/data_migrations/2_11_0.py +0 -168
  57. fractal_server/tasks/v1/_TaskCollectPip.py +0 -103
  58. fractal_server/tasks/v1/__init__.py +0 -0
  59. fractal_server/tasks/v1/background_operations.py +0 -352
  60. fractal_server/tasks/v1/endpoint_operations.py +0 -156
  61. fractal_server/tasks/v1/get_collection_data.py +0 -14
  62. fractal_server/tasks/v1/utils.py +0 -67
  63. {fractal_server-2.11.0a10.dist-info → fractal_server-2.12.0a0.dist-info}/LICENSE +0 -0
  64. {fractal_server-2.11.0a10.dist-info → fractal_server-2.12.0a0.dist-info}/WHEEL +0 -0
  65. {fractal_server-2.11.0a10.dist-info → fractal_server-2.12.0a0.dist-info}/entry_points.txt +0 -0
@@ -1,554 +0,0 @@
1
- import json
2
- from json.decoder import JSONDecodeError
3
- from pathlib import Path
4
- from typing import Optional
5
-
6
- from fastapi import APIRouter
7
- from fastapi import Depends
8
- from fastapi import HTTPException
9
- from fastapi import Response
10
- from fastapi import status
11
- from sqlmodel import or_
12
- from sqlmodel import select
13
-
14
- from ....db import AsyncSession
15
- from ....db import get_async_db
16
- from ....models.v1 import ApplyWorkflow
17
- from ....models.v1 import Dataset
18
- from ....models.v1 import Project
19
- from ....models.v1 import Resource
20
- from ....runner.filenames import HISTORY_FILENAME_V1
21
- from ....schemas.v1 import DatasetCreateV1
22
- from ....schemas.v1 import DatasetReadV1
23
- from ....schemas.v1 import DatasetStatusReadV1
24
- from ....schemas.v1 import DatasetUpdateV1
25
- from ....schemas.v1 import ResourceCreateV1
26
- from ....schemas.v1 import ResourceReadV1
27
- from ....schemas.v1 import ResourceUpdateV1
28
- from ....schemas.v1 import WorkflowExportV1
29
- from ....schemas.v1 import WorkflowTaskExportV1
30
- from ._aux_functions import _get_dataset_check_owner
31
- from ._aux_functions import _get_project_check_owner
32
- from ._aux_functions import _get_submitted_jobs_statement
33
- from ._aux_functions import _get_workflow_check_owner
34
- from ._aux_functions import _raise_if_v1_is_read_only
35
- from fractal_server.app.models import UserOAuth
36
- from fractal_server.app.routes.auth import current_active_user
37
-
38
- router = APIRouter()
39
-
40
-
41
- @router.post(
42
- "/project/{project_id}/dataset/",
43
- response_model=DatasetReadV1,
44
- status_code=status.HTTP_201_CREATED,
45
- )
46
- async def create_dataset(
47
- project_id: int,
48
- dataset: DatasetCreateV1,
49
- user: UserOAuth = Depends(current_active_user),
50
- db: AsyncSession = Depends(get_async_db),
51
- ) -> Optional[DatasetReadV1]:
52
- """
53
- Add new dataset to current project
54
- """
55
- _raise_if_v1_is_read_only()
56
- await _get_project_check_owner(
57
- project_id=project_id, user_id=user.id, db=db
58
- )
59
- db_dataset = Dataset(project_id=project_id, **dataset.dict())
60
- db.add(db_dataset)
61
- await db.commit()
62
- await db.refresh(db_dataset)
63
- await db.close()
64
-
65
- return db_dataset
66
-
67
-
68
- @router.get(
69
- "/project/{project_id}/dataset/",
70
- response_model=list[DatasetReadV1],
71
- )
72
- async def read_dataset_list(
73
- project_id: int,
74
- history: bool = True,
75
- user: UserOAuth = Depends(current_active_user),
76
- db: AsyncSession = Depends(get_async_db),
77
- ) -> Optional[list[DatasetReadV1]]:
78
- """
79
- Get dataset list for given project
80
- """
81
- # Access control
82
- project = await _get_project_check_owner(
83
- project_id=project_id, user_id=user.id, db=db
84
- )
85
- # Find datasets of the current project. Note: this select/where approach
86
- # has much better scaling than refreshing all elements of
87
- # `project.dataset_list` - ref
88
- # https://github.com/fractal-analytics-platform/fractal-server/pull/1082#issuecomment-1856676097.
89
- stm = select(Dataset).where(Dataset.project_id == project.id)
90
- res = await db.execute(stm)
91
- dataset_list = res.scalars().all()
92
- await db.close()
93
- if not history:
94
- for ds in dataset_list:
95
- setattr(ds, "history", [])
96
- return dataset_list
97
-
98
-
99
- @router.get(
100
- "/project/{project_id}/dataset/{dataset_id}/",
101
- response_model=DatasetReadV1,
102
- )
103
- async def read_dataset(
104
- project_id: int,
105
- dataset_id: int,
106
- user: UserOAuth = Depends(current_active_user),
107
- db: AsyncSession = Depends(get_async_db),
108
- ) -> Optional[DatasetReadV1]:
109
- """
110
- Get info on a dataset associated to the current project
111
- """
112
- output = await _get_dataset_check_owner(
113
- project_id=project_id,
114
- dataset_id=dataset_id,
115
- user_id=user.id,
116
- db=db,
117
- )
118
- dataset = output["dataset"]
119
- await db.close()
120
- return dataset
121
-
122
-
123
- @router.patch(
124
- "/project/{project_id}/dataset/{dataset_id}/",
125
- response_model=DatasetReadV1,
126
- )
127
- async def update_dataset(
128
- project_id: int,
129
- dataset_id: int,
130
- dataset_update: DatasetUpdateV1,
131
- user: UserOAuth = Depends(current_active_user),
132
- db: AsyncSession = Depends(get_async_db),
133
- ) -> Optional[DatasetReadV1]:
134
- """
135
- Edit a dataset associated to the current project
136
- """
137
- _raise_if_v1_is_read_only()
138
- if dataset_update.history is not None:
139
- raise HTTPException(
140
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
141
- detail="Cannot modify dataset history.",
142
- )
143
-
144
- output = await _get_dataset_check_owner(
145
- project_id=project_id,
146
- dataset_id=dataset_id,
147
- user_id=user.id,
148
- db=db,
149
- )
150
- db_dataset = output["dataset"]
151
-
152
- for key, value in dataset_update.dict(exclude_unset=True).items():
153
- setattr(db_dataset, key, value)
154
-
155
- await db.commit()
156
- await db.refresh(db_dataset)
157
- await db.close()
158
- return db_dataset
159
-
160
-
161
- @router.delete(
162
- "/project/{project_id}/dataset/{dataset_id}/",
163
- status_code=204,
164
- )
165
- async def delete_dataset(
166
- project_id: int,
167
- dataset_id: int,
168
- user: UserOAuth = Depends(current_active_user),
169
- db: AsyncSession = Depends(get_async_db),
170
- ) -> Response:
171
- """
172
- Delete a dataset associated to the current project
173
- """
174
- _raise_if_v1_is_read_only()
175
- output = await _get_dataset_check_owner(
176
- project_id=project_id,
177
- dataset_id=dataset_id,
178
- user_id=user.id,
179
- db=db,
180
- )
181
- dataset = output["dataset"]
182
-
183
- # Fail if there exist jobs that are submitted and in relation with the
184
- # current dataset.
185
- stm = _get_submitted_jobs_statement().where(
186
- or_(
187
- ApplyWorkflow.input_dataset_id == dataset_id,
188
- ApplyWorkflow.output_dataset_id == dataset_id,
189
- )
190
- )
191
- res = await db.execute(stm)
192
- jobs = res.scalars().all()
193
- if jobs:
194
- string_ids = str([job.id for job in jobs])[1:-1]
195
- raise HTTPException(
196
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
197
- detail=(
198
- f"Cannot delete dataset {dataset.id} because it "
199
- f"is linked to active job(s) {string_ids}."
200
- ),
201
- )
202
-
203
- # Cascade operations: set foreign-keys to null for jobs which are in
204
- # relationship with the current dataset
205
- # input_dataset
206
- stm = select(ApplyWorkflow).where(
207
- ApplyWorkflow.input_dataset_id == dataset_id
208
- )
209
- res = await db.execute(stm)
210
- jobs = res.scalars().all()
211
- for job in jobs:
212
- job.input_dataset_id = None
213
- await db.merge(job)
214
- await db.commit()
215
- # output_dataset
216
- stm = select(ApplyWorkflow).where(
217
- ApplyWorkflow.output_dataset_id == dataset_id
218
- )
219
- res = await db.execute(stm)
220
- jobs = res.scalars().all()
221
- for job in jobs:
222
- job.output_dataset_id = None
223
- await db.merge(job)
224
- await db.commit()
225
-
226
- # Delete dataset
227
- await db.delete(dataset)
228
- await db.commit()
229
-
230
- return Response(status_code=status.HTTP_204_NO_CONTENT)
231
-
232
-
233
- @router.post(
234
- "/project/{project_id}/dataset/{dataset_id}/resource/",
235
- response_model=ResourceReadV1,
236
- status_code=status.HTTP_201_CREATED,
237
- )
238
- async def create_resource(
239
- project_id: int,
240
- dataset_id: int,
241
- resource: ResourceCreateV1,
242
- user: UserOAuth = Depends(current_active_user),
243
- db: AsyncSession = Depends(get_async_db),
244
- ) -> Optional[ResourceReadV1]:
245
- """
246
- Add resource to an existing dataset
247
- """
248
- _raise_if_v1_is_read_only()
249
- output = await _get_dataset_check_owner(
250
- project_id=project_id,
251
- dataset_id=dataset_id,
252
- user_id=user.id,
253
- db=db,
254
- )
255
- dataset = output["dataset"]
256
- db_resource = Resource(dataset_id=dataset.id, **resource.dict())
257
- db.add(db_resource)
258
- await db.commit()
259
- await db.refresh(db_resource)
260
- await db.close()
261
- return db_resource
262
-
263
-
264
- @router.get(
265
- "/project/{project_id}/dataset/{dataset_id}/resource/",
266
- response_model=list[ResourceReadV1],
267
- )
268
- async def get_resource_list(
269
- project_id: int,
270
- dataset_id: int,
271
- user: UserOAuth = Depends(current_active_user),
272
- db: AsyncSession = Depends(get_async_db),
273
- ) -> Optional[list[ResourceReadV1]]:
274
- """
275
- Get resources from a dataset
276
- """
277
- await _get_dataset_check_owner(
278
- project_id=project_id,
279
- dataset_id=dataset_id,
280
- user_id=user.id,
281
- db=db,
282
- )
283
- stm = select(Resource).where(Resource.dataset_id == dataset_id)
284
- res = await db.execute(stm)
285
- resource_list = res.scalars().all()
286
- await db.close()
287
- return resource_list
288
-
289
-
290
- @router.patch(
291
- "/project/{project_id}/dataset/{dataset_id}/resource/{resource_id}/",
292
- response_model=ResourceReadV1,
293
- )
294
- async def update_resource(
295
- project_id: int,
296
- dataset_id: int,
297
- resource_id: int,
298
- resource_update: ResourceUpdateV1,
299
- user: UserOAuth = Depends(current_active_user),
300
- db: AsyncSession = Depends(get_async_db),
301
- ) -> Optional[ResourceReadV1]:
302
- """
303
- Edit a resource of a dataset
304
- """
305
- _raise_if_v1_is_read_only()
306
- output = await _get_dataset_check_owner(
307
- project_id=project_id,
308
- dataset_id=dataset_id,
309
- user_id=user.id,
310
- db=db,
311
- )
312
- dataset = output["dataset"]
313
- orig_resource = await db.get(Resource, resource_id)
314
-
315
- if orig_resource not in dataset.resource_list:
316
- raise HTTPException(
317
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
318
- detail=(
319
- f"Resource {resource_id} is not part of "
320
- f"dataset {dataset_id}"
321
- ),
322
- )
323
-
324
- for key, value in resource_update.dict(exclude_unset=True).items():
325
- setattr(orig_resource, key, value)
326
- await db.commit()
327
- await db.refresh(orig_resource)
328
- await db.close()
329
- return orig_resource
330
-
331
-
332
- @router.delete(
333
- "/project/{project_id}/dataset/{dataset_id}/resource/{resource_id}/",
334
- status_code=204,
335
- )
336
- async def delete_resource(
337
- project_id: int,
338
- dataset_id: int,
339
- resource_id: int,
340
- user: UserOAuth = Depends(current_active_user),
341
- db: AsyncSession = Depends(get_async_db),
342
- ) -> Response:
343
- """
344
- Delete a resource of a dataset
345
- """
346
- _raise_if_v1_is_read_only()
347
- # Get the dataset DB entry
348
- output = await _get_dataset_check_owner(
349
- project_id=project_id,
350
- dataset_id=dataset_id,
351
- user_id=user.id,
352
- db=db,
353
- )
354
- dataset = output["dataset"]
355
- resource = await db.get(Resource, resource_id)
356
- if not resource or resource.dataset_id != dataset.id:
357
- raise HTTPException(
358
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
359
- detail="Resource does not exist or does not belong to dataset",
360
- )
361
- await db.delete(resource)
362
- await db.commit()
363
- await db.close()
364
- return Response(status_code=status.HTTP_204_NO_CONTENT)
365
-
366
-
367
- @router.get(
368
- "/project/{project_id}/dataset/{dataset_id}/export_history/",
369
- response_model=WorkflowExportV1,
370
- )
371
- async def export_history_as_workflow(
372
- project_id: int,
373
- dataset_id: int,
374
- user: UserOAuth = Depends(current_active_user),
375
- db: AsyncSession = Depends(get_async_db),
376
- ) -> Optional[WorkflowExportV1]:
377
- """
378
- Extract a reproducible workflow from the dataset history.
379
- """
380
- # Get the dataset DB entry
381
- output = await _get_dataset_check_owner(
382
- project_id=project_id,
383
- dataset_id=dataset_id,
384
- user_id=user.id,
385
- db=db,
386
- )
387
- dataset = output["dataset"]
388
-
389
- # Check whether there exists a submitted job such that
390
- # `job.output_dataset_id==dataset_id`.
391
- # If at least one such job exists, then this endpoint will fail.
392
- # We do not support the use case of exporting a reproducible workflow when
393
- # job execution is in progress; this may change in the future.
394
- stm = _get_submitted_jobs_statement().where(
395
- ApplyWorkflow.output_dataset_id == dataset_id
396
- )
397
- res = await db.execute(stm)
398
- jobs = res.scalars().all()
399
- if jobs:
400
- string_ids = str([job.id for job in jobs])[1:-1]
401
- raise HTTPException(
402
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
403
- detail=(
404
- f"Cannot export history because dataset {dataset.id} "
405
- f"is linked to active job(s) {string_ids}."
406
- ),
407
- )
408
-
409
- # It such a job does not exist, continue with the endpoint. Note that this
410
- # means that the history in the DB is up-to-date.
411
-
412
- # Read history from DB
413
- history = dataset.history
414
-
415
- # Construct reproducible workflow
416
- task_list = []
417
- for history_item in history:
418
- wftask = history_item["workflowtask"]
419
- wftask_status = history_item["status"]
420
- if wftask_status == "done":
421
- task_list.append(WorkflowTaskExportV1(**wftask))
422
-
423
- def _slugify_dataset_name(_name: str) -> str:
424
- _new_name = _name
425
- for char in (" ", ".", "/", "\\"):
426
- _new_name = _new_name.replace(char, "_")
427
- return _new_name
428
-
429
- name = f"history_{_slugify_dataset_name(dataset.name)}"
430
-
431
- workflow = WorkflowExportV1(name=name, task_list=task_list)
432
- return workflow
433
-
434
-
435
- @router.get(
436
- "/project/{project_id}/dataset/{dataset_id}/status/",
437
- response_model=DatasetStatusReadV1,
438
- )
439
- async def get_workflowtask_status(
440
- project_id: int,
441
- dataset_id: int,
442
- user: UserOAuth = Depends(current_active_user),
443
- db: AsyncSession = Depends(get_async_db),
444
- ) -> Optional[DatasetStatusReadV1]:
445
- """
446
- Extract the status of all `WorkflowTask`s that ran on a given `Dataset`.
447
- """
448
- # Get the dataset DB entry
449
- output = await _get_dataset_check_owner(
450
- project_id=project_id,
451
- dataset_id=dataset_id,
452
- user_id=user.id,
453
- db=db,
454
- )
455
- dataset = output["dataset"]
456
-
457
- # Check whether there exists a job such that
458
- # 1. `job.output_dataset_id == dataset_id`, and
459
- # 2. `job.status` is either submitted or running.
460
- # If one such job exists, it will be used later. If there are multiple
461
- # jobs, raise an error.
462
- # Note: see
463
- # https://sqlmodel.tiangolo.com/tutorial/where/#type-annotations-and-errors
464
- # regarding the type-ignore in this code block
465
- stm = _get_submitted_jobs_statement().where(
466
- ApplyWorkflow.output_dataset_id == dataset_id
467
- )
468
- res = await db.execute(stm)
469
- running_jobs = res.scalars().all()
470
- if len(running_jobs) == 0:
471
- running_job = None
472
- elif len(running_jobs) == 1:
473
- running_job = running_jobs[0]
474
- else:
475
- string_ids = str([job.id for job in running_jobs])[1:-1]
476
- raise HTTPException(
477
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
478
- detail=(
479
- f"Cannot get WorkflowTask statuses as dataset {dataset.id} "
480
- f"is linked to multiple active jobs: {string_ids}"
481
- ),
482
- )
483
-
484
- # Initialize empty dictionary for workflowtasks status
485
- workflow_tasks_status_dict: dict = {}
486
-
487
- # Lowest priority: read status from DB, which corresponds to jobs that are
488
- # not running
489
- history = dataset.history
490
- for history_item in history:
491
- wftask_id = history_item["workflowtask"]["id"]
492
- wftask_status = history_item["status"]
493
- workflow_tasks_status_dict[wftask_id] = wftask_status
494
-
495
- # If a job is running, then gather more up-to-date information
496
- if running_job is not None:
497
- # Get the workflow DB entry
498
- running_workflow = await _get_workflow_check_owner(
499
- project_id=project_id,
500
- workflow_id=running_job.workflow_id,
501
- user_id=user.id,
502
- db=db,
503
- )
504
- # Mid priority: Set all WorkflowTask's that are part of the running job
505
- # as "submitted"
506
- start = running_job.first_task_index
507
- end = running_job.last_task_index + 1
508
- for wftask in running_workflow.task_list[start:end]:
509
- workflow_tasks_status_dict[wftask.id] = "submitted"
510
-
511
- # Highest priority: Read status updates coming from the running-job
512
- # temporary file. Note: this file only contains information on
513
- # WorkflowTask's that ran through successfully
514
- tmp_file = Path(running_job.working_dir) / HISTORY_FILENAME_V1
515
- try:
516
- with tmp_file.open("r") as f:
517
- history = json.load(f)
518
- except FileNotFoundError:
519
- history = []
520
- except JSONDecodeError:
521
- raise HTTPException(
522
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
523
- detail="History file does not include a valid JSON.",
524
- )
525
-
526
- for history_item in history:
527
- wftask_id = history_item["workflowtask"]["id"]
528
- wftask_status = history_item["status"]
529
- workflow_tasks_status_dict[wftask_id] = wftask_status
530
-
531
- response_body = DatasetStatusReadV1(status=workflow_tasks_status_dict)
532
- return response_body
533
-
534
-
535
- @router.get("/dataset/", response_model=list[DatasetReadV1])
536
- async def get_user_datasets(
537
- history: bool = True,
538
- user: UserOAuth = Depends(current_active_user),
539
- db: AsyncSession = Depends(get_async_db),
540
- ) -> list[DatasetReadV1]:
541
- """
542
- Returns all the datasets of the current user
543
- """
544
- stm = select(Dataset)
545
- stm = stm.join(Project).where(
546
- Project.user_list.any(UserOAuth.id == user.id)
547
- )
548
- res = await db.execute(stm)
549
- dataset_list = res.scalars().all()
550
- await db.close()
551
- if not history:
552
- for ds in dataset_list:
553
- setattr(ds, "history", [])
554
- return dataset_list