oracle-ads 2.11.9__py3-none-any.whl → 2.11.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. ads/aqua/__init__.py +1 -1
  2. ads/aqua/{base.py → app.py} +27 -7
  3. ads/aqua/cli.py +59 -17
  4. ads/aqua/common/__init__.py +5 -0
  5. ads/aqua/{decorator.py → common/decorator.py} +14 -8
  6. ads/aqua/common/enums.py +69 -0
  7. ads/aqua/{exception.py → common/errors.py} +28 -0
  8. ads/aqua/{utils.py → common/utils.py} +168 -77
  9. ads/aqua/config/config.py +18 -0
  10. ads/aqua/constants.py +51 -33
  11. ads/aqua/data.py +15 -26
  12. ads/aqua/evaluation/__init__.py +8 -0
  13. ads/aqua/evaluation/constants.py +53 -0
  14. ads/aqua/evaluation/entities.py +170 -0
  15. ads/aqua/evaluation/errors.py +71 -0
  16. ads/aqua/{evaluation.py → evaluation/evaluation.py} +122 -370
  17. ads/aqua/extension/__init__.py +2 -0
  18. ads/aqua/extension/aqua_ws_msg_handler.py +97 -0
  19. ads/aqua/extension/base_handler.py +0 -7
  20. ads/aqua/extension/common_handler.py +12 -6
  21. ads/aqua/extension/deployment_handler.py +70 -4
  22. ads/aqua/extension/errors.py +10 -0
  23. ads/aqua/extension/evaluation_handler.py +5 -3
  24. ads/aqua/extension/evaluation_ws_msg_handler.py +43 -0
  25. ads/aqua/extension/finetune_handler.py +41 -3
  26. ads/aqua/extension/model_handler.py +56 -4
  27. ads/aqua/extension/models/__init__.py +0 -0
  28. ads/aqua/extension/models/ws_models.py +69 -0
  29. ads/aqua/extension/ui_handler.py +65 -4
  30. ads/aqua/extension/ui_websocket_handler.py +124 -0
  31. ads/aqua/extension/utils.py +1 -1
  32. ads/aqua/finetuning/__init__.py +7 -0
  33. ads/aqua/finetuning/constants.py +17 -0
  34. ads/aqua/finetuning/entities.py +102 -0
  35. ads/aqua/{finetune.py → finetuning/finetuning.py} +162 -136
  36. ads/aqua/model/__init__.py +8 -0
  37. ads/aqua/model/constants.py +46 -0
  38. ads/aqua/model/entities.py +266 -0
  39. ads/aqua/model/enums.py +26 -0
  40. ads/aqua/{model.py → model/model.py} +401 -309
  41. ads/aqua/modeldeployment/__init__.py +8 -0
  42. ads/aqua/modeldeployment/constants.py +26 -0
  43. ads/aqua/{deployment.py → modeldeployment/deployment.py} +288 -227
  44. ads/aqua/modeldeployment/entities.py +142 -0
  45. ads/aqua/modeldeployment/inference.py +75 -0
  46. ads/aqua/ui.py +88 -8
  47. ads/cli.py +55 -7
  48. ads/common/serializer.py +2 -2
  49. ads/config.py +2 -1
  50. ads/jobs/builders/infrastructure/dsc_job.py +49 -6
  51. ads/model/datascience_model.py +1 -1
  52. ads/model/deployment/model_deployment.py +11 -0
  53. ads/model/model_metadata.py +17 -6
  54. ads/opctl/operator/lowcode/anomaly/README.md +0 -2
  55. ads/opctl/operator/lowcode/anomaly/__main__.py +3 -3
  56. ads/opctl/operator/lowcode/anomaly/environment.yaml +0 -2
  57. ads/opctl/operator/lowcode/anomaly/model/automlx.py +2 -2
  58. ads/opctl/operator/lowcode/anomaly/model/autots.py +1 -1
  59. ads/opctl/operator/lowcode/anomaly/model/base_model.py +13 -17
  60. ads/opctl/operator/lowcode/anomaly/operator_config.py +2 -0
  61. ads/opctl/operator/lowcode/anomaly/schema.yaml +1 -2
  62. ads/opctl/operator/lowcode/anomaly/utils.py +3 -2
  63. ads/opctl/operator/lowcode/common/transformations.py +2 -1
  64. ads/opctl/operator/lowcode/common/utils.py +1 -1
  65. ads/opctl/operator/lowcode/forecast/README.md +1 -3
  66. ads/opctl/operator/lowcode/forecast/__main__.py +3 -18
  67. ads/opctl/operator/lowcode/forecast/const.py +2 -0
  68. ads/opctl/operator/lowcode/forecast/environment.yaml +1 -2
  69. ads/opctl/operator/lowcode/forecast/model/arima.py +1 -0
  70. ads/opctl/operator/lowcode/forecast/model/automlx.py +7 -4
  71. ads/opctl/operator/lowcode/forecast/model/autots.py +1 -0
  72. ads/opctl/operator/lowcode/forecast/model/base_model.py +38 -22
  73. ads/opctl/operator/lowcode/forecast/model/factory.py +33 -4
  74. ads/opctl/operator/lowcode/forecast/model/forecast_datasets.py +15 -1
  75. ads/opctl/operator/lowcode/forecast/model/ml_forecast.py +234 -0
  76. ads/opctl/operator/lowcode/forecast/model/neuralprophet.py +9 -1
  77. ads/opctl/operator/lowcode/forecast/model/prophet.py +1 -0
  78. ads/opctl/operator/lowcode/forecast/model_evaluator.py +147 -0
  79. ads/opctl/operator/lowcode/forecast/operator_config.py +2 -1
  80. ads/opctl/operator/lowcode/forecast/schema.yaml +7 -2
  81. ads/opctl/operator/lowcode/forecast/utils.py +18 -44
  82. {oracle_ads-2.11.9.dist-info → oracle_ads-2.11.10.dist-info}/METADATA +9 -12
  83. {oracle_ads-2.11.9.dist-info → oracle_ads-2.11.10.dist-info}/RECORD +86 -61
  84. ads/aqua/job.py +0 -29
  85. {oracle_ads-2.11.9.dist-info → oracle_ads-2.11.10.dist-info}/LICENSE.txt +0 -0
  86. {oracle_ads-2.11.9.dist-info → oracle_ads-2.11.10.dist-info}/WHEEL +0 -0
  87. {oracle_ads-2.11.9.dist-info → oracle_ads-2.11.10.dist-info}/entry_points.txt +0 -0
@@ -5,9 +5,8 @@
5
5
 
6
6
  import json
7
7
  import os
8
- from dataclasses import asdict, dataclass, field
9
- from enum import Enum
10
- from typing import Dict, Optional
8
+ from dataclasses import asdict, fields, MISSING
9
+ from typing import Dict
11
10
 
12
11
  from oci.data_science.models import (
13
12
  Metadata,
@@ -16,11 +15,14 @@ from oci.data_science.models import (
16
15
  )
17
16
 
18
17
  from ads.aqua import ODSC_MODEL_COMPARTMENT_OCID, logger
19
- from ads.aqua.base import AquaApp
20
- from ads.aqua.data import AquaResourceIdentifier, Resource, Tags
21
- from ads.aqua.exception import AquaFileExistsError, AquaValueError
22
- from ads.aqua.job import AquaJobSummary
23
- from ads.aqua.utils import (
18
+ from ads.aqua.app import AquaApp
19
+ from ads.aqua.common.enums import Resource, Tags
20
+ from ads.aqua.common.errors import AquaFileExistsError, AquaValueError
21
+ from ads.aqua.common.utils import (
22
+ get_container_image,
23
+ upload_local_to_os,
24
+ )
25
+ from ads.aqua.constants import (
24
26
  DEFAULT_FT_BATCH_SIZE,
25
27
  DEFAULT_FT_BLOCK_STORAGE_SIZE,
26
28
  DEFAULT_FT_REPLICA,
@@ -28,14 +30,16 @@ from ads.aqua.utils import (
28
30
  JOB_INFRASTRUCTURE_TYPE_DEFAULT_NETWORKING,
29
31
  UNKNOWN,
30
32
  UNKNOWN_DICT,
31
- get_container_image,
32
- upload_local_to_os,
33
33
  )
34
+ from ads.aqua.config.config import get_finetuning_config_defaults
35
+ from ads.aqua.data import AquaResourceIdentifier
36
+ from ads.aqua.finetuning.constants import *
37
+ from ads.aqua.finetuning.entities import *
34
38
  from ads.common.auth import default_signer
35
39
  from ads.common.object_storage_details import ObjectStorageDetails
36
- from ads.common.serializer import DataClassSerializable
37
40
  from ads.common.utils import get_console_link
38
41
  from ads.config import (
42
+ AQUA_FINETUNING_CONTAINER_OVERRIDE_FLAG_METADATA_NAME,
39
43
  AQUA_JOB_SUBNET_ID,
40
44
  AQUA_MODEL_FINETUNING_CONFIG,
41
45
  COMPARTMENT_OCID,
@@ -54,100 +58,6 @@ from ads.model.model_metadata import (
54
58
  from ads.telemetry import telemetry
55
59
 
56
60
 
57
- class FineTuneCustomMetadata(Enum):
58
- FINE_TUNE_SOURCE = "fine_tune_source"
59
- FINE_TUNE_SOURCE_NAME = "fine_tune_source_name"
60
- FINE_TUNE_OUTPUT_PATH = "fine_tune_output_path"
61
- FINE_TUNE_JOB_ID = "fine_tune_job_id"
62
- FINE_TUNE_JOB_RUN_ID = "fine_tune_job_run_id"
63
- SERVICE_MODEL_ARTIFACT_LOCATION = "artifact_location"
64
- SERVICE_MODEL_DEPLOYMENT_CONTAINER = "deployment-container"
65
- SERVICE_MODEL_FINE_TUNE_CONTAINER = "finetune-container"
66
-
67
-
68
- @dataclass(repr=False)
69
- class AquaFineTuningParams(DataClassSerializable):
70
- epochs: int = None
71
- learning_rate: float = None
72
- sample_packing: str = "True"
73
-
74
-
75
- @dataclass(repr=False)
76
- class AquaFineTuningSummary(AquaJobSummary, DataClassSerializable):
77
- parameters: AquaFineTuningParams = field(default_factory=AquaFineTuningParams)
78
-
79
-
80
- @dataclass(repr=False)
81
- class CreateFineTuningDetails(DataClassSerializable):
82
- """Dataclass to create aqua model fine tuning.
83
-
84
- Fields
85
- ------
86
- ft_source_id: str
87
- The fine tuning source id. Must be model ocid.
88
- ft_name: str
89
- The name for fine tuning.
90
- dataset_path: str
91
- The dataset path for fine tuning. Could be either a local path from notebook session
92
- or an object storage path.
93
- report_path: str
94
- The report path for fine tuning. Must be an object storage path.
95
- ft_parameters: dict
96
- The parameters for fine tuning.
97
- shape_name: str
98
- The shape name for fine tuning job infrastructure.
99
- replica: int
100
- The replica for fine tuning job runtime.
101
- validation_set_size: float
102
- The validation set size for fine tuning job. Must be a float in between [0,1).
103
- ft_description: (str, optional). Defaults to `None`.
104
- The description for fine tuning.
105
- compartment_id: (str, optional). Defaults to `None`.
106
- The compartment id for fine tuning.
107
- project_id: (str, optional). Defaults to `None`.
108
- The project id for fine tuning.
109
- experiment_id: (str, optional). Defaults to `None`.
110
- The fine tuning model version set id. If provided,
111
- fine tuning model will be associated with it.
112
- experiment_name: (str, optional). Defaults to `None`.
113
- The fine tuning model version set name. If provided,
114
- the fine tuning version set with the same name will be used if exists,
115
- otherwise a new model version set will be created with the name.
116
- experiment_description: (str, optional). Defaults to `None`.
117
- The description for fine tuning model version set.
118
- block_storage_size: (int, optional). Defaults to 256.
119
- The storage for fine tuning job infrastructure.
120
- subnet_id: (str, optional). Defaults to `None`.
121
- The custom egress for fine tuning job.
122
- log_group_id: (str, optional). Defaults to `None`.
123
- The log group id for fine tuning job infrastructure.
124
- log_id: (str, optional). Defaults to `None`.
125
- The log id for fine tuning job infrastructure.
126
- force_overwrite: (bool, optional). Defaults to `False`.
127
- Whether to force overwrite the existing file in object storage.
128
- """
129
-
130
- ft_source_id: str
131
- ft_name: str
132
- dataset_path: str
133
- report_path: str
134
- ft_parameters: dict
135
- shape_name: str
136
- replica: int
137
- validation_set_size: float
138
- ft_description: Optional[str] = None
139
- compartment_id: Optional[str] = None
140
- project_id: Optional[str] = None
141
- experiment_id: Optional[str] = None
142
- experiment_name: Optional[str] = None
143
- experiment_description: Optional[str] = None
144
- block_storage_size: Optional[int] = None
145
- subnet_id: Optional[str] = None
146
- log_id: Optional[str] = None
147
- log_group_id: Optional[str] = None
148
- force_overwrite: Optional[bool] = False
149
-
150
-
151
61
  class AquaFineTuningApp(AquaApp):
152
62
  """Provides a suite of APIs to interact with Aqua fine-tuned models within the Oracle
153
63
  Cloud Infrastructure Data Science service, serving as an interface for creating fine-tuned models.
@@ -190,9 +100,12 @@ class AquaFineTuningApp(AquaApp):
190
100
  try:
191
101
  create_fine_tuning_details = CreateFineTuningDetails(**kwargs)
192
102
  except:
103
+ allowed_create_fine_tuning_details = ", ".join(
104
+ field.name for field in fields(CreateFineTuningDetails)
105
+ ).rstrip()
193
106
  raise AquaValueError(
194
107
  "Invalid create fine tuning parameters. Allowable parameters are: "
195
- f"{', '.join(list(asdict(CreateFineTuningDetails).keys()))}."
108
+ f"{allowed_create_fine_tuning_details}."
196
109
  )
197
110
 
198
111
  source = self.get_source(create_fine_tuning_details.ft_source_id)
@@ -247,9 +160,12 @@ class AquaFineTuningApp(AquaApp):
247
160
  **create_fine_tuning_details.ft_parameters,
248
161
  )
249
162
  except:
163
+ allowed_fine_tuning_parameters = ", ".join(
164
+ field.name for field in fields(AquaFineTuningParams)
165
+ ).rstrip()
250
166
  raise AquaValueError(
251
167
  "Invalid fine tuning parameters. Fine tuning parameters should "
252
- f"be a dictionary with keys: {', '.join(list(asdict(AquaFineTuningParams).keys()))}."
168
+ f"be a dictionary with keys: {allowed_fine_tuning_parameters}."
253
169
  )
254
170
 
255
171
  experiment_model_version_set_id = create_fine_tuning_details.experiment_id
@@ -307,19 +223,20 @@ class AquaFineTuningApp(AquaApp):
307
223
 
308
224
  ft_model_custom_metadata = ModelCustomMetadata()
309
225
  ft_model_custom_metadata.add(
310
- key=FineTuneCustomMetadata.FINE_TUNE_SOURCE.value,
226
+ key=FineTuneCustomMetadata.FINE_TUNE_SOURCE,
311
227
  value=create_fine_tuning_details.ft_source_id,
312
228
  )
313
229
  ft_model_custom_metadata.add(
314
- key=FineTuneCustomMetadata.FINE_TUNE_SOURCE_NAME.value,
230
+ key=FineTuneCustomMetadata.FINE_TUNE_SOURCE_NAME,
315
231
  value=source.display_name,
316
232
  )
317
233
  service_model_artifact_location = source.custom_metadata_list.get(
318
- FineTuneCustomMetadata.SERVICE_MODEL_ARTIFACT_LOCATION.value
234
+ FineTuneCustomMetadata.SERVICE_MODEL_ARTIFACT_LOCATION
319
235
  )
320
236
  service_model_deployment_container = source.custom_metadata_list.get(
321
- FineTuneCustomMetadata.SERVICE_MODEL_DEPLOYMENT_CONTAINER.value
237
+ FineTuneCustomMetadata.SERVICE_MODEL_DEPLOYMENT_CONTAINER
322
238
  )
239
+
323
240
  ft_model_custom_metadata.add(
324
241
  key=service_model_artifact_location.key,
325
242
  value=service_model_artifact_location.value,
@@ -350,8 +267,8 @@ class AquaFineTuningApp(AquaApp):
350
267
  )
351
268
 
352
269
  ft_job_freeform_tags = {
353
- Tags.AQUA_TAG.value: UNKNOWN,
354
- Tags.AQUA_FINE_TUNED_MODEL_TAG.value: f"{source.id}#{source.display_name}",
270
+ Tags.AQUA_TAG: UNKNOWN,
271
+ Tags.AQUA_FINE_TUNED_MODEL_TAG: f"{source.id}#{source.display_name}",
355
272
  }
356
273
 
357
274
  ft_job = Job(name=ft_model.display_name).with_infrastructure(
@@ -381,10 +298,19 @@ class AquaFineTuningApp(AquaApp):
381
298
  ft_config = self.get_finetuning_config(source.id)
382
299
 
383
300
  ft_container = source.custom_metadata_list.get(
384
- FineTuneCustomMetadata.SERVICE_MODEL_FINE_TUNE_CONTAINER.value
301
+ FineTuneCustomMetadata.SERVICE_MODEL_FINE_TUNE_CONTAINER
385
302
  ).value
386
-
387
- batch_size = (
303
+ is_custom_container = False
304
+ try:
305
+ # Check if the container override flag is set. If set, then the user has chosen custom image
306
+ if source.custom_metadata_list.get(
307
+ AQUA_FINETUNING_CONTAINER_OVERRIDE_FLAG_METADATA_NAME
308
+ ).value:
309
+ is_custom_container = True
310
+ except Exception:
311
+ pass
312
+
313
+ ft_parameters.batch_size = ft_parameters.batch_size or (
388
314
  ft_config.get("shape", UNKNOWN_DICT)
389
315
  .get(create_fine_tuning_details.shape_name, UNKNOWN_DICT)
390
316
  .get("batch_size", DEFAULT_FT_BATCH_SIZE)
@@ -398,7 +324,6 @@ class AquaFineTuningApp(AquaApp):
398
324
  dataset_path=ft_dataset_path,
399
325
  report_path=create_fine_tuning_details.report_path,
400
326
  replica=create_fine_tuning_details.replica,
401
- batch_size=batch_size,
402
327
  finetuning_params=finetuning_params,
403
328
  val_set_size=(
404
329
  create_fine_tuning_details.validation_set_size
@@ -406,6 +331,7 @@ class AquaFineTuningApp(AquaApp):
406
331
  ),
407
332
  parameters=ft_parameters,
408
333
  ft_container=ft_container,
334
+ is_custom_container=is_custom_container,
409
335
  )
410
336
  ).create()
411
337
  logger.debug(
@@ -422,11 +348,11 @@ class AquaFineTuningApp(AquaApp):
422
348
  )
423
349
 
424
350
  ft_model_custom_metadata.add(
425
- key=FineTuneCustomMetadata.FINE_TUNE_JOB_ID.value,
351
+ key=FineTuneCustomMetadata.FINE_TUNE_JOB_ID,
426
352
  value=ft_job.id,
427
353
  )
428
354
  ft_model_custom_metadata.add(
429
- key=FineTuneCustomMetadata.FINE_TUNE_JOB_RUN_ID.value,
355
+ key=FineTuneCustomMetadata.FINE_TUNE_JOB_RUN_ID,
430
356
  value=ft_job_run.id,
431
357
  )
432
358
  updated_custom_metadata_list = [
@@ -435,16 +361,16 @@ class AquaFineTuningApp(AquaApp):
435
361
  ]
436
362
 
437
363
  source_freeform_tags = source.freeform_tags or {}
438
- source_freeform_tags.pop(Tags.LICENSE.value, None)
439
- source_freeform_tags.update({Tags.READY_TO_FINE_TUNE.value: "false"})
440
- source_freeform_tags.update({Tags.AQUA_TAG.value: UNKNOWN})
364
+ source_freeform_tags.pop(Tags.LICENSE, None)
365
+ source_freeform_tags.update({Tags.READY_TO_FINE_TUNE: "false"})
366
+ source_freeform_tags.update({Tags.AQUA_TAG: UNKNOWN})
441
367
 
442
368
  self.update_model(
443
369
  model_id=ft_model.id,
444
370
  update_model_details=UpdateModelDetails(
445
371
  custom_metadata_list=updated_custom_metadata_list,
446
372
  freeform_tags={
447
- Tags.AQUA_FINE_TUNED_MODEL_TAG.value: (
373
+ Tags.AQUA_FINE_TUNED_MODEL_TAG: (
448
374
  f"{source.id}#{source.display_name}"
449
375
  ),
450
376
  **source_freeform_tags,
@@ -489,7 +415,7 @@ class AquaFineTuningApp(AquaApp):
489
415
  id=ft_model.id,
490
416
  name=ft_model.display_name,
491
417
  console_url=get_console_link(
492
- resource=Resource.MODEL.value,
418
+ resource=Resource.MODEL,
493
419
  ocid=ft_model.id,
494
420
  region=self.region,
495
421
  ),
@@ -500,7 +426,7 @@ class AquaFineTuningApp(AquaApp):
500
426
  id=experiment_model_version_set_id,
501
427
  name=experiment_model_version_set_name,
502
428
  url=get_console_link(
503
- resource=Resource.MODEL_VERSION_SET.value,
429
+ resource=Resource.MODEL_VERSION_SET,
504
430
  ocid=experiment_model_version_set_id,
505
431
  region=self.region,
506
432
  ),
@@ -509,7 +435,7 @@ class AquaFineTuningApp(AquaApp):
509
435
  id=source.id,
510
436
  name=source.display_name,
511
437
  url=get_console_link(
512
- resource=Resource.MODEL.value,
438
+ resource=Resource.MODEL,
513
439
  ocid=source.id,
514
440
  region=self.region,
515
441
  ),
@@ -518,18 +444,22 @@ class AquaFineTuningApp(AquaApp):
518
444
  id=ft_job.id,
519
445
  name=ft_job.name,
520
446
  url=get_console_link(
521
- resource=Resource.JOB.value,
447
+ resource=Resource.JOB,
522
448
  ocid=ft_job.id,
523
449
  region=self.region,
524
450
  ),
525
451
  ),
526
452
  tags=dict(
527
- aqua_finetuning=Tags.AQUA_FINE_TUNING.value,
453
+ aqua_finetuning=Tags.AQUA_FINE_TUNING,
528
454
  finetuning_job_id=ft_job.id,
529
455
  finetuning_source=source.id,
530
456
  finetuning_experiment_id=experiment_model_version_set_id,
531
457
  ),
532
- parameters=ft_parameters,
458
+ parameters={
459
+ key: value
460
+ for key, value in asdict(ft_parameters).items()
461
+ if value is not None
462
+ },
533
463
  )
534
464
 
535
465
  def _build_fine_tuning_runtime(
@@ -539,15 +469,19 @@ class AquaFineTuningApp(AquaApp):
539
469
  dataset_path: str,
540
470
  report_path: str,
541
471
  replica: int,
542
- batch_size: int,
543
472
  val_set_size: float,
544
473
  parameters: AquaFineTuningParams,
545
474
  ft_container: str = None,
546
475
  finetuning_params: str = None,
476
+ is_custom_container: bool = False,
547
477
  ) -> Runtime:
548
478
  """Builds fine tuning runtime for Job."""
549
- container = get_container_image(
550
- container_type=ft_container,
479
+ container = (
480
+ get_container_image(
481
+ container_type=ft_container,
482
+ )
483
+ if not is_custom_container
484
+ else ft_container
551
485
  )
552
486
  runtime = (
553
487
  ContainerRuntime()
@@ -562,9 +496,12 @@ class AquaFineTuningApp(AquaApp):
562
496
  },
563
497
  }
564
498
  ),
565
- "OCI__LAUNCH_CMD": (
566
- f"--micro_batch_size {batch_size} --num_epochs {parameters.epochs} --learning_rate {parameters.learning_rate} --training_data {dataset_path} --output_dir {report_path} --val_set_size {val_set_size} --sample_packing {parameters.sample_packing} "
567
- + (f"{finetuning_params}" if finetuning_params else "")
499
+ "OCI__LAUNCH_CMD": self._build_oci_launch_cmd(
500
+ dataset_path=dataset_path,
501
+ report_path=report_path,
502
+ val_set_size=val_set_size,
503
+ parameters=parameters,
504
+ finetuning_params=finetuning_params,
568
505
  ),
569
506
  "CONDA_BUCKET_NS": CONDA_BUCKET_NS,
570
507
  }
@@ -575,6 +512,30 @@ class AquaFineTuningApp(AquaApp):
575
512
 
576
513
  return runtime
577
514
 
515
+ @staticmethod
516
+ def _build_oci_launch_cmd(
517
+ dataset_path: str,
518
+ report_path: str,
519
+ val_set_size: float,
520
+ parameters: AquaFineTuningParams,
521
+ finetuning_params: str = None,
522
+ ) -> str:
523
+ """Builds the oci launch cmd for fine tuning container runtime."""
524
+ oci_launch_cmd = f"--training_data {dataset_path} --output_dir {report_path} --val_set_size {val_set_size} "
525
+ for key, value in asdict(parameters).items():
526
+ if value is not None:
527
+ if key == "batch_size":
528
+ oci_launch_cmd += f"--micro_{key} {value} "
529
+ elif key == "epochs":
530
+ oci_launch_cmd += f"--num_{key} {value} "
531
+ elif key == "lora_target_modules":
532
+ oci_launch_cmd += f"--{key} {','.join(str(k) for k in value)} "
533
+ else:
534
+ oci_launch_cmd += f"--{key} {value} "
535
+
536
+ oci_launch_cmd += f"{finetuning_params}" if finetuning_params else ""
537
+ return oci_launch_cmd.rstrip()
538
+
578
539
  @telemetry(
579
540
  entry_point="plugin=finetuning&action=get_finetuning_config", name="aqua"
580
541
  )
@@ -592,4 +553,69 @@ class AquaFineTuningApp(AquaApp):
592
553
  A dict of allowed finetuning configs.
593
554
  """
594
555
 
595
- return self.get_config(model_id, AQUA_MODEL_FINETUNING_CONFIG)
556
+ config = self.get_config(model_id, AQUA_MODEL_FINETUNING_CONFIG)
557
+ if not config:
558
+ logger.info(f"Fetching default fine-tuning config for model: {model_id}")
559
+ config = get_finetuning_config_defaults()
560
+ return config
561
+
562
+ @telemetry(
563
+ entry_point="plugin=finetuning&action=get_finetuning_default_params",
564
+ name="aqua",
565
+ )
566
+ def get_finetuning_default_params(self, model_id: str) -> Dict:
567
+ """Gets the default params set in the finetuning configs for the given model. Only the fields that are
568
+ available in AquaFineTuningParams will be accessible for user overrides.
569
+
570
+ Parameters
571
+ ----------
572
+ model_id: str
573
+ The OCID of the Aqua model.
574
+
575
+ Returns
576
+ -------
577
+ Dict:
578
+ Dict of parameters from the loaded from finetuning config json file. If config information is not available,
579
+ then an empty dict is returned.
580
+ """
581
+ default_params = {"params": {}}
582
+ finetuning_config = self.get_finetuning_config(model_id)
583
+ config_parameters = finetuning_config.get("configuration", UNKNOWN_DICT)
584
+ dataclass_fields = {field.name for field in fields(AquaFineTuningParams)}
585
+ for name, value in config_parameters.items():
586
+ if name == "micro_batch_size":
587
+ name = "batch_size"
588
+ if name in dataclass_fields:
589
+ default_params["params"][name] = value
590
+
591
+ return default_params
592
+
593
+ def validate_finetuning_params(self, params: Dict = None) -> Dict:
594
+ """Validate if the fine-tuning parameters passed by the user can be overridden. Parameter values are not
595
+ validated, only param keys are validated.
596
+
597
+ Parameters
598
+ ----------
599
+ params :Dict, optional
600
+ Params passed by the user.
601
+
602
+ Returns
603
+ -------
604
+ Return a list of restricted params.
605
+ """
606
+ try:
607
+ AquaFineTuningParams(
608
+ **params,
609
+ )
610
+ except Exception as e:
611
+ logger.debug(str(e))
612
+ allowed_fine_tuning_parameters = ", ".join(
613
+ f"{field.name} (required)" if field.default is MISSING else field.name
614
+ for field in fields(AquaFineTuningParams)
615
+ ).rstrip()
616
+ raise AquaValueError(
617
+ f"Invalid fine tuning parameters. Allowable parameters are: "
618
+ f"{allowed_fine_tuning_parameters}."
619
+ )
620
+
621
+ return dict(valid=True)
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*--
3
+
4
+ # Copyright (c) 2024 Oracle and/or its affiliates.
5
+ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
+ from ads.aqua.model.model import AquaModelApp
7
+
8
+ __all__ = ["AquaModelApp"]
@@ -0,0 +1,46 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # Copyright (c) 2024 Oracle and/or its affiliates.
4
+ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
5
+
6
+ """
7
+ aqua.model.constants
8
+ ~~~~~~~~~~~~~~~~~~~~
9
+
10
+ This module contains constants/enums used in Aqua Model.
11
+ """
12
+ from ads.common.extended_enum import ExtendedEnumMeta
13
+
14
+
15
+ class ModelCustomMetadataFields(str, metaclass=ExtendedEnumMeta):
16
+ ARTIFACT_LOCATION = "artifact_location"
17
+ DEPLOYMENT_CONTAINER = "deployment-container"
18
+ EVALUATION_CONTAINER = "evaluation-container"
19
+ FINETUNE_CONTAINER = "finetune-container"
20
+
21
+
22
+ class ModelTask(str, metaclass=ExtendedEnumMeta):
23
+ TEXT_GENERATION = "text-generation"
24
+
25
+
26
+ class FineTuningMetricCategories(str, metaclass=ExtendedEnumMeta):
27
+ VALIDATION = "validation"
28
+ TRAINING = "training"
29
+
30
+
31
+ class ModelType(str, metaclass=ExtendedEnumMeta):
32
+ FT = "FT" # Fine Tuned Model
33
+ BASE = "BASE" # Base model
34
+
35
+
36
+ # TODO: merge metadata key used in create FT
37
+ class FineTuningCustomMetadata(str, metaclass=ExtendedEnumMeta):
38
+ FT_SOURCE = "fine_tune_source"
39
+ FT_SOURCE_NAME = "fine_tune_source_name"
40
+ FT_OUTPUT_PATH = "fine_tune_output_path"
41
+ FT_JOB_ID = "fine_tune_job_id"
42
+ FT_JOB_RUN_ID = "fine_tune_jobrun_id"
43
+ TRAINING_METRICS_FINAL = "train_metrics_final"
44
+ VALIDATION_METRICS_FINAL = "val_metrics_final"
45
+ TRAINING_METRICS_EPOCH = "train_metrics_epoch"
46
+ VALIDATION_METRICS_EPOCH = "val_metrics_epoch"