oracle-ads 2.11.14__py3-none-any.whl → 2.11.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. ads/aqua/common/utils.py +77 -20
  2. ads/aqua/constants.py +30 -17
  3. ads/aqua/evaluation/evaluation.py +118 -107
  4. ads/aqua/extension/evaluation_handler.py +4 -7
  5. ads/aqua/extension/evaluation_ws_msg_handler.py +0 -4
  6. ads/aqua/model/entities.py +6 -8
  7. ads/aqua/modeldeployment/constants.py +0 -16
  8. ads/aqua/modeldeployment/deployment.py +45 -67
  9. ads/opctl/operator/common/operator_config.py +1 -0
  10. ads/opctl/operator/lowcode/anomaly/README.md +3 -3
  11. ads/opctl/operator/lowcode/anomaly/__main__.py +5 -6
  12. ads/opctl/operator/lowcode/anomaly/const.py +8 -0
  13. ads/opctl/operator/lowcode/anomaly/model/anomaly_dataset.py +6 -2
  14. ads/opctl/operator/lowcode/anomaly/model/base_model.py +29 -20
  15. ads/opctl/operator/lowcode/anomaly/model/factory.py +41 -13
  16. ads/opctl/operator/lowcode/anomaly/model/isolationforest.py +79 -0
  17. ads/opctl/operator/lowcode/anomaly/model/oneclasssvm.py +79 -0
  18. ads/opctl/operator/lowcode/anomaly/schema.yaml +12 -2
  19. ads/opctl/operator/lowcode/anomaly/utils.py +16 -13
  20. ads/opctl/operator/lowcode/common/data.py +2 -1
  21. ads/opctl/operator/lowcode/common/transformations.py +37 -9
  22. ads/opctl/operator/lowcode/common/utils.py +32 -10
  23. ads/opctl/operator/lowcode/forecast/model/ml_forecast.py +14 -18
  24. ads/opctl/operator/lowcode/forecast/model_evaluator.py +4 -2
  25. ads/opctl/operator/lowcode/forecast/schema.yaml +9 -0
  26. ads/opctl/operator/lowcode/recommender/MLoperator +16 -0
  27. ads/opctl/operator/lowcode/recommender/README.md +206 -0
  28. ads/opctl/operator/lowcode/recommender/__init__.py +5 -0
  29. ads/opctl/operator/lowcode/recommender/__main__.py +82 -0
  30. ads/opctl/operator/lowcode/recommender/cmd.py +33 -0
  31. ads/opctl/operator/lowcode/recommender/constant.py +25 -0
  32. ads/opctl/operator/lowcode/recommender/environment.yaml +11 -0
  33. ads/opctl/operator/lowcode/recommender/model/base_model.py +198 -0
  34. ads/opctl/operator/lowcode/recommender/model/factory.py +58 -0
  35. ads/opctl/operator/lowcode/recommender/model/recommender_dataset.py +25 -0
  36. ads/opctl/operator/lowcode/recommender/model/svd.py +88 -0
  37. ads/opctl/operator/lowcode/recommender/operator_config.py +81 -0
  38. ads/opctl/operator/lowcode/recommender/schema.yaml +265 -0
  39. ads/opctl/operator/lowcode/recommender/utils.py +13 -0
  40. {oracle_ads-2.11.14.dist-info → oracle_ads-2.11.15.dist-info}/METADATA +6 -1
  41. {oracle_ads-2.11.14.dist-info → oracle_ads-2.11.15.dist-info}/RECORD +44 -28
  42. {oracle_ads-2.11.14.dist-info → oracle_ads-2.11.15.dist-info}/LICENSE.txt +0 -0
  43. {oracle_ads-2.11.14.dist-info → oracle_ads-2.11.15.dist-info}/WHEEL +0 -0
  44. {oracle_ads-2.11.14.dist-info → oracle_ads-2.11.15.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,206 @@
1
+ # Recommender Operator
2
+
3
+ Recommender Systems are designed to suggest relevant items, products, or content to users based on their preferences and behaviors. These systems are widely used in various industries such as e-commerce, entertainment, and social media to enhance user experience by providing personalized recommendations. They help in increasing user engagement, satisfaction, and sales by predicting what users might like or need based on their past interactions and the preferences of similar users.
4
+
5
+
6
+ Below are the steps to configure and run the Recommender Operator on different resources.
7
+
8
+ ## 1. Prerequisites
9
+
10
+ Follow the [CLI Configuration](https://accelerated-data-science.readthedocs.io/en/latest/user_guide/cli/opctl/configure.html) steps from the ADS documentation. This step is mandatory as it sets up default values for different options while running the Recommender Operator on OCI Data Science jobs or OCI Data Flow applications. If you have previously done this and used a flexible shape, make sure to adjust `ml_job_config.ini` with shape config details and `docker_registry` information.
11
+
12
+ - ocpus = 1
13
+ - memory_in_gbs = 16
14
+ - docker_registry = `<iad.ocir.io/namespace/>`
15
+
16
+ ## 2. Generating configs
17
+
18
+ To generate starter configs, run the command below. This will create a list of YAML configs and place them in the `output` folder.
19
+
20
+ ```bash
21
+ ads operator init -t recommender --overwrite --output ~/recommender/
22
+ ```
23
+
24
+ The most important files expected to be generated are:
25
+
26
+ - `recommender.yaml`: Contains recommender related configuration.
27
+ - `backend_operator_local_python_config.yaml`: This includes a local backend configuration for running recommender in a local environment. The environment should be set up manually before running the operator.
28
+ - `backend_operator_local_container_config.yaml`: This includes a local backend configuration for running recommender within a local container. The container should be built before running the operator. Please refer to the instructions below for details on how to accomplish this.
29
+ - `backend_job_container_config.yaml`: Contains Data Science job-related config to run recommender in a Data Science job within a container (BYOC) runtime. The container should be built and published before running the operator. Please refer to the instructions below for details on how to accomplish this.
30
+ - `backend_job_python_config.yaml`: Contains Data Science job-related config to run recommender in a Data Science job within a conda runtime. The conda should be built and published before running the operator.
31
+
32
+ All generated configurations should be ready to use without the need for any additional adjustments. However, they are provided as starter kit configurations that can be customized as needed.
33
+
34
+ ## 3. Running recommender on the local conda environment
35
+
36
+ To run recommender locally, create and activate a new conda environment (`ads-recommender`). Install all the required libraries listed in the `environment.yaml` file.
37
+
38
+ ```yaml
39
+ - report-creator
40
+ - cerberus
41
+ - "git+https://github.com/oracle/accelerated-data-science.git@feature/recommender#egg=oracle-ads"
42
+ ```
43
+
44
+ Please review the previously generated `recommender.yaml` file using the `init` command, and make any necessary adjustments to the input and output file locations. By default, it assumes that the files should be located in the same folder from which the `init` command was executed.
45
+
46
+ Use the command below to verify the recommender config.
47
+
48
+ ```bash
49
+ ads operator verify -f ~/recommender/recommender.yaml
50
+ ```
51
+
52
+ Use the following command to run the recommender within the `ads-recommender` conda environment.
53
+
54
+ ```bash
55
+ ads operator run -f ~/recommender/recommender.yaml -b local
56
+ ```
57
+
58
+ The operator will run in your local environment without requiring any additional modifications.
59
+
60
+ ## 4. Running recommender on the local container
61
+
62
+ To run the recommender operator within a local container, follow these steps:
63
+
64
+ Use the command below to build the recommender container.
65
+
66
+ ```bash
67
+ ads operator build-image -t recommender
68
+ ```
69
+
70
+ This will create a new `recommender:v1` image, with `/etc/operator` as the designated working directory within the container.
71
+
72
+
73
+ Check the `backend_operator_local_container_config.yaml` config file. By default, it should have a `volume` section with the `.oci` configs folder mounted.
74
+
75
+ ```yaml
76
+ volume:
77
+ - "/Users/<user>/.oci:/root/.oci"
78
+ ```
79
+
80
+ Mounting the OCI configs folder is only required if an OCI Object Storage bucket will be used to store the input recommender data or output recommender result. The input/output folders can also be mounted to the container.
81
+
82
+ ```yaml
83
+ volume:
84
+ - /Users/<user>/.oci:/root/.oci
85
+ - /Users/<user>/recommender/data:/etc/operator/data
86
+ - /Users/<user>/recommender/result:/etc/operator/result
87
+ ```
88
+
89
+ The full config can look like:
90
+ ```yaml
91
+ kind: operator.local
92
+ spec:
93
+ image: recommender:v1
94
+ volume:
95
+ - /Users/<user>/.oci:/root/.oci
96
+ - /Users/<user>/recommender/data:/etc/operator/data
97
+ - /Users/<user>/recommender/result:/etc/operator/result
98
+ type: container
99
+ version: v1
100
+ ```
101
+
102
+ Run the recommender within a container using the command below:
103
+
104
+ ```bash
105
+ ads operator run -f ~/recommender/recommender.yaml --backend-config ~/recommender/backend_operator_local_container_config.yaml
106
+ ```
107
+
108
+ ## 5. Running recommender in the Data Science job within container runtime
109
+
110
+ To execute the recommender operator within a Data Science job using container runtime, please follow the steps outlined below:
111
+
112
+ You can use the following command to build the recommender container. This step can be skipped if you have already done this for running the operator within a local container.
113
+
114
+ ```bash
115
+ ads operator build-image -t recommender
116
+ ```
117
+
118
+ This will create a new `recommender:v1` image, with `/etc/operator` as the designated working directory within the container.
119
+
120
+ Publish the `recommender:v1` container to the [Oracle Container Registry](https://docs.public.oneportal.content.oci.oraclecloud.com/en-us/iaas/Content/Registry/home.htm). To become familiar with OCI, read the documentation links posted below.
121
+
122
+ - [Access Container Registry](https://docs.public.oneportal.content.oci.oraclecloud.com/en-us/iaas/Content/Registry/Concepts/registryoverview.htm#access)
123
+ - [Create repositories](https://docs.public.oneportal.content.oci.oraclecloud.com/en-us/iaas/Content/Registry/Tasks/registrycreatingarepository.htm#top)
124
+ - [Push images](https://docs.public.oneportal.content.oci.oraclecloud.com/en-us/iaas/Content/Registry/Tasks/registrypushingimagesusingthedockercli.htm#Pushing_Images_Using_the_Docker_CLI)
125
+
126
+ To publish `recommender:v1` to OCR, use the command posted below:
127
+
128
+ ```bash
129
+ ads operator publish-image recommender:v1 --registry <iad.ocir.io/tenancy/>
130
+ ```
131
+
132
+ After the container is published to OCR, it can be used within Data Science jobs service. Check the `backend_job_container_config.yaml` config file. It should contain pre-populated infrastructure and runtime sections. The runtime section should contain an image property, something like `image: iad.ocir.io/<tenancy>/recommender:v1`. More details about supported options can be found in the ADS Jobs documentation - [Run a Container](https://accelerated-data-science.readthedocs.io/en/latest/user_guide/jobs/run_container.html).
133
+
134
+ Adjust the `recommender.yaml` config with proper input/output folders. When the recommender is run in the Data Science job, it will not have access to local folders. Therefore, input data and output folders should be placed in the Object Storage bucket. Open the `recommender.yaml` and adjust the following fields:
135
+
136
+ ```yaml
137
+ input_data:
138
+ url: oci://bucket@namespace/recommender/input_data/data.csv
139
+ output_directory:
140
+ url: oci://bucket@namespace/recommender/result/
141
+ ```
142
+
143
+ Run the recommender on the Data Science jobs using the command posted below:
144
+
145
+ ```bash
146
+ ads operator run -f ~/recommender/recommender.yaml --backend-config ~/recommender/backend_job_container_config.yaml
147
+ ```
148
+
149
+ The logs can be monitored using the `ads opctl watch` command.
150
+
151
+ ```bash
152
+ ads opctl watch <OCID>
153
+ ```
154
+
155
+ ## 6. Running recommender in the Data Science job within conda runtime
156
+
157
+ To execute the recommender operator within a Data Science job using conda runtime, please follow the steps outlined below:
158
+
159
+ You can use the following command to build the recommender conda environment.
160
+
161
+ ```bash
162
+ ads operator build-conda -t recommender
163
+ ```
164
+
165
+ This will create a new `recommender_v1` conda environment and place it in the folder specified within `ads opctl configure` command.
166
+
167
+ Use the command below to Publish the `recommender_v1` conda environment to the Object Storage bucket.
168
+
169
+ ```bash
170
+ ads operator publish-conda -t recommender
171
+ ```
172
+ More details about configuring CLI can be found here - [Configuring CLI](https://accelerated-data-science.readthedocs.io/en/latest/user_guide/cli/opctl/configure.html)
173
+
174
+
175
+ After the conda environment is published to Object Storage, it can be used within Data Science jobs service. Check the `backend_job_python_config.yaml` config file. It should contain pre-populated infrastructure and runtime sections. The runtime section should contain a `conda` section.
176
+
177
+ ```yaml
178
+ conda:
179
+ type: published
180
+ uri: oci://bucket@namespace/conda_environments/cpu/recommender/1/recommender_v1
181
+ ```
182
+
183
+ More details about supported options can be found in the ADS Jobs documentation - [Run a Python Workload](https://accelerated-data-science.readthedocs.io/en/latest/user_guide/jobs/run_python.html).
184
+
185
+ Adjust the `recommender.yaml` config with proper input/output folders. When the recommender is run in the Data Science job, it will not have access to local folders. Therefore, input data and output folders should be placed in the Object Storage bucket. Open the `recommender.yaml` and adjust the following fields:
186
+
187
+ ```yaml
188
+ input_data:
189
+ url: oci://bucket@namespace/recommender/input_data/data.csv
190
+ output_directory:
191
+ url: oci://bucket@namespace/recommender/result/
192
+ test_data:
193
+ url: oci://bucket@namespace/recommender/input_data/test.csv
194
+ ```
195
+
196
+ Run the recommender on the Data Science jobs using the command posted below:
197
+
198
+ ```bash
199
+ ads operator run -f ~/recommender/recommender.yaml --backend-config ~/recommender/backend_job_python_config.yaml
200
+ ```
201
+
202
+ The logs can be monitored using the `ads opctl watch` command.
203
+
204
+ ```bash
205
+ ads opctl watch <OCID>
206
+ ```
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*--
3
+
4
+ # Copyright (c) 2023 Oracle and/or its affiliates.
5
+ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
@@ -0,0 +1,82 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*--
3
+
4
+ # Copyright (c) 2024 Oracle and/or its affiliates.
5
+ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
+
7
+ import json
8
+ import os
9
+ import sys
10
+ from typing import Dict, List
11
+
12
+ import yaml
13
+
14
+ from ads.opctl import logger
15
+ from ads.opctl.operator.common.const import ENV_OPERATOR_ARGS
16
+ from ads.opctl.operator.common.utils import _parse_input_args
17
+
18
+ from .model.recommender_dataset import RecommenderDatasets
19
+ from .operator_config import RecommenderOperatorConfig
20
+ from .model.factory import RecommenderOperatorModelFactory
21
+
22
+ def operate(operator_config: RecommenderOperatorConfig) -> None:
23
+ """Runs the recommender operator."""
24
+
25
+ datasets = RecommenderDatasets(operator_config)
26
+ RecommenderOperatorModelFactory.get_model(
27
+ operator_config, datasets
28
+ ).generate_report()
29
+
30
+
31
+ def verify(spec: Dict, **kwargs: Dict) -> bool:
32
+ """Verifies the recommender detection operator config."""
33
+ operator = RecommenderOperatorConfig.from_dict(spec)
34
+ msg_header = (
35
+ f"{'*' * 50} The operator config has been successfully verified {'*' * 50}"
36
+ )
37
+ print(msg_header)
38
+ print(operator.to_yaml())
39
+ print("*" * len(msg_header))
40
+
41
+
42
+ def main(raw_args: List[str]):
43
+ """The entry point of the recommender the operator."""
44
+ args, _ = _parse_input_args(raw_args)
45
+ if not args.file and not args.spec and not os.environ.get(ENV_OPERATOR_ARGS):
46
+ logger.info(
47
+ "Please specify -f[--file] or -s[--spec] or "
48
+ f"pass operator's arguments via {ENV_OPERATOR_ARGS} environment variable."
49
+ )
50
+ return
51
+
52
+ logger.info("-" * 100)
53
+ logger.info(
54
+ f"{'Running' if not args.verify else 'Verifying'} the recommender detection operator."
55
+ )
56
+
57
+ yaml_string = ""
58
+ if args.spec or os.environ.get(ENV_OPERATOR_ARGS):
59
+ operator_spec_str = args.spec or os.environ.get(ENV_OPERATOR_ARGS)
60
+ try:
61
+ yaml_string = yaml.safe_dump(json.loads(operator_spec_str))
62
+ except json.JSONDecodeError:
63
+ yaml_string = yaml.safe_dump(yaml.safe_load(operator_spec_str))
64
+ except:
65
+ yaml_string = operator_spec_str
66
+
67
+ operator_config = RecommenderOperatorConfig.from_yaml(
68
+ uri=args.file,
69
+ yaml_string=yaml_string,
70
+ )
71
+
72
+ logger.info(operator_config.to_yaml())
73
+
74
+ # run operator
75
+ if args.verify:
76
+ verify(operator_config)
77
+ else:
78
+ operate(operator_config)
79
+
80
+
81
+ if __name__ == "__main__":
82
+ main(sys.argv[1:])
@@ -0,0 +1,33 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*--
3
+
4
+ # Copyright (c) 2023 Oracle and/or its affiliates.
5
+ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
+
7
+ from typing import Dict
8
+
9
+ from ads.opctl.operator.common.operator_yaml_generator import YamlGenerator
10
+ from ads.opctl.operator.common.utils import _load_yaml_from_uri
11
+
12
+
13
+ def init(**kwargs: Dict) -> str:
14
+ """
15
+ Generates operator config by the schema.
16
+
17
+ Properties
18
+ ----------
19
+ kwargs: (Dict, optional).
20
+ Additional key value arguments.
21
+
22
+ - type: str
23
+ The type of the operator.
24
+
25
+ Returns
26
+ -------
27
+ str
28
+ The YAML specification generated based on the schema.
29
+ """
30
+
31
+ return YamlGenerator(
32
+ schema=_load_yaml_from_uri(__file__.replace("cmd.py", "schema.yaml"))
33
+ ).generate_example_dict(values={"type": kwargs.get("type")})
@@ -0,0 +1,25 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*--
3
+
4
+ # Copyright (c) 2023 Oracle and/or its affiliates.
5
+ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
+
7
+ from ads.common.extended_enum import ExtendedEnumMeta
8
+
9
+ DEFAULT_SHOW_ROWS = 25
10
+ DEFAULT_REPORT_FILENAME = "report.html"
11
+
12
+ class OutputColumns(str, metaclass=ExtendedEnumMeta):
13
+ """output columns for recommender operator"""
14
+ USER_COL = "user"
15
+ ITEM_COL = "item"
16
+ SCORE = "score"
17
+
18
+ class SupportedMetrics(str, metaclass=ExtendedEnumMeta):
19
+ """Supported recommender metrics."""
20
+ RMSE = "RMSE"
21
+ MAE = "MAE"
22
+
23
+ class SupportedModels(str, metaclass=ExtendedEnumMeta):
24
+ """Supported recommender models."""
25
+ SVD = "svd"
@@ -0,0 +1,11 @@
1
+ name: recommender
2
+ channels:
3
+ - conda-forge
4
+ dependencies:
5
+ - python=3.9
6
+ - pip
7
+ - pip:
8
+ - report-creator
9
+ - oracle_ads[opctl]
10
+ - plotly
11
+ - scikit-surprise
@@ -0,0 +1,198 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*--
3
+
4
+ # Copyright (c) 2023, 2024 Oracle and/or its affiliates.
5
+ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
+
7
+ import os
8
+ import tempfile
9
+ import time
10
+ from abc import ABC, abstractmethod
11
+ from typing import Tuple, Dict
12
+
13
+ import fsspec
14
+ import pandas as pd
15
+ import report_creator as rc
16
+
17
+ from ads.common.object_storage_details import ObjectStorageDetails
18
+ from ads.opctl import logger
19
+ from ads.opctl.operator.lowcode.common.utils import default_signer
20
+ from ads.opctl.operator.lowcode.common.utils import (
21
+ human_time_friendly,
22
+ enable_print,
23
+ disable_print,
24
+ write_data,
25
+ )
26
+ from .factory import SupportedModels
27
+ from .recommender_dataset import RecommenderDatasets
28
+ from ..operator_config import RecommenderOperatorConfig
29
+ from plotly import graph_objects as go
30
+ import matplotlib.pyplot as plt
31
+
32
+
33
+ class RecommenderOperatorBaseModel(ABC):
34
+ """The base class for the recommender detection operator models."""
35
+
36
+ def __init__(self, config: RecommenderOperatorConfig, datasets: RecommenderDatasets):
37
+ self.config = config
38
+ self.spec = self.config.spec
39
+ self.datasets = datasets
40
+
41
+ def generate_report(self):
42
+ item_col = self.spec.item_column
43
+ user_col = self.spec.user_column
44
+ interaction_col = self.spec.interaction_column
45
+ start_time = time.time()
46
+ result_df, metrics = self._build_model()
47
+ elapsed_time = time.time() - start_time
48
+ logger.info("Building the models completed in %s seconds", elapsed_time)
49
+
50
+ if self.spec.generate_report:
51
+ # build the report
52
+ (
53
+ model_description,
54
+ other_sections,
55
+ ) = self._generate_report()
56
+
57
+ header_section = rc.Block(
58
+ rc.Heading("Recommender Report", level=1),
59
+ rc.Text(
60
+ f"The recommendations was generated using {SupportedModels.SVD.upper()}. {model_description}"
61
+ ),
62
+ rc.Group(
63
+ rc.Metric(
64
+ heading="Recommendations was generated in ",
65
+ value=human_time_friendly(elapsed_time),
66
+ ),
67
+ rc.Metric(
68
+ heading="Num users",
69
+ value=len(self.datasets.users),
70
+ ),
71
+ rc.Metric(
72
+ heading="Num items",
73
+ value=len(self.datasets.items),
74
+ )
75
+ ),
76
+ )
77
+
78
+ summary = rc.Block(
79
+ header_section,
80
+ )
81
+ # user and item distributions in interactions
82
+ user_title = rc.Heading("User Statistics", level=2)
83
+ user_rating_counts = self.datasets.interactions[user_col].value_counts()
84
+ fig_user = go.Figure(data=[go.Histogram(x=user_rating_counts, nbinsx=100)])
85
+ fig_user.update_layout(
86
+ title=f'Distribution of the number of interactions by {user_col}',
87
+ xaxis_title=f'Number of {interaction_col}',
88
+ yaxis_title=f'Number of {user_col}',
89
+ bargap=0.2
90
+ )
91
+ item_title = rc.Heading("Item Statistics", level=2)
92
+ item_rating_counts = self.datasets.interactions[item_col].value_counts()
93
+ fig_item = go.Figure(data=[go.Histogram(x=item_rating_counts, nbinsx=100)])
94
+ fig_item.update_layout(
95
+ title=f'Distribution of the number of interactions by {item_col}',
96
+ xaxis_title=f'Number of {interaction_col}',
97
+ yaxis_title=f'Number of {item_col}',
98
+ bargap=0.2
99
+ )
100
+ result_heatmap_title = rc.Heading("Sample Recommendations", level=2)
101
+ sample_items = result_df[item_col].head(100).index
102
+ filtered_df = result_df[result_df[item_col].isin(sample_items)]
103
+ data = filtered_df.pivot(index=user_col, columns=item_col, values=interaction_col)
104
+ fig = go.Figure(data=go.Heatmap(
105
+ z=data.values,
106
+ x=data.columns,
107
+ y=data.index,
108
+ colorscale='Viridis'
109
+ ))
110
+ fig.update_layout(
111
+ title='Recommendation heatmap of User-Item Interactions (sample)',
112
+ width=1500,
113
+ height=800,
114
+ xaxis_title=item_col,
115
+ yaxis_title=user_col,
116
+ coloraxis_colorbar=dict(title=interaction_col)
117
+ )
118
+ plots = [user_title, rc.Widget(fig_user),
119
+ item_title, rc.Widget(fig_item),
120
+ result_heatmap_title, rc.Widget(fig)]
121
+
122
+ test_metrics_sections = [rc.DataTable(pd.DataFrame(metrics, index=[0]))]
123
+ yaml_appendix_title = rc.Heading("Reference: YAML File", level=2)
124
+ yaml_appendix = rc.Yaml(self.config.to_dict())
125
+ report_sections = (
126
+ [summary]
127
+ + plots
128
+ + test_metrics_sections
129
+ + other_sections
130
+ + [yaml_appendix_title, yaml_appendix]
131
+ )
132
+
133
+ # save the report and result CSV
134
+ self._save_report(
135
+ report_sections=report_sections,
136
+ result_df=result_df
137
+ )
138
+
139
+ def _evaluation_metrics(self):
140
+ pass
141
+
142
+ def _test_data_evaluate_metrics(self):
143
+ pass
144
+
145
+ def _save_report(self, report_sections: Tuple, result_df: pd.DataFrame):
146
+ """Saves resulting reports to the given folder."""
147
+
148
+ unique_output_dir = self.spec.output_directory.url
149
+
150
+ if ObjectStorageDetails.is_oci_path(unique_output_dir):
151
+ storage_options = default_signer()
152
+ else:
153
+ storage_options = dict()
154
+
155
+ # report-creator html report
156
+ if self.spec.generate_report:
157
+ with tempfile.TemporaryDirectory() as temp_dir:
158
+ report_local_path = os.path.join(temp_dir, "___report.html")
159
+ disable_print()
160
+ with rc.ReportCreator("My Report") as report:
161
+ report.save(rc.Block(*report_sections), report_local_path)
162
+ enable_print()
163
+
164
+ report_path = os.path.join(unique_output_dir, self.spec.report_filename)
165
+ with open(report_local_path) as f1:
166
+ with fsspec.open(
167
+ report_path,
168
+ "w",
169
+ **storage_options,
170
+ ) as f2:
171
+ f2.write(f1.read())
172
+
173
+ # recommender csv report
174
+ write_data(
175
+ data=result_df,
176
+ filename=os.path.join(unique_output_dir, self.spec.recommendations_filename),
177
+ format="csv",
178
+ storage_options=storage_options,
179
+ )
180
+
181
+ logger.info(
182
+ f"The outputs have been successfully "
183
+ f"generated and placed into the directory: {unique_output_dir}."
184
+ )
185
+
186
+ @abstractmethod
187
+ def _generate_report(self):
188
+ """
189
+ Generates the report for the particular model.
190
+ The method that needs to be implemented on the particular model level.
191
+ """
192
+
193
+ @abstractmethod
194
+ def _build_model(self) -> [pd.DataFrame, Dict]:
195
+ """
196
+ Build the model.
197
+ The method that needs to be implemented on the particular model level.
198
+ """
@@ -0,0 +1,58 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*--
3
+
4
+ # Copyright (c) 2023 Oracle and/or its affiliates.
5
+ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
+
7
+ from ..constant import SupportedModels
8
+ from ..operator_config import RecommenderOperatorConfig
9
+ from .base_model import RecommenderOperatorBaseModel
10
+ from .recommender_dataset import RecommenderDatasets
11
+ from .svd import SVDOperatorModel
12
+
13
+ class UnSupportedModelError(Exception):
14
+ def __init__(self, model_type: str):
15
+ super().__init__(
16
+ f"Model: `{model_type}` "
17
+ f"is not supported. Supported models: {SupportedModels.values}"
18
+ )
19
+
20
+
21
+ class RecommenderOperatorModelFactory:
22
+ """
23
+ The factory class helps to instantiate proper model operator based on the model type.
24
+ """
25
+
26
+ _MAP = {
27
+ SupportedModels.SVD: SVDOperatorModel
28
+ }
29
+
30
+ @classmethod
31
+ def get_model(
32
+ cls, operator_config: RecommenderOperatorConfig, datasets: RecommenderDatasets
33
+ ) -> RecommenderOperatorBaseModel:
34
+ """
35
+ Gets the operator model based on the model type.
36
+
37
+ Parameters
38
+ ----------
39
+ operator_config: RecommenderOperatorConfig
40
+ The recommender detection operator config.
41
+
42
+ datasets: RecommenderDatasets
43
+ Datasets for finding recommender
44
+
45
+ Returns
46
+ -------
47
+ RecommenderOperatorBaseModel
48
+ The recommender detection operator model.
49
+
50
+ Raises
51
+ ------
52
+ UnSupportedModelError
53
+ In case of not supported model.
54
+ """
55
+ model_type = SupportedModels.SVD
56
+ if model_type not in cls._MAP:
57
+ raise UnSupportedModelError(model_type)
58
+ return cls._MAP[model_type](config=operator_config, datasets=datasets)
@@ -0,0 +1,25 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*--
3
+
4
+ # Copyright (c) 2023 Oracle and/or its affiliates.
5
+ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
6
+
7
+ import pandas as pd
8
+
9
+ from ads.opctl.operator.lowcode.common.utils import load_data
10
+ from ..operator_config import RecommenderOperatorConfig
11
+
12
+
13
+ class RecommenderDatasets:
14
+ def __init__(self, config: RecommenderOperatorConfig):
15
+ """Instantiates the DataIO instance.
16
+
17
+ Properties
18
+ ----------
19
+ spec: RecommenderOperatorSpec
20
+ The recommender operator spec.
21
+ """
22
+ spec = config.spec
23
+ self.interactions: pd.DataFrame = load_data(getattr(spec, "interactions_data"))
24
+ self.users: pd.DataFrame = load_data(getattr(spec, "user_data"))
25
+ self.items: pd.DataFrame = load_data(getattr(spec, "item_data"))