clarifai 10.8.6__py3-none-any.whl → 10.8.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
clarifai/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "10.8.6"
1
+ __version__ = "10.8.7"
clarifai/client/app.py CHANGED
@@ -19,8 +19,7 @@ from clarifai.client.workflow import Workflow
19
19
  from clarifai.constants.model import TRAINABLE_MODEL_TYPES
20
20
  from clarifai.errors import UserError
21
21
  from clarifai.urls.helper import ClarifaiUrlHelper
22
- from clarifai.utils.logging import (display_concept_relations_tree, display_workflow_tree,
23
- get_logger)
22
+ from clarifai.utils.logging import display_concept_relations_tree, display_workflow_tree, logger
24
23
  from clarifai.utils.misc import concept_relations_accumulation
25
24
  from clarifai.workflows.utils import get_yaml_output_info_proto, is_same_yaml_model
26
25
  from clarifai.workflows.validate import validate
@@ -61,7 +60,7 @@ class App(Lister, BaseClient):
61
60
 
62
61
  self.kwargs = {**kwargs, 'id': app_id}
63
62
  self.app_info = resources_pb2.App(**self.kwargs)
64
- self.logger = get_logger(logger_level="INFO", name=__name__)
63
+ self.logger = logger
65
64
  BaseClient.__init__(
66
65
  self,
67
66
  user_id=self.user_id,
@@ -58,17 +58,15 @@ def https_cache(cache: dict, url: str) -> str:
58
58
 
59
59
  class ClarifaiAuthHelper:
60
60
 
61
- def __init__(
62
- self,
63
- user_id: str,
64
- app_id: str,
65
- pat: str,
66
- token: str = "",
67
- base: str = DEFAULT_BASE,
68
- ui: str = DEFAULT_UI,
69
- root_certificates_path: str = None,
70
- validate: bool = True,
71
- ):
61
+ def __init__(self,
62
+ user_id: str = "",
63
+ app_id: str = "",
64
+ pat: str = "",
65
+ token: str = "",
66
+ base: str = DEFAULT_BASE,
67
+ ui: str = DEFAULT_UI,
68
+ root_certificates_path: str = None,
69
+ validate: bool = True):
72
70
  """
73
71
  A helper to get the authorization information needed to make API calls with the grpc
74
72
  client to a specific app using a personal access token.
clarifai/client/base.py CHANGED
@@ -8,6 +8,7 @@ from google.protobuf.wrappers_pb2 import BoolValue
8
8
 
9
9
  from clarifai.client.auth import create_stub
10
10
  from clarifai.client.auth.helper import ClarifaiAuthHelper
11
+ from clarifai.constants.base import COMPUTE_ORCHESTRATION_RESOURCES
11
12
  from clarifai.errors import ApiError, UserError
12
13
  from clarifai.utils.constants import CLARIFAI_PAT_ENV_VAR, CLARIFAI_SESSION_TOKEN_ENV_VAR
13
14
  from clarifai.utils.misc import get_from_dict_or_env
@@ -88,8 +89,8 @@ class BaseClient:
88
89
  if kwargs.get("url"):
89
90
  default_kwargs.pop("user_id", "")
90
91
  default_kwargs.pop("app_id", "")
91
- # Remove app_id if the class name contains "Runner"
92
- if 'Runner' in _clss.__name__:
92
+ # Remove app_id if the class name is a compute orchestration resource
93
+ if any(co_resource in _clss.__name__ for co_resource in COMPUTE_ORCHESTRATION_RESOURCES):
93
94
  default_kwargs.pop("app_id", "")
94
95
  kwargs.update({**default_kwargs, "base_url": _base})
95
96
 
@@ -181,7 +182,7 @@ class BaseClient:
181
182
  value = resources_pb2.ImageInfo(**value)
182
183
  elif key == 'hosted_image_info':
183
184
  continue
184
- elif key in ['metadata']:
185
+ elif key in ['metadata', 'presets']:
185
186
  if isinstance(value, dict) and value != {}:
186
187
  value_s = struct_pb2.Struct()
187
188
  value_s.update(value)
@@ -0,0 +1,196 @@
1
+ import os
2
+ from typing import Any, Dict, Generator, List
3
+
4
+ import yaml
5
+ from clarifai_grpc.grpc.api import resources_pb2, service_pb2
6
+ from clarifai_grpc.grpc.api.status import status_code_pb2
7
+ from google.protobuf.json_format import MessageToDict
8
+
9
+ from clarifai.client.base import BaseClient
10
+ from clarifai.client.lister import Lister
11
+ from clarifai.client.nodepool import Nodepool
12
+ from clarifai.errors import UserError
13
+ from clarifai.utils.logging import logger
14
+
15
+
16
+ class ComputeCluster(Lister, BaseClient):
17
+ """ComputeCluster is a class that provides access to Clarifai API endpoints related to Compute Cluster information."""
18
+
19
+ def __init__(self,
20
+ compute_cluster_id: str = None,
21
+ user_id: str = None,
22
+ base_url: str = "https://api.clarifai.com",
23
+ pat: str = None,
24
+ token: str = None,
25
+ root_certificates_path: str = None,
26
+ **kwargs):
27
+ """Initializes an ComputeCluster object.
28
+
29
+ Args:
30
+ compute_cluster_id (str): The ComputeCluster ID for the ComputeCluster to interact with.
31
+ user_id (str): The user ID of the user.
32
+ base_url (str): Base API url. Default "https://api.clarifai.com"
33
+ pat (str): A personal access token for authentication. Can be set as env var CLARIFAI_PAT
34
+ token (str): A session token for authentication. Accepts either a session token or a pat. Can be set as env var CLARIFAI_SESSION_TOKEN
35
+ root_certificates_path (str): Path to the SSL root certificates file, used to establish secure gRPC connections.
36
+ **kwargs: Additional keyword arguments to be passed to the compute cluster.
37
+ """
38
+ self.kwargs = {**kwargs, 'id': compute_cluster_id, 'user_id': user_id}
39
+ self.compute_cluster_info = resources_pb2.ComputeCluster(**self.kwargs)
40
+ self.logger = logger
41
+ BaseClient.__init__(
42
+ self,
43
+ user_id=self.user_id,
44
+ base=base_url,
45
+ pat=pat,
46
+ token=token,
47
+ root_certificates_path=root_certificates_path)
48
+ Lister.__init__(self)
49
+
50
+ def list_nodepools(self, page_no: int = None,
51
+ per_page: int = None) -> Generator[Nodepool, None, None]:
52
+ """Lists all the available nodepools of the compute cluster.
53
+
54
+ Args:
55
+ compute_cluster_id (str): The compute cluster ID to list the nodepools.
56
+ page_no (int): The page number to list.
57
+ per_page (int): The number of items per page.
58
+
59
+ Yields:
60
+ Nodepool: Nodepool objects for the nodepools in the compute cluster.
61
+
62
+ Example:
63
+ >>> from clarifai.client.compute_cluster import ComputeCluster
64
+ >>> compute_cluster = ComputeCluster(compute_cluster_id="compute_cluster_id", user_id="user_id")
65
+ >>> all_nodepools = list(compute_cluster.list_nodepools())
66
+
67
+ Note:
68
+ Defaults to 16 per page if page_no is specified and per_page is not specified.
69
+ If both page_no and per_page are None, then lists all the resources.
70
+ """
71
+ request_data = dict(user_app_id=self.user_app_id, compute_cluster_id=self.id)
72
+ all_nodepools_info = self.list_pages_generator(
73
+ self.STUB.ListNodepools,
74
+ service_pb2.ListNodepoolsRequest,
75
+ request_data,
76
+ per_page=per_page,
77
+ page_no=page_no)
78
+
79
+ for nodepool_info in all_nodepools_info:
80
+ yield Nodepool.from_auth_helper(auth=self.auth_helper, **nodepool_info)
81
+
82
+ def _process_nodepool_config(self, config_filepath: str) -> Dict[str, Any]:
83
+ with open(config_filepath, "r") as file:
84
+ nodepool_config = yaml.safe_load(file)
85
+
86
+ assert "nodepool" in nodepool_config, "nodepool info not found in the config file"
87
+ nodepool = nodepool_config['nodepool']
88
+ assert "instance_types" in nodepool, "region not found in the config file"
89
+ assert "node_capacity_type" in nodepool, "managed_by not found in the config file"
90
+ assert "max_instances" in nodepool, "cluster_type not found in the config file"
91
+ nodepool['compute_cluster'] = resources_pb2.ComputeCluster(id=self.id, user_id=self.user_id)
92
+ nodepool['node_capacity_type'] = resources_pb2.NodeCapacityType(capacity_types=[
93
+ capacity_type for capacity_type in nodepool['node_capacity_type']['capacity_types']
94
+ ])
95
+ instance_types = []
96
+ for instance_type in nodepool['instance_types']:
97
+ if 'compute_info' in instance_type:
98
+ instance_type['compute_info'] = resources_pb2.ComputeInfo(**instance_type['compute_info'])
99
+ instance_types.append(resources_pb2.InstanceType(**instance_type))
100
+ nodepool['instance_types'] = instance_types
101
+ if "visibility" in nodepool:
102
+ nodepool["visibility"] = resources_pb2.Visibility(**nodepool["visibility"])
103
+ return nodepool
104
+
105
+ def create_nodepool(self, nodepool_id: str, config_filepath: str) -> Nodepool:
106
+ """Creates a nodepool for the compute cluster.
107
+
108
+ Args:
109
+ nodepool_id (str): The nodepool ID for the nodepool to create.
110
+ config_filepath (str): The path to the nodepool config file.
111
+
112
+ Returns:
113
+ Nodepool: A Nodepool object for the specified nodepool ID.
114
+
115
+ Example:
116
+ >>> from clarifai.client.compute_cluster import ComputeCluster
117
+ >>> compute_cluster = ComputeCluster(compute_cluster_id="compute_cluster_id", user_id="user_id")
118
+ >>> nodepool = compute_cluster.create_nodepool(nodepool_id="nodepool_id", config_filepath="config.yml")
119
+ """
120
+ if not os.path.exists(config_filepath):
121
+ raise UserError(f"Nodepool config file not found at {config_filepath}")
122
+
123
+ nodepool_config = self._process_nodepool_config(config_filepath)
124
+
125
+ if 'id' in nodepool_config:
126
+ nodepool_id = nodepool_config['id']
127
+ nodepool_config.pop('id')
128
+
129
+ request = service_pb2.PostNodepoolsRequest(
130
+ user_app_id=self.user_app_id,
131
+ compute_cluster_id=self.id,
132
+ nodepools=[resources_pb2.Nodepool(id=nodepool_id, **nodepool_config)])
133
+ response = self._grpc_request(self.STUB.PostNodepools, request)
134
+ if response.status.code != status_code_pb2.SUCCESS:
135
+ raise Exception(response.status)
136
+ self.logger.info("\nNodepool created\n%s", response.status)
137
+
138
+ return Nodepool.from_auth_helper(self.auth_helper, nodepool_id=nodepool_id)
139
+
140
+ def nodepool(self, nodepool_id: str) -> Nodepool:
141
+ """Returns a Nodepool object for the existing nodepool ID.
142
+
143
+ Args:
144
+ nodepool_id (str): The nodepool ID for the nodepool to interact with.
145
+
146
+ Returns:
147
+ Nodepool: A Nodepool object for the existing nodepool ID.
148
+
149
+ Example:
150
+ >>> from clarifai.client.compute_cluster import ComputeCluster
151
+ >>> compute_cluster = ComputeCluster(compute_cluster_id="compute_cluster_id", user_id="user_id")
152
+ >>> nodepool = compute_cluster.nodepool(nodepool_id="nodepool_id")
153
+ """
154
+ request = service_pb2.GetNodepoolRequest(
155
+ user_app_id=self.user_app_id, compute_cluster_id=self.id, nodepool_id=nodepool_id)
156
+ response = self._grpc_request(self.STUB.GetNodepool, request)
157
+
158
+ if response.status.code != status_code_pb2.SUCCESS:
159
+ raise Exception(response.status)
160
+ dict_response = MessageToDict(response, preserving_proto_field_name=True)
161
+ kwargs = self.process_response_keys(dict_response[list(dict_response.keys())[1]],
162
+ list(dict_response.keys())[1])
163
+
164
+ return Nodepool.from_auth_helper(auth=self.auth_helper, **kwargs)
165
+
166
+ def delete_nodepools(self, nodepool_ids: List[str]) -> None:
167
+ """Deletes list of nodepools for the compute cluster.
168
+
169
+ Args:
170
+ nodepool_ids (List[str]): The nodepool IDs of the compute cluster to delete.
171
+
172
+ Example:
173
+ >>> from clarifai.client.compute_cluster import ComputeCluster
174
+ >>> compute_cluster = ComputeCluster(compute_cluster_id="compute_cluster_id", user_id="user_id")
175
+ >>> compute_cluster.delete_nodepools(nodepool_ids=["nodepool_id1", "nodepool_id2"])
176
+ """
177
+ assert isinstance(nodepool_ids, list), "nodepool_ids param should be a list"
178
+
179
+ request = service_pb2.DeleteNodepoolsRequest(
180
+ user_app_id=self.user_app_id, compute_cluster_id=self.id, ids=nodepool_ids)
181
+ response = self._grpc_request(self.STUB.DeleteNodepools, request)
182
+
183
+ if response.status.code != status_code_pb2.SUCCESS:
184
+ raise Exception(response.status)
185
+ self.logger.info("\nNodepools Deleted\n%s", response.status)
186
+
187
+ def __getattr__(self, name):
188
+ return getattr(self.compute_cluster_info, name)
189
+
190
+ def __str__(self):
191
+ init_params = [param for param in self.kwargs.keys()]
192
+ attribute_strings = [
193
+ f"{param}={getattr(self.compute_cluster_info, param)}" for param in init_params
194
+ if hasattr(self.compute_cluster_info, param)
195
+ ]
196
+ return f"Clarifai Compute Cluster Details: \n{', '.join(attribute_strings)}\n"
@@ -30,7 +30,7 @@ from clarifai.datasets.upload.text import TextClassificationDataset
30
30
  from clarifai.datasets.upload.utils import DisplayUploadStatus
31
31
  from clarifai.errors import UserError
32
32
  from clarifai.urls.helper import ClarifaiUrlHelper
33
- from clarifai.utils.logging import add_file_handler, get_logger, process_log_files
33
+ from clarifai.utils.logging import add_file_handler, logger, process_log_files
34
34
  from clarifai.utils.misc import BackoffIterator, Chunker
35
35
 
36
36
  ClarifaiDatasetType = TypeVar('ClarifaiDatasetType', VisualClassificationDataset,
@@ -86,7 +86,7 @@ class Dataset(Lister, BaseClient):
86
86
  token=token,
87
87
  base_url=base_url,
88
88
  root_certificates_path=root_certificates_path)
89
- self.logger = get_logger(logger_level="INFO", name=__name__)
89
+ self.logger = logger
90
90
  BaseClient.__init__(
91
91
  self,
92
92
  user_id=self.user_id,
@@ -0,0 +1,51 @@
1
+ from clarifai_grpc.grpc.api import resources_pb2
2
+
3
+ from clarifai.client.base import BaseClient
4
+ from clarifai.client.lister import Lister
5
+ from clarifai.utils.logging import logger
6
+
7
+
8
+ class Deployment(Lister, BaseClient):
9
+ """Deployment is a class that provides access to Clarifai API endpoints related to Deployment information."""
10
+
11
+ def __init__(self,
12
+ deployment_id: str = None,
13
+ user_id: str = None,
14
+ base_url: str = "https://api.clarifai.com",
15
+ pat: str = None,
16
+ token: str = None,
17
+ root_certificates_path: str = None,
18
+ **kwargs):
19
+ """Initializes a Deployment object.
20
+
21
+ Args:
22
+ deployment_id (str): The Deployment ID for the Deployment to interact with.
23
+ user_id (str): The user ID of the user.
24
+ base_url (str): Base API url. Default "https://api.clarifai.com"
25
+ pat (str): A personal access token for authentication. Can be set as env var CLARIFAI_PAT
26
+ token (str): A session token for authentication. Accepts either a session token or a pat. Can be set as env var CLARIFAI_SESSION_TOKEN
27
+ root_certificates_path (str): Path to the SSL root certificates file, used to establish secure gRPC connections.
28
+ **kwargs: Additional keyword arguments to be passed to the deployment.
29
+ """
30
+ self.kwargs = {**kwargs, 'id': deployment_id, 'user_id': user_id}
31
+ self.deployment_info = resources_pb2.Deployment(**self.kwargs)
32
+ self.logger = logger
33
+ BaseClient.__init__(
34
+ self,
35
+ user_id=user_id,
36
+ base=base_url,
37
+ pat=pat,
38
+ token=token,
39
+ root_certificates_path=root_certificates_path)
40
+ Lister.__init__(self)
41
+
42
+ def __getattr__(self, name):
43
+ return getattr(self.deployment_info, name)
44
+
45
+ def __str__(self):
46
+ init_params = [param for param in self.kwargs.keys()]
47
+ attribute_strings = [
48
+ f"{param}={getattr(self.deployment_info, param)}" for param in init_params
49
+ if hasattr(self.deployment_info, param)
50
+ ]
51
+ return f"Deployment Details: \n{', '.join(attribute_strings)}\n"
clarifai/client/input.py CHANGED
@@ -21,7 +21,7 @@ from clarifai.client.lister import Lister
21
21
  from clarifai.constants.dataset import MAX_RETRIES
22
22
  from clarifai.constants.input import MAX_UPLOAD_BATCH_SIZE
23
23
  from clarifai.errors import UserError
24
- from clarifai.utils.logging import get_logger
24
+ from clarifai.utils.logging import logger
25
25
  from clarifai.utils.misc import BackoffIterator, Chunker
26
26
 
27
27
 
@@ -52,7 +52,7 @@ class Inputs(Lister, BaseClient):
52
52
  self.app_id = app_id
53
53
  self.kwargs = {**kwargs}
54
54
  self.input_info = resources_pb2.Input(**self.kwargs)
55
- self.logger = get_logger(logger_level=logger_level, name=__name__)
55
+ self.logger = logger
56
56
  BaseClient.__init__(
57
57
  self,
58
58
  user_id=self.user_id,
clarifai/client/model.py CHANGED
@@ -23,7 +23,7 @@ from clarifai.constants.model import (CHUNK_SIZE, MAX_CHUNK_SIZE, MAX_MODEL_PRED
23
23
  MODEL_EXPORT_TIMEOUT, RANGE_SIZE, TRAINABLE_MODEL_TYPES)
24
24
  from clarifai.errors import UserError
25
25
  from clarifai.urls.helper import ClarifaiUrlHelper
26
- from clarifai.utils.logging import get_logger
26
+ from clarifai.utils.logging import logger
27
27
  from clarifai.utils.misc import BackoffIterator
28
28
  from clarifai.utils.model_train import (find_and_replace_key, params_parser,
29
29
  response_to_model_params, response_to_param_info,
@@ -68,7 +68,7 @@ class Model(Lister, BaseClient):
68
68
  kwargs = {'user_id': user_id, 'app_id': app_id}
69
69
  self.kwargs = {**kwargs, 'id': model_id, 'model_version': model_version, }
70
70
  self.model_info = resources_pb2.Model(**self.kwargs)
71
- self.logger = get_logger(logger_level="INFO", name=__name__)
71
+ self.logger = logger
72
72
  self.training_params = {}
73
73
  BaseClient.__init__(
74
74
  self,
clarifai/client/module.py CHANGED
@@ -6,7 +6,7 @@ from clarifai.client.base import BaseClient
6
6
  from clarifai.client.lister import Lister
7
7
  from clarifai.errors import UserError
8
8
  from clarifai.urls.helper import ClarifaiUrlHelper
9
- from clarifai.utils.logging import get_logger
9
+ from clarifai.utils.logging import logger
10
10
 
11
11
 
12
12
  class Module(Lister, BaseClient):
@@ -44,7 +44,7 @@ class Module(Lister, BaseClient):
44
44
 
45
45
  self.kwargs = {**kwargs, 'id': module_id, 'module_version': module_version}
46
46
  self.module_info = resources_pb2.Module(**self.kwargs)
47
- self.logger = get_logger(logger_level="INFO", name=__name__)
47
+ self.logger = logger
48
48
  BaseClient.__init__(
49
49
  self,
50
50
  user_id=self.user_id,
@@ -0,0 +1,207 @@
1
+ import os
2
+ from typing import Any, Dict, Generator, List
3
+
4
+ import yaml
5
+ from clarifai_grpc.grpc.api import resources_pb2, service_pb2
6
+ from clarifai_grpc.grpc.api.status import status_code_pb2
7
+ from google.protobuf.json_format import MessageToDict
8
+
9
+ from clarifai.client.base import BaseClient
10
+ from clarifai.client.deployment import Deployment
11
+ from clarifai.client.lister import Lister
12
+ from clarifai.errors import UserError
13
+ from clarifai.utils.logging import logger
14
+
15
+
16
+ class Nodepool(Lister, BaseClient):
17
+ """Nodepool is a class that provides access to Clarifai API endpoints related to Nodepool information."""
18
+
19
+ def __init__(self,
20
+ nodepool_id: str = None,
21
+ user_id: str = None,
22
+ base_url: str = "https://api.clarifai.com",
23
+ pat: str = None,
24
+ token: str = None,
25
+ root_certificates_path: str = None,
26
+ **kwargs):
27
+ """Initializes a Nodepool object.
28
+
29
+ Args:
30
+ nodepool_id (str): The Nodepool ID for the Nodepool to interact with.
31
+ user_id (str): The user ID of the user.
32
+ base_url (str): Base API url. Default "https://api.clarifai.com"
33
+ pat (str): A personal access token for authentication. Can be set as env var CLARIFAI_PAT
34
+ token (str): A session token for authentication. Accepts either a session token or a pat. Can be set as env var CLARIFAI_SESSION_TOKEN
35
+ root_certificates_path (str): Path to the SSL root certificates file, used to establish secure gRPC connections.
36
+ **kwargs: Additional keyword arguments to be passed to the nodepool.
37
+ """
38
+ self.kwargs = {**kwargs, 'id': nodepool_id}
39
+ self.nodepool_info = resources_pb2.Nodepool(**self.kwargs)
40
+ self.logger = logger
41
+ BaseClient.__init__(
42
+ self,
43
+ user_id=user_id,
44
+ base=base_url,
45
+ pat=pat,
46
+ token=token,
47
+ root_certificates_path=root_certificates_path)
48
+ Lister.__init__(self)
49
+
50
+ def list_deployments(self,
51
+ filter_by: Dict[str, Any] = {},
52
+ page_no: int = None,
53
+ per_page: int = None) -> Generator[Deployment, None, None]:
54
+ """Lists all the available deployments of compute cluster.
55
+
56
+ Args:
57
+ filter_by (Dict[str, Any]): The filter to apply to the list of deployments.
58
+ page_no (int): The page number to list.
59
+ per_page (int): The number of items per page.
60
+
61
+ Yields:
62
+ Deployment: Deployment objects for the nodepools in the compute cluster.
63
+
64
+ Example:
65
+ >>> from clarifai.client.nodepool import Nodepool
66
+ >>> nodepool = Nodepool(nodepool_id="nodepool_id", user_id="user_id")
67
+ >>> all_deployments = list(nodepool.list_deployments())
68
+
69
+ Note:
70
+ Defaults to 16 per page if page_no is specified and per_page is not specified.
71
+ If both page_no and per_page are None, then lists all the resources.
72
+ """
73
+ request_data = dict(user_app_id=self.user_app_id, nodepool_id=self.id, **filter_by)
74
+ all_deployments_info = self.list_pages_generator(
75
+ self.STUB.ListDeployments,
76
+ service_pb2.ListDeploymentsRequest,
77
+ request_data,
78
+ per_page=per_page,
79
+ page_no=page_no)
80
+
81
+ for deployment_info in all_deployments_info:
82
+ yield Deployment.from_auth_helper(auth=self.auth_helper, **deployment_info)
83
+
84
+ def _process_deployment_config(self, config_filepath: str) -> Dict[str, Any]:
85
+ with open(config_filepath, "r") as file:
86
+ deployment_config = yaml.safe_load(file)
87
+
88
+ assert "deployment" in deployment_config, "deployment info not found in the config file"
89
+ deployment = deployment_config['deployment']
90
+ assert "autoscale_config" in deployment, "autoscale_config not found in the config file"
91
+ assert ("worker" in deployment) and (
92
+ ("model" in deployment["worker"]) or
93
+ ("workflow" in deployment["worker"])), "worker info not found in the config file"
94
+ assert "scheduling_choice" in deployment, "scheduling_choice not found in the config file"
95
+ assert "nodepools" in deployment, "nodepools not found in the config file"
96
+ deployment['user_id'] = self.user_app_id.user_id
97
+ deployment['autoscale_config'] = resources_pb2.AutoscaleConfig(
98
+ **deployment['autoscale_config'])
99
+ deployment['nodepools'] = [
100
+ resources_pb2.Nodepool(
101
+ id=nodepool['id'],
102
+ compute_cluster=resources_pb2.ComputeCluster(
103
+ id=nodepool['compute_cluster']['id'], user_id=self.user_app_id.user_id))
104
+ for nodepool in deployment['nodepools']
105
+ ]
106
+ if 'user' in deployment['worker']:
107
+ deployment['worker']['user'] = resources_pb2.User(**deployment['worker']['user'])
108
+ elif 'model' in deployment['worker']:
109
+ deployment['worker']['model'] = resources_pb2.Model(**deployment['worker']['model'])
110
+ elif 'workflow' in deployment['worker']:
111
+ deployment['worker']['workflow'] = resources_pb2.Workflow(**deployment['worker']['workflow'])
112
+ deployment['worker'] = resources_pb2.Worker(**deployment['worker'])
113
+ if "visibility" in deployment:
114
+ deployment["visibility"] = resources_pb2.Visibility(**deployment["visibility"])
115
+ return deployment
116
+
117
+ def create_deployment(self, deployment_id: str, config_filepath: str) -> Deployment:
118
+ """Creates a deployment for the nodepool.
119
+
120
+ Args:
121
+ deployment_id (str): The deployment ID for the deployment to create.
122
+ config_filepath (str): The path to the deployment config file.
123
+
124
+ Returns:
125
+ Deployment: A Deployment object for the specified deployment ID.
126
+
127
+ Example:
128
+ >>> from clarifai.client.nodepool import Nodepool
129
+ >>> nodepool = Nodepool(nodepool_id="nodepool_id", user_id="user_id")
130
+ >>> deployment = nodepool.create_deployment(deployment_id="deployment_id", config_filepath="config.yml")
131
+ """
132
+ if not os.path.exists(config_filepath):
133
+ raise UserError(f"Deployment config file not found at {config_filepath}")
134
+
135
+ deployment_config = self._process_deployment_config(config_filepath)
136
+
137
+ if 'id' in deployment_config:
138
+ deployment_id = deployment_config['id']
139
+ deployment_config.pop('id')
140
+
141
+ request = service_pb2.PostDeploymentsRequest(
142
+ user_app_id=self.user_app_id,
143
+ deployments=[resources_pb2.Deployment(id=deployment_id, **deployment_config)])
144
+ response = self._grpc_request(self.STUB.PostDeployments, request)
145
+ if response.status.code != status_code_pb2.SUCCESS:
146
+ raise Exception(response.status)
147
+ self.logger.info("\nDeployment created\n%s", response.status)
148
+
149
+ return Deployment.from_auth_helper(self.auth_helper, deployment_id=deployment_id)
150
+
151
+ def deployment(self, deployment_id: str) -> Deployment:
152
+ """Returns a Deployment object for the existing deployment ID.
153
+
154
+ Args:
155
+ deployment_id (str): The deployment ID for the deployment to interact with.
156
+
157
+ Returns:
158
+ Deployment: A Deployment object for the existing deployment ID.
159
+
160
+ Example:
161
+ >>> from clarifai.client.nodepool import Nodepool
162
+ >>> nodepool = Nodepool(nodepool_id="nodepool_id", user_id="user_id")
163
+ >>> deployment = nodepool.deployment(deployment_id="deployment_id")
164
+ """
165
+ request = service_pb2.GetDeploymentRequest(
166
+ user_app_id=self.user_app_id, deployment_id=deployment_id)
167
+ response = self._grpc_request(self.STUB.GetDeployment, request)
168
+
169
+ if response.status.code != status_code_pb2.SUCCESS:
170
+ raise Exception(response.status)
171
+ dict_response = MessageToDict(response, preserving_proto_field_name=True)
172
+ kwargs = self.process_response_keys(dict_response[list(dict_response.keys())[1]],
173
+ list(dict_response.keys())[1])
174
+
175
+ return Deployment.from_auth_helper(auth=self.auth_helper, **kwargs)
176
+
177
+ def delete_deployments(self, deployment_ids: List[str]) -> None:
178
+ """Deletes list of deployments for the nodepool.
179
+
180
+ Args:
181
+ deployment_ids (List[str]): The list of deployment IDs to delete.
182
+
183
+ Example:
184
+ >>> from clarifai.client.nodepool import Nodepool
185
+ >>> nodepool = Nodepool(nodepool_id="nodepool_id", user_id="user_id")
186
+ >>> nodepool.delete_deployments(deployment_ids=["deployment_id1", "deployment_id2"])
187
+ """
188
+ assert isinstance(deployment_ids, list), "deployment_ids param should be a list"
189
+
190
+ request = service_pb2.DeleteDeploymentsRequest(
191
+ user_app_id=self.user_app_id, ids=deployment_ids)
192
+ response = self._grpc_request(self.STUB.DeleteDeployments, request)
193
+
194
+ if response.status.code != status_code_pb2.SUCCESS:
195
+ raise Exception(response.status)
196
+ self.logger.info("\nDeployments Deleted\n%s", response.status)
197
+
198
+ def __getattr__(self, name):
199
+ return getattr(self.nodepool_info, name)
200
+
201
+ def __str__(self):
202
+ init_params = [param for param in self.kwargs.keys()]
203
+ attribute_strings = [
204
+ f"{param}={getattr(self.nodepool_info, param)}" for param in init_params
205
+ if hasattr(self.nodepool_info, param)
206
+ ]
207
+ return f"Nodepool Details: \n{', '.join(attribute_strings)}\n"
clarifai/client/user.py CHANGED
@@ -1,5 +1,7 @@
1
+ import os
1
2
  from typing import Any, Dict, Generator, List
2
3
 
4
+ import yaml
3
5
  from clarifai_grpc.grpc.api import resources_pb2, service_pb2
4
6
  from clarifai_grpc.grpc.api.status import status_code_pb2
5
7
  from google.protobuf.json_format import MessageToDict
@@ -7,9 +9,10 @@ from google.protobuf.wrappers_pb2 import BoolValue
7
9
 
8
10
  from clarifai.client.app import App
9
11
  from clarifai.client.base import BaseClient
12
+ from clarifai.client.compute_cluster import ComputeCluster
10
13
  from clarifai.client.lister import Lister
11
14
  from clarifai.errors import UserError
12
- from clarifai.utils.logging import get_logger
15
+ from clarifai.utils.logging import logger
13
16
 
14
17
 
15
18
  class User(Lister, BaseClient):
@@ -34,7 +37,7 @@ class User(Lister, BaseClient):
34
37
  """
35
38
  self.kwargs = {**kwargs, 'id': user_id}
36
39
  self.user_info = resources_pb2.User(**self.kwargs)
37
- self.logger = get_logger(logger_level="INFO", name=__name__)
40
+ self.logger = logger
38
41
  BaseClient.__init__(
39
42
  self,
40
43
  user_id=self.id,
@@ -109,6 +112,37 @@ class User(Lister, BaseClient):
109
112
  for runner_info in all_runners_info:
110
113
  yield dict(auth=self.auth_helper, check_runner_exists=False, **runner_info)
111
114
 
115
+ def list_compute_clusters(self, page_no: int = None,
116
+ per_page: int = None) -> Generator[dict, None, None]:
117
+ """List all compute clusters for the user
118
+
119
+ Args:
120
+ page_no (int): The page number to list.
121
+ per_page (int): The number of items per page.
122
+
123
+ Yields:
124
+ Dict: Dictionaries containing information about the compute clusters.
125
+
126
+ Example:
127
+ >>> from clarifai.client.user import User
128
+ >>> client = User(user_id="user_id")
129
+ >>> all_compute_clusters= list(client.list_compute_clusters())
130
+
131
+ Note:
132
+ Defaults to 16 per page if page_no is specified and per_page is not specified.
133
+ If both page_no and per_page are None, then lists all the resources.
134
+ """
135
+ request_data = dict(user_app_id=self.user_app_id)
136
+ all_compute_clusters_info = self.list_pages_generator(
137
+ self.STUB.ListComputeClusters,
138
+ service_pb2.ListComputeClustersRequest,
139
+ request_data,
140
+ per_page=per_page,
141
+ page_no=page_no)
142
+
143
+ for compute_cluster_info in all_compute_clusters_info:
144
+ yield ComputeCluster.from_auth_helper(self.auth_helper, **compute_cluster_info)
145
+
112
146
  def create_app(self, app_id: str, base_workflow: str = 'Empty', **kwargs) -> App:
113
147
  """Creates an app for the user.
114
148
 
@@ -172,6 +206,59 @@ class User(Lister, BaseClient):
172
206
  description=description,
173
207
  check_runner_exists=False)
174
208
 
209
+ def _process_compute_cluster_config(self, config_filepath: str) -> Dict[str, Any]:
210
+ with open(config_filepath, "r") as file:
211
+ compute_cluster_config = yaml.safe_load(file)
212
+
213
+ assert "compute_cluster" in compute_cluster_config, "compute cluster info not found in the config file"
214
+ compute_cluster = compute_cluster_config['compute_cluster']
215
+ assert "region" in compute_cluster, "region not found in the config file"
216
+ assert "managed_by" in compute_cluster, "managed_by not found in the config file"
217
+ assert "cluster_type" in compute_cluster, "cluster_type not found in the config file"
218
+ compute_cluster['cloud_provider'] = resources_pb2.CloudProvider(
219
+ **compute_cluster['cloud_provider'])
220
+ compute_cluster['key'] = resources_pb2.Key(id=self.pat)
221
+ if "visibility" in compute_cluster:
222
+ compute_cluster["visibility"] = resources_pb2.Visibility(**compute_cluster["visibility"])
223
+ return compute_cluster
224
+
225
+ def create_compute_cluster(self, compute_cluster_id: str,
226
+ config_filepath: str) -> ComputeCluster:
227
+ """Creates a compute cluster for the user.
228
+
229
+ Args:
230
+ compute_cluster_id (str): The compute cluster ID for the compute cluster to create.
231
+ config_filepath (str): The path to the compute cluster config file.
232
+
233
+ Returns:
234
+ ComputeCluster: A Compute Cluster object for the specified compute cluster ID.
235
+
236
+ Example:
237
+ >>> from clarifai.client.user import User
238
+ >>> client = User(user_id="user_id")
239
+ >>> compute_cluster = client.create_compute_cluster(compute_cluster_id="compute_cluster_id", config_filepath="config.yml")
240
+ """
241
+ if not os.path.exists(config_filepath):
242
+ raise UserError(f"Compute Cluster config file not found at {config_filepath}")
243
+
244
+ compute_cluster_config = self._process_compute_cluster_config(config_filepath)
245
+
246
+ if 'id' in compute_cluster_config:
247
+ compute_cluster_id = compute_cluster_config['id']
248
+ compute_cluster_config.pop('id')
249
+
250
+ request = service_pb2.PostComputeClustersRequest(
251
+ user_app_id=self.user_app_id,
252
+ compute_clusters=[
253
+ resources_pb2.ComputeCluster(id=compute_cluster_id, **compute_cluster_config)
254
+ ])
255
+ response = self._grpc_request(self.STUB.PostComputeClusters, request)
256
+ if response.status.code != status_code_pb2.SUCCESS:
257
+ raise Exception(response.status)
258
+ self.logger.info("\nCompute Cluster created\n%s", response.status)
259
+ return ComputeCluster.from_auth_helper(
260
+ auth=self.auth_helper, compute_cluster_id=compute_cluster_id)
261
+
175
262
  def app(self, app_id: str, **kwargs) -> App:
176
263
  """Returns an App object for the specified app ID.
177
264
 
@@ -223,6 +310,31 @@ class User(Lister, BaseClient):
223
310
 
224
311
  return dict(self.auth_helper, check_runner_exists=False, **kwargs)
225
312
 
313
+ def compute_cluster(self, compute_cluster_id: str) -> ComputeCluster:
314
+ """Returns an Compute Cluster object for the specified compute cluster ID.
315
+
316
+ Args:
317
+ compute_cluster_id (str): The compute cluster ID for the compute cluster to interact with.
318
+
319
+ Returns:
320
+ ComputeCluster: A Compute Cluster object for the specified compute cluster ID.
321
+
322
+ Example:
323
+ >>> from clarifai.client.user import User
324
+ >>> compute_cluster = User("user_id").compute_cluster("compute_cluster_id")
325
+ """
326
+ request = service_pb2.GetComputeClusterRequest(
327
+ user_app_id=self.user_app_id, compute_cluster_id=compute_cluster_id)
328
+ response = self._grpc_request(self.STUB.GetComputeCluster, request)
329
+ if response.status.code != status_code_pb2.SUCCESS:
330
+ raise Exception(response.status)
331
+
332
+ dict_response = MessageToDict(response, preserving_proto_field_name=True)
333
+ kwargs = self.process_response_keys(dict_response[list(dict_response.keys())[1]],
334
+ list(dict_response.keys())[1])
335
+
336
+ return ComputeCluster.from_auth_helper(auth=self.auth_helper, **kwargs)
337
+
226
338
  def patch_app(self, app_id: str, action: str = 'overwrite', **kwargs) -> App:
227
339
  """Patch an app for the user.
228
340
 
@@ -290,6 +402,25 @@ class User(Lister, BaseClient):
290
402
  raise Exception(response.status)
291
403
  self.logger.info("\nRunner Deleted\n%s", response.status)
292
404
 
405
+ def delete_compute_clusters(self, compute_cluster_ids: List[str]) -> None:
406
+ """Deletes a list of compute clusters for the user.
407
+
408
+ Args:
409
+ compute_cluster_ids (List[str]): The compute cluster IDs of the user to delete.
410
+
411
+ Example:
412
+ >>> from clarifai.client.user import User
413
+ >>> user = User("user_id").delete_compute_clusters(compute_cluster_ids=["compute_cluster_id1", "compute_cluster_id2"])
414
+ """
415
+ assert isinstance(compute_cluster_ids, list), "compute_cluster_ids param should be a list"
416
+
417
+ request = service_pb2.DeleteComputeClustersRequest(
418
+ user_app_id=self.user_app_id, ids=compute_cluster_ids)
419
+ response = self._grpc_request(self.STUB.DeleteComputeClusters, request)
420
+ if response.status.code != status_code_pb2.SUCCESS:
421
+ raise Exception(response.status)
422
+ self.logger.info("\nCompute Cluster Deleted\n%s", response.status)
423
+
293
424
  def __getattr__(self, name):
294
425
  return getattr(self.user_info, name)
295
426
 
@@ -12,7 +12,7 @@ from clarifai.client.lister import Lister
12
12
  from clarifai.constants.workflow import MAX_WORKFLOW_PREDICT_INPUTS
13
13
  from clarifai.errors import UserError
14
14
  from clarifai.urls.helper import ClarifaiUrlHelper
15
- from clarifai.utils.logging import get_logger
15
+ from clarifai.utils.logging import logger
16
16
  from clarifai.utils.misc import BackoffIterator
17
17
  from clarifai.workflows.export import Exporter
18
18
 
@@ -59,7 +59,7 @@ class Workflow(Lister, BaseClient):
59
59
  self.kwargs = {**kwargs, 'id': workflow_id, 'version': workflow_version}
60
60
  self.output_config = output_config
61
61
  self.workflow_info = resources_pb2.Workflow(**self.kwargs)
62
- self.logger = get_logger(logger_level="INFO", name=__name__)
62
+ self.logger = logger
63
63
  BaseClient.__init__(
64
64
  self,
65
65
  user_id=self.user_id,
@@ -0,0 +1 @@
1
+ COMPUTE_ORCHESTRATION_RESOURCES = ['Runner', 'ComputeCluster', 'Nodepool', 'Deployment']
@@ -14,9 +14,7 @@ from tqdm import tqdm
14
14
 
15
15
  from clarifai.constants.dataset import CONTENT_TYPE
16
16
  from clarifai.errors import UserError
17
- from clarifai.utils.logging import get_logger
18
-
19
- logger = get_logger("INFO", __name__)
17
+ from clarifai.utils.logging import logger
20
18
 
21
19
 
22
20
  class DatasetExportReader:
clarifai/rag/rag.py CHANGED
@@ -15,7 +15,7 @@ from clarifai.errors import UserError
15
15
  from clarifai.rag.utils import (convert_messages_to_str, format_assistant_message, load_documents,
16
16
  split_document)
17
17
  from clarifai.utils.constants import CLARIFAI_USER_ID_ENV_VAR
18
- from clarifai.utils.logging import get_logger
18
+ from clarifai.utils.logging import logger
19
19
  from clarifai.utils.misc import get_from_dict_or_env
20
20
 
21
21
  DEFAULT_RAG_PROMPT_TEMPLATE = "Context information is below:\n{data.hits}\nGiven the context information and not prior knowledge, answer the query.\nQuery: {data.text.raw}\nAnswer: "
@@ -40,7 +40,7 @@ class RAG:
40
40
  **kwargs):
41
41
  """Initialize an empty or existing RAG.
42
42
  """
43
- self.logger = get_logger(logger_level="INFO", name=__name__)
43
+ self.logger = logger
44
44
  if workflow_url is not None and workflow is None:
45
45
  self.logger.info("workflow_url:%s", workflow_url)
46
46
  w = Workflow(workflow_url, base_url=base_url, pat=pat)
@@ -10,8 +10,8 @@ from google.protobuf import json_format
10
10
  from rich import print
11
11
 
12
12
  from clarifai.client import BaseClient
13
-
14
13
  from clarifai.runners.utils.loader import HuggingFaceLoarder
14
+ from clarifai.utils.logging import logger
15
15
 
16
16
 
17
17
  def _clear_line(n: int = 1) -> None:
@@ -40,7 +40,7 @@ class ModelUploader:
40
40
  def _validate_folder(folder):
41
41
  if not folder.startswith("/"):
42
42
  folder = os.path.join(os.getcwd(), folder)
43
- print(f"Validating folder: {folder}")
43
+ logger.info(f"Validating folder: {folder}")
44
44
  if not os.path.exists(folder):
45
45
  raise FileNotFoundError(f"Folder {folder} not found, please provide a valid folder path")
46
46
  files = os.listdir(folder)
@@ -70,7 +70,7 @@ class ModelUploader:
70
70
  base = os.environ.get('CLARIFAI_API_BASE', 'https://api-dev.clarifai.com')
71
71
 
72
72
  self._client = BaseClient(user_id=user_id, app_id=app_id, base=base)
73
- print(f"Client initialized for user {user_id} and app {app_id}")
73
+ logger.info(f"Client initialized for user {user_id} and app {app_id}")
74
74
  return self._client
75
75
 
76
76
  def _get_model_proto(self):
@@ -100,7 +100,7 @@ class ModelUploader:
100
100
  service_pb2.GetModelRequest(
101
101
  user_app_id=self.client.user_app_id, model_id=self.model_proto.id))
102
102
  if resp.status.code == status_code_pb2.SUCCESS:
103
- print(
103
+ logger.info(
104
104
  f"Model '{self.client.user_app_id.user_id}/{self.client.user_app_id.app_id}/models/{self.model_proto.id}' already exists, "
105
105
  f"will create a new version for it.")
106
106
  return resp
@@ -133,9 +133,10 @@ class ModelUploader:
133
133
  build_info = self.config.get('build_info', {})
134
134
  if 'python_version' in build_info:
135
135
  python_version = build_info['python_version']
136
- print(f"Using Python version {python_version} from the config file to build the Dockerfile")
136
+ logger.info(
137
+ f"Using Python version {python_version} from the config file to build the Dockerfile")
137
138
  else:
138
- print(
139
+ logger.info(
139
140
  f"Python version not found in the config file, using default Python version: {self.DEFAULT_PYTHON_VERSION}"
140
141
  )
141
142
  python_version = self.DEFAULT_PYTHON_VERSION
@@ -152,13 +153,13 @@ class ModelUploader:
152
153
 
153
154
  def download_checkpoints(self):
154
155
  if not self.config.get("checkpoints"):
155
- print("No checkpoints specified in the config file")
156
+ logger.info("No checkpoints specified in the config file")
156
157
  return
157
158
 
158
159
  assert "type" in self.config.get("checkpoints"), "No loader type specified in the config file"
159
160
  loader_type = self.config.get("checkpoints").get("type")
160
161
  if not loader_type:
161
- print("No loader type specified in the config file for checkpoints")
162
+ logger.info("No loader type specified in the config file for checkpoints")
162
163
  assert loader_type == "huggingface", "Only huggingface loader supported for now"
163
164
  if loader_type == "huggingface":
164
165
  assert "repo_id" in self.config.get("checkpoints"), "No repo_id specified in the config file"
@@ -173,9 +174,12 @@ class ModelUploader:
173
174
  loader = HuggingFaceLoarder(repo_id=repo_id, token=hf_token)
174
175
 
175
176
  checkpoint_path = os.path.join(self.folder, '1', 'checkpoints')
176
- loader.download_checkpoints(checkpoint_path)
177
+ success = loader.download_checkpoints(checkpoint_path)
177
178
 
178
- print(f"Downloaded checkpoints for model {repo_id}")
179
+ if not success:
180
+ logger.error(f"Failed to download checkpoints for model {repo_id}")
181
+ return
182
+ logger.info(f"Downloaded checkpoints for model {repo_id}")
179
183
 
180
184
  def _concepts_protos_from_concepts(self, concepts):
181
185
  concept_protos = []
@@ -199,7 +203,7 @@ class ModelUploader:
199
203
  with open(config_file, 'w') as file:
200
204
  yaml.dump(config, file, sort_keys=False)
201
205
  concepts = config.get('concepts')
202
- print(f"Updated config.yaml with {len(concepts)} concepts.")
206
+ logger.info(f"Updated config.yaml with {len(concepts)} concepts.")
203
207
 
204
208
  def _get_model_version_proto(self):
205
209
 
@@ -225,11 +229,11 @@ class ModelUploader:
225
229
 
226
230
  def upload_model_version(self):
227
231
  file_path = f"{self.folder}.tar.gz"
228
- print(f"Will tar it into file: {file_path}")
232
+ logger.info(f"Will tar it into file: {file_path}")
229
233
 
230
234
  # Tar the folder
231
235
  os.system(f"tar --exclude=*~ -czvf {self.folder}.tar.gz -C {self.folder} .")
232
- print("Tarring complete, about to start upload.")
236
+ logger.info("Tarring complete, about to start upload.")
233
237
 
234
238
  model_version = self._get_model_version_proto()
235
239
 
@@ -251,10 +255,10 @@ class ModelUploader:
251
255
  flush=True)
252
256
  print()
253
257
  if response.status.code != status_code_pb2.MODEL_BUILDING:
254
- print(f"Failed to upload model version: {response.status.description}")
258
+ logger.error(f"Failed to upload model version: {response.status.description}")
255
259
  return
256
260
  model_version_id = response.model_version_id
257
- print(f"Created Model Version ID: {model_version_id}")
261
+ logger.info(f"Created Model Version ID: {model_version_id}")
258
262
 
259
263
  self.monitor_model_build(model_version_id)
260
264
 
@@ -264,10 +268,10 @@ class ModelUploader:
264
268
  file_size = os.path.getsize(file_path)
265
269
  chunk_size = int(127 * 1024 * 1024) # 127MB chunk size
266
270
  num_chunks = (file_size // chunk_size) + 1
267
- print("Uploading file...")
268
- print("File size: ", file_size)
269
- print("Chunk size: ", chunk_size)
270
- print("Number of chunks: ", num_chunks)
271
+ logger.info("Uploading file...")
272
+ logger.info("File size: ", file_size)
273
+ logger.info("Chunk size: ", chunk_size)
274
+ logger.info("Number of chunks: ", num_chunks)
271
275
  read_so_far = 0
272
276
  for part_id in range(num_chunks):
273
277
  try:
@@ -283,16 +287,16 @@ class ModelUploader:
283
287
  range_start=read_so_far,
284
288
  ))
285
289
  except Exception as e:
286
- print(f"\nError uploading file: {e}")
290
+ logger.exception(f"\nError uploading file: {e}")
287
291
  break
288
292
 
289
293
  if read_so_far == file_size:
290
- print("\nUpload complete!, waiting for model build...")
294
+ logger.info("\nUpload complete!, waiting for model build...")
291
295
 
292
296
  def init_upload_model_version(self, model_version, file_path):
293
297
  file_size = os.path.getsize(file_path)
294
- print(f"Uploading model version '{model_version.id}' of model {self.model_proto.id}")
295
- print(f"Using file '{os.path.basename(file_path)}' of size: {file_size} bytes")
298
+ logger.info(f"Uploading model version '{model_version.id}' of model {self.model_proto.id}")
299
+ logger.info(f"Using file '{os.path.basename(file_path)}' of size: {file_size} bytes")
296
300
  return service_pb2.PostModelVersionsUploadRequest(
297
301
  upload_config=service_pb2.PostModelVersionsUploadConfig(
298
302
  user_app_id=self.client.user_app_id,
@@ -313,16 +317,18 @@ class ModelUploader:
313
317
  ))
314
318
  status_code = resp.model_version.status.code
315
319
  if status_code == status_code_pb2.MODEL_BUILDING:
316
- print(f"Model is building... (elapsed {time.time() - st:.1f}s)", end='\r', flush=True)
320
+ logger.info(
321
+ f"Model is building... (elapsed {time.time() - st:.1f}s)", end='\r', flush=True)
317
322
  time.sleep(1)
318
323
  elif status_code == status_code_pb2.MODEL_TRAINED:
319
- print("\nModel build complete!")
320
- print(
324
+ logger.info("\nModel build complete!")
325
+ logger.info(
321
326
  f"Check out the model at https://clarifai.com/{self.client.user_app_id.user_id}/apps/{self.client.user_app_id.app_id}/models/{self.model_id}/versions/{model_version_id}"
322
327
  )
323
328
  break
324
329
  else:
325
- print(f"\nModel build failed with status: {resp.model_version.status}")
330
+ logger.info(
331
+ f"\nModel build failed with status: {resp.model_version.status} and response {resp}")
326
332
  break
327
333
 
328
334
 
@@ -15,7 +15,7 @@ from clarifai_protocol import BaseRunner
15
15
  from clarifai_protocol.utils.grpc_server import GRPCServer
16
16
 
17
17
  from clarifai.runners.models.model_servicer import ModelServicer
18
- from clarifai.runners.utils.logging import logger
18
+ from clarifai.utils.logging import logger
19
19
 
20
20
 
21
21
  def main():
@@ -3,6 +3,8 @@ import json
3
3
  import os
4
4
  import subprocess
5
5
 
6
+ from clarifai.utils.logging import logger
7
+
6
8
 
7
9
  class HuggingFaceLoarder:
8
10
 
@@ -29,22 +31,24 @@ class HuggingFaceLoarder:
29
31
  "The 'huggingface_hub' package is not installed. Please install it using 'pip install huggingface_hub'."
30
32
  )
31
33
  if os.path.exists(checkpoint_path) and self.validate_download(checkpoint_path):
32
- print("Checkpoints already exist")
34
+ logger.info("Checkpoints already exist")
35
+ return True
33
36
  else:
34
37
  os.makedirs(checkpoint_path, exist_ok=True)
35
38
  try:
36
39
  is_hf_model_exists = self.validate_hf_model()
37
40
  if not is_hf_model_exists:
38
- print("Model not found on Hugging Face")
41
+ logger.error("Model %s not found on Hugging Face" % (self.repo_id))
39
42
  return False
40
- snapshot_download(repo_id=self.repo_id, local_dir=checkpoint_path)
43
+ snapshot_download(
44
+ repo_id=self.repo_id, local_dir=checkpoint_path, local_dir_use_symlinks=False)
41
45
  except Exception as e:
42
- print("Error downloading model checkpoints ", e)
46
+ logger.exception(f"Error downloading model checkpoints {e}")
43
47
  return False
44
48
  finally:
45
49
  is_downloaded = self.validate_download(checkpoint_path)
46
50
  if not is_downloaded:
47
- print("Error downloading model checkpoints")
51
+ logger.error("Error validating downloaded model checkpoints")
48
52
  return False
49
53
  return True
50
54
 
@@ -57,7 +61,10 @@ class HuggingFaceLoarder:
57
61
  def validate_download(self, checkpoint_path: str):
58
62
  # check if model exists on HF
59
63
  from huggingface_hub import list_repo_files
60
- return (len(os.listdir(checkpoint_path)) >= len(list_repo_files(self.repo_id))) and len(
64
+ checkpoint_dir_files = [
65
+ f for dp, dn, fn in os.walk(os.path.expanduser(checkpoint_path)) for f in fn
66
+ ]
67
+ return (len(checkpoint_dir_files) >= len(list_repo_files(self.repo_id))) and len(
61
68
  list_repo_files(self.repo_id)) > 0
62
69
 
63
70
  def fetch_labels(self, checkpoint_path: str):
@@ -2,7 +2,7 @@ import concurrent.futures
2
2
 
3
3
  import fsspec
4
4
 
5
- from .logging import logger
5
+ from clarifai.utils.logging import logger
6
6
 
7
7
 
8
8
  def download_input(input):
@@ -20,8 +20,7 @@ except ImportError:
20
20
  try:
21
21
  from loguru import logger
22
22
  except ImportError:
23
- from ..logging import get_logger
24
- logger = get_logger(logger_level="INFO", name=__name__)
23
+ from ..logging import logger
25
24
 
26
25
  MACRO_AVG = "macro_avg"
27
26
 
@@ -26,8 +26,7 @@ except ImportError:
26
26
  try:
27
27
  from loguru import logger
28
28
  except ImportError:
29
- from ..logging import get_logger
30
- logger = get_logger(logger_level="INFO", name=__name__)
29
+ from ..logging import logger
31
30
 
32
31
  __all__ = ['EvalResultCompare']
33
32
 
clarifai/utils/logging.py CHANGED
@@ -19,6 +19,8 @@ from rich.tree import Tree
19
19
 
20
20
  install()
21
21
 
22
+ # The default logger to use throughout the SDK is defined at bottom of this file.
23
+
22
24
  # For the json logger.
23
25
  JSON_LOGGER_NAME = "clarifai-json"
24
26
  JSON_LOG_KEY = 'msg'
@@ -357,3 +359,7 @@ class JsonFormatter(logging.Formatter):
357
359
  default=self.json_default,
358
360
  cls=self.json_cls,
359
361
  )
362
+
363
+
364
+ # the default logger for the SDK.
365
+ logger = get_logger(logger_level=os.environ.get("LOG_LEVEL", "INFO"), name="clarifai")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: clarifai
3
- Version: 10.8.6
3
+ Version: 10.8.7
4
4
  Summary: Clarifai Python SDK
5
5
  Home-page: https://github.com/Clarifai/clarifai-python
6
6
  Author: Clarifai
@@ -1,22 +1,26 @@
1
- clarifai/__init__.py,sha256=5gJxqMyHfStgLsUbgDyNI4zTcQbFwffphiPw6fZhAHk,23
1
+ clarifai/__init__.py,sha256=rJghd8fTbSCfCAow2Dmd1bS2hdZ2VQdNcu2IHtIH5vU,23
2
2
  clarifai/cli.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  clarifai/errors.py,sha256=RwzTajwds51wLD0MVlMC5kcpBnzRpreDLlazPSBZxrg,2605
4
4
  clarifai/versions.py,sha256=jctnczzfGk_S3EnVqb2FjRKfSREkNmvNEwAAa_VoKiQ,222
5
5
  clarifai/client/__init__.py,sha256=xI1U0l5AZdRThvQAXCLsd9axxyFzXXJ22m8LHqVjQRU,662
6
- clarifai/client/app.py,sha256=VC5TqeYVNUyENPxOOKNSAl0q3ivPiqg-gtRr77ACIIM,38424
7
- clarifai/client/base.py,sha256=JXbbjg2CXo8rOdw-XgKWWtLVAhPv3OZua5LFT5w4U2Q,7380
8
- clarifai/client/dataset.py,sha256=AmkeYdZI7oe7ZCEh4odTuzC5r4ESCmkdHHo4v23dSeQ,30204
9
- clarifai/client/input.py,sha256=ZLqa1jGx4NgCbunOTpJxCq4lDQ5xAf4GQ0rsZY8AHCM,44456
6
+ clarifai/client/app.py,sha256=6pckYme1urV2YJjLIYfeZ-vH0Z5YSQa51jzIMcEfwug,38342
7
+ clarifai/client/base.py,sha256=hSHOqkXbSKyaRDeylMMnkhUHCAHhEqno4KI0CXGziBA,7536
8
+ clarifai/client/compute_cluster.py,sha256=lntZDLVDhS71Yj7mZrgq5uhnAuNPUnj48i3zMSuoUpk,8693
9
+ clarifai/client/dataset.py,sha256=oqp6ryg7IyxCZcItzownadYJKK0s1DtghHwITN71_6E,30160
10
+ clarifai/client/deployment.py,sha256=3V-kqXYAZabtxHHgmy_QAGOG5Idt8iJvc8NlUIbNHg8,2096
11
+ clarifai/client/input.py,sha256=cEVRytrMF1gCgwHLbXlSbPSEQN8uHpUAoKcCdyHO1pc,44406
10
12
  clarifai/client/lister.py,sha256=03KGMvs5RVyYqxLsSrWhNc34I8kiF1Ph0NeyEwu7nMU,2082
11
- clarifai/client/model.py,sha256=YqeCwk1v_Rs2L6OTZSkDZvxGaxMoFMYuxVV7s9jdd0I,74416
12
- clarifai/client/module.py,sha256=360JaOasX0DZCNE_Trj0LNTr-T_tUDZLfGpz0CdIi78,4248
13
+ clarifai/client/model.py,sha256=_9kr__KP74v6PtSBtK1RrJjhrxzunXFVqyVn3xL78U0,74372
14
+ clarifai/client/module.py,sha256=FTkm8s9m-EaTKN7g9MnLhGJ9eETUfKG7aWZ3o1RshYs,4204
15
+ clarifai/client/nodepool.py,sha256=J9pnxQGO_HfScaUahccmEvJ4EOBib6k2nQR3gBdMoNs,9029
13
16
  clarifai/client/search.py,sha256=GaPWN6JmTQGZaCHr6U1yv0zqR6wKFl7i9IVLg2ul1CI,14254
14
- clarifai/client/user.py,sha256=Fr3vDEHieqD7HRRKnlp9h-AIX4AuYBirRYtG4KLwSe8,11836
15
- clarifai/client/workflow.py,sha256=e3axkhU6c6WcxK9P5tgmnV464k-afslSzsSXx6nSMgA,10560
17
+ clarifai/client/user.py,sha256=0tcOk8_Yd1_ANj9E6sy9mz6s01V3qkmJS7pZVn_zUYo,17637
18
+ clarifai/client/workflow.py,sha256=Wm4Fry6lGx8T43sBUqRI7v7sAmuvq_4Jft3vSW8UUJU,10516
16
19
  clarifai/client/auth/__init__.py,sha256=7EwR0NrozkAUwpUnCsqXvE_p0wqx_SelXlSpKShKJK0,136
17
- clarifai/client/auth/helper.py,sha256=hqwI7Zlsvivc-O9aAdtxyJT3zkpuMvbxjRaiCTsWYGk,14183
20
+ clarifai/client/auth/helper.py,sha256=BuyI_smxuRq__8RF0Nv3lr_Rmb-CJVEyK4hv60VEV5A,14259
18
21
  clarifai/client/auth/register.py,sha256=2CMdBsoVLoTfjyksE6j7BM2tiEc73WKYvxnwDDgNn1k,536
19
22
  clarifai/client/auth/stub.py,sha256=xy4-fV0W8keCgXld4eOVzFQEIKxOktNwtL5bLztReug,4940
23
+ clarifai/constants/base.py,sha256=ogmFSZYoF0YhGjHg5aiOc3MLqPr_poKAls6xaD0_C3U,89
20
24
  clarifai/constants/dataset.py,sha256=vjK3IlgXu31HycuvjRSzEQSqhU6xfj5TIgo6IpyUWoc,609
21
25
  clarifai/constants/input.py,sha256=WcHwToUVIK9ItAhDefaSohQHCLNeR55PSjZ0BFnoZ3U,28
22
26
  clarifai/constants/model.py,sha256=Um1hLfMFlh5R_vtP3Z6P-o6zon-tdbLcKVIl4PucrV4,438
@@ -25,7 +29,7 @@ clarifai/constants/search.py,sha256=yYEqTaFg-KdnpJE_Ytp-EPVHIIC395iNtZrpVlLIf4o,
25
29
  clarifai/constants/workflow.py,sha256=cECq1xdvf44MCdtK2AbkiuuwhyL-6OWZdQfYbsLKy_o,33
26
30
  clarifai/datasets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
31
  clarifai/datasets/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
- clarifai/datasets/export/inputs_annotations.py,sha256=7c6HWdATI4aPCRoCPZetUBNNEz9dBhbyYX1QqX-xYe4,9744
32
+ clarifai/datasets/export/inputs_annotations.py,sha256=Mwo-wFRIYXsoe13bR2bfsXViA6DlJbWmNuPpD3yHSrE,9701
29
33
  clarifai/datasets/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
34
  clarifai/datasets/upload/base.py,sha256=UIc0ufyIBCrb83_sFpv21L8FshsX4nwsLYQkdlJfzD4,2357
31
35
  clarifai/datasets/upload/features.py,sha256=jv2x7jGZKS-LMt87sEZNBwwOskHbP26XTMjoiaSA5pg,2024
@@ -47,10 +51,10 @@ clarifai/modules/css.py,sha256=kadCEunmyh5h2yf0-4aysE3ZcZ6qaQcxuAgDXS96yF8,2020
47
51
  clarifai/modules/pages.py,sha256=iOoM3RNRMgXlV0qBqcdQofxoXo2RuRQh0h9c9BIS0-I,1383
48
52
  clarifai/modules/style.css,sha256=j7FNPZVhLPj35vvBksAJ90RuX5sLuqzDR5iM2WIEhiA,6073
49
53
  clarifai/rag/__init__.py,sha256=wu3PzAzo7uqgrEzuaC9lY_3gj1HFiR3GU3elZIKTT5g,40
50
- clarifai/rag/rag.py,sha256=0udYyWVzBML-5-E05QuxBz7rNLxu7aMlist5uP7zPIY,12638
54
+ clarifai/rag/rag.py,sha256=L10TcV9E0PF1aJ2Nn1z1x6WVoUoGxbKt20lQXg8ksqo,12594
51
55
  clarifai/rag/utils.py,sha256=yr1jAcbpws4vFGBqlAwPPE7v1DRba48g8gixLFw8OhQ,4070
52
56
  clarifai/runners/__init__.py,sha256=3vr4RVvN1IRy2SxJpyycAAvrUBbH-mXR7pqUmu4w36A,412
53
- clarifai/runners/server.py,sha256=xzEoYoBcQE5JUxmpy1aj5CF5gIpUZKLDKqVXTcQRH4Y,4267
57
+ clarifai/runners/server.py,sha256=M6TaNy5gWgAJG29ZkUgzGThVNnWpWJerIiFINk2X-j4,4259
54
58
  clarifai/runners/dockerfile_template/Dockerfile.cpu.template,sha256=lJw28GwMHEGO1uaDmEk2xb-Xx4y5pr_YY2Ipg25SLqo,1213
55
59
  clarifai/runners/dockerfile_template/Dockerfile.cuda.template,sha256=lwDwfeb9vtW3zVLBtlJYo44j02Am7FH2FABthwY47kY,4971
56
60
  clarifai/runners/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -58,31 +62,30 @@ clarifai/runners/models/base_typed_model.py,sha256=OnAk08Lo2Y1fGiBc6JJ6UvJ8P435c
58
62
  clarifai/runners/models/model_class.py,sha256=9JSPAr4U4K7xI0kSl-q0mHB06zknm2OR-8XIgBCto94,1611
59
63
  clarifai/runners/models/model_runner.py,sha256=3vzoastQxkGRDK8T9aojDsLNBb9A3IiKm6YmbFrE9S0,6241
60
64
  clarifai/runners/models/model_servicer.py,sha256=i10oxz4pb1NsTQwkyhccJ3URg88Qn63XiuoBRCnJn9w,2737
61
- clarifai/runners/models/model_upload.py,sha256=PsCHO4CsJ2N6kBdeo7uUNjxWooXUgLepW_NfjU_YJ1A,13899
65
+ clarifai/runners/models/model_upload.py,sha256=4MD85lYtUwEolZF9nxmiH0zRM69Altv-z3luDlvxEvM,14276
62
66
  clarifai/runners/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
67
  clarifai/runners/utils/data_handler.py,sha256=sxy9zlAgI6ETuxCQhUgEXAn2GCsaW1GxpK6GTaMne0g,6966
64
68
  clarifai/runners/utils/data_utils.py,sha256=R1iQ82TuQ9JwxCJk8yEB1Lyb0BYVhVbWJI9YDi1zGOs,318
65
- clarifai/runners/utils/loader.py,sha256=r1FFfN7fE3GXPLOGaJAqmSpwETESkOAVZViOI8IpXNk,2532
66
- clarifai/runners/utils/logging.py,sha256=xan5ZQH5XHVyNIIjUAQoc_bS3MrjQjEvBYuo0CQKdho,153
67
- clarifai/runners/utils/url_fetcher.py,sha256=6u7tPRXuBe-4a2LYxRNJ5WTrNMH2kYXsk73Zuo14qb4,1432
69
+ clarifai/runners/utils/loader.py,sha256=taWTf-TCIZKh8jjwiFDYg3EqtJPXqn9EFoedIbnYXE8,2811
70
+ clarifai/runners/utils/url_fetcher.py,sha256=-Hwjb1SURszn7zUVwi4Of0-nrksfZy-uqT4SvPGCgSU,1446
68
71
  clarifai/schema/search.py,sha256=JjTi8ammJgZZ2OGl4K6tIA4zEJ1Fr2ASZARXavI1j5c,2448
69
72
  clarifai/urls/helper.py,sha256=tjoMGGHuWX68DUB0pk4MEjrmFsClUAQj2jmVEM_Sy78,4751
70
73
  clarifai/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
71
74
  clarifai/utils/constants.py,sha256=MG_iHnSwNEyUZOpvsrTicNwaT4CIjmlK_Ixk_qqEX8g,142
72
- clarifai/utils/logging.py,sha256=_nzRyXu3fUa_5X8vXig5dV392YkNESDTc3ZoiwVOGZY,11359
75
+ clarifai/utils/logging.py,sha256=rhutBRQJLtkNRz8IErNCgbIpvtl2fQ3D2otYcGqd3-Q,11565
73
76
  clarifai/utils/misc.py,sha256=ptjt1NtteDT0EhrPoyQ7mgWtvoAQ-XNncQaZvNHb0KI,2253
74
77
  clarifai/utils/model_train.py,sha256=Mndqy5GNu7kjQHjDyNVyamL0hQFLGSHcWhOuPyOvr1w,8005
75
78
  clarifai/utils/evaluation/__init__.py,sha256=PYkurUrXrGevByj7RFb6CoU1iC7fllyQSfnnlo9WnY8,69
76
- clarifai/utils/evaluation/helpers.py,sha256=d_dcASRI_lhsHIRukAF1S-w7XazLpK9y6E_ug3l50t4,18440
77
- clarifai/utils/evaluation/main.py,sha256=0SbTN8cUDjfd4vHUSIG0qVVmDDlJE_5il4vcZGi6a5g,15781
79
+ clarifai/utils/evaluation/helpers.py,sha256=Fl3QzrtlvhtaLc8kD3As2hJGpnf7yHcQ6hA0sDstnnI,18378
80
+ clarifai/utils/evaluation/main.py,sha256=u8j_ZcY2vKfhJhNJRvf6Ww1OsHfKg8gR-5HnXHF4aEY,15719
78
81
  clarifai/utils/evaluation/testset_annotation_parser.py,sha256=iZfLw6oR1qgJ3MHMbOZXcGBLu7btSDn0VqdiAzpIm4g,5002
79
82
  clarifai/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
80
83
  clarifai/workflows/export.py,sha256=vICRhIreqDSShxLKjHNM2JwzKsf1B4fdXB0ciMcA70k,1945
81
84
  clarifai/workflows/utils.py,sha256=nGeB_yjVgUO9kOeKTg4OBBaBz-AwXI3m-huSVj-9W18,1924
82
85
  clarifai/workflows/validate.py,sha256=yJq03MaJqi5AK3alKGJJBR89xmmjAQ31sVufJUiOqY8,2556
83
- clarifai-10.8.6.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
84
- clarifai-10.8.6.dist-info/METADATA,sha256=u7wP6djeaOnVhlz_RBIE2aDEretrTZuJWHbfC_UVHpY,19479
85
- clarifai-10.8.6.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
86
- clarifai-10.8.6.dist-info/entry_points.txt,sha256=qZOr_MIPG0dBBE1zringDJS_wXNGTAA_SQ-zcbmDHOw,82
87
- clarifai-10.8.6.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
88
- clarifai-10.8.6.dist-info/RECORD,,
86
+ clarifai-10.8.7.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
87
+ clarifai-10.8.7.dist-info/METADATA,sha256=VwT5l9f_03dv65G4fsQgcHyEWZcsTOCBmpmodcj3OhE,19479
88
+ clarifai-10.8.7.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
89
+ clarifai-10.8.7.dist-info/entry_points.txt,sha256=qZOr_MIPG0dBBE1zringDJS_wXNGTAA_SQ-zcbmDHOw,82
90
+ clarifai-10.8.7.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
91
+ clarifai-10.8.7.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- import os
2
-
3
- from clarifai.utils.logging import get_logger
4
-
5
- logger_level = os.environ.get("LOG_LEVEL", "INFO")
6
- logger = get_logger(logger_level, __name__)