clarifai 11.2.3__py3-none-any.whl → 11.2.3rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (169) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
  3. clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
  4. clarifai/__pycache__/errors.cpython-310.pyc +0 -0
  5. clarifai/__pycache__/versions.cpython-310.pyc +0 -0
  6. clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  7. clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
  8. clarifai/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
  9. clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  10. clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
  11. clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
  12. clarifai/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
  13. clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
  14. clarifai/cli/base.py +81 -228
  15. clarifai/cli/compute_cluster.py +18 -28
  16. clarifai/cli/deployment.py +42 -70
  17. clarifai/cli/model.py +39 -26
  18. clarifai/cli/nodepool.py +41 -62
  19. clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
  20. clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
  21. clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
  22. clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
  23. clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
  24. clarifai/client/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  25. clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
  26. clarifai/client/__pycache__/deployment.cpython-310.pyc +0 -0
  27. clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
  28. clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
  29. clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
  30. clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
  31. clarifai/client/__pycache__/nodepool.cpython-310.pyc +0 -0
  32. clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
  33. clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
  34. clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
  35. clarifai/client/app.py +1 -1
  36. clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
  37. clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
  38. clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
  39. clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
  40. clarifai/client/auth/stub.py +5 -4
  41. clarifai/client/cli/__init__.py +0 -0
  42. clarifai/client/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  43. clarifai/client/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
  44. clarifai/client/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
  45. clarifai/client/cli/base_cli.py +88 -0
  46. clarifai/client/cli/model_cli.py +29 -0
  47. clarifai/client/dataset.py +4 -3
  48. clarifai/client/model.py +159 -393
  49. clarifai/client/model_client.py +502 -0
  50. clarifai/constants/__pycache__/base.cpython-310.pyc +0 -0
  51. clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
  52. clarifai/constants/__pycache__/input.cpython-310.pyc +0 -0
  53. clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
  54. clarifai/constants/__pycache__/rag.cpython-310.pyc +0 -0
  55. clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
  56. clarifai/constants/__pycache__/workflow.cpython-310.pyc +0 -0
  57. clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
  58. clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
  59. clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
  60. clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
  61. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
  62. clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
  63. clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
  64. clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
  65. clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
  66. clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
  67. clarifai/datasets/upload/__pycache__/multimodal.cpython-310.pyc +0 -0
  68. clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
  69. clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
  70. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-39.pyc +0 -0
  71. clarifai/models/__pycache__/__init__.cpython-39.pyc +0 -0
  72. clarifai/modules/__pycache__/__init__.cpython-39.pyc +0 -0
  73. clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
  74. clarifai/rag/__pycache__/__init__.cpython-39.pyc +0 -0
  75. clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
  76. clarifai/rag/__pycache__/rag.cpython-39.pyc +0 -0
  77. clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
  78. clarifai/runners/__init__.py +2 -7
  79. clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
  80. clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
  81. clarifai/runners/dockerfile_template/Dockerfile.cpu.template +31 -0
  82. clarifai/runners/dockerfile_template/Dockerfile.cuda.template +42 -0
  83. clarifai/runners/dockerfile_template/Dockerfile.nim +71 -0
  84. clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
  85. clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
  86. clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
  87. clarifai/runners/models/__pycache__/base_typed_model.cpython-39.pyc +0 -0
  88. clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
  89. clarifai/runners/models/__pycache__/model_run_locally.cpython-310-pytest-7.1.2.pyc +0 -0
  90. clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
  91. clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
  92. clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
  93. clarifai/runners/models/model_builder.py +138 -51
  94. clarifai/runners/models/model_class.py +441 -28
  95. clarifai/runners/models/model_class_refract.py +80 -0
  96. clarifai/runners/models/model_run_locally.py +25 -89
  97. clarifai/runners/models/model_runner.py +8 -0
  98. clarifai/runners/models/model_servicer.py +11 -2
  99. clarifai/runners/models/model_upload.py +607 -0
  100. clarifai/runners/models/temp.py +25 -0
  101. clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  102. clarifai/runners/utils/__pycache__/__init__.cpython-38.pyc +0 -0
  103. clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  104. clarifai/runners/utils/__pycache__/buffered_stream.cpython-310.pyc +0 -0
  105. clarifai/runners/utils/__pycache__/buffered_stream.cpython-38.pyc +0 -0
  106. clarifai/runners/utils/__pycache__/buffered_stream.cpython-39.pyc +0 -0
  107. clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
  108. clarifai/runners/utils/__pycache__/constants.cpython-310.pyc +0 -0
  109. clarifai/runners/utils/__pycache__/constants.cpython-38.pyc +0 -0
  110. clarifai/runners/utils/__pycache__/constants.cpython-39.pyc +0 -0
  111. clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
  112. clarifai/runners/utils/__pycache__/data_handler.cpython-38.pyc +0 -0
  113. clarifai/runners/utils/__pycache__/data_handler.cpython-39.pyc +0 -0
  114. clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
  115. clarifai/runners/utils/__pycache__/data_utils.cpython-38.pyc +0 -0
  116. clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
  117. clarifai/runners/utils/__pycache__/grpc_server.cpython-310.pyc +0 -0
  118. clarifai/runners/utils/__pycache__/grpc_server.cpython-38.pyc +0 -0
  119. clarifai/runners/utils/__pycache__/grpc_server.cpython-39.pyc +0 -0
  120. clarifai/runners/utils/__pycache__/health.cpython-310.pyc +0 -0
  121. clarifai/runners/utils/__pycache__/health.cpython-38.pyc +0 -0
  122. clarifai/runners/utils/__pycache__/health.cpython-39.pyc +0 -0
  123. clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
  124. clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
  125. clarifai/runners/utils/__pycache__/logging.cpython-38.pyc +0 -0
  126. clarifai/runners/utils/__pycache__/logging.cpython-39.pyc +0 -0
  127. clarifai/runners/utils/__pycache__/stream_source.cpython-310.pyc +0 -0
  128. clarifai/runners/utils/__pycache__/stream_source.cpython-39.pyc +0 -0
  129. clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
  130. clarifai/runners/utils/__pycache__/url_fetcher.cpython-38.pyc +0 -0
  131. clarifai/runners/utils/__pycache__/url_fetcher.cpython-39.pyc +0 -0
  132. clarifai/runners/utils/code_script.py +217 -0
  133. clarifai/runners/utils/const.py +8 -9
  134. clarifai/runners/utils/data_handler.py +271 -210
  135. clarifai/runners/utils/data_handler_refract.py +213 -0
  136. clarifai/runners/utils/data_types.py +473 -0
  137. clarifai/runners/utils/data_utils.py +165 -0
  138. clarifai/runners/utils/loader.py +6 -36
  139. clarifai/runners/utils/logger.py +0 -0
  140. clarifai/runners/utils/method_signatures.py +518 -0
  141. clarifai/runners/utils/serializers.py +222 -0
  142. clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
  143. clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
  144. clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  145. clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  146. clarifai/utils/__pycache__/cli.cpython-310.pyc +0 -0
  147. clarifai/utils/__pycache__/constants.cpython-310.pyc +0 -0
  148. clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
  149. clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
  150. clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
  151. clarifai/utils/cli.py +34 -132
  152. clarifai/utils/constants.py +0 -4
  153. clarifai/utils/evaluation/__pycache__/__init__.cpython-39.pyc +0 -0
  154. clarifai/utils/evaluation/__pycache__/main.cpython-39.pyc +0 -0
  155. clarifai/utils/logging.py +21 -64
  156. clarifai/utils/misc.py +0 -2
  157. clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
  158. clarifai/workflows/__pycache__/__init__.cpython-39.pyc +0 -0
  159. clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
  160. clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
  161. clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
  162. {clarifai-11.2.3.dist-info → clarifai-11.2.3rc2.dist-info}/METADATA +3 -4
  163. clarifai-11.2.3rc2.dist-info/RECORD +238 -0
  164. {clarifai-11.2.3.dist-info → clarifai-11.2.3rc2.dist-info}/WHEEL +1 -1
  165. clarifai/utils/config.py +0 -105
  166. clarifai-11.2.3.dist-info/RECORD +0 -102
  167. {clarifai-11.2.3.dist-info/licenses → clarifai-11.2.3rc2.dist-info}/LICENSE +0 -0
  168. {clarifai-11.2.3.dist-info → clarifai-11.2.3rc2.dist-info}/entry_points.txt +0 -0
  169. {clarifai-11.2.3.dist-info → clarifai-11.2.3rc2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,222 @@
1
+ import json
2
+ from typing import Dict, Iterable
3
+
4
+ import numpy as np
5
+ from clarifai_grpc.grpc.api import resources_pb2
6
+
7
+ from clarifai.runners.utils import data_types
8
+
9
+
10
+ class Serializer:
11
+
12
+ def serialize(self, data_proto, value):
13
+ pass
14
+
15
+ def deserialize(self, data_proto):
16
+ pass
17
+
18
+ def handles_list(self):
19
+ return False
20
+
21
+
22
+ def is_repeated_field(field_name):
23
+ descriptor = resources_pb2.Data.DESCRIPTOR.fields_by_name.get(field_name)
24
+ return descriptor and descriptor.label == descriptor.LABEL_REPEATED
25
+
26
+
27
+ class AtomicFieldSerializer(Serializer):
28
+
29
+ def __init__(self, field_name):
30
+ self.field_name = field_name
31
+
32
+ def serialize(self, data_proto, value):
33
+ try:
34
+ setattr(data_proto, self.field_name, value)
35
+ except TypeError as e:
36
+ raise TypeError(f"Incompatible type for {self.field_name}: {type(value)}") from e
37
+
38
+ def deserialize(self, data_proto):
39
+ return getattr(data_proto, self.field_name)
40
+
41
+
42
+ class MessageSerializer(Serializer):
43
+
44
+ def __init__(self, field_name, message_class):
45
+ self.field_name = field_name
46
+ self.message_class = message_class
47
+ self.is_repeated_field = is_repeated_field(field_name)
48
+
49
+ def handles_list(self):
50
+ return self.is_repeated_field
51
+
52
+ def serialize(self, data_proto, value):
53
+ value = self.message_class.from_value(value).to_proto()
54
+ dst = getattr(data_proto, self.field_name)
55
+ try:
56
+ if self.is_repeated_field:
57
+ dst.add().CopyFrom(value)
58
+ else:
59
+ dst.CopyFrom(value)
60
+ except TypeError as e:
61
+ raise TypeError(f"Incompatible type for {self.field_name}: {type(value)}") from e
62
+
63
+ def serialize_list(self, data_proto, values):
64
+ assert self.is_repeated_field
65
+ dst = getattr(data_proto, self.field_name)
66
+ dst.extend([self.message_class.from_value(value).to_proto() for value in values])
67
+
68
+ def deserialize(self, data_proto):
69
+ src = getattr(data_proto, self.field_name)
70
+ if self.is_repeated_field:
71
+ values = [self.message_class.from_proto(x) for x in src]
72
+ if len(values) == 1:
73
+ return values[0]
74
+ return values if values else None
75
+ else:
76
+ if not data_proto.HasField(self.field_name):
77
+ return None
78
+ return self.message_class.from_proto(src)
79
+
80
+ def deserialize_list(self, data_proto):
81
+ assert self.is_repeated_field
82
+ src = getattr(data_proto, self.field_name)
83
+ return [self.message_class.from_proto(x) for x in src]
84
+
85
+
86
+ class NDArraySerializer(Serializer):
87
+
88
+ def __init__(self, field_name, as_list=False):
89
+ self.field_name = field_name
90
+ self.as_list = as_list
91
+
92
+ def serialize(self, data_proto, value):
93
+ if self.as_list and not isinstance(value, Iterable):
94
+ raise TypeError(f"Expected list, got {type(value)}")
95
+ value = np.asarray(value)
96
+ if not np.issubdtype(value.dtype, np.number):
97
+ raise TypeError(f"Expected number array, got {value.dtype}")
98
+ proto = getattr(data_proto, self.field_name)
99
+ proto.buffer = value.tobytes()
100
+ proto.shape.extend(value.shape)
101
+ proto.dtype = str(value.dtype)
102
+
103
+ def deserialize(self, data_proto):
104
+ proto = getattr(data_proto, self.field_name)
105
+ if not proto.buffer:
106
+ return None
107
+ array = np.frombuffer(proto.buffer, dtype=np.dtype(proto.dtype)).reshape(proto.shape)
108
+ if self.as_list:
109
+ return array.tolist()
110
+ return array
111
+
112
+
113
+ class JSONSerializer(Serializer):
114
+
115
+ def __init__(self, field_name, type=None):
116
+ self.field_name = field_name
117
+ self.type = type
118
+
119
+ def serialize(self, data_proto, value):
120
+ #if self.type is not None and not isinstance(value, self.type):
121
+ # raise TypeError(f"Expected {self.type}, got {type(value)}")
122
+ try:
123
+ setattr(data_proto, self.field_name, json.dumps(value))
124
+ except TypeError as e:
125
+ raise TypeError(f"Incompatible type for {self.field_name}: {type(value)}") from e
126
+
127
+ def deserialize(self, data_proto):
128
+ value = getattr(data_proto, self.field_name)
129
+ if not value:
130
+ return None
131
+ return json.loads(value)
132
+
133
+
134
+ class ListSerializer(Serializer):
135
+
136
+ def __init__(self, inner_serializer):
137
+ self.field_name = 'parts'
138
+ self.inner_serializer = inner_serializer
139
+
140
+ def handles_list(self):
141
+ # if handles_list() is called on this serializer, it means that we're
142
+ # trying to serialize a list of lists. In this case, we need to use
143
+ # parts[] for the outer list, so we return False here (we can't inline it).
144
+ return False
145
+
146
+ def serialize(self, data_proto, value):
147
+ if not isinstance(value, Iterable):
148
+ raise TypeError(f"Expected iterable, got {type(value)}")
149
+ if self.inner_serializer.handles_list():
150
+ self.inner_serializer.serialize_list(data_proto, value)
151
+ else:
152
+ for item in value:
153
+ part = data_proto.parts.add()
154
+ self.inner_serializer.serialize(part.data, item)
155
+
156
+ def deserialize(self, data_proto):
157
+ if self.inner_serializer.handles_list():
158
+ return self.inner_serializer.deserialize_list(data_proto)
159
+ return [self.inner_serializer.deserialize(part.data) for part in data_proto.parts]
160
+
161
+
162
+ class TupleSerializer(Serializer):
163
+
164
+ def __init__(self, inner_serializers):
165
+ self.field_name = 'parts'
166
+ self.inner_serializers = inner_serializers
167
+
168
+ def serialize(self, data_proto, value):
169
+ if not isinstance(value, (tuple, list)):
170
+ raise TypeError(f"Expected tuple, got {type(value)}")
171
+ if len(value) != len(self.inner_serializers):
172
+ raise ValueError(f"Expected tuple of length {len(self.inner_serializers)}, got {len(value)}")
173
+ for i, (serializer, item) in enumerate(zip(self.inner_serializers, value)):
174
+ part = data_proto.parts.add()
175
+ part.id = str(i)
176
+ serializer.serialize(part.data, item)
177
+
178
+ def deserialize(self, data_proto):
179
+ if not data_proto.parts and self.inner_serializers:
180
+ return None
181
+ if len(data_proto.parts) != len(self.inner_serializers):
182
+ raise ValueError(
183
+ f"Expected tuple of length {len(self.inner_serializers)}, got {len(data_proto.parts)}")
184
+ return tuple(
185
+ serializer.deserialize(part.data)
186
+ for serializer, part in zip(self.inner_serializers, data_proto.parts))
187
+
188
+
189
+ class NamedFieldsSerializer(Serializer):
190
+
191
+ def __init__(self, named_field_serializers: Dict[str, Serializer]):
192
+ self.field_name = 'parts'
193
+ self.named_field_serializers = named_field_serializers
194
+
195
+ def serialize(self, data_proto, value):
196
+ for name, serializer in self.named_field_serializers.items():
197
+ if name not in value:
198
+ raise TypeError(f"Missing field {name}")
199
+ part = self._get_part(data_proto, name, add=True)
200
+ serializer.serialize(part.data, value[name])
201
+
202
+ def deserialize(self, data_proto):
203
+ if not data_proto.parts and self.named_field_serializers:
204
+ return None
205
+ value = data_types.NamedFields()
206
+ for name, serializer in self.named_field_serializers.items():
207
+ part = self._get_part(data_proto, name)
208
+ value[name] = serializer.deserialize(part.data)
209
+ return value
210
+
211
+ def _get_part(self, data_proto, name, add=False):
212
+ for part in data_proto.parts:
213
+ if part.id == name:
214
+ return part
215
+ if add:
216
+ part = data_proto.parts.add()
217
+ part.id = name
218
+ return part
219
+ raise TypeError(f"Missing part with key {name}")
220
+
221
+
222
+ # TODO dict serializer, maybe json only?
clarifai/utils/cli.py CHANGED
@@ -1,14 +1,14 @@
1
1
  import importlib
2
2
  import os
3
3
  import pkgutil
4
- import sys
5
- import typing as t
6
- from collections import defaultdict
7
- from typing import OrderedDict
8
4
 
9
5
  import click
10
6
  import yaml
11
- from tabulate import tabulate
7
+
8
+ from rich.console import Console
9
+ from rich.panel import Panel
10
+ from rich.style import Style
11
+ from rich.text import Text
12
12
 
13
13
 
14
14
  def from_yaml(filename: str):
@@ -28,6 +28,19 @@ def dump_yaml(data, filename: str):
28
28
  click.echo(f"Error writing YAML file: {e}", err=True)
29
29
 
30
30
 
31
+ def set_base_url(env):
32
+ environments = {
33
+ 'prod': 'https://api.clarifai.com',
34
+ 'staging': 'https://api-staging.clarifai.com',
35
+ 'dev': 'https://api-dev.clarifai.com'
36
+ }
37
+
38
+ if env in environments:
39
+ return environments[env]
40
+ else:
41
+ raise ValueError("Invalid environment. Please choose from 'prod', 'staging', 'dev'.")
42
+
43
+
31
44
  # Dynamically find and import all command modules from the cli directory
32
45
  def load_command_modules():
33
46
  package_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'cli')
@@ -37,132 +50,21 @@ def load_command_modules():
37
50
  importlib.import_module(f'clarifai.cli.{module_name}')
38
51
 
39
52
 
40
- def display_co_resources(response,
41
- custom_columns={
42
- 'ID': lambda c: c.id,
43
- 'USER_ID': lambda c: c.user_id,
44
- 'DESCRIPTION': lambda c: c.description,
45
- }):
53
+ def display_co_resources(response, resource_type):
46
54
  """Display compute orchestration resources listing results using rich."""
47
55
 
48
- formatter = TableFormatter(custom_columns)
49
- print(formatter.format(list(response), fmt="plain"))
50
-
51
-
52
- class TableFormatter:
53
-
54
- def __init__(self, custom_columns: OrderedDict):
55
- """
56
- Initializes the TableFormatter with column headers and custom column mappings.
57
-
58
- :param headers: List of column headers for the table.
59
- """
60
- self.custom_columns = custom_columns
61
-
62
- def format(self, objects, fmt='plain'):
63
- """
64
- Formats a list of objects into a table with custom columns.
65
-
66
- :param objects: List of objects to format into a table.
67
- :return: A string representing the table.
68
- """
69
- # Prepare the rows by applying the custom column functions to each object
70
- rows = []
71
- for obj in objects:
72
- # row = [self.custom_columns[header](obj) for header in self.headers]
73
- row = [f(obj) for f in self.custom_columns.values()]
74
- rows.append(row)
75
-
76
- # Create the table
77
- table = tabulate(rows, headers=self.custom_columns.keys(), tablefmt=fmt)
78
- return table
79
-
80
-
81
- class AliasedGroup(click.Group):
82
-
83
- def __init__(self,
84
- name: t.Optional[str] = None,
85
- commands: t.Optional[t.Union[t.MutableMapping[str, click.Command], t.Sequence[
86
- click.Command]]] = None,
87
- **attrs: t.Any) -> None:
88
- super().__init__(name, commands, **attrs)
89
- self.alias_map = {}
90
- self.command_to_aliases = defaultdict(list)
91
-
92
- def add_alias(self, cmd: click.Command, alias: str) -> None:
93
- self.alias_map[alias] = cmd
94
- if alias != cmd.name:
95
- self.command_to_aliases[cmd].append(alias)
96
-
97
- def command(self, aliases=None, *args,
98
- **kwargs) -> t.Callable[[t.Callable[..., t.Any]], click.Command]:
99
- cmd_decorator = super().command(*args, **kwargs)
100
- if aliases is None:
101
- aliases = []
102
-
103
- def aliased_decorator(f):
104
- cmd = cmd_decorator(f)
105
- if cmd.name:
106
- self.add_alias(cmd, cmd.name)
107
- for alias in aliases:
108
- self.add_alias(cmd, alias)
109
- return cmd
110
-
111
- f = None
112
- if args and callable(args[0]):
113
- (f,) = args
114
- if f is not None:
115
- return aliased_decorator(f)
116
- return aliased_decorator
117
-
118
- def group(self, aliases=None, *args,
119
- **kwargs) -> t.Callable[[t.Callable[..., t.Any]], click.Group]:
120
- cmd_decorator = super().group(*args, **kwargs)
121
- if aliases is None:
122
- aliases = []
123
-
124
- def aliased_decorator(f):
125
- cmd = cmd_decorator(f)
126
- if cmd.name:
127
- self.add_alias(cmd, cmd.name)
128
- for alias in aliases:
129
- self.add_alias(cmd, alias)
130
- return cmd
131
-
132
- f = None
133
- if args and callable(args[0]):
134
- (f,) = args
135
- if f is not None:
136
- return aliased_decorator(f)
137
- return aliased_decorator
138
-
139
- def get_command(self, ctx: click.Context, cmd_name: str) -> t.Optional[click.Command]:
140
- rv = click.Group.get_command(self, ctx, cmd_name)
141
- if rv is not None:
142
- return rv
143
- return self.alias_map.get(cmd_name)
144
-
145
- def format_commands(self, ctx, formatter):
146
- sub_commands = self.list_commands(ctx)
147
-
148
- rows = []
149
- for sub_command in sub_commands:
150
- cmd = self.get_command(ctx, sub_command)
151
- if cmd is None or getattr(cmd, 'hidden', False):
152
- continue
153
- if cmd in self.command_to_aliases:
154
- aliases = ', '.join(self.command_to_aliases[cmd])
155
- sub_command = f'{sub_command} ({aliases})'
156
- cmd_help = cmd.help
157
- rows.append((sub_command, cmd_help))
158
-
159
- if rows:
160
- with formatter.section("Commands"):
161
- formatter.write_dl(rows)
162
-
163
-
164
- def validate_context(ctx):
165
- from clarifai.utils.logging import logger
166
- if ctx.obj == {}:
167
- logger.error("CLI config file missing. Run `clarifai login` to set up the CLI config.")
168
- sys.exit(1)
56
+ console = Console()
57
+ panel = Panel(
58
+ Text(f"List of {resource_type}s", justify="center"),
59
+ title="",
60
+ style=Style(color="blue", bold=True),
61
+ border_style="green",
62
+ width=60)
63
+ console.print(panel)
64
+ for indx, item in enumerate(list(response)):
65
+ panel = Panel(
66
+ "\n".join([f"{'ID'}: {item.id}", f"{'Description'}: {item.description}"]),
67
+ title=f"{resource_type} {(indx + 1)}",
68
+ border_style="green",
69
+ width=60)
70
+ console.print(panel)
@@ -1,7 +1,3 @@
1
- import os
2
-
3
1
  CLARIFAI_PAT_ENV_VAR = "CLARIFAI_PAT"
4
2
  CLARIFAI_SESSION_TOKEN_ENV_VAR = "CLARIFAI_SESSION_TOKEN"
5
3
  CLARIFAI_USER_ID_ENV_VAR = "CLARIFAI_USER_ID"
6
-
7
- DEFAULT_CONFIG = f'{os.environ["HOME"]}/.config/clarifai/config'
clarifai/utils/logging.py CHANGED
@@ -10,6 +10,15 @@ import traceback
10
10
  from collections import defaultdict
11
11
  from typing import Any, Dict, List, Optional, Union
12
12
 
13
+ from rich import print as rprint
14
+ from rich.console import Console
15
+ from rich.logging import RichHandler
16
+ from rich.table import Table
17
+ from rich.traceback import install
18
+ from rich.tree import Tree
19
+
20
+ install()
21
+
13
22
  # The default logger to use throughout the SDK is defined at bottom of this file.
14
23
 
15
24
  # For the json logger.
@@ -20,20 +29,6 @@ FIELD_BLACKLIST = [
20
29
  'msg', 'message', 'account', 'levelno', 'created', 'threadName', 'name', 'processName',
21
30
  'module', 'funcName', 'msecs', 'relativeCreated', 'pathname', 'args', 'thread', 'process'
22
31
  ]
23
- COLORS = {
24
- 'ARGUMENTS': '\033[90m', # Gray
25
- 'DEBUG': '\033[90m', # Gray
26
- 'INFO': '\033[32m', # Green
27
- 'WARNING': '\033[33m', # Yellow
28
- 'ERROR': '\033[31m', # Red
29
- 'CRITICAL': '\033[31m', # Red
30
- 'TIME': '\033[34m',
31
- 'RESET': '\033[0m'
32
- }
33
- LOG_FORMAT = f"[%(levelname)s] {COLORS.get('TIME')}%(asctime)s{COLORS.get('RESET')} %(message)s |" \
34
- f"{COLORS.get('ARGUMENTS')} " \
35
- f"%(optional_args)s " \
36
- f"thread=%(thread)d {COLORS.get('RESET')}"
37
32
 
38
33
  # Create thread local storage that the format() call below uses.
39
34
  # This is only used by the json_logger in the appropriate CLARIFAI_DEPLOY levels.
@@ -64,9 +59,6 @@ def get_req_id_from_context():
64
59
 
65
60
  def display_workflow_tree(nodes_data: List[Dict]) -> None:
66
61
  """Displays a tree of the workflow nodes."""
67
- from rich import print as rprint
68
- from rich.tree import Tree
69
-
70
62
  # Create a mapping of node_id to the list of node_ids that are connected to it.
71
63
  node_adj_mapping = defaultdict(list)
72
64
  # Create a mapping of node_id to the node data info.
@@ -112,10 +104,8 @@ def display_workflow_tree(nodes_data: List[Dict]) -> None:
112
104
  rprint(tree)
113
105
 
114
106
 
115
- def table_from_dict(data: List[Dict], column_names: List[str],
116
- title: str = "") -> 'rich.Table': #noqa F821
107
+ def table_from_dict(data: List[Dict], column_names: List[str], title: str = "") -> Table:
117
108
  """Use this function for printing tables from a list of dicts."""
118
- from rich.table import Table
119
109
  table = Table(title=title, show_lines=False, show_header=True, header_style="blue")
120
110
  for column_name in column_names:
121
111
  table.add_column(column_name)
@@ -144,18 +134,23 @@ def _configure_logger(name: str, logger_level: Union[int, str] = logging.NOTSET)
144
134
  # If ENABLE_JSON_LOGGER is not set, then use json logger if in k8s.
145
135
  enabled_json = os.getenv('ENABLE_JSON_LOGGER', None)
146
136
  in_k8s = 'KUBERNETES_SERVICE_HOST' in os.environ
147
- handler = logging.StreamHandler()
148
- handler.setLevel(logger_level)
149
137
  if enabled_json == 'true' or (in_k8s and enabled_json != 'false'):
150
138
  # Add the json handler and formatter
139
+ handler = logging.StreamHandler()
151
140
  formatter = JsonFormatter()
152
141
  handler.setFormatter(formatter)
142
+ logger.addHandler(handler)
153
143
  else:
154
- # create formatter and add it to the handlers
155
- formatter = TerminalFormatter(LOG_FORMAT)
144
+ # Add the new rich handler and formatter
145
+ try:
146
+ width, _ = os.get_terminal_size()
147
+ except OSError:
148
+ width = 255
149
+ handler = RichHandler(
150
+ rich_tracebacks=True, log_time_format="%Y-%m-%d %H:%M:%S.%f", console=Console(width=width))
151
+ formatter = logging.Formatter('%(message)s')
156
152
  handler.setFormatter(formatter)
157
- # add the handlers to the logger
158
- logger.addHandler(handler)
153
+ logger.addHandler(handler)
159
154
 
160
155
 
161
156
  def get_logger(logger_level: Union[int, str] = logging.NOTSET,
@@ -212,8 +207,6 @@ def display_concept_relations_tree(relations_dict: Dict[str, Any]) -> None:
212
207
  Args:
213
208
  relations_dict (dict): A dict of concept relations info.
214
209
  """
215
- from rich import print as rprint
216
- from rich.tree import Tree
217
210
  for parent, children in relations_dict.items():
218
211
  tree = Tree(parent)
219
212
  for child in children:
@@ -379,41 +372,5 @@ class JsonFormatter(logging.Formatter):
379
372
  )
380
373
 
381
374
 
382
- class TerminalFormatter(logging.Formatter):
383
- """ If you have fields in your Formatter (see setup_logger where we setup the format strings) then
384
- you can set them on the record using a filter. We do that for req_id here which is a request
385
- specific field. This allows us to find requests easily between services.
386
- """
387
-
388
- def format(self, record):
389
- record.optional_args = []
390
-
391
- user_id = getattr(thread_log_info, 'user_id', None)
392
- if user_id is not None:
393
- record.optional_args.append("user_id=" + user_id)
394
-
395
- app_id = getattr(thread_log_info, 'app_id', None)
396
- if app_id is not None:
397
- record.optional_args.append("app_id=" + app_id)
398
-
399
- req_id = getattr(thread_log_info, 'req_id', None)
400
- if req_id is not None:
401
- record.optional_args.append("req_id=" + req_id)
402
-
403
- record.optional_args = " ".join(record.optional_args)
404
-
405
- color_code = COLORS.get(record.levelname, '')
406
-
407
- record.levelname = f"{color_code}{record.levelname}{COLORS.get('RESET')}"
408
- record.msg = f"{color_code}{str(record.msg)}{COLORS.get('RESET')}"
409
-
410
- return super(TerminalFormatter, self).format(record)
411
-
412
- def formatTime(self, record, datefmt=None):
413
- # Note we didn't go with UTC here as it's easier to understand time in your time zone.
414
- # The json logger leverages UTC though.
415
- return datetime.datetime.fromtimestamp(record.created).strftime('%H:%M:%S.%f')
416
-
417
-
418
375
  # the default logger for the SDK.
419
376
  logger = get_logger(logger_level=os.environ.get("LOG_LEVEL", "INFO"), name="clarifai")
clarifai/utils/misc.py CHANGED
@@ -12,8 +12,6 @@ RETRYABLE_CODES = [
12
12
  status_code_pb2.MODEL_BUSY_PLEASE_RETRY
13
13
  ]
14
14
 
15
- DEFAULT_CONFIG = f'{os.environ["HOME"]}/.config/clarifai/config'
16
-
17
15
 
18
16
  def status_is_retryable(status_code: int) -> bool:
19
17
  """Check if a status code is retryable."""
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.4
1
+ Metadata-Version: 2.2
2
2
  Name: clarifai
3
- Version: 11.2.3
3
+ Version: 11.2.3rc2
4
4
  Summary: Clarifai Python SDK
5
5
  Home-page: https://github.com/Clarifai/clarifai-python
6
6
  Author: Clarifai
@@ -21,7 +21,7 @@ Requires-Python: >=3.8
21
21
  Description-Content-Type: text/markdown
22
22
  License-File: LICENSE
23
23
  Requires-Dist: clarifai-grpc>=11.2.6
24
- Requires-Dist: clarifai-protocol>=0.0.21
24
+ Requires-Dist: clarifai-protocol>=0.0.22
25
25
  Requires-Dist: numpy>=1.22.0
26
26
  Requires-Dist: tqdm>=4.65.0
27
27
  Requires-Dist: rich>=13.4.2
@@ -42,7 +42,6 @@ Dynamic: description
42
42
  Dynamic: description-content-type
43
43
  Dynamic: home-page
44
44
  Dynamic: license
45
- Dynamic: license-file
46
45
  Dynamic: provides-extra
47
46
  Dynamic: requires-dist
48
47
  Dynamic: requires-python