clarifai 11.8.1__py3-none-any.whl → 11.8.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -116,13 +116,33 @@ class PipelineBuilder:
116
116
  return True
117
117
 
118
118
  def _upload_pipeline_step_with_version_capture(self, step_path: str) -> tuple[bool, str]:
119
- """Upload a pipeline step and capture its version ID."""
119
+ """
120
+ Upload a pipeline step and capture its version ID.
121
+ Implements hash-based optimization to only upload modified steps.
122
+ """
120
123
  try:
121
124
  # Use the existing pipeline step builder
122
125
  from clarifai.runners.pipeline_steps.pipeline_step_builder import PipelineStepBuilder
123
126
 
124
127
  builder = PipelineStepBuilder(step_path)
125
128
 
129
+ # Check if we should upload based on hash comparison
130
+ should_upload = builder.should_upload_step()
131
+
132
+ if not should_upload:
133
+ # Load existing version ID from config-lock.yaml
134
+ config_lock = builder.load_config_lock()
135
+ if config_lock and config_lock.get("id"):
136
+ version_id = config_lock["id"]
137
+ logger.info(
138
+ f"Using existing pipeline step version {version_id} (no changes detected)"
139
+ )
140
+ return True, version_id
141
+ else:
142
+ logger.warning(
143
+ "Hash indicates no upload needed, but no version ID found in config-lock.yaml. Proceeding with upload."
144
+ )
145
+
126
146
  # Create dockerfile if needed
127
147
  builder.create_dockerfile()
128
148
 
@@ -139,6 +159,11 @@ class PipelineBuilder:
139
159
  success = builder.upload_pipeline_step_version()
140
160
 
141
161
  if success and builder.pipeline_step_version_id:
162
+ # Generate config-lock.yaml with the new version ID
163
+ builder.save_config_lock(builder.pipeline_step_version_id)
164
+ logger.info(
165
+ f"Generated config-lock.yaml for pipeline step with version {builder.pipeline_step_version_id}"
166
+ )
142
167
  return True, builder.pipeline_step_version_id
143
168
  else:
144
169
  logger.error("Failed to get pipeline step version ID after upload")
@@ -148,42 +173,105 @@ class PipelineBuilder:
148
173
  logger.error(f"Error uploading pipeline step: {e}")
149
174
  return False, ""
150
175
 
151
- def update_config_with_versions(self) -> None:
152
- """Update the config.yaml with uploaded pipeline step versions."""
176
+ def prepare_lockfile_with_step_versions(self) -> Dict[str, Any]:
177
+ """Prepare lockfile data with step versions after pipeline step upload."""
153
178
  if not self.uploaded_step_versions:
154
- logger.info("No pipeline step versions to update in config")
155
- return
156
-
157
- logger.info("Updating config.yaml with pipeline step versions...")
179
+ logger.info("No pipeline step versions for lockfile")
158
180
 
159
- # Update the orchestration spec
181
+ # Create a copy of the orchestration spec to modify
160
182
  pipeline_config = self.config["pipeline"]
161
- orchestration_spec = pipeline_config["orchestration_spec"]
183
+ orchestration_spec = pipeline_config["orchestration_spec"].copy()
162
184
  argo_spec_str = orchestration_spec["argo_orchestration_spec"]
163
185
  argo_spec = yaml.safe_load(argo_spec_str)
164
186
 
165
187
  # Update templateRef names to include versions
166
188
  self._update_template_refs_with_versions(argo_spec)
167
189
 
168
- # Update the config
169
- orchestration_spec["argo_orchestration_spec"] = yaml.dump(
170
- argo_spec, Dumper=LiteralBlockDumper, default_flow_style=False
171
- )
190
+ # Create the partial lockfile data structure (without pipeline info)
191
+ lockfile_data = {
192
+ "pipeline": {
193
+ "id": self.pipeline_id,
194
+ "user_id": self.user_id,
195
+ "app_id": self.app_id,
196
+ "version_id": None, # Will be filled in later
197
+ "orchestration_spec": {
198
+ "argo_orchestration_spec": yaml.dump(
199
+ argo_spec, Dumper=LiteralBlockDumper, default_flow_style=False
200
+ )
201
+ },
202
+ }
203
+ }
204
+
205
+ return lockfile_data
206
+
207
+ def update_lockfile_with_pipeline_info(
208
+ self, lockfile_data: Dict[str, Any], pipeline_version_id: str
209
+ ) -> Dict[str, Any]:
210
+ """Update the prepared lockfile data with pipeline version information."""
211
+ lockfile_data["pipeline"]["version_id"] = pipeline_version_id
212
+ return lockfile_data
213
+
214
+ def generate_lockfile_data(
215
+ self, pipeline_id: str = None, pipeline_version_id: str = None
216
+ ) -> Dict[str, Any]:
217
+ """Generate the complete lockfile data structure without modifying config.yaml.
218
+
219
+ This method is kept for backward compatibility. The recommended approach is to use
220
+ prepare_lockfile_with_step_versions() followed by update_lockfile_with_pipeline_info().
221
+ """
222
+ if not self.uploaded_step_versions:
223
+ logger.info("No pipeline step versions for lockfile")
172
224
 
173
- # Remove uploaded directories from step_directories
174
- remaining_dirs = []
175
- for step_dir in pipeline_config.get("step_directories", []):
176
- if step_dir not in self.uploaded_step_versions:
177
- remaining_dirs.append(step_dir)
225
+ # Create a copy of the orchestration spec to modify
226
+ pipeline_config = self.config["pipeline"]
227
+ orchestration_spec = pipeline_config["orchestration_spec"].copy()
228
+ argo_spec_str = orchestration_spec["argo_orchestration_spec"]
229
+ argo_spec = yaml.safe_load(argo_spec_str)
178
230
 
179
- pipeline_config["step_directories"] = remaining_dirs
231
+ # Update templateRef names to include versions
232
+ self._update_template_refs_with_versions(argo_spec)
233
+
234
+ # Create the lockfile data structure
235
+ lockfile_data = {
236
+ "pipeline": {
237
+ "id": pipeline_id or self.pipeline_id,
238
+ "user_id": self.user_id,
239
+ "app_id": self.app_id,
240
+ "version_id": pipeline_version_id,
241
+ "orchestration_spec": {
242
+ "argo_orchestration_spec": yaml.dump(
243
+ argo_spec, Dumper=LiteralBlockDumper, default_flow_style=False
244
+ )
245
+ },
246
+ }
247
+ }
248
+
249
+ return lockfile_data
250
+
251
+ def save_lockfile(self, lockfile_data: Dict[str, Any], lockfile_path: str = None) -> None:
252
+ """Save lockfile data to config-lock.yaml."""
253
+ if lockfile_path is None:
254
+ lockfile_path = os.path.join(self.config_dir, "config-lock.yaml")
180
255
 
181
- # Save the updated config
182
- self._save_config()
183
- logger.info("Updated config.yaml with pipeline step versions")
256
+ try:
257
+ with open(lockfile_path, 'w', encoding="utf-8") as file:
258
+ yaml.dump(
259
+ lockfile_data,
260
+ file,
261
+ Dumper=LiteralBlockDumper,
262
+ default_flow_style=False,
263
+ sort_keys=False,
264
+ )
265
+ logger.info(f"Generated lockfile: {lockfile_path}")
266
+ except Exception as e:
267
+ raise ValueError(f"Error saving lockfile {lockfile_path}: {e}")
184
268
 
185
269
  def _update_template_refs_with_versions(self, argo_spec: Dict[str, Any]) -> None:
186
- """Update templateRef names in Argo spec to include version information."""
270
+ """
271
+ Update templateRef names in Argo spec to include version information.
272
+ The step versions should be resolved from the corresponding config-lock.yaml
273
+ file of each pipeline-step, located in the step_directories.
274
+ """
187
275
  for template in argo_spec["spec"]["templates"]:
188
276
  if "steps" in template:
189
277
  for step_group in template["steps"]:
@@ -199,12 +287,19 @@ class PipelineBuilder:
199
287
  step_name = parts[-1]
200
288
  # The step name should match the directory name or be derivable from it
201
289
  version_id = self.uploaded_step_versions.get(step_name, None)
290
+
291
+ # If not found in uploaded_step_versions, try to get from config-lock.yaml
292
+ if version_id is None:
293
+ version_id = self._get_version_from_config_lock(step_name)
294
+
202
295
  if version_id is not None:
203
296
  # Update the templateRef to include version
204
297
  new_name = f"{name}/versions/{version_id}"
205
298
  template_ref["name"] = new_name
206
299
  template_ref["template"] = new_name
207
300
  logger.info(f"Updated templateRef from {name} to {new_name}")
301
+ else:
302
+ logger.warning(f"Could not find version for step: {step_name}")
208
303
  elif self.validator.TEMPLATE_REF_WITH_VERSION_PATTERN.match(name):
209
304
  # strip the /versions/{version_id} from the end of name
210
305
  # to get the name like above
@@ -215,6 +310,11 @@ class PipelineBuilder:
215
310
  # if it already has a version, make sure it matches the uploaded
216
311
  # version
217
312
  version_id = self.uploaded_step_versions.get(step_name, None)
313
+
314
+ # If not found in uploaded_step_versions, try to get from config-lock.yaml
315
+ if version_id is None:
316
+ version_id = self._get_version_from_config_lock(step_name)
317
+
218
318
  if version_id is not None:
219
319
  # Update the templateRef to include version
220
320
  new_name = f"{name}/versions/{version_id}"
@@ -223,9 +323,51 @@ class PipelineBuilder:
223
323
  logger.info(
224
324
  f"Updated templateRef from {orig_name} to {new_name}"
225
325
  )
326
+ else:
327
+ logger.warning(f"Could not find version for step: {step_name}")
328
+
329
+ def _get_version_from_config_lock(self, step_name: str) -> str:
330
+ """
331
+ Get version ID from config-lock.yaml file in the corresponding step directory.
226
332
 
227
- def create_pipeline(self) -> bool:
228
- """Create the pipeline using PostPipelines RPC."""
333
+ :param step_name: Name of the pipeline step
334
+ :return: Version ID if found, None otherwise
335
+ """
336
+ pipeline_config = self.config["pipeline"]
337
+ step_directories = pipeline_config.get("step_directories", [])
338
+
339
+ for step_dir in step_directories:
340
+ # Check if step_dir matches step_name (handle both exact match and derivable cases)
341
+ if (
342
+ step_dir == step_name
343
+ or step_dir.endswith(f"/{step_name}")
344
+ or step_name in step_dir
345
+ ):
346
+ config_lock_path = os.path.join(self.config_dir, step_dir, "config-lock.yaml")
347
+
348
+ if os.path.exists(config_lock_path):
349
+ try:
350
+ with open(config_lock_path, 'r', encoding='utf-8') as f:
351
+ config_lock = yaml.safe_load(f)
352
+ version_id = config_lock.get("id")
353
+ if version_id:
354
+ logger.info(
355
+ f"Found version {version_id} for step {step_name} in {config_lock_path}"
356
+ )
357
+ return version_id
358
+ except Exception as e:
359
+ logger.warning(
360
+ f"Failed to read config-lock.yaml at {config_lock_path}: {e}"
361
+ )
362
+
363
+ return None
364
+
365
+ def create_pipeline(self) -> tuple[bool, str]:
366
+ """Create the pipeline using PostPipelines RPC.
367
+
368
+ Returns:
369
+ tuple[bool, str]: (success, pipeline_version_id)
370
+ """
229
371
  logger.info(f"Creating pipeline {self.pipeline_id}...")
230
372
 
231
373
  try:
@@ -243,6 +385,11 @@ class PipelineBuilder:
243
385
  argo_spec = yaml.safe_load(argo_spec_str)
244
386
  api_version = argo_spec.get("apiVersion", "argoproj.io/v1alpha1")
245
387
 
388
+ # Ensure that pipeline_config.argo_orchestration_spec_proto has the updated spec.templates.steps.templateRef values
389
+ # For each step, if the templateRef is missing a version, append the correct version at the end
390
+ # The step versions should be resolved from the corresponding config-lock.yaml file of each pipeline-step, located in the step_directories
391
+ self._update_template_refs_with_versions(argo_spec)
392
+
246
393
  # Create pipeline version with orchestration spec
247
394
  pipeline_version = resources_pb2.PipelineVersion()
248
395
  # Create orchestration spec proto
@@ -269,29 +416,32 @@ class PipelineBuilder:
269
416
  if response.status.code == status_code_pb2.SUCCESS:
270
417
  logger.info(f"Successfully created pipeline {self.pipeline_id}")
271
418
 
419
+ pipeline_version_id = ""
272
420
  # Log pipeline and version IDs if available in response
273
421
  if response.pipelines:
274
422
  created_pipeline = response.pipelines[0]
275
423
  logger.info(f"Pipeline ID: {created_pipeline.id}")
276
424
  if created_pipeline.pipeline_version and created_pipeline.pipeline_version.id:
277
- logger.info(f"Pipeline version ID: {created_pipeline.pipeline_version.id}")
425
+ pipeline_version_id = created_pipeline.pipeline_version.id
426
+ logger.info(f"Pipeline version ID: {pipeline_version_id}")
278
427
 
279
- return True
428
+ return True, pipeline_version_id
280
429
  else:
281
430
  logger.error(f"Failed to create pipeline: {response.status.description}")
282
431
  logger.error(f"Details: {response.status.details}")
283
- return False
432
+ return False, ""
284
433
 
285
434
  except Exception as e:
286
435
  logger.error(f"Error creating pipeline: {e}")
287
- return False
436
+ return False, ""
288
437
 
289
438
 
290
- def upload_pipeline(path: str):
439
+ def upload_pipeline(path: str, no_lockfile: bool = False):
291
440
  """
292
441
  Upload a pipeline with associated pipeline steps to Clarifai.
293
442
 
294
443
  :param path: Path to the pipeline configuration file or directory containing config.yaml
444
+ :param no_lockfile: If True, skip creating config-lock.yaml
295
445
  """
296
446
  try:
297
447
  # Determine if path is a directory or file
@@ -311,15 +461,27 @@ def upload_pipeline(path: str):
311
461
  logger.error("Failed to upload pipeline steps")
312
462
  sys.exit(1)
313
463
 
314
- # Step 2: Update config with version information
315
- builder.update_config_with_versions()
464
+ # Step 2: Generate lockfile (unless --no-lockfile is specified)
465
+ # This will be used to update the versions of pipeline-steps that just got uploaded in Step 1
466
+ lockfile_data = None
467
+ if not no_lockfile:
468
+ lockfile_data = builder.prepare_lockfile_with_step_versions()
316
469
 
317
470
  # Step 3: Create the pipeline
318
- if not builder.create_pipeline():
471
+ success, pipeline_version_id = builder.create_pipeline()
472
+ if not success:
319
473
  logger.error("Failed to create pipeline")
320
474
  sys.exit(1)
321
475
 
322
- logger.info("Pipeline upload completed successfully!")
476
+ # Step 4: Update lockfile (unless --no-lockfile is specified)
477
+ if not no_lockfile and lockfile_data:
478
+ lockfile_data = builder.update_lockfile_with_pipeline_info(
479
+ lockfile_data, pipeline_version_id
480
+ )
481
+ builder.save_lockfile(lockfile_data)
482
+ logger.info("Pipeline upload completed successfully with lockfile!")
483
+ else:
484
+ logger.info("Pipeline upload completed successfully (lockfile skipped)!")
323
485
 
324
486
  except Exception as e:
325
487
  logger.error(f"Pipeline upload failed: {e}")
@@ -290,6 +290,7 @@ class ModelServer:
290
290
  model_id=context.model_id,
291
291
  deployment_id=context.deployment_id,
292
292
  base_url=context.api_base,
293
+ colorize=True,
293
294
  )
294
295
  logger.info(
295
296
  "✅ Your model is running locally and is ready for requests from the API...\n"
@@ -33,6 +33,7 @@ def generate_client_script(
33
33
  compute_cluster_id: str = None,
34
34
  nodepool_id: str = None,
35
35
  use_ctx: bool = False,
36
+ colorize: bool = False,
36
37
  ) -> str:
37
38
  url_helper = ClarifaiUrlHelper()
38
39
 
@@ -91,7 +92,7 @@ response = client.chat.completions.create(
91
92
  "content": "How do I check if a Python object is an instance of a class?",
92
93
  }},
93
94
  ],
94
- temperature=0.7,
95
+ temperature=1.0,
95
96
  stream=False, # stream=True also works, just iterator over the response
96
97
  )
97
98
  print(response)
@@ -203,6 +204,16 @@ model = Model.from_current_context()
203
204
  script_lines.append(method_signatures_str)
204
205
  script_lines.append("")
205
206
  script = "\n".join(script_lines)
207
+ if colorize:
208
+ try:
209
+ from pygments import highlight # type: ignore
210
+ from pygments.formatters import TerminalFormatter # type: ignore
211
+ from pygments.lexers import PythonLexer # type: ignore
212
+
213
+ return highlight(script, PythonLexer(), TerminalFormatter())
214
+ except Exception:
215
+ # Fallback to plain text if pygments is unavailable
216
+ return script
206
217
  return script
207
218
 
208
219
 
clarifai/utils/cli.py CHANGED
@@ -298,6 +298,20 @@ def check_ollama_installed():
298
298
  return False
299
299
 
300
300
 
301
+ def check_lmstudio_installed():
302
+ """Check if the LM Studio CLI is installed."""
303
+ try:
304
+ import subprocess
305
+
306
+ result = subprocess.run(['lms', 'version'], capture_output=True, text=True, check=False)
307
+ if result.returncode == 0:
308
+ return True
309
+ else:
310
+ return False
311
+ except FileNotFoundError:
312
+ return False
313
+
314
+
301
315
  def _is_package_installed(package_name):
302
316
  """Helper function to check if a single package in requirements.txt is installed."""
303
317
  import importlib.metadata
@@ -389,3 +403,51 @@ def convert_timestamp_to_string(timestamp: Timestamp) -> str:
389
403
  datetime_obj = timestamp.ToDatetime()
390
404
 
391
405
  return datetime_obj.strftime('%Y-%m-%dT%H:%M:%SZ')
406
+
407
+
408
+ def customize_huggingface_model(model_path, model_name):
409
+ config_path = os.path.join(model_path, 'config.yaml')
410
+ if os.path.exists(config_path):
411
+ with open(config_path, 'r') as f:
412
+ config = yaml.safe_load(f)
413
+
414
+ # Update the repo_id in checkpoints section
415
+ if 'checkpoints' not in config:
416
+ config['checkpoints'] = {}
417
+ config['checkpoints']['repo_id'] = model_name
418
+
419
+ with open(config_path, 'w') as f:
420
+ yaml.dump(config, f, default_flow_style=False, sort_keys=False)
421
+
422
+ logger.info(f"Updated Hugging Face model repo_id to: {model_name}")
423
+ else:
424
+ logger.warning(f"config.yaml not found at {config_path}, skipping model configuration")
425
+
426
+
427
+ def customize_lmstudio_model(model_path, model_name, port, context_length):
428
+ """Customize the LM Studio model name in the cloned template files.
429
+ Args:
430
+ model_path: Path to the cloned model directory
431
+ model_name: The model name to set (e.g., 'qwen/qwen3-4b-thinking-2507') - optional
432
+ port: Port for LM Studio server - optional
433
+ context_length: Context length for the model - optional
434
+
435
+ """
436
+ config_path = os.path.join(model_path, 'config.yaml')
437
+
438
+ if os.path.exists(config_path):
439
+ with open(config_path, 'r') as f:
440
+ config = yaml.safe_load(f)
441
+ if 'toolkit' not in config or config['toolkit'] is None:
442
+ config['toolkit'] = {}
443
+ if model_name is not None:
444
+ config['toolkit']['model'] = model_name
445
+ if port is not None:
446
+ config['toolkit']['port'] = port
447
+ if context_length is not None:
448
+ config['toolkit']['context_length'] = context_length
449
+ with open(config_path, 'w') as f:
450
+ yaml.dump(config, f, default_flow_style=False, sort_keys=False)
451
+ logger.info(f"Updated LM Studio model configuration in: {config_path}")
452
+ else:
453
+ logger.warning(f"config.yaml not found at {config_path}, skipping model configuration")
@@ -21,8 +21,7 @@ DEFAULT_LOCAL_RUNNER_DEPLOYMENT_ID = "local-runner-deployment"
21
21
  DEFAULT_LOCAL_RUNNER_MODEL_ID = "local-runner-model"
22
22
  DEFAULT_LOCAL_RUNNER_APP_ID = "local-runner-app"
23
23
 
24
- # FIXME: should have any-to-any for these cases.
25
- DEFAULT_LOCAL_RUNNER_MODEL_TYPE = "text-to-text"
24
+ DEFAULT_LOCAL_RUNNER_MODEL_TYPE = "any-to-any"
26
25
 
27
26
  DEFAULT_LOCAL_RUNNER_COMPUTE_CLUSTER_CONFIG = {
28
27
  "compute_cluster": {
@@ -62,8 +61,11 @@ DEFAULT_LOCAL_RUNNER_NODEPOOL_CONFIG = {
62
61
  "max_instances": 1,
63
62
  }
64
63
  }
65
- DEFAULT_OLLAMA_MODEL_REPO = "https://github.com/Clarifai/runners-examples"
64
+ DEFAULT_TOOLKIT_MODEL_REPO = "https://github.com/Clarifai/runners-examples"
66
65
  DEFAULT_OLLAMA_MODEL_REPO_BRANCH = "ollama"
66
+ DEFAULT_HF_MODEL_REPO_BRANCH = "huggingface"
67
+ DEFAULT_LMSTUDIO_MODEL_REPO_BRANCH = "lmstudio"
68
+ DEFAULT_VLLM_MODEL_REPO_BRANCH = "vllm"
67
69
 
68
70
  STATUS_OK = "200 OK"
69
71
  STATUS_MIXED = "207 MIXED"
@@ -0,0 +1,117 @@
1
+ """
2
+ Hashing utilities for Clarifai Python SDK.
3
+
4
+ This module provides functions for computing stable hashes of directories and files,
5
+ commonly used for change detection in pipeline steps and other components.
6
+ """
7
+
8
+ import hashlib
9
+ import os
10
+ from typing import List, Optional
11
+
12
+
13
+ def hash_directory(
14
+ directory: str, algo: str = "md5", exclude_files: Optional[List[str]] = None
15
+ ) -> str:
16
+ """
17
+ Compute a stable hash of all files in a directory.
18
+
19
+ This function computes a hash that accounts for:
20
+ - File relative paths (to detect renames)
21
+ - File sizes (to detect empty files)
22
+ - File contents (read in chunks for large files)
23
+
24
+ :param directory: Directory to hash
25
+ :param algo: Hash algorithm ('md5', 'sha1', 'sha256', etc.)
26
+ :param exclude_files: List of file names to exclude from hash calculation.
27
+ If None, defaults to ['config-lock.yaml'] for backward compatibility.
28
+ :return: Hash as lowercase hex digest string
29
+ """
30
+ if exclude_files is None:
31
+ exclude_files = ['config-lock.yaml']
32
+
33
+ # Ensure directory exists
34
+ if not os.path.exists(directory):
35
+ raise ValueError(f"Directory does not exist: {directory}")
36
+
37
+ if not os.path.isdir(directory):
38
+ raise ValueError(f"Path is not a directory: {directory}")
39
+
40
+ hash_func = hashlib.new(algo)
41
+
42
+ for root, _, files in os.walk(directory):
43
+ for name in sorted(files):
44
+ # Skip files in the exclusion list
45
+ if name in exclude_files:
46
+ continue
47
+
48
+ filepath = os.path.join(root, name)
49
+ relative_path = os.path.relpath(filepath, directory)
50
+
51
+ # Hash the relative path to detect renames
52
+ hash_func.update(relative_path.encode("utf-8"))
53
+
54
+ # Hash the file size to detect empties
55
+ file_size = os.path.getsize(filepath)
56
+ hash_func.update(str(file_size).encode("utf-8"))
57
+
58
+ # Hash the file contents (read in chunks for large files)
59
+ try:
60
+ with open(filepath, "rb") as f:
61
+ for chunk in iter(lambda: f.read(8192), b""):
62
+ hash_func.update(chunk)
63
+ except (IOError, OSError) as e:
64
+ # If we can't read the file, include the error in the hash
65
+ # This ensures the hash changes if file permissions change
66
+ hash_func.update(f"ERROR_READING_FILE: {e}".encode("utf-8"))
67
+
68
+ return hash_func.hexdigest()
69
+
70
+
71
+ def hash_file(filepath: str, algo: str = "md5") -> str:
72
+ """
73
+ Compute a hash of a single file.
74
+
75
+ :param filepath: Path to the file to hash
76
+ :param algo: Hash algorithm ('md5', 'sha1', 'sha256', etc.)
77
+ :return: Hash as lowercase hex digest string
78
+ """
79
+ if not os.path.exists(filepath):
80
+ raise ValueError(f"File does not exist: {filepath}")
81
+
82
+ if not os.path.isfile(filepath):
83
+ raise ValueError(f"Path is not a file: {filepath}")
84
+
85
+ hash_func = hashlib.new(algo)
86
+
87
+ try:
88
+ with open(filepath, "rb") as f:
89
+ for chunk in iter(lambda: f.read(8192), b""):
90
+ hash_func.update(chunk)
91
+ except (IOError, OSError) as e:
92
+ raise ValueError(f"Error reading file {filepath}: {e}")
93
+
94
+ return hash_func.hexdigest()
95
+
96
+
97
+ def verify_hash_algorithm(algo: str) -> bool:
98
+ """
99
+ Verify that a hash algorithm is supported.
100
+
101
+ :param algo: Hash algorithm name
102
+ :return: True if algorithm is supported, False otherwise
103
+ """
104
+ try:
105
+ hashlib.new(algo)
106
+ return True
107
+ except ValueError:
108
+ return False
109
+
110
+
111
+ def get_available_algorithms() -> List[str]:
112
+ """
113
+ Get list of available hash algorithms.
114
+
115
+ :return: List of available algorithm names
116
+ """
117
+ return list(hashlib.algorithms_available)
clarifai/utils/secrets.py CHANGED
@@ -1,3 +1,5 @@
1
+ # NOTE(alan): Most of this file is used to support hot reloading of secrets, which has been disabled for now.
2
+
1
3
  import os
2
4
  import time
3
5
  from contextlib import contextmanager
@@ -149,8 +151,11 @@ def inject_secrets(request: Optional[service_pb2.PostModelOutputsRequest]) -> No
149
151
  # Since only env type secrets are injected into the shared volume, we can read them directly.
150
152
  variables = get_env_variable(secrets_path)
151
153
  else:
152
- # If no secrets path is set, assume no secrets and return the request as is.
153
- return
154
+ # If no secrets path is set, use variables from the current environment
155
+ variables = {}
156
+ for key, value in os.environ.items():
157
+ if not key.startswith("CLARIFAI"):
158
+ variables[key] = value
154
159
 
155
160
  if not request.HasField("model"):
156
161
  request.model.CopyFrom(resources_pb2.Model())
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: clarifai
3
- Version: 11.8.1
3
+ Version: 11.8.3
4
4
  Home-page: https://github.com/Clarifai/clarifai-python
5
5
  Author: Clarifai
6
6
  Author-email: support@clarifai.com
@@ -19,8 +19,8 @@ Classifier: Operating System :: OS Independent
19
19
  Requires-Python: >=3.8
20
20
  Description-Content-Type: text/markdown
21
21
  License-File: LICENSE
22
- Requires-Dist: clarifai-grpc>=11.7.9
23
- Requires-Dist: clarifai-protocol>=0.0.30
22
+ Requires-Dist: clarifai-grpc>=11.8.2
23
+ Requires-Dist: clarifai-protocol>=0.0.32
24
24
  Requires-Dist: numpy>=1.22.0
25
25
  Requires-Dist: tqdm>=4.65.0
26
26
  Requires-Dist: PyYAML>=6.0.1
@@ -34,6 +34,7 @@ Requires-Dist: aiohttp>=3.10.0
34
34
  Requires-Dist: uv==0.7.12
35
35
  Requires-Dist: ruff==0.11.4
36
36
  Requires-Dist: psutil==7.0.0
37
+ Requires-Dist: pygments>=2.19.2
37
38
  Requires-Dist: pydantic_core==2.33.2
38
39
  Requires-Dist: packaging==25.0
39
40
  Provides-Extra: all