clarifai 11.4.8__tar.gz → 11.4.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {clarifai-11.4.8/clarifai.egg-info → clarifai-11.4.9}/PKG-INFO +6 -1
- {clarifai-11.4.8 → clarifai-11.4.9}/README.md +5 -0
- clarifai-11.4.9/clarifai/__init__.py +1 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/cli/model.py +80 -0
- clarifai-11.4.9/clarifai/cli/model_templates.py +243 -0
- {clarifai-11.4.8 → clarifai-11.4.9/clarifai.egg-info}/PKG-INFO +6 -1
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai.egg-info/SOURCES.txt +1 -0
- clarifai-11.4.8/clarifai/__init__.py +0 -1
- {clarifai-11.4.8 → clarifai-11.4.9}/LICENSE +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/MANIFEST.in +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/cli/README.md +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/cli/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/cli/__main__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/cli/base.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/cli/compute_cluster.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/cli/deployment.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/cli/nodepool.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/cli.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/app.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/auth/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/auth/helper.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/auth/register.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/auth/stub.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/base.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/compute_cluster.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/dataset.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/deployment.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/input.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/lister.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/model.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/model_client.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/module.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/nodepool.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/runner.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/search.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/user.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/client/workflow.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/constants/base.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/constants/dataset.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/constants/input.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/constants/model.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/constants/rag.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/constants/search.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/constants/workflow.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/export/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/export/inputs_annotations.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/base.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/features.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/image.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/loaders/README.md +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/loaders/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/loaders/coco_captions.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/loaders/coco_detection.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/loaders/imagenet_classification.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/loaders/xview_detection.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/multimodal.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/text.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/utils.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/errors.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/models/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/models/api.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/modules/README.md +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/modules/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/modules/css.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/modules/pages.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/modules/style.css +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/rag/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/rag/rag.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/rag/utils.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/dockerfile_template/Dockerfile.template +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/models/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/models/dummy_openai_model.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/models/mcp_class.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/models/model_builder.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/models/model_class.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/models/model_run_locally.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/models/model_runner.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/models/model_servicer.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/models/openai_class.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/models/visual_classifier_class.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/models/visual_detector_class.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/server.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/utils/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/utils/code_script.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/utils/const.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/utils/data_types/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/utils/data_types/data_types.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/utils/data_utils.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/utils/loader.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/utils/method_signatures.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/utils/openai_convertor.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/utils/serializers.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/utils/url_fetcher.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/schema/search.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/urls/helper.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/utils/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/utils/cli.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/utils/config.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/utils/constants.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/utils/evaluation/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/utils/evaluation/helpers.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/utils/evaluation/main.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/utils/evaluation/testset_annotation_parser.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/utils/logging.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/utils/misc.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/utils/model_train.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/utils/protobuf.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/versions.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/workflows/__init__.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/workflows/export.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/workflows/utils.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai/workflows/validate.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai.egg-info/dependency_links.txt +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai.egg-info/entry_points.txt +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai.egg-info/requires.txt +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/clarifai.egg-info/top_level.txt +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/pyproject.toml +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/requirements.txt +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/setup.cfg +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/setup.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/tests/test_app.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/tests/test_auth.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/tests/test_data_upload.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/tests/test_eval.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/tests/test_misc.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/tests/test_model_predict.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/tests/test_model_train.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/tests/test_modules.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/tests/test_rag.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/tests/test_search.py +0 -0
- {clarifai-11.4.8 → clarifai-11.4.9}/tests/test_stub.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: clarifai
|
3
|
-
Version: 11.4.
|
3
|
+
Version: 11.4.9
|
4
4
|
Home-page: https://github.com/Clarifai/clarifai-python
|
5
5
|
Author: Clarifai
|
6
6
|
Author-email: support@clarifai.com
|
@@ -608,3 +608,8 @@ rag_agent = RAG(workflow_url="WORKFLOW_URL")
|
|
608
608
|
|
609
609
|
See many more code examples in this [repo](https://github.com/Clarifai/examples).
|
610
610
|
Also see the official [Python SDK docs](https://clarifai-python.readthedocs.io/en/latest/index.html)
|
611
|
+
|
612
|
+
## :open_file_folder: Model Upload
|
613
|
+
|
614
|
+
Examples for uploading models and runners have been moved to this [repo](https://github.com/Clarifai/runners-examples).
|
615
|
+
Find our official documentation at [docs.clarifai.com/compute/models/upload](https://docs.clarifai.com/compute/models/upload).
|
@@ -561,3 +561,8 @@ rag_agent = RAG(workflow_url="WORKFLOW_URL")
|
|
561
561
|
|
562
562
|
See many more code examples in this [repo](https://github.com/Clarifai/examples).
|
563
563
|
Also see the official [Python SDK docs](https://clarifai-python.readthedocs.io/en/latest/index.html)
|
564
|
+
|
565
|
+
## :open_file_folder: Model Upload
|
566
|
+
|
567
|
+
Examples for uploading models and runners have been moved to this [repo](https://github.com/Clarifai/runners-examples).
|
568
|
+
Find our official documentation at [docs.clarifai.com/compute/models/upload](https://docs.clarifai.com/compute/models/upload).
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__ = "11.4.9"
|
@@ -25,6 +25,86 @@ def model():
|
|
25
25
|
"""Manage models: upload, test, local dev, predict, etc"""
|
26
26
|
|
27
27
|
|
28
|
+
@model.command()
|
29
|
+
@click.argument(
|
30
|
+
"model_path",
|
31
|
+
type=click.Path(),
|
32
|
+
required=False,
|
33
|
+
default=".",
|
34
|
+
)
|
35
|
+
@click.option(
|
36
|
+
'--model-type-id',
|
37
|
+
type=click.Choice(['mcp', 'openai'], case_sensitive=False),
|
38
|
+
required=False,
|
39
|
+
help='Model type: "mcp" for MCPModelClass, "openai" for OpenAIModelClass, or leave empty for default ModelClass.',
|
40
|
+
)
|
41
|
+
def init(model_path, model_type_id):
|
42
|
+
"""Initialize a new model directory structure.
|
43
|
+
|
44
|
+
Creates the following structure in the specified directory:
|
45
|
+
├── 1/
|
46
|
+
│ └── model.py
|
47
|
+
├── requirements.txt
|
48
|
+
└── config.yaml
|
49
|
+
|
50
|
+
MODEL_PATH: Path where to create the model directory structure. If not specified, the current directory is used by default.
|
51
|
+
"""
|
52
|
+
from clarifai.cli.model_templates import (
|
53
|
+
get_config_template,
|
54
|
+
get_model_template,
|
55
|
+
get_requirements_template,
|
56
|
+
)
|
57
|
+
|
58
|
+
# Resolve the absolute path
|
59
|
+
model_path = os.path.abspath(model_path)
|
60
|
+
|
61
|
+
# Create the model directory if it doesn't exist
|
62
|
+
os.makedirs(model_path, exist_ok=True)
|
63
|
+
|
64
|
+
# Create the 1/ subdirectory
|
65
|
+
model_version_dir = os.path.join(model_path, "1")
|
66
|
+
os.makedirs(model_version_dir, exist_ok=True)
|
67
|
+
|
68
|
+
# Create model.py
|
69
|
+
model_py_path = os.path.join(model_version_dir, "model.py")
|
70
|
+
if os.path.exists(model_py_path):
|
71
|
+
logger.warning(f"File {model_py_path} already exists, skipping...")
|
72
|
+
else:
|
73
|
+
model_template = get_model_template(model_type_id)
|
74
|
+
with open(model_py_path, 'w') as f:
|
75
|
+
f.write(model_template)
|
76
|
+
logger.info(f"Created {model_py_path}")
|
77
|
+
|
78
|
+
# Create requirements.txt
|
79
|
+
requirements_path = os.path.join(model_path, "requirements.txt")
|
80
|
+
if os.path.exists(requirements_path):
|
81
|
+
logger.warning(f"File {requirements_path} already exists, skipping...")
|
82
|
+
else:
|
83
|
+
requirements_template = get_requirements_template(model_type_id)
|
84
|
+
with open(requirements_path, 'w') as f:
|
85
|
+
f.write(requirements_template)
|
86
|
+
logger.info(f"Created {requirements_path}")
|
87
|
+
|
88
|
+
# Create config.yaml
|
89
|
+
config_path = os.path.join(model_path, "config.yaml")
|
90
|
+
if os.path.exists(config_path):
|
91
|
+
logger.warning(f"File {config_path} already exists, skipping...")
|
92
|
+
else:
|
93
|
+
config_model_type_id = "text-to-text" # default
|
94
|
+
|
95
|
+
config_template = get_config_template(config_model_type_id)
|
96
|
+
with open(config_path, 'w') as f:
|
97
|
+
f.write(config_template)
|
98
|
+
logger.info(f"Created {config_path}")
|
99
|
+
|
100
|
+
logger.info(f"Model initialization complete in {model_path}")
|
101
|
+
logger.info("Next steps:")
|
102
|
+
logger.info("1. Search for '# TODO: please fill in' comments in the generated files")
|
103
|
+
logger.info("2. Update the model configuration in config.yaml")
|
104
|
+
logger.info("3. Add your model dependencies to requirements.txt")
|
105
|
+
logger.info("4. Implement your model logic in 1/model.py")
|
106
|
+
|
107
|
+
|
28
108
|
@model.command()
|
29
109
|
@click.argument("model_path", type=click.Path(exists=True), required=False, default=".")
|
30
110
|
@click.option(
|
@@ -0,0 +1,243 @@
|
|
1
|
+
"""Templates for model initialization."""
|
2
|
+
|
3
|
+
from clarifai import __version__
|
4
|
+
|
5
|
+
|
6
|
+
def get_model_class_template() -> str:
|
7
|
+
"""Return the template for a basic ModelClass-based model."""
|
8
|
+
return '''from typing import Iterator, List
|
9
|
+
from clarifai.runners.models.model_class import ModelClass
|
10
|
+
from clarifai.runners.util.data_utils import Param
|
11
|
+
|
12
|
+
class MyModel(ModelClass):
|
13
|
+
"""A custom model implementation using ModelClass."""
|
14
|
+
|
15
|
+
def load_model(self):
|
16
|
+
"""Load the model here.
|
17
|
+
# TODO: please fill in
|
18
|
+
# Add your model loading logic here
|
19
|
+
"""
|
20
|
+
pass
|
21
|
+
|
22
|
+
@ModelClass.method
|
23
|
+
def predict(
|
24
|
+
self,
|
25
|
+
prompt: str = "",
|
26
|
+
chat_history: List[dict] = None,
|
27
|
+
max_tokens: int = Param(default=256, description="The maximum number of tokens to generate. Shorter token lengths will provide faster performance."),
|
28
|
+
temperature: float = Param(default=1.0, description="A decimal number that determines the degree of randomness in the response"),
|
29
|
+
top_p: float = Param(default=1.0, description="An alternative to sampling with temperature, where the model considers the results of the tokens with top_p probability mass."),
|
30
|
+
) -> str:
|
31
|
+
"""This is the method that will be called when the runner is run. It takes in an input and returns an output."""
|
32
|
+
# TODO: please fill in
|
33
|
+
# Implement your prediction logic here
|
34
|
+
pass # Replace with your actual logic
|
35
|
+
|
36
|
+
@ModelClass.method
|
37
|
+
def generate(
|
38
|
+
self,
|
39
|
+
prompt: str = "",
|
40
|
+
chat_history: List[dict] = None,
|
41
|
+
max_tokens: int = Param(default=256, description="The maximum number of tokens to generate. Shorter token lengths will provide faster performance."),
|
42
|
+
temperature: float = Param(default=1.0, description="A decimal number that determines the degree of randomness in the response"),
|
43
|
+
top_p: float = Param(default=1.0, description="An alternative to sampling with temperature, where the model considers the results of the tokens with top_p probability mass."),
|
44
|
+
) -> Iterator[str]:
|
45
|
+
"""Example yielding a streamed response."""
|
46
|
+
# TODO: please fill in
|
47
|
+
# Implement your generation logic here
|
48
|
+
pass # Replace with your actual logic
|
49
|
+
'''
|
50
|
+
|
51
|
+
|
52
|
+
def get_mcp_model_class_template() -> str:
|
53
|
+
"""Return the template for an MCPModelClass-based model."""
|
54
|
+
return '''from typing import Any
|
55
|
+
|
56
|
+
from fastmcp import FastMCP # use fastmcp v2 not the built in mcp
|
57
|
+
from pydantic import Field
|
58
|
+
|
59
|
+
from clarifai.runners.models.mcp_class import MCPModelClass
|
60
|
+
|
61
|
+
# TODO: please fill in
|
62
|
+
# Configure your FastMCP server
|
63
|
+
server = FastMCP("my-mcp-server", instructions="", stateless_http=True)
|
64
|
+
|
65
|
+
|
66
|
+
# TODO: please fill in
|
67
|
+
# Add your tools, resources, and prompts here
|
68
|
+
@server.tool("example_tool", description="An example tool")
|
69
|
+
def example_tool(input_param: Any = Field(description="Example input parameter")):
|
70
|
+
"""Example tool implementation."""
|
71
|
+
# TODO: please fill in
|
72
|
+
# Implement your tool logic here
|
73
|
+
return f"Processed: {input_param}"
|
74
|
+
|
75
|
+
|
76
|
+
# Static resource example
|
77
|
+
@server.resource("config://version")
|
78
|
+
def get_version():
|
79
|
+
"""Example static resource."""
|
80
|
+
# TODO: please fill in
|
81
|
+
# Return your resource data
|
82
|
+
return "1.0.0"
|
83
|
+
|
84
|
+
|
85
|
+
@server.prompt()
|
86
|
+
def example_prompt(text: str) -> str:
|
87
|
+
"""Example prompt template."""
|
88
|
+
# TODO: please fill in
|
89
|
+
# Define your prompt template
|
90
|
+
return f"Process this text: {text}"
|
91
|
+
|
92
|
+
|
93
|
+
class MyModel(MCPModelClass):
|
94
|
+
"""A custom model implementation using MCPModelClass."""
|
95
|
+
|
96
|
+
def get_server(self) -> FastMCP:
|
97
|
+
"""Return the FastMCP server instance."""
|
98
|
+
return server
|
99
|
+
'''
|
100
|
+
|
101
|
+
|
102
|
+
def get_openai_model_class_template() -> str:
|
103
|
+
"""Return the template for an OpenAIModelClass-based model."""
|
104
|
+
return '''from typing import List
|
105
|
+
from openai import OpenAI
|
106
|
+
from clarifai.runners.models.openai_class import OpenAIModelClass
|
107
|
+
from clarifai.runners.util.data_utils import Param
|
108
|
+
from clarifai.runners.utils.openai_convertor import build_openai_messages
|
109
|
+
|
110
|
+
class MyModel(OpenAIModelClass):
|
111
|
+
"""A custom model implementation using OpenAIModelClass."""
|
112
|
+
|
113
|
+
# TODO: please fill in
|
114
|
+
# Configure your OpenAI-compatible client for local model
|
115
|
+
client = OpenAI(
|
116
|
+
api_key="local-key", # TODO: please fill in - use your local API key
|
117
|
+
base_url="http://localhost:8000/v1", # TODO: please fill in - your local model server endpoint
|
118
|
+
)
|
119
|
+
|
120
|
+
# TODO: please fill in
|
121
|
+
# Specify the model name to use
|
122
|
+
model = "my-local-model" # TODO: please fill in - replace with your local model name
|
123
|
+
|
124
|
+
def load_model(self):
|
125
|
+
"""Optional: Add any additional model loading logic here."""
|
126
|
+
# TODO: please fill in (optional)
|
127
|
+
# Add any initialization logic if needed
|
128
|
+
pass
|
129
|
+
|
130
|
+
@OpenAIModelClass.method
|
131
|
+
def predict(
|
132
|
+
self,
|
133
|
+
prompt: str = "",
|
134
|
+
chat_history: List[dict] = None,
|
135
|
+
max_tokens: int = Param(default=256, description="The maximum number of tokens to generate. Shorter token lengths will provide faster performance."),
|
136
|
+
temperature: float = Param(default=1.0, description="A decimal number that determines the degree of randomness in the response"),
|
137
|
+
top_p: float = Param(default=1.0, description="An alternative to sampling with temperature, where the model considers the results of the tokens with top_p probability mass."),
|
138
|
+
) -> str:
|
139
|
+
"""Run a single prompt completion using the OpenAI client."""
|
140
|
+
# TODO: please fill in
|
141
|
+
# Implement your prediction logic here
|
142
|
+
messages = build_openai_messages(prompt, chat_history)
|
143
|
+
response = self.client.chat.completions.create(
|
144
|
+
model=self.model,
|
145
|
+
messages=messages,
|
146
|
+
max_completion_tokens=max_tokens,
|
147
|
+
temperature=temperature,
|
148
|
+
top_p=top_p,
|
149
|
+
)
|
150
|
+
return response.choices[0].message.content
|
151
|
+
|
152
|
+
@OpenAIModelClass.method
|
153
|
+
def generate(
|
154
|
+
self,
|
155
|
+
prompt: str = "",
|
156
|
+
chat_history: List[dict] = None,
|
157
|
+
max_tokens: int = Param(default=256, description="The maximum number of tokens to generate. Shorter token lengths will provide faster performance."),
|
158
|
+
temperature: float = Param(default=1.0, description="A decimal number that determines the degree of randomness in the response"),
|
159
|
+
top_p: float = Param(default=1.0, description="An alternative to sampling with temperature, where the model considers the results of the tokens with top_p probability mass."),
|
160
|
+
):
|
161
|
+
"""Stream a completion response using the OpenAI client."""
|
162
|
+
# TODO: please fill in
|
163
|
+
# Implement your streaming logic here
|
164
|
+
messages = build_openai_messages(prompt, chat_history)
|
165
|
+
stream = self.client.chat.completions.create(
|
166
|
+
model=self.model,
|
167
|
+
messages=messages,
|
168
|
+
max_completion_tokens=max_tokens,
|
169
|
+
temperature=temperature,
|
170
|
+
top_p=top_p,
|
171
|
+
stream=True,
|
172
|
+
)
|
173
|
+
for chunk in stream:
|
174
|
+
if chunk.choices:
|
175
|
+
text = (chunk.choices[0].delta.content
|
176
|
+
if (chunk and chunk.choices[0].delta.content) is not None else '')
|
177
|
+
yield text
|
178
|
+
'''
|
179
|
+
|
180
|
+
|
181
|
+
def get_config_template(model_type_id: str = "text-to-text") -> str:
|
182
|
+
"""Return the template for config.yaml."""
|
183
|
+
return f'''# Configuration file for your Clarifai model
|
184
|
+
|
185
|
+
model:
|
186
|
+
id: "my-model" # TODO: please fill in - replace with your model ID
|
187
|
+
user_id: "user_id" # TODO: please fill in - replace with your user ID
|
188
|
+
app_id: "app_id" # TODO: please fill in - replace with your app ID
|
189
|
+
model_type_id: "{model_type_id}" # TODO: please fill in - replace if different model type ID
|
190
|
+
|
191
|
+
build_info:
|
192
|
+
python_version: "3.12"
|
193
|
+
|
194
|
+
# TODO: please fill in - adjust compute requirements for your model
|
195
|
+
inference_compute_info:
|
196
|
+
cpu_limit: "1" # TODO: please fill in - Amount of CPUs to use as a limit
|
197
|
+
cpu_memory: "1Gi" # TODO: please fill in - Amount of CPU memory to use as a limit
|
198
|
+
cpu_requests: "0.5" # TODO: please fill in - Amount of CPUs to use as a minimum
|
199
|
+
cpu_memory_requests: "512Mi" # TODO: please fill in - Amount of CPU memory to use as a minimum
|
200
|
+
num_accelerators: 1 # TODO: please fill in - Amount of GPU/TPUs to use
|
201
|
+
accelerator_type: ["NVIDIA-*"] # TODO: please fill in - type of accelerators requested
|
202
|
+
accelerator_memory: "1Gi" # TODO: please fill in - Amount of accelerator/GPU memory to use as a minimum
|
203
|
+
|
204
|
+
# TODO: please fill in (optional) - add checkpoints section if needed
|
205
|
+
# checkpoints:
|
206
|
+
# type: "huggingface" # supported type
|
207
|
+
# repo_id: "your-model-repo" # for huggingface
|
208
|
+
# when: "build" # or "runtime", "upload"
|
209
|
+
'''
|
210
|
+
|
211
|
+
|
212
|
+
def get_requirements_template(model_type_id: str = None) -> str:
|
213
|
+
"""Return the template for requirements.txt."""
|
214
|
+
requirements = f'''# Clarifai SDK - required
|
215
|
+
clarifai>={__version__}
|
216
|
+
'''
|
217
|
+
if model_type_id == "mcp":
|
218
|
+
requirements += "fastmcp\n"
|
219
|
+
elif model_type_id == "openai":
|
220
|
+
requirements += "openai\n"
|
221
|
+
requirements += '''
|
222
|
+
# TODO: please fill in - add your model's dependencies here
|
223
|
+
# Examples:
|
224
|
+
# torch>=2.0.0
|
225
|
+
# transformers>=4.30.0
|
226
|
+
# numpy>=1.21.0
|
227
|
+
# pillow>=9.0.0
|
228
|
+
'''
|
229
|
+
return requirements
|
230
|
+
|
231
|
+
|
232
|
+
# Mapping of model type IDs to their corresponding templates
|
233
|
+
MODEL_TYPE_TEMPLATES = {
|
234
|
+
"mcp": get_mcp_model_class_template,
|
235
|
+
"openai": get_openai_model_class_template,
|
236
|
+
}
|
237
|
+
|
238
|
+
|
239
|
+
def get_model_template(model_type_id: str = None) -> str:
|
240
|
+
"""Get the appropriate model template based on model_type_id."""
|
241
|
+
if model_type_id in MODEL_TYPE_TEMPLATES:
|
242
|
+
return MODEL_TYPE_TEMPLATES[model_type_id]()
|
243
|
+
return get_model_class_template()
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: clarifai
|
3
|
-
Version: 11.4.
|
3
|
+
Version: 11.4.9
|
4
4
|
Home-page: https://github.com/Clarifai/clarifai-python
|
5
5
|
Author: Clarifai
|
6
6
|
Author-email: support@clarifai.com
|
@@ -608,3 +608,8 @@ rag_agent = RAG(workflow_url="WORKFLOW_URL")
|
|
608
608
|
|
609
609
|
See many more code examples in this [repo](https://github.com/Clarifai/examples).
|
610
610
|
Also see the official [Python SDK docs](https://clarifai-python.readthedocs.io/en/latest/index.html)
|
611
|
+
|
612
|
+
## :open_file_folder: Model Upload
|
613
|
+
|
614
|
+
Examples for uploading models and runners have been moved to this [repo](https://github.com/Clarifai/runners-examples).
|
615
|
+
Find our official documentation at [docs.clarifai.com/compute/models/upload](https://docs.clarifai.com/compute/models/upload).
|
@@ -1 +0,0 @@
|
|
1
|
-
__version__ = "11.4.8"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{clarifai-11.4.8 → clarifai-11.4.9}/clarifai/datasets/upload/loaders/imagenet_classification.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{clarifai-11.4.8 → clarifai-11.4.9}/clarifai/runners/dockerfile_template/Dockerfile.template
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|