clarifai 11.5.2__py3-none-any.whl → 11.5.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -1
- clarifai/cli/model.py +33 -1
- clarifai/cli/pipeline.py +137 -0
- clarifai/cli/pipeline_step.py +104 -0
- clarifai/cli/templates/__init__.py +1 -0
- clarifai/cli/templates/pipeline_step_templates.py +64 -0
- clarifai/cli/templates/pipeline_templates.py +150 -0
- clarifai/client/auth/helper.py +23 -0
- clarifai/client/auth/register.py +5 -0
- clarifai/client/auth/stub.py +116 -12
- clarifai/client/base.py +9 -0
- clarifai/client/model.py +111 -7
- clarifai/client/model_client.py +355 -6
- clarifai/client/user.py +81 -0
- clarifai/runners/models/model_builder.py +52 -9
- clarifai/runners/pipeline_steps/__init__.py +0 -0
- clarifai/runners/pipeline_steps/pipeline_step_builder.py +510 -0
- clarifai/runners/pipelines/__init__.py +0 -0
- clarifai/runners/pipelines/pipeline_builder.py +313 -0
- clarifai/runners/utils/code_script.py +40 -7
- clarifai/runners/utils/const.py +2 -2
- clarifai/runners/utils/model_utils.py +135 -0
- clarifai/runners/utils/pipeline_validation.py +153 -0
- {clarifai-11.5.2.dist-info → clarifai-11.5.3.dist-info}/METADATA +1 -1
- {clarifai-11.5.2.dist-info → clarifai-11.5.3.dist-info}/RECORD +30 -19
- /clarifai/cli/{model_templates.py → templates/model_templates.py} +0 -0
- {clarifai-11.5.2.dist-info → clarifai-11.5.3.dist-info}/WHEEL +0 -0
- {clarifai-11.5.2.dist-info → clarifai-11.5.3.dist-info}/entry_points.txt +0 -0
- {clarifai-11.5.2.dist-info → clarifai-11.5.3.dist-info}/licenses/LICENSE +0 -0
- {clarifai-11.5.2.dist-info → clarifai-11.5.3.dist-info}/top_level.txt +0 -0
clarifai/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "11.5.
|
1
|
+
__version__ = "11.5.3"
|
clarifai/cli/model.py
CHANGED
@@ -49,7 +49,7 @@ def init(model_path, model_type_id):
|
|
49
49
|
|
50
50
|
MODEL_PATH: Path where to create the model directory structure. If not specified, the current directory is used by default.
|
51
51
|
"""
|
52
|
-
from clarifai.cli.model_templates import (
|
52
|
+
from clarifai.cli.templates.model_templates import (
|
53
53
|
get_config_template,
|
54
54
|
get_model_template,
|
55
55
|
get_requirements_template,
|
@@ -819,3 +819,35 @@ def predict(
|
|
819
819
|
output_config=output_config,
|
820
820
|
) ## TO DO: Add support for input_id
|
821
821
|
click.echo(model_prediction)
|
822
|
+
|
823
|
+
|
824
|
+
@model.command(name="list")
|
825
|
+
@click.argument(
|
826
|
+
"user_id",
|
827
|
+
required=False,
|
828
|
+
default=None,
|
829
|
+
)
|
830
|
+
@click.option(
|
831
|
+
'--app_id',
|
832
|
+
'-a',
|
833
|
+
type=str,
|
834
|
+
default=None,
|
835
|
+
show_default=True,
|
836
|
+
help="Get all models of an app",
|
837
|
+
)
|
838
|
+
@click.pass_context
|
839
|
+
def list_model(ctx, user_id, app_id):
|
840
|
+
"""List models of user/community
|
841
|
+
|
842
|
+
USER_ID: User id. If not specified, the current user is used by default. Set "all" to get all public models in Clarifai platform.
|
843
|
+
"""
|
844
|
+
from clarifai.client import User
|
845
|
+
|
846
|
+
try:
|
847
|
+
pat = ctx.obj.contexts["default"]["env"]["CLARIFAI_PAT"]
|
848
|
+
except Exception as e:
|
849
|
+
pat = None
|
850
|
+
|
851
|
+
User(pat=pat).list_models(
|
852
|
+
user_id=user_id, app_id=app_id, show=True, return_clarifai_model=False
|
853
|
+
)
|
clarifai/cli/pipeline.py
ADDED
@@ -0,0 +1,137 @@
|
|
1
|
+
import os
|
2
|
+
import shutil
|
3
|
+
|
4
|
+
import click
|
5
|
+
|
6
|
+
from clarifai.cli.base import cli
|
7
|
+
from clarifai.utils.logging import logger
|
8
|
+
|
9
|
+
|
10
|
+
@cli.group(
|
11
|
+
['pipeline', 'pl'],
|
12
|
+
context_settings={'max_content_width': shutil.get_terminal_size().columns - 10},
|
13
|
+
)
|
14
|
+
def pipeline():
|
15
|
+
"""Manage pipelines: upload, init, etc"""
|
16
|
+
|
17
|
+
|
18
|
+
@pipeline.command()
|
19
|
+
@click.argument("path", type=click.Path(exists=True), required=False, default=".")
|
20
|
+
def upload(path):
|
21
|
+
"""Upload a pipeline with associated pipeline steps to Clarifai.
|
22
|
+
|
23
|
+
PATH: Path to the pipeline configuration file or directory containing config.yaml. If not specified, the current directory is used by default.
|
24
|
+
"""
|
25
|
+
from clarifai.runners.pipelines.pipeline_builder import upload_pipeline
|
26
|
+
|
27
|
+
upload_pipeline(path)
|
28
|
+
|
29
|
+
|
30
|
+
@pipeline.command()
|
31
|
+
@click.argument(
|
32
|
+
"pipeline_path",
|
33
|
+
type=click.Path(),
|
34
|
+
required=False,
|
35
|
+
default=".",
|
36
|
+
)
|
37
|
+
def init(pipeline_path):
|
38
|
+
"""Initialize a new pipeline project structure.
|
39
|
+
|
40
|
+
Creates the following structure in the specified directory:
|
41
|
+
├── config.yaml # Pipeline configuration
|
42
|
+
├── stepA/ # First pipeline step
|
43
|
+
│ ├── config.yaml # Step A configuration
|
44
|
+
│ ├── requirements.txt # Step A dependencies
|
45
|
+
│ └── 1/
|
46
|
+
│ └── pipeline_step.py # Step A implementation
|
47
|
+
├── stepB/ # Second pipeline step
|
48
|
+
│ ├── config.yaml # Step B configuration
|
49
|
+
│ ├── requirements.txt # Step B dependencies
|
50
|
+
│ └── 1/
|
51
|
+
│ └── pipeline_step.py # Step B implementation
|
52
|
+
└── README.md # Documentation
|
53
|
+
|
54
|
+
PIPELINE_PATH: Path where to create the pipeline project structure. If not specified, the current directory is used by default.
|
55
|
+
"""
|
56
|
+
from clarifai.cli.templates.pipeline_templates import (
|
57
|
+
get_pipeline_config_template,
|
58
|
+
get_pipeline_step_config_template,
|
59
|
+
get_pipeline_step_requirements_template,
|
60
|
+
get_pipeline_step_template,
|
61
|
+
get_readme_template,
|
62
|
+
)
|
63
|
+
|
64
|
+
# Resolve the absolute path
|
65
|
+
pipeline_path = os.path.abspath(pipeline_path)
|
66
|
+
|
67
|
+
# Create the pipeline directory if it doesn't exist
|
68
|
+
os.makedirs(pipeline_path, exist_ok=True)
|
69
|
+
|
70
|
+
# Create pipeline config.yaml
|
71
|
+
config_path = os.path.join(pipeline_path, "config.yaml")
|
72
|
+
if os.path.exists(config_path):
|
73
|
+
logger.warning(f"File {config_path} already exists, skipping...")
|
74
|
+
else:
|
75
|
+
config_template = get_pipeline_config_template()
|
76
|
+
with open(config_path, 'w', encoding='utf-8') as f:
|
77
|
+
f.write(config_template)
|
78
|
+
logger.info(f"Created {config_path}")
|
79
|
+
|
80
|
+
# Create README.md
|
81
|
+
readme_path = os.path.join(pipeline_path, "README.md")
|
82
|
+
if os.path.exists(readme_path):
|
83
|
+
logger.warning(f"File {readme_path} already exists, skipping...")
|
84
|
+
else:
|
85
|
+
readme_template = get_readme_template()
|
86
|
+
with open(readme_path, 'w', encoding='utf-8') as f:
|
87
|
+
f.write(readme_template)
|
88
|
+
logger.info(f"Created {readme_path}")
|
89
|
+
|
90
|
+
# Create pipeline steps (stepA and stepB)
|
91
|
+
for step_id in ["stepA", "stepB"]:
|
92
|
+
step_dir = os.path.join(pipeline_path, step_id)
|
93
|
+
os.makedirs(step_dir, exist_ok=True)
|
94
|
+
|
95
|
+
# Create the 1/ subdirectory for the step version
|
96
|
+
step_version_dir = os.path.join(step_dir, "1")
|
97
|
+
os.makedirs(step_version_dir, exist_ok=True)
|
98
|
+
|
99
|
+
# Create step config.yaml
|
100
|
+
step_config_path = os.path.join(step_dir, "config.yaml")
|
101
|
+
if os.path.exists(step_config_path):
|
102
|
+
logger.warning(f"File {step_config_path} already exists, skipping...")
|
103
|
+
else:
|
104
|
+
step_config_template = get_pipeline_step_config_template(step_id)
|
105
|
+
with open(step_config_path, 'w', encoding='utf-8') as f:
|
106
|
+
f.write(step_config_template)
|
107
|
+
logger.info(f"Created {step_config_path}")
|
108
|
+
|
109
|
+
# Create step requirements.txt
|
110
|
+
step_requirements_path = os.path.join(step_dir, "requirements.txt")
|
111
|
+
if os.path.exists(step_requirements_path):
|
112
|
+
logger.warning(f"File {step_requirements_path} already exists, skipping...")
|
113
|
+
else:
|
114
|
+
step_requirements_template = get_pipeline_step_requirements_template()
|
115
|
+
with open(step_requirements_path, 'w', encoding='utf-8') as f:
|
116
|
+
f.write(step_requirements_template)
|
117
|
+
logger.info(f"Created {step_requirements_path}")
|
118
|
+
|
119
|
+
# Create step pipeline_step.py
|
120
|
+
step_py_path = os.path.join(step_version_dir, "pipeline_step.py")
|
121
|
+
if os.path.exists(step_py_path):
|
122
|
+
logger.warning(f"File {step_py_path} already exists, skipping...")
|
123
|
+
else:
|
124
|
+
step_py_template = get_pipeline_step_template(step_id)
|
125
|
+
with open(step_py_path, 'w', encoding='utf-8') as f:
|
126
|
+
f.write(step_py_template)
|
127
|
+
logger.info(f"Created {step_py_path}")
|
128
|
+
|
129
|
+
logger.info(f"Pipeline initialization complete in {pipeline_path}")
|
130
|
+
logger.info("Next steps:")
|
131
|
+
logger.info("1. Search for '# TODO: please fill in' comments in the generated files")
|
132
|
+
logger.info("2. Update your user_id and app_id in all config.yaml files")
|
133
|
+
logger.info(
|
134
|
+
"3. Implement your pipeline step logic in stepA/1/pipeline_step.py and stepB/1/pipeline_step.py"
|
135
|
+
)
|
136
|
+
logger.info("4. Add dependencies to requirements.txt files as needed")
|
137
|
+
logger.info("5. Run 'clarifai pipeline upload config.yaml' to upload your pipeline")
|
@@ -0,0 +1,104 @@
|
|
1
|
+
import os
|
2
|
+
import shutil
|
3
|
+
|
4
|
+
import click
|
5
|
+
|
6
|
+
from clarifai.cli.base import cli
|
7
|
+
from clarifai.utils.logging import logger
|
8
|
+
|
9
|
+
|
10
|
+
@cli.group(
|
11
|
+
['pipelinestep'],
|
12
|
+
context_settings={'max_content_width': shutil.get_terminal_size().columns - 10},
|
13
|
+
)
|
14
|
+
def pipeline_step():
|
15
|
+
"""Manage pipeline steps: upload, test, etc"""
|
16
|
+
|
17
|
+
|
18
|
+
@pipeline_step.command()
|
19
|
+
@click.argument("pipeline_step_path", type=click.Path(exists=True), required=False, default=".")
|
20
|
+
@click.option(
|
21
|
+
'--skip_dockerfile',
|
22
|
+
is_flag=True,
|
23
|
+
help='Flag to skip generating a dockerfile so that you can manually edit an already created dockerfile.',
|
24
|
+
)
|
25
|
+
def upload(pipeline_step_path, skip_dockerfile):
|
26
|
+
"""Upload a pipeline step to Clarifai.
|
27
|
+
|
28
|
+
PIPELINE_STEP_PATH: Path to the pipeline step directory. If not specified, the current directory is used by default.
|
29
|
+
"""
|
30
|
+
from clarifai.runners.pipeline_steps.pipeline_step_builder import upload_pipeline_step
|
31
|
+
|
32
|
+
upload_pipeline_step(pipeline_step_path, skip_dockerfile)
|
33
|
+
|
34
|
+
|
35
|
+
@pipeline_step.command()
|
36
|
+
@click.argument(
|
37
|
+
"pipeline_step_path",
|
38
|
+
type=click.Path(),
|
39
|
+
required=False,
|
40
|
+
default=".",
|
41
|
+
)
|
42
|
+
def init(pipeline_step_path):
|
43
|
+
"""Initialize a new pipeline step directory structure.
|
44
|
+
|
45
|
+
Creates the following structure in the specified directory:
|
46
|
+
├── 1/
|
47
|
+
│ └── pipeline_step.py
|
48
|
+
├── requirements.txt
|
49
|
+
└── config.yaml
|
50
|
+
|
51
|
+
PIPELINE_STEP_PATH: Path where to create the pipeline step directory structure. If not specified, the current directory is used by default.
|
52
|
+
"""
|
53
|
+
from clarifai.cli.templates.pipeline_step_templates import (
|
54
|
+
get_config_template,
|
55
|
+
get_pipeline_step_template,
|
56
|
+
get_requirements_template,
|
57
|
+
)
|
58
|
+
|
59
|
+
# Resolve the absolute path
|
60
|
+
pipeline_step_path = os.path.abspath(pipeline_step_path)
|
61
|
+
|
62
|
+
# Create the pipeline step directory if it doesn't exist
|
63
|
+
os.makedirs(pipeline_step_path, exist_ok=True)
|
64
|
+
|
65
|
+
# Create the 1/ subdirectory
|
66
|
+
pipeline_step_version_dir = os.path.join(pipeline_step_path, "1")
|
67
|
+
os.makedirs(pipeline_step_version_dir, exist_ok=True)
|
68
|
+
|
69
|
+
# Create pipeline_step.py
|
70
|
+
pipeline_step_py_path = os.path.join(pipeline_step_version_dir, "pipeline_step.py")
|
71
|
+
if os.path.exists(pipeline_step_py_path):
|
72
|
+
logger.warning(f"File {pipeline_step_py_path} already exists, skipping...")
|
73
|
+
else:
|
74
|
+
pipeline_step_template = get_pipeline_step_template()
|
75
|
+
with open(pipeline_step_py_path, 'w') as f:
|
76
|
+
f.write(pipeline_step_template)
|
77
|
+
logger.info(f"Created {pipeline_step_py_path}")
|
78
|
+
|
79
|
+
# Create requirements.txt
|
80
|
+
requirements_path = os.path.join(pipeline_step_path, "requirements.txt")
|
81
|
+
if os.path.exists(requirements_path):
|
82
|
+
logger.warning(f"File {requirements_path} already exists, skipping...")
|
83
|
+
else:
|
84
|
+
requirements_template = get_requirements_template()
|
85
|
+
with open(requirements_path, 'w') as f:
|
86
|
+
f.write(requirements_template)
|
87
|
+
logger.info(f"Created {requirements_path}")
|
88
|
+
|
89
|
+
# Create config.yaml
|
90
|
+
config_path = os.path.join(pipeline_step_path, "config.yaml")
|
91
|
+
if os.path.exists(config_path):
|
92
|
+
logger.warning(f"File {config_path} already exists, skipping...")
|
93
|
+
else:
|
94
|
+
config_template = get_config_template()
|
95
|
+
with open(config_path, 'w') as f:
|
96
|
+
f.write(config_template)
|
97
|
+
logger.info(f"Created {config_path}")
|
98
|
+
|
99
|
+
logger.info(f"Pipeline step initialization complete in {pipeline_step_path}")
|
100
|
+
logger.info("Next steps:")
|
101
|
+
logger.info("1. Search for '# TODO: please fill in' comments in the generated files")
|
102
|
+
logger.info("2. Update the pipeline step configuration in config.yaml")
|
103
|
+
logger.info("3. Add your pipeline step dependencies to requirements.txt")
|
104
|
+
logger.info("4. Implement your pipeline step logic in 1/pipeline_step.py")
|
@@ -0,0 +1 @@
|
|
1
|
+
"""Template files for CLI init commands."""
|
@@ -0,0 +1,64 @@
|
|
1
|
+
"""Templates for initializing pipeline step projects."""
|
2
|
+
|
3
|
+
from clarifai.versions import CLIENT_VERSION
|
4
|
+
|
5
|
+
|
6
|
+
def get_config_template():
|
7
|
+
"""Get the config.yaml template for pipeline steps."""
|
8
|
+
return """pipeline_step:
|
9
|
+
id: "text-classifier-train-upload-step" # TODO: please fill in
|
10
|
+
user_id: "your_user_id" # TODO: please fill in
|
11
|
+
app_id: "your_app_id" # TODO: please fill in
|
12
|
+
|
13
|
+
pipeline_step_input_params:
|
14
|
+
- name: param_a
|
15
|
+
- name: param_b
|
16
|
+
default: "param_b_allowed_value1"
|
17
|
+
description: "param_b is the second parameter of the pipeline step"
|
18
|
+
accepted_values: # list of accepted values for param_b
|
19
|
+
- "param_b_allowed_value1"
|
20
|
+
- "param_b_allowed_value2"
|
21
|
+
- "param_b_allowed_value3"
|
22
|
+
|
23
|
+
build_info:
|
24
|
+
python_version: "3.12"
|
25
|
+
|
26
|
+
pipeline_step_compute_info:
|
27
|
+
cpu_limit: "500m"
|
28
|
+
cpu_memory: "500Mi"
|
29
|
+
num_accelerators: 0
|
30
|
+
"""
|
31
|
+
|
32
|
+
|
33
|
+
def get_pipeline_step_template():
|
34
|
+
"""Get the pipeline_step.py template."""
|
35
|
+
return '''import argparse
|
36
|
+
|
37
|
+
import clarifai
|
38
|
+
|
39
|
+
|
40
|
+
def main():
|
41
|
+
parser = argparse.ArgumentParser(description='Concatenate two strings.')
|
42
|
+
parser.add_argument('--param_a', type=str, required=True, help='First string to concatenate')
|
43
|
+
parser.add_argument('--param_b', type=str, required=True, help='Second string to concatenate')
|
44
|
+
|
45
|
+
args = parser.parse_args()
|
46
|
+
|
47
|
+
print(clarifai.__version__)
|
48
|
+
|
49
|
+
print(f"Concatenation Output: {args.param_a + args.param_b}")
|
50
|
+
|
51
|
+
|
52
|
+
if __name__ == "__main__":
|
53
|
+
main()
|
54
|
+
'''
|
55
|
+
|
56
|
+
|
57
|
+
def get_requirements_template():
|
58
|
+
"""Get the requirements.txt template."""
|
59
|
+
return f'''clarifai=={CLIENT_VERSION}
|
60
|
+
# Add your pipeline step dependencies here
|
61
|
+
# Example:
|
62
|
+
# torch>=1.9.0
|
63
|
+
# transformers>=4.20.0
|
64
|
+
'''
|
@@ -0,0 +1,150 @@
|
|
1
|
+
"""Templates for initializing pipeline projects."""
|
2
|
+
|
3
|
+
from clarifai.versions import CLIENT_VERSION
|
4
|
+
|
5
|
+
|
6
|
+
def get_pipeline_config_template():
|
7
|
+
"""Get the config.yaml template for pipelines."""
|
8
|
+
return """pipeline:
|
9
|
+
id: "hello-world-pipeline" # TODO: please fill in
|
10
|
+
user_id: "your_user_id" # TODO: please fill in
|
11
|
+
app_id: "your_app_id" # TODO: please fill in
|
12
|
+
step_directories:
|
13
|
+
- stepA
|
14
|
+
- stepB
|
15
|
+
orchestration_spec:
|
16
|
+
argo_orchestration_spec: |
|
17
|
+
apiVersion: argoproj.io/v1alpha1
|
18
|
+
kind: Workflow
|
19
|
+
metadata:
|
20
|
+
generateName: hello-world-pipeline-
|
21
|
+
spec:
|
22
|
+
entrypoint: sequence
|
23
|
+
templates:
|
24
|
+
- name: sequence
|
25
|
+
steps:
|
26
|
+
- - name: step-a
|
27
|
+
templateRef:
|
28
|
+
name: users/your_user_id/apps/your_app_id/pipeline-steps/stepA # TODO: please fill in
|
29
|
+
template: users/your_user_id/apps/your_app_id/pipeline-steps/stepA # TODO: please fill in
|
30
|
+
- - name: step-b
|
31
|
+
templateRef:
|
32
|
+
name: users/your_user_id/apps/your_app_id/pipeline-steps/stepB # TODO: please fill in
|
33
|
+
template: users/your_user_id/apps/your_app_id/pipeline-steps/stepB # TODO: please fill in
|
34
|
+
"""
|
35
|
+
|
36
|
+
|
37
|
+
def get_pipeline_step_config_template(step_id: str):
|
38
|
+
"""Get the config.yaml template for a pipeline step."""
|
39
|
+
return f"""pipeline_step:
|
40
|
+
id: "{step_id}" # TODO: please fill in
|
41
|
+
user_id: "your_user_id" # TODO: please fill in
|
42
|
+
app_id: "your_app_id" # TODO: please fill in
|
43
|
+
|
44
|
+
pipeline_step_input_params:
|
45
|
+
- name: input_text
|
46
|
+
description: "Text input for processing"
|
47
|
+
|
48
|
+
build_info:
|
49
|
+
python_version: "3.12"
|
50
|
+
|
51
|
+
pipeline_step_compute_info:
|
52
|
+
cpu_limit: "500m"
|
53
|
+
cpu_memory: "500Mi"
|
54
|
+
num_accelerators: 0
|
55
|
+
"""
|
56
|
+
|
57
|
+
|
58
|
+
def get_pipeline_step_template(step_id: str):
|
59
|
+
"""Get the pipeline_step.py template for a pipeline step."""
|
60
|
+
return f'''import argparse
|
61
|
+
|
62
|
+
import clarifai
|
63
|
+
|
64
|
+
|
65
|
+
def main():
|
66
|
+
parser = argparse.ArgumentParser(description='{step_id} processing step.')
|
67
|
+
parser.add_argument('--input_text', type=str, required=True, help='Text input for processing')
|
68
|
+
|
69
|
+
args = parser.parse_args()
|
70
|
+
|
71
|
+
print(clarifai.__version__)
|
72
|
+
|
73
|
+
# TODO: Implement your pipeline step logic here
|
74
|
+
print(f"{step_id} processed: {{args.input_text}}")
|
75
|
+
|
76
|
+
|
77
|
+
if __name__ == "__main__":
|
78
|
+
main()
|
79
|
+
'''
|
80
|
+
|
81
|
+
|
82
|
+
def get_pipeline_step_requirements_template():
|
83
|
+
"""Get the requirements.txt template for pipeline steps."""
|
84
|
+
return f'''clarifai=={CLIENT_VERSION}
|
85
|
+
# Add your pipeline step dependencies here
|
86
|
+
# Example:
|
87
|
+
# torch>=1.9.0
|
88
|
+
# transformers>=4.20.0
|
89
|
+
'''
|
90
|
+
|
91
|
+
|
92
|
+
def get_readme_template():
|
93
|
+
"""Get the README.md template for the pipeline project."""
|
94
|
+
return """# Pipeline Project
|
95
|
+
|
96
|
+
This project contains a Clarifai pipeline with associated pipeline steps.
|
97
|
+
|
98
|
+
## Structure
|
99
|
+
|
100
|
+
```
|
101
|
+
├── config.yaml # Pipeline configuration
|
102
|
+
├── stepA/ # First pipeline step
|
103
|
+
│ ├── config.yaml # Step A configuration
|
104
|
+
│ ├── requirements.txt # Step A dependencies
|
105
|
+
│ └── 1/
|
106
|
+
│ └── pipeline_step.py # Step A implementation
|
107
|
+
├── stepB/ # Second pipeline step
|
108
|
+
│ ├── config.yaml # Step B configuration
|
109
|
+
│ ├── requirements.txt # Step B dependencies
|
110
|
+
│ └── 1/
|
111
|
+
│ └── pipeline_step.py # Step B implementation
|
112
|
+
└── README.md # This file
|
113
|
+
```
|
114
|
+
|
115
|
+
## Getting Started
|
116
|
+
|
117
|
+
1. **Configure the pipeline**: Edit `config.yaml` and update the TODO fields:
|
118
|
+
- Set your `user_id` and `app_id`
|
119
|
+
- Update the pipeline `id`
|
120
|
+
- Modify the Argo orchestration spec as needed
|
121
|
+
|
122
|
+
2. **Configure pipeline steps**: For each step directory (stepA, stepB):
|
123
|
+
- Edit `config.yaml` and fill in the TODO fields
|
124
|
+
- Update `requirements.txt` with your dependencies
|
125
|
+
- Implement your logic in `1/pipeline_step.py`
|
126
|
+
|
127
|
+
3. **Upload the pipeline**:
|
128
|
+
```bash
|
129
|
+
clarifai pipeline upload config.yaml
|
130
|
+
```
|
131
|
+
|
132
|
+
This will:
|
133
|
+
- Upload the pipeline steps from the `step_directories`
|
134
|
+
- Create the pipeline with proper orchestration
|
135
|
+
- Link all components together
|
136
|
+
|
137
|
+
## Pipeline Steps
|
138
|
+
|
139
|
+
### stepA
|
140
|
+
TODO: Describe what stepA does
|
141
|
+
|
142
|
+
### stepB
|
143
|
+
TODO: Describe what stepB does
|
144
|
+
|
145
|
+
## Customization
|
146
|
+
|
147
|
+
- Add more pipeline steps by creating new directories and adding them to `step_directories` in `config.yaml`
|
148
|
+
- Modify the Argo orchestration spec to change the execution flow
|
149
|
+
- Update compute resources in each step's `config.yaml` as needed
|
150
|
+
"""
|
clarifai/client/auth/helper.py
CHANGED
@@ -312,6 +312,29 @@ class ClarifaiAuthHelper:
|
|
312
312
|
stub = service_pb2_grpc.V2Stub(channel)
|
313
313
|
return stub
|
314
314
|
|
315
|
+
def get_async_stub(self) -> service_pb2_grpc.V2Stub:
|
316
|
+
"""Get the API gRPC async stub using the right channel based on the API endpoint base.
|
317
|
+
Returns:
|
318
|
+
stub: The service_pb2_grpc.V2Stub stub for the API.
|
319
|
+
"""
|
320
|
+
if self._base not in base_https_cache:
|
321
|
+
raise Exception("Cannot determine if base %s is https" % self._base)
|
322
|
+
|
323
|
+
https = base_https_cache[self._base]
|
324
|
+
if https:
|
325
|
+
channel = ClarifaiChannel.get_aio_grpc_channel(
|
326
|
+
base=self._base, root_certificates_path=self._root_certificates_path
|
327
|
+
)
|
328
|
+
else:
|
329
|
+
if self._base.find(":") >= 0:
|
330
|
+
host, port = self._base.split(":")
|
331
|
+
else:
|
332
|
+
host = self._base
|
333
|
+
port = 80
|
334
|
+
channel = ClarifaiChannel.get_aio_insecure_grpc_channel(base=host, port=port)
|
335
|
+
stub = service_pb2_grpc.V2Stub(channel)
|
336
|
+
return stub
|
337
|
+
|
315
338
|
@property
|
316
339
|
def ui(self) -> str:
|
317
340
|
"""Return the domain for the UI."""
|
clarifai/client/auth/register.py
CHANGED
@@ -18,6 +18,11 @@ def _register_classes():
|
|
18
18
|
for name in dir(grpc):
|
19
19
|
if name.endswith('Callable'):
|
20
20
|
RpcCallable.register(getattr(grpc, name))
|
21
|
+
# add grpc aio classes as subclasses of the abcs, so they also succeed in isinstance calls
|
22
|
+
# This is needed for calling AuthorizedRPCCallable in the async stub with metadata headers
|
23
|
+
for name in dir(grpc.aio):
|
24
|
+
if name.endswith('Callable'):
|
25
|
+
RpcCallable.register(getattr(grpc.aio, name))
|
21
26
|
|
22
27
|
|
23
28
|
_register_classes()
|