pinexq-cli 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pinexq/cli/__init__.py ADDED
File without changes
pinexq/cli/__main__.py ADDED
@@ -0,0 +1,28 @@
1
+ from typing import Optional
2
+ import signal
3
+ import logging
4
+
5
+ from dotenv import load_dotenv
6
+
7
+ from pinexq.cli.cmd.cli import pinexq
8
+
9
+ log = logging.getLogger(__name__)
10
+
11
+ load_dotenv()
12
+
13
+ def signal_handler(_sig, _frame):
14
+ print('Aborting')
15
+ exit(0)
16
+
17
+ def main(argv: Optional[list[str]] = None) -> None:
18
+ signal.signal(signal.SIGINT, signal_handler)
19
+ # Delegate to Typer app
20
+ # If argv is None, Typer uses sys.argv automatically
21
+ if argv is None:
22
+ pinexq(prog_name="pinexq")
23
+ else:
24
+ pinexq(args=argv, prog_name="pinexq")
25
+
26
+
27
+ if __name__ == "__main__":
28
+ main()
File without changes
pinexq/cli/cmd/cli.py ADDED
@@ -0,0 +1,122 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from typing import Optional
5
+ import importlib.metadata
6
+
7
+ import typer.rich_utils # important: import the module, not individual names
8
+
9
+ from .deploy import deploy as deploy_impl, DeployOptions
10
+ from .register import register as register_impl, RegisterOptions
11
+ from .generate import generate_app
12
+ from .initialization import init_project
13
+ from ..docker_tools.client import load_docker_client
14
+ from ..pinexq_tools.project import get_project_meta
15
+
16
+
17
+ @dataclass
18
+ class CLIContext:
19
+ pinexq_endpoint: Optional[str] = None
20
+ verbose: bool = False
21
+
22
+
23
+ typer.rich_utils.STYLE_METAVAR = 'bold dark_orange'
24
+ typer.rich_utils.STYLE_USAGE = 'dark_orange'
25
+ typer.rich_utils.STYLE_OPTION_ENVVAR = 'dim dark_orange'
26
+ app = typer.Typer(
27
+ name="pinexq",
28
+ invoke_without_command=True,
29
+ add_completion=False,
30
+ no_args_is_help=True,
31
+ suggest_commands=True,
32
+ rich_markup_mode="rich",
33
+ )
34
+
35
+
36
+ def version_callback(value: bool):
37
+ if value:
38
+ try:
39
+ # Replace "your-package-name" with the name in your pyproject.toml
40
+ version = importlib.metadata.version("pinexq-cli")
41
+ print(f"CLI Version: {version}")
42
+ except importlib.metadata.PackageNotFoundError:
43
+ print("Version not found (package might not be installed)")
44
+ raise typer.Exit()
45
+
46
+
47
+ @app.callback()
48
+ def _pinexq_callback(
49
+ ctx: typer.Context,
50
+ endpoint: Optional[str] = typer.Option(None, "--endpoint", envvar="PINEXQ_ENDPOINT", help="Pinexq endpoint"),
51
+ version: bool = typer.Option(False, "--version", help="Prints the version", callback=version_callback),
52
+ verbose: bool = typer.Option(False, "--verbose", help="Debug output"),
53
+ ):
54
+ ctx.obj = CLIContext(pinexq_endpoint=endpoint, verbose=verbose)
55
+
56
+
57
+ @app.command(name="deploy", rich_help_panel="Deploy functions to Pinexq")
58
+ def deploy(
59
+ dockerfile: str = typer.Option("./Dockerfile", "-D", "--dockerfile", show_default=True),
60
+ context_dir: str = typer.Option("./", "--context-dir", show_default=True),
61
+ api_key: str = typer.Option(None, "--api-key", envvar="PINEXQ_API_KEY", help="Pinexq API key"),
62
+ functions: list[str] = typer.Option(None, "-f", "--function", help="Select functions to deploy"),
63
+ secrets: list[str] = typer.Option(None, '--secret', help="Secrets to be passed to the docker build"),
64
+ ):
65
+ """
66
+ Deploy functions to [dark_orange]Pinexq[/dark_orange]. This builds and pushes an OCI-compatible image to Pinexq and registers the function with the API.
67
+ """
68
+ # TODO: use overrides for project_meta
69
+ docker_client = load_docker_client()
70
+ project_meta = get_project_meta()
71
+ deploy_impl(DeployOptions(
72
+ dockerfile=dockerfile,
73
+ context_dir=context_dir,
74
+ api_key=api_key or "",
75
+ functions=list(functions or []),
76
+ secrets=list(secrets or []),
77
+ ), docker_client, project_meta)
78
+
79
+
80
+ @app.command(name="init", help="Initialize a pinexq project")
81
+ def init(
82
+ path: str = typer.Argument("./"),
83
+ template: str = typer.Option("gh:data-cybernetics/pinexq-project-starter.git", "--template", show_default=True),
84
+ version: str = typer.Option("latest", "--template-version", show_default=True),
85
+ project_name: str = typer.Option(None, "--project-name", help="Project name"),
86
+ pinexq_endpoint: str = typer.Option(None, "--pinexq-endpoint", help="Pinexq endpoint"),
87
+ ):
88
+ init_project(
89
+ path,
90
+ template,
91
+ version,
92
+ project_name=project_name,
93
+ pinexq_endpoint=pinexq_endpoint
94
+ )
95
+
96
+ @app.command(name="register", help="Register functions in Pinexq")
97
+ def register(
98
+ dockerfile: str = typer.Option("./Dockerfile", "-D", "--dockerfile", show_default=True),
99
+ context_dir: str = typer.Option("./", "--context-dir", show_default=True),
100
+ api_key: str = typer.Option(None, "--api-key", envvar="PINEXQ_API_KEY", help="Pinexq API key"),
101
+ functions: list[str] = typer.Option(None, "-f", "--function", help="Select functions to deploy"),
102
+ secrets: list[str] = typer.Option(None, '--secret', help="Secrets to be passed to the docker build"),
103
+ ):
104
+ docker_client = load_docker_client()
105
+ project_meta = get_project_meta()
106
+ register_impl(RegisterOptions(
107
+ dockerfile=dockerfile,
108
+ context_dir=context_dir,
109
+ api_key=api_key or "",
110
+ functions=list(functions or []),
111
+ secrets=list(secrets or []),
112
+ ), docker_client, project_meta)
113
+
114
+ # Register sub-apps
115
+ app.add_typer(generate_app, name="generate")
116
+
117
+ # Backwards-compatibility for type hints imported in other modules
118
+ # Export name `CLI` to represent the root context type
119
+ CLI = CLIContext
120
+
121
+ # For compatibility with the existing import name
122
+ pinexq = app
@@ -0,0 +1,98 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass, field
4
+
5
+ from pinexq_client.job_management import enter_jma, EntryPointHco
6
+ from pinexq_client.job_management.model import ConfigureDeploymentParameters, \
7
+ ScalingConfiguration, ScalingBehaviours, AssignCodeHashParameters, DeploymentStates
8
+
9
+ from pinexq.cli.cmd.register import register_processing_step
10
+ from pinexq.cli.docker_tools.client import ContainerClient
11
+ from pinexq.cli.pinexq_tools.client import get_client
12
+ from pinexq.cli.pinexq_tools.info import get_info
13
+ from pinexq.cli.pinexq_tools.manifest import generate_manifests, BuildOptions
14
+ from pinexq.cli.pinexq_tools.project import PinexqProjectConfig
15
+ from pinexq.cli.utils.console import console, err_console
16
+ from pinexq.cli.utils.const import PINEXQ_PREFIX as PREFIX, PINEXQ_ERROR_PREFIX as ERROR_PREFIX
17
+ from pinexq.cli.utils.uv_utils import is_uv_lockfile_up_to_date
18
+
19
+
20
+ @dataclass
21
+ class DeployOptions:
22
+ dockerfile: str = "./Dockerfile"
23
+ context_dir: str = "./"
24
+ api_key: str = ""
25
+ functions: list[str] = field(default_factory=list)
26
+ secrets: list[str] = field(default_factory=list)
27
+
28
+
29
+ def deploy(deploy_command: DeployOptions, container_client: ContainerClient, config: PinexqProjectConfig):
30
+ try:
31
+ pinexq_client = get_client(config.project.endpoint, deploy_command.api_key)
32
+ info = get_info(pinexq_client)
33
+ if 'grant:codeContributor' not in info.user_grants and 'role:admin' not in info.user_grants:
34
+ err_console.print(
35
+ f'{ERROR_PREFIX} You do not have permission to deploy functions. Please ask your administrator or support to grant you the permission.')
36
+ exit(1)
37
+ if not is_uv_lockfile_up_to_date():
38
+ err_console.print(f'{ERROR_PREFIX} uv lockfile is not up to date. Please run `uv lock` to update it.')
39
+ exit(1)
40
+
41
+ manifests, functions = generate_manifests(container_client, deploy_command.functions, BuildOptions(
42
+ dockerfile=deploy_command.dockerfile,
43
+ context_dir=deploy_command.context_dir,
44
+ tag=f'{config.project.name}:{config.project.version}',
45
+ secrets=deploy_command.secrets,
46
+ entrypoint=config.project.entrypoint
47
+ ))
48
+ if not manifests:
49
+ err_console.print(f'{ERROR_PREFIX} Failed to list functions in procon.')
50
+ console.print(f'{PREFIX} Deploying following functions: {functions}')
51
+
52
+ # This will build the base image again for the destination architecture amd64
53
+ base_image = container_client.build_base_image(deploy_command.context_dir, deploy_command.dockerfile,
54
+ f'{config.project.name}:{config.project.version}',
55
+ secrets=deploy_command.secrets)
56
+ if not base_image:
57
+ console.print(f'{ERROR_PREFIX} Failed to build base image.')
58
+ exit(1)
59
+
60
+ # Start registering the PS
61
+ entrypoint: EntryPointHco = enter_jma(pinexq_client)
62
+ console.print(f'{PREFIX} Registering function{"s" if len(functions) > 1 else ""} at {config.project.endpoint}')
63
+ for function_name in functions:
64
+ version = manifests[function_name]['version']
65
+ console.print(f'{PREFIX} start deploying function {function_name}:{version}')
66
+ processing_step = register_processing_step(entrypoint, function_name, version, manifests[function_name])
67
+
68
+ if processing_step.deployment_state != DeploymentStates.undefined or processing_step.deployment_state != DeploymentStates.not_deployed:
69
+ console.print(f'{PREFIX} Processing step for function {function_name}:{version} is already deployed. Skipping deployment.')
70
+ continue
71
+ if processing_step._entity.properties.code_hash is not None:
72
+ console.print(f'{PREFIX} Processing step for function {function_name}:{version} is already registered with an code artifact. Skipping deployment.')
73
+ continue
74
+ else:
75
+ # Push image to registry
76
+ if not container_client.tag_base_image_as_function(base_image, info, function_name, version):
77
+ exit(1)
78
+ digest = container_client.push_function_image(info, function_name, version)
79
+ if not digest:
80
+ console.print(f'{ERROR_PREFIX} Failed to push function image for {function_name}:{version}')
81
+ exit(1)
82
+ deployment = config.get_function_deployment(function_name)
83
+ processing_step.self_link.navigate().assign_code_hash_action.execute(
84
+ AssignCodeHashParameters(CodeHash=digest))
85
+ processing_step.self_link.navigate().configure_deployment_action.execute(
86
+ ConfigureDeploymentParameters(
87
+ ResourcePreset=deployment.resource_preset,
88
+ Entrypoint=config.project.entrypoint,
89
+ Scaling=ScalingConfiguration(
90
+ MaxReplicas=deployment.max_replicas,
91
+ Behaviour=ScalingBehaviours.balanced
92
+ ),
93
+ )
94
+ )
95
+
96
+ except Exception as e:
97
+ err_console.print(f'{ERROR_PREFIX} Error: {e}')
98
+ exit(1)
@@ -0,0 +1,60 @@
1
+ import typer
2
+ from copier import run_copy, CopierAnswersInterrupt
3
+ from copier.errors import CopierError
4
+ from rich.console import Console
5
+
6
+ generate_app = typer.Typer(name="generate", no_args_is_help=True)
7
+
8
+ # REUSABLE OPTIONS
9
+ Template = typer.Option("gh:data-cybernetics/pinexq-project-starter.git", "--template", show_default=True)
10
+ Version = typer.Option("latest", "--template-version", show_default=True)
11
+ Path = typer.Option("./", "--path", show_default=True)
12
+
13
+ err_console = Console(stderr=True)
14
+
15
+
16
+ @generate_app.command(name="project-toml", help="Generate project.toml")
17
+ def generate_project_toml(
18
+ template: str = Template,
19
+ version: str = Version,
20
+ path: str = Path,
21
+ ):
22
+ data = {'python_versions': ['3.14'], 'default_python_version': '3.14'}
23
+ generate_project_file(path, template, version, target_files=["pinexq.toml"], data=data)
24
+
25
+
26
+ @generate_app.command(name="dockerfile", help="Generate Dockerfile")
27
+ def generate_dockerfile(
28
+ template: str = Template,
29
+ version: str = Version,
30
+ path: str = Path,
31
+ python_version: str = typer.Option(None, "--python-version", help="Python version to use in Dockerfile")
32
+ ):
33
+ allowed_versions = ['3.14', '3.13', '3.12', '3.11']
34
+ data = {'project_name': 'dummy', 'pinexq_endpoint': 'dummy', 'python_versions': allowed_versions}
35
+ if python_version:
36
+ if python_version not in allowed_versions:
37
+ raise typer.BadParameter(f'Provided version ({python_version}) is not allowed. Allowed versions are: {allowed_versions}')
38
+ data['default_python_version'] = python_version
39
+ generate_project_file(
40
+ path,
41
+ template,
42
+ version,
43
+ target_files=["Dockerfile", ".dockerignore"],
44
+ data=data
45
+ )
46
+
47
+
48
+ def generate_project_file(path: str, template: str, template_version: str, target_files: list[str], data: dict = None):
49
+ try:
50
+ run_copy(
51
+ template,
52
+ path,
53
+ vcs_ref=template_version if template_version != "latest" else None,
54
+ exclude=["*", *[f"!{target_files}" for target_files in target_files]],
55
+ data=data
56
+ )
57
+ except CopierAnswersInterrupt:
58
+ err_console.print("Project generation aborted")
59
+ except CopierError as e:
60
+ err_console.print(f"Error during project generation: {e}")
@@ -0,0 +1,24 @@
1
+ from __future__ import annotations
2
+ from rich.console import Console
3
+ from copier import run_copy, CopierAnswersInterrupt
4
+ from copier.errors import CopierError
5
+
6
+ err_console = Console(stderr=True)
7
+ console = Console(highlight=False)
8
+
9
+
10
+ def init_project(path: str, template: str, version: str, project_name: str | None = None, pinexq_endpoint: str | None = None) -> None:
11
+ data = {}
12
+ if project_name:
13
+ data["project_name"] = project_name
14
+ if pinexq_endpoint:
15
+ data["pinexq_endpoint"] = pinexq_endpoint
16
+ try:
17
+ run_copy(template,
18
+ path,
19
+ vcs_ref=version if version != "latest" else None,
20
+ data=data)
21
+ except CopierAnswersInterrupt:
22
+ console.print("Project generation aborted")
23
+ except CopierError as e:
24
+ err_console.print(f"Error during project generation: {e}")
@@ -0,0 +1,91 @@
1
+ from dataclasses import dataclass, field
2
+
3
+ from pinexq_client.core import ApiException
4
+ from pinexq_client.core.hco.upload_action_hco import UploadParameters
5
+ from pinexq_client.job_management.model import ProcessingStepQueryParameters, ProcessingStepFilterParameter, CreateProcessingStepParameters, Pagination, FunctionNameMatchTypes
6
+ from rich.console import Console
7
+
8
+ from pinexq_client.job_management import enter_jma, EntryPointHco, ProcessingStepsRootHco
9
+
10
+ from pinexq.cli.pinexq_tools.info import get_info
11
+ from pinexq.cli.pinexq_tools.manifest import generate_manifests, BuildOptions
12
+ from pinexq.cli.utils.const import PINEXQ_PREFIX as PREFIX, PINEXQ_ERROR_PREFIX as ERROR_PREFIX
13
+ from pinexq.cli.docker_tools.client import ContainerClient
14
+ from pinexq.cli.pinexq_tools.client import get_client
15
+ from pinexq.cli.pinexq_tools.project import PinexqProjectConfig
16
+ from pinexq.cli.utils.uv_utils import is_uv_lockfile_up_to_date
17
+
18
+ err_console = Console(stderr=True)
19
+ console = Console(highlight=False)
20
+
21
+
22
+ @dataclass
23
+ class RegisterOptions:
24
+ dockerfile: str = "./Dockerfile"
25
+ context_dir: str = "./"
26
+ api_key: str = ""
27
+ functions: list[str] = field(default_factory=list)
28
+ secrets: list[str] = field(default_factory=list)
29
+
30
+
31
+ def register(options: RegisterOptions, container_client: ContainerClient, config: PinexqProjectConfig):
32
+ try:
33
+ pinexq_client = get_client(config.project.endpoint, options.api_key)
34
+ info = get_info(pinexq_client)
35
+ if 'grant:codeContributor' not in info.user_grants and 'role:admin' not in info.user_grants:
36
+ err_console.print(
37
+ f'{ERROR_PREFIX} You do not have permission to register or deploy functions. Please ask your administrator or support to grant you the permission.')
38
+ exit(1)
39
+ if not is_uv_lockfile_up_to_date():
40
+ err_console.print(f'{ERROR_PREFIX} uv lockfile is not up to date. Please run `uv lock` to update it.')
41
+ exit(1)
42
+
43
+ manifests, functions = generate_manifests(container_client, options.functions, BuildOptions(
44
+ dockerfile=options.dockerfile,
45
+ context_dir=options.context_dir,
46
+ tag=f'{config.project.name}:{config.project.version}',
47
+ secrets=options.secrets,
48
+ entrypoint=config.project.entrypoint
49
+ ))
50
+ if not manifests:
51
+ err_console.print(f'{ERROR_PREFIX} Failed to list functions in procon.')
52
+ console.print(f'{PREFIX} Register following functions: {functions}')
53
+
54
+ # Start registering the PS
55
+ entrypoint: EntryPointHco = enter_jma(pinexq_client)
56
+ console.print(f'{PREFIX} Registering function at {config.project.endpoint}')
57
+ for function_name in functions:
58
+ version = manifests[function_name]['version']
59
+ console.print(f'{PREFIX} Deploying function {function_name}:{version}')
60
+ register_processing_step(entrypoint, function_name, version, manifests[function_name])
61
+ except Exception as e:
62
+ err_console.print(f'{ERROR_PREFIX} Error: {e}')
63
+ exit(1)
64
+
65
+
66
+ def register_processing_step(entrypoint: EntryPointHco, function_name: str, version: str, manifest: dict[str, dict]):
67
+ processing_step_root: ProcessingStepsRootHco = entrypoint.processing_step_root_link.navigate()
68
+ ps_filter = ProcessingStepFilterParameter(FunctionName=function_name, Version=version, ShowDeprecated=False, FunctionNameMatchType=FunctionNameMatchTypes.match_exact)
69
+ existing_steps = processing_step_root.query_action.execute(ProcessingStepQueryParameters(Filter=ps_filter, IncludeRemainingTags=False, Pagination=Pagination(PageSize=1, PageOffset=None)))
70
+ if existing_steps.current_entities_count == 0:
71
+ console.print(f'{PREFIX} Processing step for function {function_name}:{version} does not exist. Creating new one.')
72
+ try:
73
+ params = CreateProcessingStepParameters(FunctionName=function_name, Title=function_name,
74
+ Version=version)
75
+ processing_step = processing_step_root.register_new_action.execute(params)
76
+ processing_step.upload_configuration_action.execute(
77
+ UploadParameters(filename='UploadFile', mediatype='application/json',
78
+ json=manifest))
79
+ except ApiException as e:
80
+ if e.problem_details.detail == 'A processing step with the same function name already exists.':
81
+ console.print(
82
+ f'{PREFIX} Processing step for function {function_name}:{version} in version [bold dark_orange]{version}[/bold dark_orange] already exists. Skipping deployment.')
83
+ exit(0)
84
+ else:
85
+ err_console.print(
86
+ f'{ERROR_PREFIX} Error registering processing step for function {function_name}:{version}:')
87
+ exit(1)
88
+ else:
89
+ console.print(
90
+ f'{PREFIX} Processing step for function {function_name}:{version} in version [bold dark_orange]{version}[/bold dark_orange] already exists.')
91
+ return existing_steps.processing_steps[0]
File without changes
@@ -0,0 +1,229 @@
1
+ import json
2
+ import os
3
+ import subprocess
4
+ from typing import Optional, Union
5
+
6
+ import docker
7
+ from docker import DockerClient
8
+ from docker.errors import APIError
9
+ from docker.models.images import Image
10
+ from rich.progress import Progress, BarColumn, TextColumn, TaskProgressColumn, DownloadColumn, TransferSpeedColumn, \
11
+ TaskID
12
+ from rich.console import Console
13
+
14
+ from pinexq.cli.pinexq_tools.info import Info
15
+ from pinexq.cli.utils.const import DOCKER_PREFIX as PREFIX
16
+
17
+ err_console = Console(stderr=True)
18
+ console = Console(highlight=False)
19
+
20
+
21
+ def print_stream_logs(logs):
22
+ try:
23
+ for chunk in logs or []:
24
+ line = chunk.get('stream')
25
+ if line and line != '\n':
26
+ console.print(f"{line.rstrip()}")
27
+ except Exception:
28
+ pass
29
+
30
+
31
+ def print_push_logs(logs):
32
+ """Render docker push logs using rich progress bars per layer id."""
33
+ try:
34
+ layer_tasks: dict[str, TaskID] = {}
35
+ finished_layers: set[str] = set()
36
+
37
+ progress = Progress(
38
+ TextColumn("{task.description}"),
39
+ BarColumn(),
40
+ TaskProgressColumn(),
41
+ DownloadColumn(),
42
+ TransferSpeedColumn(),
43
+ transient=True,
44
+ )
45
+
46
+ with progress:
47
+ for chunk in logs or []:
48
+ status = chunk.get('status')
49
+ layer_id = chunk.get('id')
50
+ detail = chunk.get('progressDetail') or {}
51
+ total = detail.get('total')
52
+ current = detail.get('current')
53
+
54
+ # If no layer id, print general status when not mid-push UI
55
+ if not layer_id:
56
+ if status:
57
+ print(f"{status.rstrip()}")
58
+ continue
59
+
60
+ # Ensure a task exists for this layer
61
+ if layer_id not in layer_tasks:
62
+ desc = f"Waiting {layer_id}"
63
+ task_id = progress.add_task(desc, total=total if total else 1, completed=0)
64
+ layer_tasks[layer_id] = task_id
65
+
66
+ task_id = layer_tasks[layer_id]
67
+
68
+ if status == 'Waiting':
69
+ progress.update(task_id, description=f"Waiting {layer_id}")
70
+ continue
71
+
72
+ if status == 'Pushing':
73
+ if total and current is not None:
74
+ progress.update(task_id, total=total, completed=current, description=f"Pushing {layer_id}")
75
+ elif current is not None:
76
+ # Unknown total; treat as indeterminate with growing total
77
+ progress.update(task_id, total=None, completed=current, description=f"Pushing {layer_id}")
78
+ else:
79
+ progress.update(task_id, description=f"Pushing {layer_id}")
80
+ continue
81
+
82
+ if status == 'Pushed':
83
+ if total:
84
+ progress.update(task_id, total=total, completed=total, description=f"Pushed {layer_id}")
85
+ else:
86
+ progress.update(task_id, total=1, completed=1, description=f"Pushed {layer_id}")
87
+ finished_layers.add(layer_id)
88
+ continue
89
+
90
+ if status == 'Layer already exists' or (isinstance(status, str) and status.startswith('Mounted from')):
91
+ progress.update(task_id, total=1, completed=1, description=f"Exists {layer_id}")
92
+ finished_layers.add(layer_id)
93
+ continue
94
+
95
+ # Fallback update to show any other status
96
+ if status:
97
+ progress.update(task_id, description=f"{status} {layer_id}")
98
+ except Exception as e:
99
+ err_console.print(e)
100
+ pass
101
+
102
+
103
+ class ContainerClient:
104
+ def __init__(self, client: DockerClient):
105
+ self.client = client
106
+
107
+ def pre_build_image(self, context_dir: str, dockerfile: str, tag: str, verbose=True, secrets=None) -> \
108
+ Optional[Image]:
109
+ if secrets is None:
110
+ secrets = []
111
+ secrets_arg = [arg for secret in secrets for arg in ('--secret', secret)]
112
+ command = ['docker', 'build', '--progress', 'plain', '-t', tag, '-f', dockerfile, *secrets_arg, context_dir]
113
+ return self.build_image(command, tag, verbose)
114
+
115
+ def build_base_image(self, context_dir: str, dockerfile: str, tag: str, verbose=True, secrets=None) -> Optional[
116
+ Image]:
117
+ # We need to directly build the docker image since buildkit is not supported by docker-py
118
+ # https://github.com/docker/docker-py/issues/2230
119
+ if secrets is None:
120
+ secrets = []
121
+ secrets_arg = [arg for secret in secrets for arg in ('--secret', secret)]
122
+ command = ['docker', 'build', '--progress', 'plain',
123
+ '--platform', 'linux/amd64',
124
+ '-t', tag, '-f', dockerfile, *secrets_arg, context_dir]
125
+ return self.build_image(command, tag, verbose)
126
+
127
+ def build_image(self, command: list[str], tag: str, verbose=True) -> Optional[Image]:
128
+ console.print(f"{PREFIX} Running docker build command: {' '.join(command)}")
129
+ with subprocess.Popen(
130
+ command,
131
+ stdout=subprocess.PIPE,
132
+ stderr=subprocess.STDOUT,
133
+ text=True,
134
+ bufsize=1,
135
+ universal_newlines=True,
136
+ env=os.environ,
137
+ ) as process:
138
+ import time
139
+ message = f"{PREFIX} [bold dodger_blue1]Waiting for docker...[/]"
140
+ collected = [message]
141
+ with console.status(message, spinner="dots") as status:
142
+ def verbose_log():
143
+ for line in process.stdout:
144
+ collected.append(line.strip())
145
+ status.update('\n'.join(collected))
146
+ time.sleep(0.01)
147
+
148
+ def log():
149
+ for line in process.stdout:
150
+ status.update(f'{message}: {line.strip()}')
151
+ time.sleep(0.01)
152
+
153
+ verbose_log() if verbose else log()
154
+ if verbose:
155
+ console.print('\n'.join(collected))
156
+ return_code = process.poll()
157
+ if return_code == 0:
158
+ return self.client.images.get(tag)
159
+ else:
160
+ err_console.print(f"{PREFIX} Docker build failed with return code {return_code}")
161
+ return None
162
+
163
+ @staticmethod
164
+ def tag_base_image_as_function(image: Image, info: Info, function_name: str, function_version: str) -> bool:
165
+ dist_tag = f"{info.registry_endpoint}/{info.get_context_id()}/{function_name}:{function_version}"
166
+ try:
167
+ image.tag(repository=dist_tag)
168
+ console.print(f"{PREFIX} Tagged image {image.id} as {dist_tag}")
169
+ return True
170
+ except APIError as e:
171
+ err_console.print(f"{PREFIX} Docker API error: {e}")
172
+ return False
173
+
174
+ def push_function_image(self, info: Info, function_name: str, function_version: str) -> Union[str, None]:
175
+ repo = f"{info.registry_endpoint}/{info.get_context_id()}/{function_name}"
176
+ try:
177
+ push_logs = self.client.images.push(repo, tag=function_version, auth_config=info.get_docker_auth(),
178
+ stream=True, decode=True)
179
+ print_push_logs(push_logs)
180
+ console.print(f"{PREFIX} Pushed function image {repo}:{function_version}")
181
+ image = self.client.images.get(f'{repo}:{function_version}')
182
+ return self.get_image_digest(image)
183
+ except APIError as e:
184
+ err_console.print(f"{PREFIX} Docker API error: {e}")
185
+ return None
186
+
187
+ def run_function_list(self, image: Image, entrypoint: str = 'main.py') -> list[str] | None:
188
+ try:
189
+ output_bytes = self.client.containers.run(image.id, f'{entrypoint} list -j', detach=False, remove=True)
190
+ output_str = output_bytes.decode("utf-8").strip()
191
+ return json.loads(output_str)
192
+ except docker.errors.ContainerError as e:
193
+ err_console.print(f"Container exited with non-zero exit code: {e.exit_status}")
194
+ err_console.print(f"Stderr: {e.stderr}")
195
+ except docker.errors.ImageNotFound:
196
+ err_console.print("Image not found locally.")
197
+ except docker.errors.APIError as e:
198
+ err_console.print(f"Docker API Error: {e}")
199
+
200
+ def run_manifest(self, image: Image, function: str, entrypoint: str = 'main.py') -> dict | None:
201
+ try:
202
+ output_bytes = self.client.containers.run(image.id, f'{entrypoint} signature --function {function} -j',
203
+ detach=False, remove=True)
204
+ output_str = output_bytes.decode("utf-8").strip()
205
+ return json.loads(output_str)
206
+ except docker.errors.ContainerError as e:
207
+ err_console.print(f"Container exited with non-zero exit code: {e.exit_status}")
208
+ err_console.print(f"Stderr: {e.stderr}")
209
+ except docker.errors.ImageNotFound:
210
+ err_console.print("Image not found locally.")
211
+ except docker.errors.APIError as e:
212
+ err_console.print(f"Docker API Error: {e}")
213
+
214
+ @staticmethod
215
+ def get_image_digest(image: Image) -> str | None:
216
+ # Hack to get the correct image digest for docker v2 manifests version
217
+ repo_digests = image.attrs.get('RepoDigests')
218
+ if not repo_digests:
219
+ return image.id
220
+ return repo_digests[0].split('@')[-1] if len(repo_digests) > 0 else image.id
221
+
222
+
223
+ def load_docker_client() -> ContainerClient:
224
+ try:
225
+ client = docker.from_env()
226
+ return ContainerClient(client)
227
+ except Exception as e:
228
+ err_console.print(f'cannot connect to docker socket: {e}')
229
+ exit(1)
File without changes
@@ -0,0 +1,8 @@
1
+ from httpx import Client
2
+
3
+
4
+ def get_client(endpoint: str, api_key: str):
5
+ if api_key is None or api_key == "":
6
+ raise Exception("PINEXQ_API_KEY is required. Provide it by the environment variable `PINEXQ_API_KEY` globally or in .env or using --api-key flag.")
7
+ pinexq_endpoint = f'https://{endpoint.replace("https://", "").replace("http://", "")}'
8
+ return Client(base_url=pinexq_endpoint, headers={'x-api-key': api_key})
@@ -0,0 +1,37 @@
1
+ from typing import Optional
2
+
3
+ from httpx import Client
4
+ from pinexq_client.job_management import EntryPointHco, enter_jma
5
+ from pydantic import BaseModel
6
+
7
+
8
+ class Info(BaseModel):
9
+ user_id: str
10
+ org_id: Optional[str] = None
11
+ user_grants: list[str]
12
+ registry_endpoint: str
13
+ api_key: str
14
+
15
+ def get_context_id(self) -> str:
16
+ if not self.org_id:
17
+ return self.user_id
18
+ else:
19
+ return self.org_id
20
+
21
+ def get_docker_auth(self):
22
+ return {
23
+ "username": self.user_id,
24
+ "password": self.api_key,
25
+ }
26
+
27
+
28
+ def get_info(client: Client) -> Info:
29
+ entrypoint: EntryPointHco = enter_jma(client)
30
+ info = entrypoint.info_link.navigate()
31
+ return Info(
32
+ user_id=info.current_user.user_id.__str__(),
33
+ org_id=info.organization_id,
34
+ user_grants=info.current_user.user_grants,
35
+ registry_endpoint=str(info.deployment_registry_endpoint.get_url()),
36
+ api_key=client.headers.get("x-api-key")
37
+ )
@@ -0,0 +1,55 @@
1
+ from dataclasses import dataclass
2
+ import json
3
+ import os
4
+ from pathlib import Path
5
+ from typing import Tuple
6
+
7
+ from docker.models.images import Image
8
+
9
+ from pinexq.cli.docker_tools.client import ContainerClient
10
+ from pinexq.cli.utils.console import console, err_console
11
+ from pinexq.cli.utils.const import PINEXQ_PREFIX as PREFIX, PINEXQ_ERROR_PREFIX as ERROR_PREFIX
12
+
13
+
14
+ def generate_manifest_signature(container_client: ContainerClient, base_image: Image, function_name: str,
15
+ entrypoint: str) -> dict:
16
+ return container_client.run_manifest(base_image, function_name, entrypoint=entrypoint)
17
+
18
+
19
+ @dataclass
20
+ class BuildOptions:
21
+ dockerfile: str
22
+ context_dir: str
23
+ secrets: list[str]
24
+ tag: str
25
+ entrypoint: str
26
+
27
+
28
+ def generate_manifests(container_client: ContainerClient, functions: list[str], build_options: BuildOptions) -> Tuple[dict, list]:
29
+ # Generate manifests
30
+ # We are building the image for the current execution context to generate manifests
31
+ # This will build for the local architecture to make sure the manifest generation can be executed locally
32
+ base_image = container_client.pre_build_image(build_options.context_dir, build_options.dockerfile,
33
+ build_options.tag,
34
+ secrets=build_options.secrets)
35
+ if not base_image:
36
+ console.print(f'{ERROR_PREFIX} Failed to build base image.')
37
+ exit(1)
38
+ container_functions = container_client.run_function_list(base_image, entrypoint=build_options.entrypoint)
39
+ if not container_functions:
40
+ err_console.print(f'{ERROR_PREFIX} Failed to list functions in procon.')
41
+ console.print(f'{PREFIX} Found following functions in container: {container_functions}')
42
+ if len(functions) == 0:
43
+ functions = container_functions
44
+ else:
45
+ console.print(f'{PREFIX} Functions specified in command line: {functions}')
46
+ functions = [f for f in container_functions if f in functions]
47
+ manifests = {}
48
+ for function_name in functions:
49
+ signature = generate_manifest_signature(container_client, base_image, function_name, build_options.entrypoint)
50
+ manifests[function_name] = signature
51
+ os.makedirs('.manifests', exist_ok=True)
52
+ manifest_path = Path('.manifests') / f"{signature['function_name']}.json"
53
+ with open(manifest_path, 'w') as f:
54
+ json.dump(signature, f, indent=2)
55
+ return manifests, functions
@@ -0,0 +1,87 @@
1
+ from typing import Tuple, Union
2
+ import click
3
+ import tomli
4
+ from pinexq_client.job_management.model import DeploymentResourcePresets
5
+ from pydantic import BaseModel, Field, field_validator
6
+
7
+ from pinexq.cli.utils.const import PINEXQ_ERROR_PREFIX as ERROR_PREFIX
8
+
9
+
10
+ class PinexqDeploymentSetting(BaseModel):
11
+ resource_preset: DeploymentResourcePresets
12
+ max_replicas: int
13
+
14
+
15
+ class PinexqProjectDetails(BaseModel):
16
+ name: str = Field(None, alias="name")
17
+ endpoint: str = Field(None, alias="pinexq_endpoint")
18
+ entrypoint: str = Field('main.py')
19
+ version: str = Field(None)
20
+
21
+ @field_validator('entrypoint')
22
+ @classmethod
23
+ def validate_entrypoint(cls, v: str) -> str:
24
+ if not v.endswith(('.py', '.pyc')):
25
+ raise ValueError("entrypoint must have a python extension")
26
+ return v
27
+
28
+
29
+ class PinexqProjectConfig(BaseModel):
30
+ # read from pinexq.toml
31
+ project: PinexqProjectDetails = Field(None)
32
+ deployment: PinexqDeploymentSetting = Field(None)
33
+ functions: dict[str, PinexqDeploymentSetting] = Field({}, alias="function")
34
+
35
+ # read from pyproject.toml
36
+
37
+ def get_function_deployment(self, function_name: str) -> PinexqDeploymentSetting:
38
+ function_config = self.functions.get(function_name)
39
+ resource_preset = function_config.resource_preset if function_config else self.deployment.resource_preset
40
+ max_replicas = function_config.max_replicas if function_config else self.deployment.max_replicas
41
+ return PinexqDeploymentSetting(resource_preset=resource_preset, max_replicas=max_replicas)
42
+
43
+
44
+ def get_project_meta() -> PinexqProjectConfig:
45
+ [py_project_name, version] = _read_pyproject_toml()
46
+ ctx = click.get_current_context()
47
+ try:
48
+ pinexq_config = _read_pinexq_toml()
49
+ project_override = {}
50
+ if ctx.obj.pinexq_endpoint:
51
+ project_override = {'pinexq_endpoint': ctx.obj.pinexq_endpoint}
52
+ project_config = {**{'version': version, 'name': py_project_name}, **pinexq_config['project'], **project_override}
53
+ config = PinexqProjectConfig(**{**pinexq_config, **{'project': project_config}})
54
+ except ValueError as e:
55
+ print(f'{ERROR_PREFIX} Error reading pinexq.toml: {str(e)}')
56
+ exit(1)
57
+ return config
58
+
59
+
60
+ def _read_pinexq_toml() -> dict[str, Union[str, int]]:
61
+ try:
62
+ with open("pinexq.toml", "rb") as f:
63
+ toml_dict = tomli.load(f)
64
+ return toml_dict
65
+ except FileNotFoundError:
66
+ print(f'{ERROR_PREFIX} pinexq.toml file not found in current directory')
67
+ exit(1)
68
+ except Exception as e:
69
+ print(f'{ERROR_PREFIX} Error reading pinexq.toml: {str(e)}')
70
+ exit(1)
71
+
72
+
73
+ def _read_pyproject_toml() -> Tuple[str, str]:
74
+ try:
75
+ with open("pyproject.toml", "rb") as f:
76
+ toml_dict = tomli.load(f)
77
+
78
+ project_name = toml_dict.get("project", {}).get("name", "")
79
+ version = toml_dict.get("project", {}).get("version", "0.0.0")
80
+
81
+ return project_name, version
82
+ except FileNotFoundError:
83
+ print(f'{ERROR_PREFIX} pyproject.toml file not found in current directory')
84
+ exit(1)
85
+ except Exception as e:
86
+ print(f'{ERROR_PREFIX} Error reading pyproject.toml: {str(e)}')
87
+ exit(1)
File without changes
@@ -0,0 +1,5 @@
1
+ from rich.console import Console
2
+
3
+ err_console = Console(stderr=True)
4
+ console = Console(highlight=False)
5
+
@@ -0,0 +1,4 @@
1
+ PINEXQ_PREFIX = '🌲 [bold dark_orange]pinexq:[/bold dark_orange]'
2
+ PINEXQ_ERROR_PREFIX = '🚨 [bold red]pinexq:[/bold red]'
3
+ DOCKER_PREFIX = '🐋 [bold dodger_blue1]docker:[/bold dodger_blue1]'
4
+
@@ -0,0 +1,14 @@
1
+ import subprocess
2
+
3
+
4
+ def is_uv_lockfile_up_to_date() -> bool:
5
+ result = subprocess.run(
6
+ ['uv', 'lock', '--check'],
7
+ cwd='.',
8
+ stdout=subprocess.PIPE,
9
+ stderr=subprocess.PIPE,
10
+ text=True,
11
+ )
12
+ return result.returncode == 0
13
+
14
+
@@ -0,0 +1,34 @@
1
+ Metadata-Version: 2.3
2
+ Name: pinexq-cli
3
+ Version: 0.1.5
4
+ Summary: pinexq command-line interface
5
+ Author: Sebastian Höfer, Sebastian Boerakker
6
+ Author-email: Sebastian Höfer <hoefer@data-cybernetics.com>, Sebastian Boerakker <boerakker@data-cybernetics.com>
7
+ License: MIT
8
+ Requires-Dist: typer>=0.12.5
9
+ Requires-Dist: copier>=9.10.2
10
+ Requires-Dist: docker>=7.1.0
11
+ Requires-Dist: dotenv>=0.9.9
12
+ Requires-Dist: pinexq-client>=0.10.4rc1
13
+ Requires-Dist: pydantic>=2.12.0
14
+ Requires-Dist: rich>=14.2.0
15
+ Requires-Dist: tomli>=2.3.0
16
+ Requires-Dist: importlib>=1.0.4
17
+ Maintainer: Sebastian Höfer, Sebastian Boerakker
18
+ Maintainer-email: Sebastian Höfer <hoefer@data-cybernetics.com>, Sebastian Boerakker <boerakker@data-cybernetics.com>
19
+ Requires-Python: >=3.13
20
+ Description-Content-Type: text/markdown
21
+
22
+ # pinexq CLI
23
+
24
+ A minimal command-line interface for creating and running pinexq workers.
25
+
26
+ Install (editable):
27
+
28
+ - Using uv or pip, from the project root:
29
+ - `uv pip install -e .` or `pip install -e .`
30
+
31
+ Usage:
32
+
33
+ - Global options must precede the subcommand.
34
+ - Authentication: Provide `--api-key` or set the `PINEXQ_API_KEY` environment variable. The CLI prefers the flag over the env var.
@@ -0,0 +1,23 @@
1
+ pinexq/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ pinexq/cli/__main__.py,sha256=qhDROx55ETVxBT1OutXvTmmAKMwV7C_Wp1Rad6Xj7sg,586
3
+ pinexq/cli/cmd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ pinexq/cli/cmd/cli.py,sha256=vJxAjqQwQylHDnSAJf4Gxpb58kex2jxzKzWCHbre_8A,4760
5
+ pinexq/cli/cmd/deploy.py,sha256=s7uzltvbZZoSFc7GnIO8k4LUPogB84w1kBPK3Yqscs0,5336
6
+ pinexq/cli/cmd/generate.py,sha256=PFLuh36cqHP6C0D8mpFaeYR2MHs8VDVwDTV-JInofxQ,2310
7
+ pinexq/cli/cmd/initialization.py,sha256=F0Jde8uP77dg0pI0wJzaOvQbLhXxzTCTfQ8AyQ3GDxY,856
8
+ pinexq/cli/cmd/register.py,sha256=B9oTkvbz-0vO9LYve5LmBWvyrL-omLu5k_kygMWEDSs,5089
9
+ pinexq/cli/docker_tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ pinexq/cli/docker_tools/client.py,sha256=i7zrsx_UPrNaivUhlJkyyONyB5veL8Y_VOFRDrL_n5U,9830
11
+ pinexq/cli/pinexq_tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
+ pinexq/cli/pinexq_tools/client.py,sha256=oyzNPxR8K5We344A_IxqvuL0t0TwgqZ23NZLlGUKREo,436
13
+ pinexq/cli/pinexq_tools/info.py,sha256=mjpXrR2l_kgZvMKYm9wI_T9hwwzfs9oot7wM4NvW7Vw,996
14
+ pinexq/cli/pinexq_tools/manifest.py,sha256=UuxD7I3vdoBLZmdYmDXS1AXwj2vDA2s65P55uJAIQUw,2485
15
+ pinexq/cli/pinexq_tools/project.py,sha256=-HOKfuyNKeVAhGpaKZo_-B5vVHYm9teuvDe1JcB0ZLQ,3228
16
+ pinexq/cli/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
+ pinexq/cli/utils/console.py,sha256=xJ_p6spPCYiiBGXnV37kZJufO-MveByETX1EcGHbn1U,105
18
+ pinexq/cli/utils/const.py,sha256=efk6g2WIYjGcRQ96dZzgRMDNmvToL51rXFFAZNqmsms,197
19
+ pinexq/cli/utils/uv_utils.py,sha256=6X-jExlOD_OvpE8mprEuUaV-B6KLSUHWs_ZAVFl_c5c,267
20
+ pinexq_cli-0.1.5.dist-info/WHEEL,sha256=KSLUh82mDPEPk0Bx0ScXlWL64bc8KmzIPNcpQZFV-6E,79
21
+ pinexq_cli-0.1.5.dist-info/entry_points.txt,sha256=I_w5x5YdnwgZhuJK3tnZbqMNwMMIeCwqC0KDTK5eXpo,53
22
+ pinexq_cli-0.1.5.dist-info/METADATA,sha256=dj6lfwEn3P2yWeGAv-bx71ICHbAWL6HujZgG7roAn_Y,1163
23
+ pinexq_cli-0.1.5.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: uv 0.9.22
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,3 @@
1
+ [console_scripts]
2
+ pinexq = pinexq.cli.__main__:main
3
+