llamactl 0.3.15__tar.gz → 0.3.17__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llamactl-0.3.15 → llamactl-0.3.17}/PKG-INFO +5 -6
- {llamactl-0.3.15 → llamactl-0.3.17}/README.md +2 -3
- {llamactl-0.3.15 → llamactl-0.3.17}/pyproject.toml +3 -3
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/__init__.py +2 -1
- llamactl-0.3.17/src/llama_deploy/cli/commands/pkg.py +122 -0
- llamactl-0.3.17/src/llama_deploy/cli/pkg/__init__.py +10 -0
- llamactl-0.3.17/src/llama_deploy/cli/pkg/defaults.py +11 -0
- llamactl-0.3.17/src/llama_deploy/cli/pkg/options.py +84 -0
- llamactl-0.3.17/src/llama_deploy/cli/pkg/utils.py +46 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/app.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/auth/client.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/client.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/commands/aliased_group.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/commands/auth.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/commands/deployment.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/commands/env.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/commands/init.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/commands/serve.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/config/_config.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/config/_migrations.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/config/auth_service.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/config/env_service.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/config/migrations/0001_init.sql +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/config/migrations/0002_add_auth_fields.sql +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/config/migrations/__init__.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/config/schema.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/debug.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/env.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/interactive_prompts/session_utils.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/interactive_prompts/utils.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/options.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/py.typed +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/styles.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/textual/deployment_form.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/textual/deployment_help.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/textual/deployment_monitor.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/textual/git_validation.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/textual/github_callback_server.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/textual/llama_loader.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/textual/secrets_form.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/textual/styles.tcss +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/utils/env_inject.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/utils/redact.py +0 -0
- {llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/utils/version.py +0 -0
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: llamactl
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.17
|
|
4
4
|
Summary: A command-line interface for managing LlamaDeploy projects and deployments
|
|
5
5
|
Author: Adrian Lyjak
|
|
6
6
|
Author-email: Adrian Lyjak <adrianlyjak@gmail.com>
|
|
7
7
|
License: MIT
|
|
8
|
-
Requires-Dist: llama-deploy-core[client]>=0.3.
|
|
9
|
-
Requires-Dist: llama-deploy-appserver>=0.3.
|
|
8
|
+
Requires-Dist: llama-deploy-core[client]>=0.3.17,<0.4.0
|
|
9
|
+
Requires-Dist: llama-deploy-appserver>=0.3.17,<0.4.0
|
|
10
10
|
Requires-Dist: vibe-llama-core>=0.1.0
|
|
11
11
|
Requires-Dist: rich>=13.0.0
|
|
12
12
|
Requires-Dist: questionary>=2.0.0
|
|
@@ -22,12 +22,11 @@ Description-Content-Type: text/markdown
|
|
|
22
22
|
|
|
23
23
|
# llamactl
|
|
24
24
|
|
|
25
|
-
> [!WARNING]
|
|
26
|
-
> This repository contains pre-release software. It is unstable, incomplete, and subject to breaking changes. Not recommended for use.
|
|
27
|
-
|
|
28
25
|
|
|
29
26
|
A command-line interface for managing LlamaDeploy projects and deployments.
|
|
30
27
|
|
|
28
|
+
For an end-to-end introduction, see [Getting started with LlamaAgents](https://developers.llamaindex.ai/python/cloud/llamaagents/getting-started).
|
|
29
|
+
|
|
31
30
|
## Installation
|
|
32
31
|
|
|
33
32
|
Install from PyPI:
|
|
@@ -1,11 +1,10 @@
|
|
|
1
1
|
# llamactl
|
|
2
2
|
|
|
3
|
-
> [!WARNING]
|
|
4
|
-
> This repository contains pre-release software. It is unstable, incomplete, and subject to breaking changes. Not recommended for use.
|
|
5
|
-
|
|
6
3
|
|
|
7
4
|
A command-line interface for managing LlamaDeploy projects and deployments.
|
|
8
5
|
|
|
6
|
+
For an end-to-end introduction, see [Getting started with LlamaAgents](https://developers.llamaindex.ai/python/cloud/llamaagents/getting-started).
|
|
7
|
+
|
|
9
8
|
## Installation
|
|
10
9
|
|
|
11
10
|
Install from PyPI:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "llamactl"
|
|
3
|
-
version = "0.3.
|
|
3
|
+
version = "0.3.17"
|
|
4
4
|
description = "A command-line interface for managing LlamaDeploy projects and deployments"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
license = { text = "MIT" }
|
|
@@ -9,8 +9,8 @@ authors = [
|
|
|
9
9
|
]
|
|
10
10
|
requires-python = ">=3.11, <4"
|
|
11
11
|
dependencies = [
|
|
12
|
-
"llama-deploy-core[client]>=0.3.
|
|
13
|
-
"llama-deploy-appserver>=0.3.
|
|
12
|
+
"llama-deploy-core[client]>=0.3.17,<0.4.0",
|
|
13
|
+
"llama-deploy-appserver>=0.3.17,<0.4.0",
|
|
14
14
|
"vibe-llama-core>=0.1.0",
|
|
15
15
|
"rich>=13.0.0",
|
|
16
16
|
"questionary>=2.0.0",
|
|
@@ -4,6 +4,7 @@ from llama_deploy.cli.commands.auth import auth
|
|
|
4
4
|
from llama_deploy.cli.commands.deployment import deployments
|
|
5
5
|
from llama_deploy.cli.commands.env import env_group
|
|
6
6
|
from llama_deploy.cli.commands.init import init
|
|
7
|
+
from llama_deploy.cli.commands.pkg import pkg
|
|
7
8
|
from llama_deploy.cli.commands.serve import serve
|
|
8
9
|
|
|
9
10
|
from .app import app
|
|
@@ -22,7 +23,7 @@ def main() -> None:
|
|
|
22
23
|
app()
|
|
23
24
|
|
|
24
25
|
|
|
25
|
-
__all__ = ["app", "deployments", "auth", "serve", "init", "env_group"]
|
|
26
|
+
__all__ = ["app", "deployments", "auth", "serve", "init", "env_group", "pkg"]
|
|
26
27
|
|
|
27
28
|
|
|
28
29
|
if __name__ == "__main__":
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
import click
|
|
4
|
+
from llama_deploy.cli.pkg import (
|
|
5
|
+
DEFAULT_DOCKER_IGNORE,
|
|
6
|
+
build_dockerfile_content,
|
|
7
|
+
infer_python_version,
|
|
8
|
+
pkg_container_options,
|
|
9
|
+
)
|
|
10
|
+
from llama_deploy.core.deployment_config import (
|
|
11
|
+
read_deployment_config_from_git_root_or_cwd,
|
|
12
|
+
)
|
|
13
|
+
from rich import print as rprint
|
|
14
|
+
|
|
15
|
+
from ..app import app
|
|
16
|
+
|
|
17
|
+
SUPPORTED_FORMATS = ["Docker", "Podman"]
|
|
18
|
+
SUPPORTED_FORMATS_STR = ", ".join(SUPPORTED_FORMATS)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@app.group(
|
|
22
|
+
help=f"Package your application in different formats. Currently supported: {SUPPORTED_FORMATS_STR}",
|
|
23
|
+
no_args_is_help=True,
|
|
24
|
+
context_settings={"max_content_width": None},
|
|
25
|
+
)
|
|
26
|
+
def pkg() -> None:
|
|
27
|
+
"""Package application in different formats (Dockerfile, Podman config, Nixpack...)"""
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@pkg.command(
|
|
32
|
+
"container",
|
|
33
|
+
help="Generate a minimal, build-ready file to containerize your workflows through Docker or Podman (currently frontend is not supported).",
|
|
34
|
+
)
|
|
35
|
+
@pkg_container_options
|
|
36
|
+
def create_container_file(
|
|
37
|
+
deployment_file: Path,
|
|
38
|
+
python_version: str | None = None,
|
|
39
|
+
port: int = 4501,
|
|
40
|
+
exclude: tuple[str, ...] | None = None,
|
|
41
|
+
output_file: str = "Dockerfile",
|
|
42
|
+
dockerignore_path: str = ".dockerignore",
|
|
43
|
+
overwrite: bool = False,
|
|
44
|
+
):
|
|
45
|
+
_create_file_for_container(
|
|
46
|
+
deployment_file=deployment_file,
|
|
47
|
+
python_version=python_version,
|
|
48
|
+
port=port,
|
|
49
|
+
exclude=exclude,
|
|
50
|
+
output_file=output_file,
|
|
51
|
+
dockerignore_path=dockerignore_path,
|
|
52
|
+
overwrite=overwrite,
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _check_deployment_config(deployment_file: Path) -> Path:
|
|
57
|
+
if not deployment_file.exists():
|
|
58
|
+
rprint(f"[red]Deployment file '{deployment_file}' not found[/red]")
|
|
59
|
+
raise click.Abort()
|
|
60
|
+
|
|
61
|
+
# Early check: appserver requires a pyproject.toml in the config directory
|
|
62
|
+
config_dir = deployment_file if deployment_file.is_dir() else deployment_file.parent
|
|
63
|
+
if not (config_dir / "pyproject.toml").exists():
|
|
64
|
+
rprint(
|
|
65
|
+
"[red]No pyproject.toml found at[/red] "
|
|
66
|
+
f"[bold]{config_dir}[/bold].\n"
|
|
67
|
+
"Add a pyproject.toml to your project and re-run 'llamactl serve'."
|
|
68
|
+
)
|
|
69
|
+
raise click.Abort()
|
|
70
|
+
|
|
71
|
+
try:
|
|
72
|
+
config = read_deployment_config_from_git_root_or_cwd(
|
|
73
|
+
Path.cwd(), deployment_file
|
|
74
|
+
)
|
|
75
|
+
except Exception:
|
|
76
|
+
rprint(
|
|
77
|
+
"[red]Error: Could not read a deployment config. This doesn't appear to be a valid llama-deploy project.[/red]"
|
|
78
|
+
)
|
|
79
|
+
raise click.Abort()
|
|
80
|
+
if config.ui:
|
|
81
|
+
rprint(
|
|
82
|
+
"[bold red]Containerized UI builds are currently not supported. Please remove the UI configuration from your deployment file if you wish to proceed.[/]"
|
|
83
|
+
)
|
|
84
|
+
raise click.Abort()
|
|
85
|
+
return config_dir
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def _create_file_for_container(
|
|
89
|
+
deployment_file: Path,
|
|
90
|
+
output_file: str = "Dockerfile",
|
|
91
|
+
python_version: str | None = None,
|
|
92
|
+
port: int = 4501,
|
|
93
|
+
exclude: tuple[str, ...] | None = None,
|
|
94
|
+
dockerignore_path: str = ".dockerignore",
|
|
95
|
+
overwrite: bool = False,
|
|
96
|
+
):
|
|
97
|
+
config_dir = _check_deployment_config(deployment_file=deployment_file)
|
|
98
|
+
|
|
99
|
+
if not python_version:
|
|
100
|
+
python_version = infer_python_version(config_dir)
|
|
101
|
+
|
|
102
|
+
dockerignore_content = DEFAULT_DOCKER_IGNORE
|
|
103
|
+
if exclude:
|
|
104
|
+
for item in exclude:
|
|
105
|
+
dockerignore_content += "\n" + item
|
|
106
|
+
|
|
107
|
+
dockerfile_content = build_dockerfile_content(python_version, port)
|
|
108
|
+
|
|
109
|
+
if Path(output_file).exists() and not overwrite:
|
|
110
|
+
rprint(
|
|
111
|
+
f"[red bold]Error: {output_file} already exists. If you wish to overwrite the file, pass `--overwrite` as a flag to the command.[/]"
|
|
112
|
+
)
|
|
113
|
+
raise click.Abort()
|
|
114
|
+
with open(output_file, "w") as f:
|
|
115
|
+
f.write(dockerfile_content)
|
|
116
|
+
if Path(dockerignore_path).exists() and not overwrite:
|
|
117
|
+
rprint(
|
|
118
|
+
f"[red bold]Error: {dockerignore_path} already exists. If you wish to overwrite the file, pass `--overwrite` as a flag to the command.[/]"
|
|
119
|
+
)
|
|
120
|
+
raise click.Abort()
|
|
121
|
+
with open(dockerignore_path, "w") as f:
|
|
122
|
+
f.write(dockerignore_content)
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from .defaults import DEFAULT_DOCKER_IGNORE
|
|
2
|
+
from .options import pkg_container_options
|
|
3
|
+
from .utils import build_dockerfile_content, infer_python_version
|
|
4
|
+
|
|
5
|
+
__all__ = [
|
|
6
|
+
"infer_python_version",
|
|
7
|
+
"build_dockerfile_content",
|
|
8
|
+
"DEFAULT_DOCKER_IGNORE",
|
|
9
|
+
"pkg_container_options",
|
|
10
|
+
]
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from typing import Callable, ParamSpec, TypeVar
|
|
3
|
+
|
|
4
|
+
import click
|
|
5
|
+
from llama_deploy.core.config import DEFAULT_DEPLOYMENT_FILE_PATH
|
|
6
|
+
|
|
7
|
+
P = ParamSpec("P")
|
|
8
|
+
R = TypeVar("R")
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _deployment_file_option(f: Callable[P, R]) -> Callable[P, R]:
|
|
12
|
+
return click.argument(
|
|
13
|
+
"deployment_file",
|
|
14
|
+
required=False,
|
|
15
|
+
default=DEFAULT_DEPLOYMENT_FILE_PATH,
|
|
16
|
+
type=click.Path(dir_okay=True, resolve_path=True, path_type=Path),
|
|
17
|
+
)(f)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _python_version_option(f: Callable[P, R]) -> Callable[P, R]:
|
|
21
|
+
return click.option(
|
|
22
|
+
"--python-version",
|
|
23
|
+
help="Python version for the base image. Default is inferred from the uv project configuration (.python-version or pyproject.toml). If no version can be inferred, python 3.12 is used.",
|
|
24
|
+
required=False,
|
|
25
|
+
default=None,
|
|
26
|
+
)(f)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _port_option(f: Callable[P, R]) -> Callable[P, R]:
|
|
30
|
+
return click.option(
|
|
31
|
+
"--port",
|
|
32
|
+
help="The port to run the API server on. Defaults to 4501.",
|
|
33
|
+
required=False,
|
|
34
|
+
default=4501,
|
|
35
|
+
type=int,
|
|
36
|
+
)(f)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _dockerignore_path_option(f: Callable[P, R]) -> Callable[P, R]:
|
|
40
|
+
return click.option(
|
|
41
|
+
"--dockerignore-path",
|
|
42
|
+
help="Path for the output .dockerignore file. Defaults to .dockerignore",
|
|
43
|
+
required=False,
|
|
44
|
+
default=".dockerignore",
|
|
45
|
+
)(f)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _output_file_option(f: Callable[P, R]) -> Callable[P, R]:
|
|
49
|
+
return click.option(
|
|
50
|
+
"--output-file",
|
|
51
|
+
help="Path for the output file to build the image. Defaults to Dockerfile",
|
|
52
|
+
required=False,
|
|
53
|
+
default="Dockerfile",
|
|
54
|
+
)(f)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _overwrite_option(f: Callable[P, R]) -> Callable[P, R]:
|
|
58
|
+
return click.option(
|
|
59
|
+
"--overwrite",
|
|
60
|
+
help="Overwrite output files",
|
|
61
|
+
is_flag=True,
|
|
62
|
+
)(f)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _exclude_option(f: Callable[P, R]) -> Callable[P, R]:
|
|
66
|
+
return click.option(
|
|
67
|
+
"--exclude",
|
|
68
|
+
help="Path to exclude from the build (will be appended to .dockerignore). Can be used multiple times.",
|
|
69
|
+
multiple=True,
|
|
70
|
+
required=False,
|
|
71
|
+
default=None,
|
|
72
|
+
)(f)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def pkg_container_options(f: Callable[P, R]) -> Callable[P, R]:
|
|
76
|
+
return _deployment_file_option(
|
|
77
|
+
_python_version_option(
|
|
78
|
+
_port_option(
|
|
79
|
+
_dockerignore_path_option(
|
|
80
|
+
_overwrite_option(_exclude_option(_output_file_option(f)))
|
|
81
|
+
)
|
|
82
|
+
)
|
|
83
|
+
)
|
|
84
|
+
)
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from tomllib import load as load_toml
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def _get_min_py_version(requires_python: str):
|
|
6
|
+
min_v = requires_python.split(",")[0].strip()
|
|
7
|
+
return (
|
|
8
|
+
min_v.replace("=", "")
|
|
9
|
+
.replace(">", "")
|
|
10
|
+
.replace("<", "")
|
|
11
|
+
.replace("~", "")
|
|
12
|
+
.strip()
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def infer_python_version(config_dir: Path):
|
|
17
|
+
if (config_dir / ".python-version").exists():
|
|
18
|
+
with open(config_dir / ".python-version", "r") as f:
|
|
19
|
+
content = f.read()
|
|
20
|
+
if content.strip():
|
|
21
|
+
py_version = content.strip()
|
|
22
|
+
return py_version
|
|
23
|
+
with open(config_dir / "pyproject.toml", "rb") as f:
|
|
24
|
+
data = load_toml(f)
|
|
25
|
+
return _get_min_py_version(data.get("project", {}).get("requires-python", "3.12"))
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def build_dockerfile_content(python_version: str | None = None, port: int = 4501):
|
|
29
|
+
return f"""
|
|
30
|
+
FROM python:{python_version}-slim-trixie
|
|
31
|
+
|
|
32
|
+
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
|
33
|
+
|
|
34
|
+
WORKDIR /app
|
|
35
|
+
|
|
36
|
+
COPY . /app/
|
|
37
|
+
|
|
38
|
+
ENV PATH=/root/.local/bin:$PATH
|
|
39
|
+
|
|
40
|
+
RUN uv sync --locked
|
|
41
|
+
RUN uv tool install llamactl
|
|
42
|
+
|
|
43
|
+
EXPOSE {port}
|
|
44
|
+
|
|
45
|
+
ENTRYPOINT [ "uv", "run", "llamactl", "serve", "--host", "0.0.0.0", "--port", "{port}" ]
|
|
46
|
+
"""
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/config/migrations/0002_add_auth_fields.sql
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llamactl-0.3.15 → llamactl-0.3.17}/src/llama_deploy/cli/interactive_prompts/session_utils.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|