shipit-cli 0.13.4__py3-none-any.whl → 0.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
shipit/generator.py CHANGED
@@ -1,16 +1,13 @@
1
- from __future__ import annotations
2
-
1
+ import json
3
2
  from pathlib import Path
4
- from typing import Dict, List, Optional
3
+ from typing import List, Optional, Union
5
4
 
6
5
  from shipit.providers.base import (
7
6
  DependencySpec,
8
7
  Provider,
9
8
  ProviderPlan,
10
9
  DetectResult,
11
- MountSpec,
12
- VolumeSpec,
13
- CustomCommands,
10
+ Config,
14
11
  )
15
12
  from shipit.providers.registry import providers as registry_providers
16
13
 
@@ -20,10 +17,10 @@ def _providers() -> list[type[Provider]]:
20
17
  return registry_providers()
21
18
 
22
19
 
23
- def detect_provider(path: Path, custom_commands: CustomCommands) -> Provider:
20
+ def detect_provider(path: Path, base_config: Config) -> Provider:
24
21
  matches: list[tuple[type[Provider], DetectResult]] = []
25
22
  for provider_cls in _providers():
26
- res = provider_cls.detect(path, custom_commands)
23
+ res = provider_cls.detect(path, base_config)
27
24
  if res:
28
25
  matches.append((provider_cls, res))
29
26
  if not matches:
@@ -64,39 +61,56 @@ def _emit_dependencies_declarations(
64
61
  declared.add(alias)
65
62
 
66
63
  version_var = None
67
- architecture_var = None
68
- if dep.env_var:
69
- default = f' or "{dep.default_version}"' if dep.default_version else ""
70
- version_key = alias + "_version"
71
- lines.append(f'{version_key} = getenv("{dep.env_var}"){default}')
72
- version_var = version_key
73
- if dep.architecture_var:
74
- architecture_key = alias + "_architecture"
75
- lines.append(f'{architecture_key} = getenv("{dep.architecture_var}")')
76
- architecture_var = architecture_key
64
+ architecture_env_var = None
65
+ if dep.var_name:
66
+ version_var = dep.var_name
67
+ if dep.architecture_var_name:
68
+ architecture_env_var = dep.architecture_var_name
77
69
  vars = [f'"{dep.name}"']
78
70
  if version_var:
79
71
  vars.append(version_var)
80
- if architecture_var:
81
- vars.append(f"architecture={architecture_var}")
72
+ if architecture_env_var:
73
+ vars.append(f"architecture={architecture_env_var}")
82
74
  lines.append(f"{alias} = dep({', '.join(vars)})")
83
75
 
84
76
  return "\n".join(lines), serve_vars, build_vars
85
77
 
86
78
 
87
- def generate_shipit(path: Path, custom_commands: CustomCommands, use_provider: Optional[str] = None) -> str:
79
+ def load_provider(
80
+ path: Path, base_config: Config, use_provider: Optional[str] = None
81
+ ) -> type[Provider]:
88
82
  provider_cls = None
89
83
  if use_provider:
90
- provider_cls = next((p for p in _providers() if p.name().lower() == use_provider.lower()), None)
84
+ provider_cls = next(
85
+ (p for p in _providers() if p.name().lower() == use_provider.lower()), None
86
+ )
91
87
  if not provider_cls:
92
- provider_cls = detect_provider(path, custom_commands)
93
- provider = provider_cls(path, custom_commands)
88
+ provider_cls = detect_provider(path, base_config)
89
+ return provider_cls
90
+
91
+
92
+ def load_provider_config(
93
+ provider_cls: type[Provider],
94
+ path: Path,
95
+ base_config: Config,
96
+ config: Optional[Union[dict, str]] = None,
97
+ ) -> dict:
98
+ provider_config = provider_cls.load_config(path, base_config)
99
+ if config:
100
+ if isinstance(config, str):
101
+ config = json.loads(config)
102
+ assert isinstance(config, dict), "Config must be a dictionary, got %s" % type(config)
103
+ provider_config = provider_config.__class__.model_validate({**(provider_config.model_dump() | config)})
104
+ return provider_config
105
+
106
+
107
+ def generate_shipit(path: Path, provider: Provider) -> str:
108
+ default_serve_name = path.absolute().name
94
109
 
95
110
  # Collect parts
96
111
  plan = ProviderPlan(
97
- serve_name=provider.serve_name(),
112
+ serve_name=provider.serve_name() or default_serve_name,
98
113
  provider=provider.name(),
99
- platform=provider.platform(),
100
114
  mounts=provider.mounts(),
101
115
  volumes=provider.volumes(),
102
116
  declarations=provider.declarations(),
@@ -121,8 +135,12 @@ def generate_shipit(path: Path, custom_commands: CustomCommands, use_provider: O
121
135
 
122
136
  build_steps_block = ",\n".join([f" {s}" for s in build_steps])
123
137
  deps_array = ", ".join(serve_dep_vars)
138
+
139
+ def format_command(k: str, v: str) -> str:
140
+ return f' "{k}": {v}'
141
+
124
142
  commands_lines = ",\n".join(
125
- [f' "{k}": {v}.replace("$PORT", PORT)' for k, v in plan.commands.items()]
143
+ [format_command(k, v) for k, v in plan.commands.items()]
126
144
  )
127
145
  env_lines = None
128
146
  if plan.env is not None:
@@ -150,8 +168,9 @@ def generate_shipit(path: Path, custom_commands: CustomCommands, use_provider: O
150
168
  out.append(dep_block)
151
169
  out.append("")
152
170
 
153
- for m in plan.mounts:
154
- out.append(f'{m.name} = mount("{m.name}")')
171
+ if plan.mounts:
172
+ for m in plan.mounts:
173
+ out.append(f'{m.name} = mount("{m.name}")')
155
174
  out.append("")
156
175
 
157
176
  if plan.volumes:
@@ -166,18 +185,16 @@ def generate_shipit(path: Path, custom_commands: CustomCommands, use_provider: O
166
185
  )
167
186
  out.append("")
168
187
 
169
- out.append('PORT = getenv("PORT") or "8080"')
170
-
171
188
  if plan.declarations:
172
189
  out.append(plan.declarations)
190
+ out.append("")
173
191
 
174
- out.append("")
175
192
  out.append("serve(")
176
193
  out.append(f' name="{plan.serve_name}",')
177
194
  out.append(f' provider="{plan.provider}",')
178
195
  # If app is mounted for serve, set cwd to the app serve path
179
196
  if "app" in attach_serve_names:
180
- out.append(' cwd=app["serve"],')
197
+ out.append(' cwd=app.serve_path,')
181
198
  out.append(" build=[")
182
199
  out.append(build_steps_block)
183
200
  out.append(" ],")
@@ -194,9 +211,12 @@ def generate_shipit(path: Path, custom_commands: CustomCommands, use_provider: O
194
211
  out.append(" env = {")
195
212
  out.append(env_lines)
196
213
  out.append(" },")
197
- out.append(" commands = {")
198
- out.append(commands_lines)
199
- out.append(" },")
214
+ if commands_lines:
215
+ out.append(" commands = {")
216
+ out.append(commands_lines)
217
+ out.append(" },")
218
+ else:
219
+ out.append(" commands = {},")
200
220
  if plan.services:
201
221
  out.append(" services=[")
202
222
  for s in plan.services:
shipit/procfile.py CHANGED
@@ -1,33 +1,8 @@
1
1
  # File forked from https://github.com/nickstenning/honcho/blob/main/honcho/environ.py
2
2
  # MIT License
3
- import os
4
3
  import re
5
- from collections import defaultdict, namedtuple
6
4
 
7
- PROCFILE_LINE = re.compile(r'^([A-Za-z0-9_-]+):\s*(.+)$')
8
-
9
-
10
- class Env(object):
11
-
12
- def __init__(self, config):
13
- self._c = config
14
-
15
- @property
16
- def port(self):
17
- try:
18
- return int(self._c['port'])
19
- except ValueError:
20
- raise ValueError(f"invalid value for port: '{self._c['port']}'")
21
-
22
- @property
23
- def procfile(self):
24
- return os.path.join(self._c['app_root'], self._c['procfile'])
25
-
26
- def load_procfile(self):
27
- with open(self.procfile) as f:
28
- content = f.read()
29
-
30
- return parse_procfile(content)
5
+ PROCFILE_LINE = re.compile(r"^([A-Za-z0-9_-]+):\s*(.+)$")
31
6
 
32
7
 
33
8
  class Procfile(object):
@@ -46,10 +21,11 @@ class Procfile(object):
46
21
  return p
47
22
 
48
23
  def add_process(self, name, command):
49
- assert name not in self.processes, \
24
+ assert name not in self.processes, (
50
25
  "process names must be unique within a Procfile"
26
+ )
51
27
  self.processes[name] = command
52
-
28
+
53
29
  def get_start_command(self):
54
30
  if "web" in self.processes:
55
31
  return self.processes["web"]
@@ -60,47 +36,3 @@ class Procfile(object):
60
36
  elif len(self.processes) == 1:
61
37
  return list(self.processes.values())[0]
62
38
  return None
63
-
64
-
65
- ProcessParams = namedtuple("ProcessParams", "name cmd quiet env")
66
-
67
-
68
- def expand_processes(processes, concurrency=None, env=None, quiet=None, port=None):
69
- """
70
- Get a list of the processes that need to be started given the specified
71
- list of process types, concurrency, environment, quietness, and base port
72
- number.
73
-
74
- Returns a list of ProcessParams objects, which have `name`, `cmd`, `env`,
75
- and `quiet` attributes, corresponding to the parameters to the constructor
76
- of `honcho.process.Process`.
77
- """
78
- if env is not None and env.get("PORT") is not None:
79
- port = int(env.get("PORT"))
80
-
81
- if quiet is None:
82
- quiet = []
83
-
84
- con = defaultdict(lambda: 1)
85
- if concurrency is not None:
86
- con.update(concurrency)
87
-
88
- out = []
89
-
90
- for name, cmd in processes.items():
91
- for i in range(con[name]):
92
- n = "{0}.{1}".format(name, i + 1)
93
- c = cmd
94
- q = name in quiet
95
- e = {'SHIPIT_PROCESS_NAME': n}
96
- if env is not None:
97
- e.update(env)
98
- if port is not None:
99
- e['PORT'] = str(port + i)
100
-
101
- params = ProcessParams(n, c, q, e)
102
- out.append(params)
103
- if port is not None:
104
- port += 100
105
-
106
- return out
shipit/providers/base.py CHANGED
@@ -1,8 +1,9 @@
1
- from __future__ import annotations
2
-
3
1
  from dataclasses import dataclass, field
4
2
  from pathlib import Path
5
- from typing import Dict, List, Optional, Protocol, Literal
3
+ from typing import Any, Dict, List, Optional, Protocol, Literal
4
+ from shipit.procfile import Procfile
5
+ from pydantic import BaseModel, Field
6
+ from pydantic_settings import SettingsConfigDict
6
7
 
7
8
 
8
9
  @dataclass
@@ -11,24 +12,62 @@ class DetectResult:
11
12
  score: int # Higher score wins when multiple providers match
12
13
 
13
14
 
14
- @dataclass
15
- class CustomCommands:
15
+ class CustomCommands(BaseModel):
16
+ model_config = SettingsConfigDict(extra="ignore")
17
+
16
18
  install: Optional[str] = None
17
19
  build: Optional[str] = None
18
20
  start: Optional[str] = None
19
21
  after_deploy: Optional[str] = None
20
22
 
23
+ # if (path / "Dockerfile").exists():
24
+ # # We get the start command from the Dockerfile
25
+ # with open(path / "Dockerfile", "r") as f:
26
+ # cmd = None
27
+ # for line in f:
28
+ # if line.startswith("CMD "):
29
+ # cmd = line[4:].strip()
30
+ # cmd = json.loads(cmd)
31
+ # # We get the last command
32
+ # if cmd:
33
+ # if isinstance(cmd, list):
34
+ # cmd = " ".join(cmd)
35
+ # custom_commands.start = cmd
36
+
37
+ def enrich_from_path(self, path: Path, use_procfile: bool = True) -> "CustomCommands":
38
+ if use_procfile:
39
+ procfile_path = path / "Procfile"
40
+ if procfile_path.exists():
41
+ try:
42
+ procfile = Procfile.loads(procfile_path.read_text())
43
+ self.start = procfile.get_start_command()
44
+ except Exception:
45
+ pass
46
+ return self
47
+
48
+
49
+ class Config(BaseModel):
50
+ model_config = SettingsConfigDict(extra="ignore", env_prefix="SHIPIT_")
51
+
52
+ port: Optional[int] = 8080
53
+ commands: CustomCommands = Field(default_factory=CustomCommands)
54
+
55
+ def __getattr__(self, name: str) -> Any:
56
+ return getattr(self.commands, name, None)
57
+
21
58
 
22
59
  class Provider(Protocol):
23
60
  def __init__(self, path: Path): ...
24
61
  @classmethod
25
62
  def name(cls) -> str: ...
26
63
  @classmethod
27
- def detect(cls, path: Path, custom_commands: CustomCommands) -> Optional[DetectResult]: ...
28
- def initialize(self) -> None: ...
64
+ def load_config(cls, path: Path, config: Config) -> Config: ...
65
+ @classmethod
66
+ def detect(
67
+ cls, path: Path, config: Config
68
+ ) -> Optional[DetectResult]: ...
29
69
  # Structured plan steps (no path args; use self.path)
30
- def serve_name(self) -> str: ...
31
- def platform(self) -> Optional[str]: ...
70
+ def serve_name(self) -> Optional[str]: ...
32
71
  def dependencies(self) -> list["DependencySpec"]: ...
33
72
  def declarations(self) -> Optional[str]: ...
34
73
  def build_steps(self) -> list[str]: ...
@@ -44,9 +83,9 @@ class Provider(Protocol):
44
83
  @dataclass
45
84
  class DependencySpec:
46
85
  name: str
47
- env_var: Optional[str] = None
86
+ var_name: Optional[str] = None
48
87
  default_version: Optional[str] = None
49
- architecture_var: Optional[str] = None
88
+ architecture_var_name: Optional[str] = None
50
89
  alias: Optional[str] = None # Variable name in Shipit plan
51
90
  use_in_build: bool = False
52
91
  use_in_serve: bool = False
shipit/providers/hugo.py CHANGED
@@ -1,19 +1,70 @@
1
- from __future__ import annotations
2
-
3
1
  from pathlib import Path
4
2
  from typing import Dict, Optional
5
3
 
6
- from .base import DetectResult, DependencySpec, Provider, _exists, ServiceSpec, VolumeSpec, CustomCommands, MountSpec
7
- from .staticfile import StaticFileProvider
4
+ from .base import (
5
+ DetectResult,
6
+ DependencySpec,
7
+ Provider,
8
+ _exists,
9
+ ServiceSpec,
10
+ VolumeSpec,
11
+ CustomCommands,
12
+ MountSpec,
13
+ Config,
14
+ )
15
+ from .staticfile import StaticFileProvider, StaticFileConfig
16
+ from pydantic_settings import SettingsConfigDict
17
+ import toml
18
+ import json
19
+ import yaml
20
+
21
+
22
+ class HugoConfig(StaticFileConfig):
23
+ model_config = SettingsConfigDict(extra="ignore", env_prefix="SHIPIT_")
24
+
25
+ hugo_version: Optional[str] = "0.149.0"
26
+ static_dir: Optional[str] = "public"
27
+
8
28
 
9
29
  class HugoProvider(StaticFileProvider):
30
+ def __init__(self, path: Path, config: HugoConfig):
31
+ super().__init__(path, config)
32
+
33
+ @classmethod
34
+ def load_config(cls, path: Path, base_config: Config) -> HugoConfig:
35
+ config = super().load_config(path, base_config)
36
+ if _exists(path, "hugo.toml"):
37
+ config_dict = toml.load(open(path / "hugo.toml"))
38
+ elif _exists(path, "hugo.json"):
39
+ config_dict = json.load(open(path / "hugo.json"))
40
+ elif _exists(path, "hugo.yaml"):
41
+ config_dict = yaml.safe_load(open(path / "hugo.yaml"))
42
+ elif _exists(path, "hugo.yml"):
43
+ config_dict = yaml.safe_load(open(path / "hugo.yml"))
44
+ else:
45
+ config_dict = {}
46
+
47
+ config = HugoConfig(**config.model_dump())
48
+ if not config.static_dir:
49
+ hugo_publish_dir = None
50
+ if isinstance(config_dict, dict):
51
+ hugo_publish_dir = config_dict.get("publishDir")
52
+ if not hugo_publish_dir:
53
+ # Use destination as fallback
54
+ hugo_publish_dir = config_dict.get("destination")
55
+ hugo_publish_dir = hugo_publish_dir or "public"
56
+ assert isinstance(hugo_publish_dir, str), "publishDir in hugo config must be a string"
57
+ config.static_dir = hugo_publish_dir
58
+ return config
10
59
 
11
60
  @classmethod
12
61
  def name(cls) -> str:
13
62
  return "hugo"
14
63
 
15
64
  @classmethod
16
- def detect(cls, path: Path, custom_commands: CustomCommands) -> Optional[DetectResult]:
65
+ def detect(
66
+ cls, path: Path, config: Config
67
+ ) -> Optional[DetectResult]:
17
68
  if _exists(path, "hugo.toml", "hugo.json", "hugo.yaml", "hugo.yml"):
18
69
  return DetectResult(cls.name(), 80)
19
70
  if (
@@ -22,20 +73,18 @@ class HugoProvider(StaticFileProvider):
22
73
  and (_exists(path, "static") or _exists(path, "themes"))
23
74
  ):
24
75
  return DetectResult(cls.name(), 40)
76
+ if config.commands.build and config.commands.build.startswith("hugo "):
77
+ return DetectResult(cls.name(), 80)
25
78
  return None
26
79
 
27
- def serve_name(self) -> str:
28
- return self.path.name
29
-
30
- def platform(self) -> Optional[str]:
31
- return "hugo"
80
+ def serve_name(self) -> Optional[str]:
81
+ return None
32
82
 
33
83
  def dependencies(self) -> list[DependencySpec]:
34
84
  return [
35
85
  DependencySpec(
36
86
  "hugo",
37
- env_var="SHIPIT_HUGO_VERSION",
38
- default_version="0.149.0",
87
+ var_name="config.hugo_version",
39
88
  use_in_build=True,
40
89
  ),
41
90
  *super().dependencies(),
@@ -43,9 +92,10 @@ class HugoProvider(StaticFileProvider):
43
92
 
44
93
  def build_steps(self) -> list[str]:
45
94
  return [
46
- 'workdir(temp["build"])',
95
+ 'workdir(temp.path)',
47
96
  'copy(".", ".", ignore=[".git"])',
48
- 'run("hugo build --destination={}".format(app["build"]), group="build")',
97
+ 'run("hugo build", group="build")',
98
+ 'run("cp -R {}/* {}/".format(config.static_dir, static_app.path))'
49
99
  ]
50
100
 
51
101
  def mounts(self) -> list[MountSpec]:
@@ -0,0 +1,123 @@
1
+ from pathlib import Path
2
+ from typing import Dict, Optional
3
+ import yaml
4
+
5
+ from .base import (
6
+ DetectResult,
7
+ DependencySpec,
8
+ Provider,
9
+ _exists,
10
+ MountSpec,
11
+ ServiceSpec,
12
+ VolumeSpec,
13
+ CustomCommands,
14
+ Config,
15
+ )
16
+ from .staticfile import StaticFileProvider, StaticFileConfig
17
+ from pydantic_settings import SettingsConfigDict
18
+
19
+
20
+ class JekyllConfig(StaticFileConfig):
21
+ model_config = SettingsConfigDict(extra="ignore", env_prefix="SHIPIT_")
22
+
23
+ ruby_version: Optional[str] = "3.4.7"
24
+ jekyll_version: Optional[str] = "4.3.0"
25
+
26
+ static_dir: Optional[str] = "_site"
27
+
28
+
29
+ class JekyllProvider(StaticFileProvider):
30
+ def __init__(self, path: Path, config: JekyllConfig):
31
+ self.path = path
32
+ self.config = config
33
+
34
+ @classmethod
35
+ def load_config(
36
+ cls, path: Path, base_config: Config
37
+ ) -> JekyllConfig:
38
+ config = super().load_config(path, base_config)
39
+ config = JekyllConfig(**config.model_dump())
40
+ if not config.static_dir:
41
+ jekyll_static_dir = None
42
+ if _exists(path, "_config.yml"):
43
+ config_dict = yaml.safe_load(open(path / "_config.yml"))
44
+ elif _exists(path, "_config.yaml"):
45
+ config_dict = yaml.safe_load(open(path / "_config.yaml"))
46
+ else:
47
+ config_dict = {}
48
+ if config_dict and isinstance(config_dict, dict):
49
+ jekyll_static_dir = config_dict.get("destination")
50
+ jekyll_static_dir = jekyll_static_dir or "_site"
51
+ assert isinstance(jekyll_static_dir, str), "destination in Jekyll config must be a string"
52
+ config.static_dir = jekyll_static_dir
53
+ return config
54
+
55
+ @classmethod
56
+ def name(cls) -> str:
57
+ return "jekyll"
58
+
59
+ @classmethod
60
+ def detect(
61
+ cls, path: Path, config: Config
62
+ ) -> Optional[DetectResult]:
63
+ if _exists(path, "_config.yml", "_config.yaml"):
64
+ if _exists(path, "Gemfile"):
65
+ return DetectResult(cls.name(), 85)
66
+ return DetectResult(cls.name(), 40)
67
+ if config.commands.build and config.commands.build.startswith("jekyll "):
68
+ return DetectResult(cls.name(), 85)
69
+ return None
70
+
71
+ def serve_name(self) -> Optional[str]:
72
+ return None
73
+
74
+ def dependencies(self) -> list[DependencySpec]:
75
+ return [
76
+ DependencySpec(
77
+ "ruby",
78
+ var_name="config.ruby_version",
79
+ use_in_build=True,
80
+ use_in_serve=False,
81
+ ),
82
+ *super().dependencies(),
83
+ ]
84
+
85
+ def build_steps(self) -> list[str]:
86
+ if _exists(self.path, "Gemfile"):
87
+ install_deps = ["Gemfile"]
88
+ install_deps_str = ", ".join([f'"{dep}"' for dep in install_deps])
89
+ install_commands = [
90
+ f'run("bundle install", inputs=[{install_deps_str}], group="install")'
91
+ ]
92
+ if _exists(self.path, "Gemfile.lock"):
93
+ install_commands = [
94
+ 'copy("Gemfile.lock")',
95
+ *install_commands,
96
+ ]
97
+ else:
98
+ install_commands = [
99
+ 'run("bundle init", group="install")',
100
+ 'run("bundle add jekyll -v {}".format(config.jekyll_version), group="install")',
101
+ ]
102
+ return [
103
+ 'workdir(temp.path)',
104
+ *install_commands,
105
+ 'copy(".", ignore=[".git"])',
106
+ 'run("jekyll build", group="build")',
107
+ 'run("cp -R {}/* {}/".format(config.static_dir, static_app.path))'
108
+ ]
109
+
110
+ def prepare_steps(self) -> Optional[list[str]]:
111
+ return None
112
+
113
+ def mounts(self) -> list[MountSpec]:
114
+ return [MountSpec("temp", attach_to_serve=False), *super().mounts()]
115
+
116
+ def volumes(self) -> list[VolumeSpec]:
117
+ return []
118
+
119
+ def env(self) -> Optional[Dict[str, str]]:
120
+ return None
121
+
122
+ def services(self) -> list[ServiceSpec]:
123
+ return []