uv-workspace-codegen 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uv_workspace_codegen/__init__.py +8 -0
- uv_workspace_codegen/__main__.py +11 -0
- uv_workspace_codegen/main.py +454 -0
- uv_workspace_codegen/py.typed +0 -0
- uv_workspace_codegen/templates/package.template.yml +24 -0
- uv_workspace_codegen-0.6.0.dist-info/METADATA +9 -0
- uv_workspace_codegen-0.6.0.dist-info/RECORD +9 -0
- uv_workspace_codegen-0.6.0.dist-info/WHEEL +4 -0
- uv_workspace_codegen-0.6.0.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Generate individual CI/CD workflows for each library in the workspace.
|
|
3
|
+
|
|
4
|
+
This package scans the libs/ directory and creates individual GitHub Actions
|
|
5
|
+
workflows for each library, allowing parallel testing and better CI/CD isolation.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
__version__ = "0.1.0"
|
|
@@ -0,0 +1,454 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Main module for the uv-workspace-codegen package.
|
|
3
|
+
|
|
4
|
+
This module contains the main function and logic for generating GitHub Actions
|
|
5
|
+
workflows for libraries in the workspace.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import difflib
|
|
9
|
+
import os
|
|
10
|
+
import sys
|
|
11
|
+
import tomllib
|
|
12
|
+
from dataclasses import dataclass, asdict
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Optional
|
|
15
|
+
|
|
16
|
+
import click
|
|
17
|
+
import yaml
|
|
18
|
+
from jinja2 import Environment, Template
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class Package:
|
|
23
|
+
"""Represents a package with its metadata."""
|
|
24
|
+
|
|
25
|
+
name: str
|
|
26
|
+
path: str
|
|
27
|
+
package_name: str
|
|
28
|
+
template_type: list[str]
|
|
29
|
+
generate_standard_pytest_step: bool
|
|
30
|
+
typechecker: str = "mypy"
|
|
31
|
+
generate_typechecking_step: bool = True
|
|
32
|
+
generate_alembic_migration_check_step: bool = False
|
|
33
|
+
custom_steps: Optional[list[dict]] = None
|
|
34
|
+
|
|
35
|
+
def __post_init__(self):
|
|
36
|
+
if self.custom_steps is None:
|
|
37
|
+
self.custom_steps = []
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def discover_packages(workspace_dir: Path, workspace_config: dict) -> list[Package]:
|
|
41
|
+
"""Discover packages with uv-workspace-codegen configuration in their pyproject.toml files."""
|
|
42
|
+
packages = []
|
|
43
|
+
|
|
44
|
+
# Check workspace root
|
|
45
|
+
packages.extend(
|
|
46
|
+
_discover_in_directory(
|
|
47
|
+
workspace_dir, workspace_dir, workspace_config, check_root=True
|
|
48
|
+
)
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
# Scan all subdirectories recursively
|
|
52
|
+
for root, dirs, files in os.walk(workspace_dir):
|
|
53
|
+
root_path = Path(root)
|
|
54
|
+
|
|
55
|
+
# Skip hidden directories and __pycache__
|
|
56
|
+
if (
|
|
57
|
+
any(
|
|
58
|
+
part.startswith(".")
|
|
59
|
+
for part in root_path.relative_to(workspace_dir).parts
|
|
60
|
+
)
|
|
61
|
+
or "__pycache__" in root_path.parts
|
|
62
|
+
):
|
|
63
|
+
dirs[:] = [] # Don't recurse into these
|
|
64
|
+
continue
|
|
65
|
+
|
|
66
|
+
if root_path != workspace_dir:
|
|
67
|
+
packages.extend(
|
|
68
|
+
_discover_in_directory(
|
|
69
|
+
root_path, workspace_dir, workspace_config, check_root=False
|
|
70
|
+
)
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
return packages
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def _discover_in_directory(
|
|
77
|
+
target_dir: Path,
|
|
78
|
+
workspace_dir: Path,
|
|
79
|
+
workspace_config: dict,
|
|
80
|
+
check_root: bool = False,
|
|
81
|
+
) -> list[Package]:
|
|
82
|
+
"""Discover packages in a specific directory."""
|
|
83
|
+
packages: list[Package] = []
|
|
84
|
+
|
|
85
|
+
pyproject_path = target_dir / "pyproject.toml"
|
|
86
|
+
if not pyproject_path.exists():
|
|
87
|
+
return packages
|
|
88
|
+
|
|
89
|
+
try:
|
|
90
|
+
with open(pyproject_path, "rb") as f:
|
|
91
|
+
pyproject_data = tomllib.load(f)
|
|
92
|
+
|
|
93
|
+
# Check if uv-workspace-codegen configuration exists
|
|
94
|
+
gh_config = pyproject_data.get("tool", {}).get("uv-workspace-codegen", {})
|
|
95
|
+
if not gh_config.get("generate", False):
|
|
96
|
+
return packages
|
|
97
|
+
|
|
98
|
+
# Get template_type from config, with workspace-level default fallback
|
|
99
|
+
workspace_default_template_type = workspace_config.get(
|
|
100
|
+
"default_template_type", ["package"]
|
|
101
|
+
)
|
|
102
|
+
config_template_type = gh_config.get(
|
|
103
|
+
"template_type", workspace_default_template_type
|
|
104
|
+
)
|
|
105
|
+
config_template_type = (
|
|
106
|
+
[config_template_type]
|
|
107
|
+
if not isinstance(config_template_type, list)
|
|
108
|
+
else config_template_type
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
# Extract project name and derive package name
|
|
112
|
+
project_name = pyproject_data.get("project", {}).get("name", target_dir.name)
|
|
113
|
+
package_name = project_name.replace("-", "_")
|
|
114
|
+
|
|
115
|
+
# Parse custom_steps if provided
|
|
116
|
+
custom_steps: list[dict] = []
|
|
117
|
+
custom_steps_str = gh_config.get("custom_steps", "")
|
|
118
|
+
if custom_steps_str:
|
|
119
|
+
try:
|
|
120
|
+
custom_steps = yaml.safe_load(custom_steps_str) or []
|
|
121
|
+
except yaml.YAMLError as e:
|
|
122
|
+
print(
|
|
123
|
+
f"Warning: Failed to parse custom_steps YAML in {pyproject_path}: {e}"
|
|
124
|
+
)
|
|
125
|
+
custom_steps = []
|
|
126
|
+
|
|
127
|
+
# Determine path relative to workspace
|
|
128
|
+
if check_root:
|
|
129
|
+
relative_path = "."
|
|
130
|
+
else:
|
|
131
|
+
relative_path = str(target_dir.relative_to(workspace_dir))
|
|
132
|
+
|
|
133
|
+
package = Package(
|
|
134
|
+
name=project_name,
|
|
135
|
+
path=relative_path,
|
|
136
|
+
package_name=package_name,
|
|
137
|
+
template_type=config_template_type,
|
|
138
|
+
generate_standard_pytest_step=gh_config.get(
|
|
139
|
+
"generate_standard_pytest_step", False
|
|
140
|
+
),
|
|
141
|
+
typechecker=gh_config.get("typechecker", "mypy"),
|
|
142
|
+
generate_typechecking_step=gh_config.get(
|
|
143
|
+
"generate_typechecking_step", True
|
|
144
|
+
),
|
|
145
|
+
generate_alembic_migration_check_step=gh_config.get(
|
|
146
|
+
"generate_alembic_migration_check_step", False
|
|
147
|
+
),
|
|
148
|
+
custom_steps=custom_steps,
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
packages.append(package)
|
|
152
|
+
|
|
153
|
+
except (tomllib.TOMLDecodeError, KeyError) as e:
|
|
154
|
+
print(f"Warning: Failed to parse {pyproject_path}: {e}")
|
|
155
|
+
|
|
156
|
+
return packages
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def get_workspace_config(workspace_dir: Path) -> dict:
|
|
160
|
+
"""Get workspace-level uv-workspace-codegen configuration."""
|
|
161
|
+
pyproject_path = workspace_dir / "pyproject.toml"
|
|
162
|
+
if not pyproject_path.exists():
|
|
163
|
+
return {}
|
|
164
|
+
|
|
165
|
+
try:
|
|
166
|
+
with open(pyproject_path, "rb") as f:
|
|
167
|
+
pyproject_data = tomllib.load(f)
|
|
168
|
+
|
|
169
|
+
return pyproject_data.get("tool", {}).get("uv-workspace-codegen", {})
|
|
170
|
+
except (tomllib.TOMLDecodeError, KeyError):
|
|
171
|
+
return {}
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def load_template(
|
|
175
|
+
template_type: str,
|
|
176
|
+
workspace_dir: Path,
|
|
177
|
+
workspace_config: dict,
|
|
178
|
+
diff_mode: bool = False,
|
|
179
|
+
) -> Template:
|
|
180
|
+
"""Load the appropriate template based on template type."""
|
|
181
|
+
# Get template directory from workspace config, with default fallback
|
|
182
|
+
template_dir_str = workspace_config.get(
|
|
183
|
+
"template_dir", ".github/workflow-templates"
|
|
184
|
+
)
|
|
185
|
+
templates_dir = workspace_dir / template_dir_str
|
|
186
|
+
template_path = templates_dir / f"{template_type}.template.yml"
|
|
187
|
+
|
|
188
|
+
# If the requested template does not exist, and the requested type is
|
|
189
|
+
# 'package', attempt to populate it from the bundled template located in
|
|
190
|
+
# this package's `templates/` directory. Only create the workspace
|
|
191
|
+
# templates directory when we actually need to write the default file.
|
|
192
|
+
if not template_path.exists():
|
|
193
|
+
if template_type == "package":
|
|
194
|
+
bundled_template = (
|
|
195
|
+
Path(__file__).parent / "templates" / "package.template.yml"
|
|
196
|
+
)
|
|
197
|
+
if bundled_template.exists():
|
|
198
|
+
try:
|
|
199
|
+
# In diff mode, we don't want to create the template file
|
|
200
|
+
if diff_mode:
|
|
201
|
+
with open(bundled_template, "r") as src:
|
|
202
|
+
return create_jinja_environment().from_string(src.read())
|
|
203
|
+
|
|
204
|
+
# Create templates dir now that we will populate it
|
|
205
|
+
templates_dir.mkdir(parents=True, exist_ok=True)
|
|
206
|
+
with (
|
|
207
|
+
open(bundled_template, "r") as src,
|
|
208
|
+
open(template_path, "w") as dst,
|
|
209
|
+
):
|
|
210
|
+
dst.write(src.read())
|
|
211
|
+
except Exception:
|
|
212
|
+
# On any failure, raise a clear FileNotFoundError to match
|
|
213
|
+
# previous behavior for missing templates.
|
|
214
|
+
raise FileNotFoundError(
|
|
215
|
+
f"Template not found or could not be created: {template_path}"
|
|
216
|
+
)
|
|
217
|
+
else:
|
|
218
|
+
raise FileNotFoundError(
|
|
219
|
+
f"Bundled default template missing: {bundled_template}"
|
|
220
|
+
)
|
|
221
|
+
else:
|
|
222
|
+
raise FileNotFoundError(f"Template not found: {template_path}")
|
|
223
|
+
|
|
224
|
+
with open(template_path, "r") as f:
|
|
225
|
+
template_content = f.read()
|
|
226
|
+
|
|
227
|
+
env = create_jinja_environment()
|
|
228
|
+
return env.from_string(template_content)
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
def generate_workflow(
|
|
232
|
+
package: Package,
|
|
233
|
+
template_type: str,
|
|
234
|
+
template: Template,
|
|
235
|
+
output_dir: Path,
|
|
236
|
+
diff_mode: bool = False,
|
|
237
|
+
) -> Optional[Path]:
|
|
238
|
+
"""Generate a workflow file for a single package."""
|
|
239
|
+
|
|
240
|
+
workflow_content = template.render(
|
|
241
|
+
package=asdict(package) | {"template_type": template_type}
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
# Add autogenerated comment at the top
|
|
245
|
+
autogen_comment = (
|
|
246
|
+
"# This file was automatically generated by uv-workspace-codegen\n"
|
|
247
|
+
"# For more information, see: https://github.com/epoch8/uv-workspace-codegen/blob/master/README.md\n"
|
|
248
|
+
"# Do not edit this file manually - changes will be overwritten\n\n"
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
workflow_content = autogen_comment + workflow_content
|
|
252
|
+
|
|
253
|
+
# Create workflow filename based on package name and template type
|
|
254
|
+
workflow_filename = f"{template_type}-{package.name}.yml"
|
|
255
|
+
workflow_path = output_dir / workflow_filename
|
|
256
|
+
|
|
257
|
+
if diff_mode:
|
|
258
|
+
if workflow_path.exists():
|
|
259
|
+
with open(workflow_path, "r") as f:
|
|
260
|
+
existing_content = f.readlines()
|
|
261
|
+
else:
|
|
262
|
+
existing_content = []
|
|
263
|
+
|
|
264
|
+
new_content_lines = workflow_content.splitlines(keepends=True)
|
|
265
|
+
|
|
266
|
+
diff = list(
|
|
267
|
+
difflib.unified_diff(
|
|
268
|
+
existing_content,
|
|
269
|
+
new_content_lines,
|
|
270
|
+
fromfile=str(workflow_path),
|
|
271
|
+
tofile=str(workflow_path),
|
|
272
|
+
)
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
if diff:
|
|
276
|
+
click.echo("".join(diff), nl=False)
|
|
277
|
+
return workflow_path
|
|
278
|
+
else:
|
|
279
|
+
with open(workflow_path, "w") as f:
|
|
280
|
+
f.write(workflow_content)
|
|
281
|
+
|
|
282
|
+
print(f"Generated workflow: {workflow_path}")
|
|
283
|
+
return workflow_path
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def create_jinja_environment() -> Environment:
|
|
287
|
+
"""Create a Jinja2 environment with ansible filters including to_nice_yaml."""
|
|
288
|
+
from jinja2_ansible_filters import AnsibleCoreFiltersExtension
|
|
289
|
+
|
|
290
|
+
env = Environment(extensions=[AnsibleCoreFiltersExtension])
|
|
291
|
+
return env
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
def is_workspace_root(path: Path) -> bool:
|
|
295
|
+
"""Check if a directory is a workspace root."""
|
|
296
|
+
pyproject_path = path / "pyproject.toml"
|
|
297
|
+
if not pyproject_path.exists():
|
|
298
|
+
return False
|
|
299
|
+
|
|
300
|
+
try:
|
|
301
|
+
with open(pyproject_path, "rb") as f:
|
|
302
|
+
pyproject_data = tomllib.load(f)
|
|
303
|
+
|
|
304
|
+
return (
|
|
305
|
+
"tool" in pyproject_data
|
|
306
|
+
and "uv" in pyproject_data["tool"]
|
|
307
|
+
and "workspace" in pyproject_data["tool"]["uv"]
|
|
308
|
+
)
|
|
309
|
+
except tomllib.TOMLDecodeError:
|
|
310
|
+
return False
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def find_workspace_root() -> Path:
|
|
314
|
+
"""Find the workspace root directory by looking for pyproject.toml with workspace config."""
|
|
315
|
+
current_dir = Path.cwd()
|
|
316
|
+
|
|
317
|
+
# First, try the current directory and its parents
|
|
318
|
+
for path in [current_dir] + list(current_dir.parents):
|
|
319
|
+
if is_workspace_root(path):
|
|
320
|
+
return path
|
|
321
|
+
|
|
322
|
+
# If we can't find a workspace root, assume current directory is the workspace
|
|
323
|
+
return current_dir
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
@click.command()
|
|
327
|
+
@click.argument(
|
|
328
|
+
"root_dir",
|
|
329
|
+
required=False,
|
|
330
|
+
type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
|
|
331
|
+
)
|
|
332
|
+
@click.option(
|
|
333
|
+
"--diff", is_flag=True, help="Show diff of changes without writing files."
|
|
334
|
+
)
|
|
335
|
+
def main(root_dir: Optional[Path], diff: bool):
|
|
336
|
+
"""Main function to generate all workflows."""
|
|
337
|
+
|
|
338
|
+
# Get the workspace root directory
|
|
339
|
+
if root_dir:
|
|
340
|
+
workspace_dir = root_dir.resolve()
|
|
341
|
+
if not is_workspace_root(workspace_dir):
|
|
342
|
+
click.echo(
|
|
343
|
+
f"Error: The provided directory '{workspace_dir}' is not a valid workspace root.",
|
|
344
|
+
err=True,
|
|
345
|
+
)
|
|
346
|
+
sys.exit(1)
|
|
347
|
+
else:
|
|
348
|
+
workspace_dir = find_workspace_root()
|
|
349
|
+
|
|
350
|
+
# Log which directory was discovered as the workspace root
|
|
351
|
+
print(f"Workspace root discovered: {workspace_dir}")
|
|
352
|
+
|
|
353
|
+
# Get workspace-level configuration
|
|
354
|
+
workspace_config = get_workspace_config(workspace_dir)
|
|
355
|
+
|
|
356
|
+
workflows_dir = workspace_dir / ".github" / "workflows"
|
|
357
|
+
if not diff:
|
|
358
|
+
workflows_dir.mkdir(parents=True, exist_ok=True)
|
|
359
|
+
|
|
360
|
+
# Discover packages with uv-workspace-codegen configuration
|
|
361
|
+
packages = discover_packages(workspace_dir, workspace_config)
|
|
362
|
+
|
|
363
|
+
print(f"Found {len(packages)} items:")
|
|
364
|
+
for pkg in packages:
|
|
365
|
+
template_types_str = ", ".join(pkg.template_type)
|
|
366
|
+
print(
|
|
367
|
+
f" - {pkg.name} (templates: {template_types_str}, package: {pkg.package_name}, tests: {pkg.generate_standard_pytest_step})"
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
# Group packages by template type for efficient template loading
|
|
371
|
+
templates_cache = {}
|
|
372
|
+
generated_files: list[Path] = []
|
|
373
|
+
|
|
374
|
+
for package in packages:
|
|
375
|
+
try:
|
|
376
|
+
for template_type in package.template_type:
|
|
377
|
+
# Load template if not cached
|
|
378
|
+
if template_type not in templates_cache:
|
|
379
|
+
templates_cache[template_type] = load_template(
|
|
380
|
+
template_type,
|
|
381
|
+
workspace_dir,
|
|
382
|
+
workspace_config,
|
|
383
|
+
diff_mode=diff,
|
|
384
|
+
)
|
|
385
|
+
|
|
386
|
+
template = templates_cache[template_type]
|
|
387
|
+
generated_file = generate_workflow(
|
|
388
|
+
package, template_type, template, workflows_dir, diff_mode=diff
|
|
389
|
+
)
|
|
390
|
+
if (
|
|
391
|
+
generated_file
|
|
392
|
+
): # Only append if a file was actually generated (not in diff mode)
|
|
393
|
+
generated_files.append(generated_file)
|
|
394
|
+
except Exception as e:
|
|
395
|
+
print(f"Error generating workflow for {package.name}: {e}")
|
|
396
|
+
return 1
|
|
397
|
+
|
|
398
|
+
cleanup_stale_workflows(workflows_dir, generated_files, diff_mode=diff)
|
|
399
|
+
|
|
400
|
+
if not diff:
|
|
401
|
+
print(f"\nSuccessfully generated {len(packages)} workflow files!")
|
|
402
|
+
return 0
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
def cleanup_stale_workflows(
|
|
406
|
+
output_dir: Path, generated_files: list[Path], diff_mode: bool = False
|
|
407
|
+
) -> None:
|
|
408
|
+
"""
|
|
409
|
+
Remove workflow files that were previously generated but are no longer needed.
|
|
410
|
+
|
|
411
|
+
A file is considered stale if:
|
|
412
|
+
1. It exists in the output directory
|
|
413
|
+
2. It has a .yml or .yaml extension
|
|
414
|
+
3. It contains the autogenerated header
|
|
415
|
+
4. It is NOT in the list of currently generated files
|
|
416
|
+
"""
|
|
417
|
+
# Normalize generated files to absolute paths for comparison
|
|
418
|
+
generated_paths = {f.resolve() for f in generated_files}
|
|
419
|
+
|
|
420
|
+
# Check all yaml files in the output directory
|
|
421
|
+
for file_path in output_dir.glob("*.yml"):
|
|
422
|
+
_check_and_delete_stale_file(file_path, generated_paths, diff_mode)
|
|
423
|
+
|
|
424
|
+
for file_path in output_dir.glob("*.yaml"):
|
|
425
|
+
_check_and_delete_stale_file(file_path, generated_paths, diff_mode)
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
def _check_and_delete_stale_file(
|
|
429
|
+
file_path: Path, generated_paths: set[Path], diff_mode: bool = False
|
|
430
|
+
) -> None:
|
|
431
|
+
"""Helper to check if a single file is stale and delete it if so."""
|
|
432
|
+
# Skip if this file was just generated
|
|
433
|
+
if file_path.resolve() in generated_paths:
|
|
434
|
+
return
|
|
435
|
+
|
|
436
|
+
try:
|
|
437
|
+
# Check for autogenerated header
|
|
438
|
+
with open(file_path, "r") as f:
|
|
439
|
+
content = f.read(500) # Read first 500 chars should be enough for header
|
|
440
|
+
|
|
441
|
+
if "# This file was automatically generated by uv-workspace-codegen" in content:
|
|
442
|
+
if diff_mode:
|
|
443
|
+
print(f"Would remove stale workflow: {file_path}")
|
|
444
|
+
else:
|
|
445
|
+
print(f"Removing stale workflow: {file_path}")
|
|
446
|
+
file_path.unlink()
|
|
447
|
+
except Exception as e:
|
|
448
|
+
print(
|
|
449
|
+
f"Warning: Failed to check/remove potentially stale file {file_path}: {e}"
|
|
450
|
+
)
|
|
451
|
+
|
|
452
|
+
|
|
453
|
+
if __name__ == "__main__":
|
|
454
|
+
exit(main())
|
|
File without changes
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
name: Package {{ package.name }}
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
paths:
|
|
6
|
+
- "{{ package.path }}/**"
|
|
7
|
+
|
|
8
|
+
jobs:
|
|
9
|
+
test:
|
|
10
|
+
runs-on: ubuntu-latest
|
|
11
|
+
steps:
|
|
12
|
+
- name: Install the latest version of uv
|
|
13
|
+
uses: astral-sh/setup-uv@v6
|
|
14
|
+
|
|
15
|
+
- name: Install deps
|
|
16
|
+
run: |
|
|
17
|
+
cd {{ package.path }}
|
|
18
|
+
uv sync
|
|
19
|
+
|
|
20
|
+
- name: Check formatting
|
|
21
|
+
run: uv run ruff check {{ package.path }}
|
|
22
|
+
|
|
23
|
+
- name: Run tests
|
|
24
|
+
run: uv run pytest {{ package.path }}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: uv-workspace-codegen
|
|
3
|
+
Version: 0.6.0
|
|
4
|
+
Summary: Generate individual CI/CD workflows for packages in the workspace
|
|
5
|
+
Requires-Python: >=3.12
|
|
6
|
+
Requires-Dist: click>=8.1.7
|
|
7
|
+
Requires-Dist: jinja2-ansible-filters>=1.3.2
|
|
8
|
+
Requires-Dist: jinja2>=3.1.6
|
|
9
|
+
Requires-Dist: pyyaml>=6.0.1
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
uv_workspace_codegen/__init__.py,sha256=H6Cls48-Sx25zM8K92hMhSbzzHzgD6BES56cIahJZqY,262
|
|
2
|
+
uv_workspace_codegen/__main__.py,sha256=LE4jsy6WxXvZDbrLoxDOC4Bze8kIPip6HEKDCbZ0bFY,216
|
|
3
|
+
uv_workspace_codegen/main.py,sha256=z-F5_I_duPU_YHmMfyGEkWIRKgCC6ZzAtb4K6qJnHfs,15311
|
|
4
|
+
uv_workspace_codegen/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
+
uv_workspace_codegen/templates/package.template.yml,sha256=OApNVlrtAV7N8z6NmwvR8UAp0vnwZWrzp5ak3MlKk3g,471
|
|
6
|
+
uv_workspace_codegen-0.6.0.dist-info/METADATA,sha256=sHwKLz3ff7Sm8iw-tlReXcUmAAKYjCqmqPfR100p000,294
|
|
7
|
+
uv_workspace_codegen-0.6.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
8
|
+
uv_workspace_codegen-0.6.0.dist-info/entry_points.txt,sha256=MrdGiqNrcabGKOYoNVnK1RpnaTwLDJuTkP2psKSN_y4,72
|
|
9
|
+
uv_workspace_codegen-0.6.0.dist-info/RECORD,,
|