elspais 0.9.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,166 @@
1
+ """
2
+ elspais.commands.index - INDEX.md management command.
3
+ """
4
+
5
+ import argparse
6
+ from pathlib import Path
7
+
8
+ from elspais.config.defaults import DEFAULT_CONFIG
9
+ from elspais.config.loader import find_config_file, get_spec_directories, load_config
10
+ from elspais.core.parser import RequirementParser
11
+ from elspais.core.patterns import PatternConfig
12
+
13
+
14
+ def run(args: argparse.Namespace) -> int:
15
+ """Run the index command."""
16
+ if not args.index_action:
17
+ print("Usage: elspais index {validate|regenerate}")
18
+ return 1
19
+
20
+ if args.index_action == "validate":
21
+ return run_validate(args)
22
+ elif args.index_action == "regenerate":
23
+ return run_regenerate(args)
24
+
25
+ return 1
26
+
27
+
28
+ def run_validate(args: argparse.Namespace) -> int:
29
+ """Validate INDEX.md accuracy."""
30
+ config_path = args.config or find_config_file(Path.cwd())
31
+ if config_path and config_path.exists():
32
+ config = load_config(config_path)
33
+ else:
34
+ config = DEFAULT_CONFIG
35
+
36
+ spec_dirs = get_spec_directories(args.spec_dir, config)
37
+ if not spec_dirs:
38
+ print("Error: No spec directories found")
39
+ return 1
40
+
41
+ spec_config = config.get("spec", {})
42
+ # Use first spec directory for INDEX.md location
43
+ index_file = spec_dirs[0] / spec_config.get("index_file", "INDEX.md")
44
+
45
+ if not index_file.exists():
46
+ print(f"INDEX.md not found: {index_file}")
47
+ return 1
48
+
49
+ # Parse all requirements
50
+ pattern_config = PatternConfig.from_dict(config.get("patterns", {}))
51
+ no_reference_values = spec_config.get("no_reference_values")
52
+ skip_files = spec_config.get("skip_files", [])
53
+ parser = RequirementParser(pattern_config, no_reference_values=no_reference_values)
54
+ requirements = parser.parse_directories(spec_dirs, skip_files=skip_files)
55
+
56
+ # Parse INDEX.md to find listed requirements
57
+ index_content = index_file.read_text(encoding="utf-8")
58
+ indexed_ids = set()
59
+
60
+ import re
61
+ for match in re.finditer(r"\|\s*([A-Z]+-(?:[A-Z]+-)?[a-zA-Z]?\d+)\s*\|", index_content):
62
+ indexed_ids.add(match.group(1))
63
+
64
+ # Compare
65
+ actual_ids = set(requirements.keys())
66
+ missing = actual_ids - indexed_ids
67
+ extra = indexed_ids - actual_ids
68
+
69
+ if missing:
70
+ print(f"Missing from INDEX.md ({len(missing)}):")
71
+ for req_id in sorted(missing):
72
+ print(f" - {req_id}")
73
+
74
+ if extra:
75
+ print(f"\nExtra in INDEX.md ({len(extra)}):")
76
+ for req_id in sorted(extra):
77
+ print(f" - {req_id}")
78
+
79
+ if not missing and not extra:
80
+ print(f"✓ INDEX.md is accurate ({len(actual_ids)} requirements)")
81
+ return 0
82
+
83
+ return 1
84
+
85
+
86
+ def run_regenerate(args: argparse.Namespace) -> int:
87
+ """Regenerate INDEX.md from requirements."""
88
+ config_path = args.config or find_config_file(Path.cwd())
89
+ if config_path and config_path.exists():
90
+ config = load_config(config_path)
91
+ else:
92
+ config = DEFAULT_CONFIG
93
+
94
+ spec_dirs = get_spec_directories(args.spec_dir, config)
95
+ if not spec_dirs:
96
+ print("Error: No spec directories found")
97
+ return 1
98
+
99
+ spec_config = config.get("spec", {})
100
+ # Use first spec directory for INDEX.md location
101
+ index_file = spec_dirs[0] / spec_config.get("index_file", "INDEX.md")
102
+
103
+ # Parse all requirements
104
+ pattern_config = PatternConfig.from_dict(config.get("patterns", {}))
105
+ no_reference_values = spec_config.get("no_reference_values")
106
+ skip_files = spec_config.get("skip_files", [])
107
+ parser = RequirementParser(pattern_config, no_reference_values=no_reference_values)
108
+ requirements = parser.parse_directories(spec_dirs, skip_files=skip_files)
109
+
110
+ if not requirements:
111
+ print("No requirements found")
112
+ return 1
113
+
114
+ # Generate INDEX.md
115
+ content = generate_index(requirements, config)
116
+ index_file.write_text(content, encoding="utf-8")
117
+
118
+ print(f"Regenerated: {index_file}")
119
+ print(f" {len(requirements)} requirements indexed")
120
+
121
+ return 0
122
+
123
+
124
+ def generate_index(requirements: dict, config: dict) -> str:
125
+ """Generate INDEX.md content."""
126
+ lines = [
127
+ "# Requirements Index",
128
+ "",
129
+ "This file provides a complete index of all requirements.",
130
+ "",
131
+ ]
132
+
133
+ # Group by type
134
+ prd_reqs = {k: v for k, v in requirements.items() if v.level.upper() in ["PRD", "PRODUCT"]}
135
+ ops_reqs = {k: v for k, v in requirements.items() if v.level.upper() in ["OPS", "OPERATIONS"]}
136
+ dev_reqs = {k: v for k, v in requirements.items() if v.level.upper() in ["DEV", "DEVELOPMENT"]}
137
+
138
+ for title, reqs in [
139
+ ("Product Requirements (PRD)", prd_reqs),
140
+ ("Operations Requirements (OPS)", ops_reqs),
141
+ ("Development Requirements (DEV)", dev_reqs),
142
+ ]:
143
+ if not reqs:
144
+ continue
145
+
146
+ lines.append(f"## {title}")
147
+ lines.append("")
148
+ lines.append("| ID | Title | File | Hash |")
149
+ lines.append("|---|---|---|---|")
150
+
151
+ for req_id, req in sorted(reqs.items()):
152
+ file_name = req.file_path.name if req.file_path else "-"
153
+ hash_val = req.hash or "-"
154
+ lines.append(f"| {req_id} | {req.title} | {file_name} | {hash_val} |")
155
+
156
+ lines.append("")
157
+
158
+ lines.extend([
159
+ "---",
160
+ "",
161
+ "*Generated by elspais*",
162
+ ])
163
+
164
+ return "\n".join(lines)
165
+
166
+
@@ -0,0 +1,177 @@
1
+ """
2
+ elspais.commands.init - Initialize configuration command.
3
+
4
+ Creates .elspais.toml configuration file.
5
+ """
6
+
7
+ import argparse
8
+ from pathlib import Path
9
+ from typing import Optional
10
+
11
+
12
+ def run(args: argparse.Namespace) -> int:
13
+ """
14
+ Run the init command.
15
+
16
+ Args:
17
+ args: Parsed command line arguments
18
+
19
+ Returns:
20
+ Exit code (0 for success)
21
+ """
22
+ config_path = Path.cwd() / ".elspais.toml"
23
+
24
+ if config_path.exists() and not args.force:
25
+ print(f"Configuration file already exists: {config_path}")
26
+ print("Use --force to overwrite.")
27
+ return 1
28
+
29
+ # Determine project type
30
+ project_type = args.type or "core"
31
+ associated_prefix = args.associated_prefix
32
+
33
+ if project_type == "associated" and not associated_prefix:
34
+ print("Error: --associated-prefix required for associated repositories")
35
+ return 1
36
+
37
+ # Generate configuration
38
+ config_content = generate_config(project_type, associated_prefix)
39
+
40
+ # Write file
41
+ config_path.write_text(config_content)
42
+ print(f"Created configuration: {config_path}")
43
+
44
+ return 0
45
+
46
+
47
+ def generate_config(project_type: str, associated_prefix: Optional[str] = None) -> str:
48
+ """Generate configuration file content."""
49
+
50
+ if project_type == "associated":
51
+ if associated_prefix is None:
52
+ associated_prefix = "XXX" # Placeholder if not provided
53
+ return f'''# elspais configuration - Associated Repository
54
+ # Generated by: elspais init --type associated
55
+
56
+ [project]
57
+ name = "{associated_prefix.lower()}-project"
58
+ type = "associated"
59
+
60
+ [associated]
61
+ prefix = "{associated_prefix}"
62
+ id_range = [1, 99999]
63
+
64
+ [core]
65
+ # Path to core repository (relative or absolute)
66
+ path = "../core"
67
+
68
+ [directories]
69
+ spec = "spec"
70
+ docs = "docs"
71
+ code = ["src", "lib"]
72
+
73
+ [patterns]
74
+ id_template = "{{prefix}}-{{associated}}{{type}}{{id}}"
75
+ prefix = "REQ"
76
+
77
+ [patterns.types]
78
+ prd = {{ id = "p", name = "Product Requirement", level = 1 }}
79
+ ops = {{ id = "o", name = "Operations Requirement", level = 2 }}
80
+ dev = {{ id = "d", name = "Development Requirement", level = 3 }}
81
+
82
+ [patterns.id_format]
83
+ style = "numeric"
84
+ digits = 5
85
+ leading_zeros = true
86
+
87
+ [patterns.associated]
88
+ enabled = true
89
+ length = 3
90
+ format = "uppercase"
91
+ separator = "-"
92
+
93
+ [rules.hierarchy]
94
+ allowed_implements = [
95
+ "dev -> ops, prd",
96
+ "ops -> prd",
97
+ "prd -> prd",
98
+ ]
99
+ cross_repo_implements = true
100
+ allow_orphans = true # More permissive for associated development
101
+
102
+ [rules.format]
103
+ require_hash = true
104
+ require_assertions = true
105
+ '''
106
+
107
+ else: # core
108
+ return '''# elspais configuration - Core Repository
109
+ # Generated by: elspais init
110
+
111
+ [project]
112
+ name = "my-project"
113
+ type = "core"
114
+
115
+ [directories]
116
+ spec = "spec"
117
+ docs = "docs"
118
+ database = "database"
119
+ code = ["src", "apps", "packages"]
120
+
121
+ [patterns]
122
+ id_template = "{prefix}-{type}{id}"
123
+ prefix = "REQ"
124
+
125
+ [patterns.types]
126
+ prd = { id = "p", name = "Product Requirement", level = 1 }
127
+ ops = { id = "o", name = "Operations Requirement", level = 2 }
128
+ dev = { id = "d", name = "Development Requirement", level = 3 }
129
+
130
+ [patterns.id_format]
131
+ style = "numeric"
132
+ digits = 5
133
+ leading_zeros = true
134
+
135
+ [patterns.associated]
136
+ enabled = true
137
+ length = 3
138
+ format = "uppercase"
139
+ separator = "-"
140
+
141
+ [spec]
142
+ index_file = "INDEX.md"
143
+ skip_files = ["README.md", "requirements-format.md", "INDEX.md"]
144
+
145
+ [spec.file_patterns]
146
+ "prd-*.md" = "prd"
147
+ "ops-*.md" = "ops"
148
+ "dev-*.md" = "dev"
149
+
150
+ [rules.hierarchy]
151
+ allowed_implements = [
152
+ "dev -> ops, prd",
153
+ "ops -> prd",
154
+ "prd -> prd",
155
+ ]
156
+ allow_circular = false
157
+ allow_orphans = false
158
+
159
+ [rules.format]
160
+ require_hash = true
161
+ require_rationale = false
162
+ require_assertions = true
163
+ require_status = true
164
+ allowed_statuses = ["Active", "Draft", "Deprecated", "Superseded"]
165
+
166
+ [validation]
167
+ hash_algorithm = "sha256"
168
+ hash_length = 8
169
+
170
+ [traceability]
171
+ output_formats = ["markdown", "html"]
172
+ scan_patterns = [
173
+ "database/**/*.sql",
174
+ "src/**/*.py",
175
+ "apps/**/*.dart",
176
+ ]
177
+ '''
@@ -0,0 +1,120 @@
1
+ """
2
+ elspais.commands.rules_cmd - Content rules management command.
3
+
4
+ View and manage content rule files.
5
+ """
6
+
7
+ import argparse
8
+ import sys
9
+ from pathlib import Path
10
+ from typing import Optional
11
+
12
+ from elspais.config.loader import find_config_file, load_config
13
+ from elspais.core.content_rules import load_content_rules, load_content_rule
14
+
15
+
16
+ def run(args: argparse.Namespace) -> int:
17
+ """
18
+ Run the rules command.
19
+
20
+ Args:
21
+ args: Parsed command line arguments
22
+
23
+ Returns:
24
+ Exit code (0 for success)
25
+ """
26
+ action = getattr(args, "rules_action", None)
27
+
28
+ if action == "list":
29
+ return cmd_list(args)
30
+ elif action == "show":
31
+ return cmd_show(args)
32
+ else:
33
+ # Default to list
34
+ return cmd_list(args)
35
+
36
+
37
+ def cmd_list(args: argparse.Namespace) -> int:
38
+ """List configured content rules."""
39
+ config_path = _get_config_path(args)
40
+
41
+ if not config_path:
42
+ print("No configuration file found. Run 'elspais init' to create one.")
43
+ return 1
44
+
45
+ config = load_config(config_path)
46
+ base_path = config_path.parent
47
+
48
+ rules = load_content_rules(config, base_path)
49
+
50
+ if not rules:
51
+ print("No content rules configured.")
52
+ print("\nTo add content rules, use:")
53
+ print(' elspais config add rules.content_rules "spec/AI-AGENT.md"')
54
+ return 0
55
+
56
+ print("Content Rules:")
57
+ print("-" * 60)
58
+ for rule in rules:
59
+ rel_path = rule.file_path.relative_to(base_path) if base_path in rule.file_path.parents else rule.file_path
60
+ print(f" {rel_path}")
61
+ print(f" Title: {rule.title}")
62
+ print(f" Type: {rule.type}")
63
+ if rule.applies_to:
64
+ print(f" Applies to: {', '.join(rule.applies_to)}")
65
+ print()
66
+
67
+ return 0
68
+
69
+
70
+ def cmd_show(args: argparse.Namespace) -> int:
71
+ """Show content of a specific content rule."""
72
+ config_path = _get_config_path(args)
73
+ file_name = args.file
74
+
75
+ if not config_path:
76
+ print("No configuration file found.", file=sys.stderr)
77
+ return 1
78
+
79
+ config = load_config(config_path)
80
+ base_path = config_path.parent
81
+
82
+ # Load all rules and find the matching one
83
+ rules = load_content_rules(config, base_path)
84
+
85
+ matching_rule = None
86
+ for rule in rules:
87
+ if rule.file_path.name == file_name or str(rule.file_path).endswith(file_name):
88
+ matching_rule = rule
89
+ break
90
+
91
+ # If not in config, try loading directly
92
+ if not matching_rule:
93
+ file_path = base_path / file_name
94
+ if file_path.exists():
95
+ try:
96
+ matching_rule = load_content_rule(file_path)
97
+ except Exception as e:
98
+ print(f"Error loading file: {e}", file=sys.stderr)
99
+ return 1
100
+ else:
101
+ print(f"Content rule not found: {file_name}", file=sys.stderr)
102
+ return 1
103
+
104
+ # Display the rule
105
+ print(f"# {matching_rule.title}")
106
+ print(f"Type: {matching_rule.type}")
107
+ if matching_rule.applies_to:
108
+ print(f"Applies to: {', '.join(matching_rule.applies_to)}")
109
+ print(f"File: {matching_rule.file_path}")
110
+ print("-" * 60)
111
+ print(matching_rule.content)
112
+
113
+ return 0
114
+
115
+
116
+ def _get_config_path(args: argparse.Namespace) -> Optional[Path]:
117
+ """Get configuration file path from args or by discovery."""
118
+ if hasattr(args, "config") and args.config:
119
+ return args.config
120
+ return find_config_file(Path.cwd())
@@ -0,0 +1,208 @@
1
+ """
2
+ elspais.commands.trace - Generate traceability matrix command.
3
+ """
4
+
5
+ import argparse
6
+ import sys
7
+ from pathlib import Path
8
+ from typing import Dict, List
9
+
10
+ from elspais.config.defaults import DEFAULT_CONFIG
11
+ from elspais.config.loader import find_config_file, get_spec_directories, load_config
12
+ from elspais.core.models import Requirement
13
+ from elspais.core.parser import RequirementParser
14
+ from elspais.core.patterns import PatternConfig
15
+
16
+
17
+ def run(args: argparse.Namespace) -> int:
18
+ """Run the trace command."""
19
+ # Load configuration
20
+ config_path = args.config or find_config_file(Path.cwd())
21
+ if config_path and config_path.exists():
22
+ config = load_config(config_path)
23
+ else:
24
+ config = DEFAULT_CONFIG
25
+
26
+ # Get spec directories
27
+ spec_dirs = get_spec_directories(args.spec_dir, config)
28
+ if not spec_dirs:
29
+ print("Error: No spec directories found", file=sys.stderr)
30
+ return 1
31
+
32
+ # Parse requirements
33
+ pattern_config = PatternConfig.from_dict(config.get("patterns", {}))
34
+ spec_config = config.get("spec", {})
35
+ no_reference_values = spec_config.get("no_reference_values")
36
+ skip_files = spec_config.get("skip_files", [])
37
+ parser = RequirementParser(pattern_config, no_reference_values=no_reference_values)
38
+ requirements = parser.parse_directories(spec_dirs, skip_files=skip_files)
39
+
40
+ if not requirements:
41
+ print("No requirements found.")
42
+ return 1
43
+
44
+ # Determine output format
45
+ output_format = args.format
46
+
47
+ # Generate output
48
+ if output_format in ["markdown", "both"]:
49
+ md_output = generate_markdown_matrix(requirements)
50
+ if args.output:
51
+ if output_format == "markdown":
52
+ output_path = args.output
53
+ else:
54
+ output_path = args.output.with_suffix(".md")
55
+ else:
56
+ output_path = Path("traceability.md")
57
+ output_path.write_text(md_output)
58
+ print(f"Generated: {output_path}")
59
+
60
+ if output_format in ["html", "both"]:
61
+ html_output = generate_html_matrix(requirements)
62
+ if args.output:
63
+ if output_format == "html":
64
+ output_path = args.output
65
+ else:
66
+ output_path = args.output.with_suffix(".html")
67
+ else:
68
+ output_path = Path("traceability.html")
69
+ output_path.write_text(html_output)
70
+ print(f"Generated: {output_path}")
71
+
72
+ if output_format == "csv":
73
+ csv_output = generate_csv_matrix(requirements)
74
+ output_path = args.output or Path("traceability.csv")
75
+ output_path.write_text(csv_output)
76
+ print(f"Generated: {output_path}")
77
+
78
+ return 0
79
+
80
+
81
+ def generate_markdown_matrix(requirements: Dict[str, Requirement]) -> str:
82
+ """Generate Markdown traceability matrix."""
83
+ lines = ["# Traceability Matrix", "", "## Requirements Hierarchy", ""]
84
+
85
+ # Group by type
86
+ prd_reqs = {k: v for k, v in requirements.items() if v.level.upper() in ["PRD", "PRODUCT"]}
87
+ ops_reqs = {k: v for k, v in requirements.items() if v.level.upper() in ["OPS", "OPERATIONS"]}
88
+ dev_reqs = {k: v for k, v in requirements.items() if v.level.upper() in ["DEV", "DEVELOPMENT"]}
89
+
90
+ # PRD table
91
+ if prd_reqs:
92
+ lines.extend(["### Product Requirements", ""])
93
+ lines.append("| ID | Title | Status | Implemented By |")
94
+ lines.append("|---|---|---|---|")
95
+ for req_id, req in sorted(prd_reqs.items()):
96
+ impl_by = find_implementers(req_id, requirements)
97
+ impl_str = ", ".join(impl_by) if impl_by else "-"
98
+ lines.append(f"| {req_id} | {req.title} | {req.status} | {impl_str} |")
99
+ lines.append("")
100
+
101
+ # OPS table
102
+ if ops_reqs:
103
+ lines.extend(["### Operations Requirements", ""])
104
+ lines.append("| ID | Title | Implements | Status |")
105
+ lines.append("|---|---|---|---|")
106
+ for req_id, req in sorted(ops_reqs.items()):
107
+ impl_str = ", ".join(req.implements) if req.implements else "-"
108
+ lines.append(f"| {req_id} | {req.title} | {impl_str} | {req.status} |")
109
+ lines.append("")
110
+
111
+ # DEV table
112
+ if dev_reqs:
113
+ lines.extend(["### Development Requirements", ""])
114
+ lines.append("| ID | Title | Implements | Status |")
115
+ lines.append("|---|---|---|---|")
116
+ for req_id, req in sorted(dev_reqs.items()):
117
+ impl_str = ", ".join(req.implements) if req.implements else "-"
118
+ lines.append(f"| {req_id} | {req.title} | {impl_str} | {req.status} |")
119
+ lines.append("")
120
+
121
+ lines.extend(["---", "*Generated by elspais*"])
122
+ return "\n".join(lines)
123
+
124
+
125
+ def generate_html_matrix(requirements: Dict[str, Requirement]) -> str:
126
+ """Generate HTML traceability matrix."""
127
+ html = """<!DOCTYPE html>
128
+ <html lang="en">
129
+ <head>
130
+ <meta charset="UTF-8">
131
+ <title>Traceability Matrix</title>
132
+ <style>
133
+ body { font-family: -apple-system, BlinkMacSystemFont, sans-serif; margin: 2rem; }
134
+ h1 { color: #333; }
135
+ table { border-collapse: collapse; width: 100%; margin: 1rem 0; }
136
+ th, td { border: 1px solid #ddd; padding: 0.5rem; text-align: left; }
137
+ th { background: #f5f5f5; }
138
+ tr:hover { background: #f9f9f9; }
139
+ .status-active { color: green; }
140
+ .status-draft { color: orange; }
141
+ .status-deprecated { color: red; }
142
+ </style>
143
+ </head>
144
+ <body>
145
+ <h1>Traceability Matrix</h1>
146
+ """
147
+
148
+ # Group by type
149
+ prd_reqs = {k: v for k, v in requirements.items() if v.level.upper() in ["PRD", "PRODUCT"]}
150
+ ops_reqs = {k: v for k, v in requirements.items() if v.level.upper() in ["OPS", "OPERATIONS"]}
151
+ dev_reqs = {k: v for k, v in requirements.items() if v.level.upper() in ["DEV", "DEVELOPMENT"]}
152
+
153
+ for title, reqs in [
154
+ ("Product Requirements", prd_reqs),
155
+ ("Operations Requirements", ops_reqs),
156
+ ("Development Requirements", dev_reqs),
157
+ ]:
158
+ if not reqs:
159
+ continue
160
+
161
+ html += f" <h2>{title}</h2>\n"
162
+ html += " <table>\n"
163
+ html += " <tr><th>ID</th><th>Title</th><th>Implements</th><th>Status</th></tr>\n"
164
+
165
+ for req_id, req in sorted(reqs.items()):
166
+ impl_str = ", ".join(req.implements) if req.implements else "-"
167
+ status_class = f"status-{req.status.lower()}"
168
+ subdir_attr = f'data-subdir="{req.subdir}"'
169
+ html += (
170
+ f' <tr {subdir_attr}><td>{req_id}</td><td>{req.title}</td>'
171
+ f'<td>{impl_str}</td><td class="{status_class}">{req.status}</td></tr>\n'
172
+ )
173
+
174
+ html += " </table>\n"
175
+
176
+ html += """ <hr>
177
+ <p><em>Generated by elspais</em></p>
178
+ </body>
179
+ </html>"""
180
+ return html
181
+
182
+
183
+ def generate_csv_matrix(requirements: Dict[str, Requirement]) -> str:
184
+ """Generate CSV traceability matrix."""
185
+ lines = ["ID,Title,Level,Status,Implements,Subdir"]
186
+
187
+ for req_id, req in sorted(requirements.items()):
188
+ impl_str = ";".join(req.implements) if req.implements else ""
189
+ title = req.title.replace('"', '""')
190
+ lines.append(f'"{req_id}","{title}","{req.level}","{req.status}","{impl_str}","{req.subdir}"')
191
+
192
+ return "\n".join(lines)
193
+
194
+
195
+ def find_implementers(req_id: str, requirements: Dict[str, Requirement]) -> List[str]:
196
+ """Find requirements that implement the given requirement."""
197
+ implementers = []
198
+ short_id = req_id.split("-")[-1] if "-" in req_id else req_id
199
+
200
+ for other_id, other_req in requirements.items():
201
+ for impl in other_req.implements:
202
+ if impl == req_id or impl == short_id or impl.endswith(short_id):
203
+ implementers.append(other_id)
204
+ break
205
+
206
+ return implementers
207
+
208
+