@qa-gentic/agents 1.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +203 -0
- package/bin/postinstall.js +75 -0
- package/bin/qa-stlc.js +76 -0
- package/package.json +48 -0
- package/skills/qa-stlc/AGENT-BEHAVIOR.md +373 -0
- package/skills/qa-stlc/deduplication-protocol.md +303 -0
- package/skills/qa-stlc/generate-gherkin.md +550 -0
- package/skills/qa-stlc/generate-playwright-code.md +439 -0
- package/skills/qa-stlc/generate-test-cases.md +176 -0
- package/skills/qa-stlc/write-helix-files.md +349 -0
- package/src/cmd-init.js +84 -0
- package/src/cmd-mcp-config.js +177 -0
- package/src/cmd-skills.js +124 -0
- package/src/cmd-verify.js +129 -0
- package/src/qa_stlc_agents/__init__.py +0 -0
- package/src/qa_stlc_agents/__pycache__/__init__.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_gherkin_generator/__init__.py +0 -0
- package/src/qa_stlc_agents/agent_gherkin_generator/__pycache__/__init__.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_gherkin_generator/__pycache__/server.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_gherkin_generator/server.py +502 -0
- package/src/qa_stlc_agents/agent_gherkin_generator/tools/__init__.py +0 -0
- package/src/qa_stlc_agents/agent_gherkin_generator/tools/__pycache__/__init__.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_gherkin_generator/tools/__pycache__/ado_gherkin.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_gherkin_generator/tools/ado_gherkin.py +854 -0
- package/src/qa_stlc_agents/agent_helix_writer/__init__.py +0 -0
- package/src/qa_stlc_agents/agent_helix_writer/__pycache__/__init__.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_helix_writer/__pycache__/server.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_helix_writer/server.py +529 -0
- package/src/qa_stlc_agents/agent_helix_writer/tools/__init__.py +0 -0
- package/src/qa_stlc_agents/agent_helix_writer/tools/__pycache__/__init__.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_helix_writer/tools/__pycache__/helix_write.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_helix_writer/tools/helix_write.py +622 -0
- package/src/qa_stlc_agents/agent_playwright_generator/__init__.py +0 -0
- package/src/qa_stlc_agents/agent_playwright_generator/__pycache__/__init__.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_playwright_generator/__pycache__/server.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_playwright_generator/server.py +2771 -0
- package/src/qa_stlc_agents/agent_playwright_generator/tools/__init__.py +0 -0
- package/src/qa_stlc_agents/agent_playwright_generator/tools/__pycache__/__init__.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_playwright_generator/tools/__pycache__/ado_attach.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_playwright_generator/tools/ado_attach.py +62 -0
- package/src/qa_stlc_agents/agent_test_case_manager/__init__.py +0 -0
- package/src/qa_stlc_agents/agent_test_case_manager/__pycache__/__init__.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_test_case_manager/__pycache__/server.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_test_case_manager/server.py +483 -0
- package/src/qa_stlc_agents/agent_test_case_manager/tools/__init__.py +0 -0
- package/src/qa_stlc_agents/agent_test_case_manager/tools/__pycache__/__init__.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_test_case_manager/tools/__pycache__/ado_workitem.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/agent_test_case_manager/tools/ado_workitem.py +302 -0
- package/src/qa_stlc_agents/shared/__init__.py +0 -0
- package/src/qa_stlc_agents/shared/__pycache__/__init__.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/shared/__pycache__/auth.cpython-310.pyc +0 -0
- package/src/qa_stlc_agents/shared/auth.py +119 -0
|
@@ -0,0 +1,622 @@
|
|
|
1
|
+
"""
|
|
2
|
+
helix_write.py — File-system write tool for the Helix QA framework.
|
|
3
|
+
|
|
4
|
+
Public API:
|
|
5
|
+
inspect_helix_project(helix_root) -> dict
|
|
6
|
+
write_files_to_helix(helix_root, files, mode) -> dict
|
|
7
|
+
read_helix_file(helix_root, relative_path) -> dict
|
|
8
|
+
list_helix_tree(helix_root) -> dict
|
|
9
|
+
|
|
10
|
+
Framework existence detection
|
|
11
|
+
inspect_helix_project() returns framework_state: "absent"|"partial"|"present"
|
|
12
|
+
and a recommendation of "scaffold_and_tests" or "tests_only".
|
|
13
|
+
write_files_to_helix() accepts that recommendation as its `mode` argument.
|
|
14
|
+
|
|
15
|
+
Infrastructure file protection
|
|
16
|
+
In mode="tests_only" all six utils/locators/*.ts files are always skipped.
|
|
17
|
+
In mode="scaffold_and_tests" they are written only if absent on disk,
|
|
18
|
+
unless force_scaffold=True is passed.
|
|
19
|
+
|
|
20
|
+
Within-file deduplication
|
|
21
|
+
For locators.ts : new const-object entries are merged; duplicate keys skipped.
|
|
22
|
+
For *.steps.ts : new step blocks are appended; duplicate regex patterns skipped.
|
|
23
|
+
For *.page.ts : new async methods are appended; duplicate method names skipped.
|
|
24
|
+
|
|
25
|
+
Interface adapter
|
|
26
|
+
The generator emits repo.updateHealed / repo.incrementSuccess / repo.getBBox etc.
|
|
27
|
+
The existing Helix LocatorRepository only has setHealed / getBestSelector / getHealed.
|
|
28
|
+
_adapt_to_helix_interface() rewrites generated content before writing so it
|
|
29
|
+
compiles cleanly against the Helix interface without manual edits.
|
|
30
|
+
"""
|
|
31
|
+
from __future__ import annotations
|
|
32
|
+
|
|
33
|
+
import re
|
|
34
|
+
from pathlib import Path
|
|
35
|
+
from typing import Any
|
|
36
|
+
|
|
37
|
+
# ── Infrastructure file names ──────────────────────────────────────────────
|
|
38
|
+
|
|
39
|
+
_INFRA_FILES = {
|
|
40
|
+
"LocatorHealer.ts",
|
|
41
|
+
"LocatorRepository.ts",
|
|
42
|
+
"TimingHealer.ts",
|
|
43
|
+
"VisualIntentChecker.ts",
|
|
44
|
+
"DevToolsHealer.ts",
|
|
45
|
+
"HealingDashboard.ts",
|
|
46
|
+
}
|
|
47
|
+
_REQUIRED_INFRA = {"LocatorHealer.ts", "LocatorRepository.ts"}
|
|
48
|
+
_OPTIONAL_INFRA = _INFRA_FILES - _REQUIRED_INFRA
|
|
49
|
+
|
|
50
|
+
_INFRA_RE = re.compile(r"(LocatorHealer|LocatorRepository|TimingHealer|VisualIntentChecker|DevToolsHealer|HealingDashboard)\.ts$")
|
|
51
|
+
_LOCATOR_RE = re.compile(r"locators\.ts$", re.IGNORECASE)
|
|
52
|
+
_PAGE_RE = re.compile(r"page\.ts$", re.IGNORECASE)
|
|
53
|
+
_STEPS_RE = re.compile(r"steps\.ts$", re.IGNORECASE)
|
|
54
|
+
_FEATURE_RE = re.compile(r"\.feature$", re.IGNORECASE)
|
|
55
|
+
_CUCUMBER_RE = re.compile(r"cucumber", re.IGNORECASE)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
# ── Interface adapter ─────────────────────────────────────────────────────
|
|
59
|
+
|
|
60
|
+
def _adapt_to_helix_interface(content: str) -> str:
|
|
61
|
+
"""
|
|
62
|
+
Rewrite generated TypeScript so it compiles against the Helix-QA
|
|
63
|
+
LocatorRepository / LocatorHealer interface.
|
|
64
|
+
|
|
65
|
+
Generator emits → Helix expects
|
|
66
|
+
─────────────────────────────────────────────────────────────────
|
|
67
|
+
repo.updateHealed(k,s,…) → repo.setHealed(k, s)
|
|
68
|
+
repo.getBBox(key) → null
|
|
69
|
+
repo.incrementSuccess(k) → (removed)
|
|
70
|
+
repo.incrementFailure(k) → (removed)
|
|
71
|
+
repo.queueSuggestion(…) → (removed)
|
|
72
|
+
repo.updateBoundingBox(…) → (removed)
|
|
73
|
+
this.devtools.captureBoundingBox(…) → (removed)
|
|
74
|
+
fixture().logger → this.logger
|
|
75
|
+
fixture().locatorRepository → this.repo
|
|
76
|
+
fixture().page → this.page
|
|
77
|
+
import { Logger } from "winston" → import { HealerLogger }
|
|
78
|
+
import EnvironmentManager → import { environment } from @config/environment
|
|
79
|
+
new EnvironmentManager() → environment
|
|
80
|
+
this.env.getBaseUrl() → environment.getConfig().baseUrl
|
|
81
|
+
this.env.getPath('x') → "x"
|
|
82
|
+
"""
|
|
83
|
+
# repo.updateHealed(key, sel, ...) → repo.setHealed(key, sel)
|
|
84
|
+
content = re.sub(
|
|
85
|
+
r"this\.repo\.updateHealed\(\s*([^,)]+),\s*([^,)]+)(?:,[^)]+)?\)",
|
|
86
|
+
r"this.repo.setHealed(\1, \2)",
|
|
87
|
+
content,
|
|
88
|
+
)
|
|
89
|
+
# repo.getBBox(key) → null
|
|
90
|
+
content = re.sub(r"this\.repo\.getBBox\([^)]*\)", "null", content)
|
|
91
|
+
|
|
92
|
+
# Remove single-statement method calls that have no Helix equivalent
|
|
93
|
+
for method in ("incrementSuccess", "incrementFailure", "queueSuggestion",
|
|
94
|
+
"updateBoundingBox", "captureBoundingBox"):
|
|
95
|
+
content = re.sub(
|
|
96
|
+
rf"^\s*(?:this\.repo\.|(?:this\.\w+\.)?){method}\([^)]*\)(?:\.catch\([^)]*\))?;?\s*\n",
|
|
97
|
+
"",
|
|
98
|
+
content,
|
|
99
|
+
flags=re.MULTILINE,
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
# fixture() references
|
|
103
|
+
content = re.sub(r"fixture\(\)\.logger\b", "this.logger", content)
|
|
104
|
+
content = re.sub(r"fixture\(\)\.locatorRepository\b", "this.repo", content)
|
|
105
|
+
content = re.sub(r"fixture\(\)\.page\b", "this.page", content)
|
|
106
|
+
|
|
107
|
+
# Winston Logger → Helix HealerLogger
|
|
108
|
+
content = content.replace(
|
|
109
|
+
'import { Logger } from "winston";',
|
|
110
|
+
'import { HealerLogger } from "./LocatorHealer";',
|
|
111
|
+
)
|
|
112
|
+
content = re.sub(r"\bLogger\b(?!\s*=)", "HealerLogger", content)
|
|
113
|
+
|
|
114
|
+
# EnvironmentManager → Helix environment singleton
|
|
115
|
+
content = content.replace(
|
|
116
|
+
'import { EnvironmentManager } from "@helper/environment/environmentManager.util";',
|
|
117
|
+
'import { environment } from "@config/environment";',
|
|
118
|
+
)
|
|
119
|
+
content = re.sub(r"\s*this\.env\s*=\s*new EnvironmentManager\(\);?\s*\n", "\n", content)
|
|
120
|
+
content = content.replace("new EnvironmentManager()", "environment")
|
|
121
|
+
content = content.replace("this.env.getBaseUrl()", "environment.getConfig().baseUrl")
|
|
122
|
+
content = re.sub(r"this\.env\.getPath\(['\"]([^'\"]+)['\"]\)", r'"\1"', content)
|
|
123
|
+
|
|
124
|
+
return content
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
# ── Path resolution ───────────────────────────────────────────────────────
|
|
128
|
+
|
|
129
|
+
def _resolve_destination(helix_root: Path, file_key: str) -> Path:
|
|
130
|
+
key = file_key.strip()
|
|
131
|
+
|
|
132
|
+
if _CUCUMBER_RE.search(key):
|
|
133
|
+
return helix_root / "src" / "config" / "cucumber.config.ts"
|
|
134
|
+
if _INFRA_RE.search(key):
|
|
135
|
+
return helix_root / "src" / "utils" / "locators" / Path(key).name
|
|
136
|
+
if _FEATURE_RE.search(key):
|
|
137
|
+
return helix_root / "src" / "test" / "features" / Path(key).name
|
|
138
|
+
if _STEPS_RE.search(key):
|
|
139
|
+
return helix_root / "src" / "test" / "steps" / Path(key).name
|
|
140
|
+
if _LOCATOR_RE.search(key):
|
|
141
|
+
parts = Path(key).parts
|
|
142
|
+
stem = next(
|
|
143
|
+
(p for p in parts if p not in ("src", "pages", "locators", "utils") and not p.endswith(".ts")),
|
|
144
|
+
"page",
|
|
145
|
+
)
|
|
146
|
+
return helix_root / "src" / "locators" / f"{stem}.locators.ts"
|
|
147
|
+
if _PAGE_RE.search(key):
|
|
148
|
+
return helix_root / "src" / "pages" / Path(key).name
|
|
149
|
+
|
|
150
|
+
rel = key.lstrip("/")
|
|
151
|
+
return helix_root / (rel if rel.startswith("src/") else f"src/{rel}")
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
# ── Within-file merge helpers ─────────────────────────────────────────────
|
|
155
|
+
|
|
156
|
+
def _merge_locators(existing: str, generated: str) -> tuple[str, list[str], list[str]]:
|
|
157
|
+
"""Append locator entries whose keys are not already in existing."""
|
|
158
|
+
existing_keys = set(re.findall(r"^\s{2}(\w+)\s*:", existing, re.MULTILINE))
|
|
159
|
+
|
|
160
|
+
new_entries: list[tuple[str, str]] = []
|
|
161
|
+
current_key: str | None = None
|
|
162
|
+
current_lines: list[str] = []
|
|
163
|
+
|
|
164
|
+
for line in generated.splitlines():
|
|
165
|
+
key_match = re.match(r"^ (\w+)\s*:\s*\{", line)
|
|
166
|
+
if key_match:
|
|
167
|
+
if current_key:
|
|
168
|
+
new_entries.append((current_key, "\n".join(current_lines)))
|
|
169
|
+
current_key = key_match.group(1)
|
|
170
|
+
current_lines = [line]
|
|
171
|
+
elif current_key:
|
|
172
|
+
current_lines.append(line)
|
|
173
|
+
if re.match(r"^\s*\},?\s*$", line):
|
|
174
|
+
new_entries.append((current_key, "\n".join(current_lines)))
|
|
175
|
+
current_key = None
|
|
176
|
+
current_lines = []
|
|
177
|
+
|
|
178
|
+
added: list[str] = []
|
|
179
|
+
skipped: list[str] = []
|
|
180
|
+
append_lines: list[str] = []
|
|
181
|
+
|
|
182
|
+
for key, block in new_entries:
|
|
183
|
+
if key in existing_keys:
|
|
184
|
+
skipped.append(key)
|
|
185
|
+
else:
|
|
186
|
+
append_lines.append(block)
|
|
187
|
+
added.append(key)
|
|
188
|
+
|
|
189
|
+
if not append_lines:
|
|
190
|
+
return existing, added, skipped
|
|
191
|
+
|
|
192
|
+
insertion = "\n" + "\n".join(append_lines)
|
|
193
|
+
merged = re.sub(r"(\n\}\s*as\s+const\s*;)", insertion + r"\1", existing, count=1)
|
|
194
|
+
if merged == existing:
|
|
195
|
+
merged = existing.rstrip() + "\n" + insertion + "\n"
|
|
196
|
+
return merged, added, skipped
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def _merge_steps(existing: str, generated: str) -> tuple[str, list[str], list[str]]:
|
|
200
|
+
"""Append step blocks whose regex pattern is not already in existing."""
|
|
201
|
+
existing_patterns = set(re.findall(r"/\^([^/]+)\$/", existing))
|
|
202
|
+
|
|
203
|
+
step_block_re = re.compile(r"^(Given|When|Then)\(", re.MULTILINE)
|
|
204
|
+
parts = step_block_re.split(generated)
|
|
205
|
+
|
|
206
|
+
blocks: list[tuple[str, str]] = []
|
|
207
|
+
i = 1
|
|
208
|
+
while i + 1 < len(parts):
|
|
209
|
+
keyword = parts[i]
|
|
210
|
+
body = parts[i + 1]
|
|
211
|
+
blocks.append((keyword, keyword + "(" + body))
|
|
212
|
+
i += 2
|
|
213
|
+
|
|
214
|
+
added: list[str] = []
|
|
215
|
+
skipped: list[str] = []
|
|
216
|
+
new_blocks: list[str] = []
|
|
217
|
+
|
|
218
|
+
for _kw, block in blocks:
|
|
219
|
+
pat_match = re.search(r"/\^([^/]+)\$/", block)
|
|
220
|
+
pattern = pat_match.group(1) if pat_match else block[:40]
|
|
221
|
+
if pattern in existing_patterns:
|
|
222
|
+
skipped.append(pattern)
|
|
223
|
+
else:
|
|
224
|
+
new_blocks.append(block)
|
|
225
|
+
added.append(pattern)
|
|
226
|
+
|
|
227
|
+
merged = existing.rstrip() + ("\n\n" + "\n".join(new_blocks) if new_blocks else "") + "\n"
|
|
228
|
+
return merged, added, skipped
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
def _merge_page_methods(existing: str, generated: str) -> tuple[str, list[str], list[str]]:
|
|
232
|
+
"""Append async methods whose names are not already in existing."""
|
|
233
|
+
existing_methods = set(re.findall(r"async\s+(\w+)\s*\(", existing))
|
|
234
|
+
|
|
235
|
+
method_re = re.compile(r"( async\s+\w+\s*\([^)]*\)[^{]*\{)", re.MULTILINE)
|
|
236
|
+
raw_parts = method_re.split(generated)
|
|
237
|
+
|
|
238
|
+
method_blocks: list[tuple[str, str]] = []
|
|
239
|
+
i = 1
|
|
240
|
+
while i + 1 < len(raw_parts):
|
|
241
|
+
sig = raw_parts[i]
|
|
242
|
+
body_and_rest = raw_parts[i + 1]
|
|
243
|
+
depth, end = 1, 0
|
|
244
|
+
for ch in body_and_rest:
|
|
245
|
+
if ch == "{":
|
|
246
|
+
depth += 1
|
|
247
|
+
elif ch == "}":
|
|
248
|
+
depth -= 1
|
|
249
|
+
if depth == 0:
|
|
250
|
+
break
|
|
251
|
+
end += 1
|
|
252
|
+
method_blocks.append((sig, sig + body_and_rest[: end + 1]))
|
|
253
|
+
i += 2
|
|
254
|
+
|
|
255
|
+
added: list[str] = []
|
|
256
|
+
skipped: list[str] = []
|
|
257
|
+
new_methods: list[str] = []
|
|
258
|
+
|
|
259
|
+
for sig, block in method_blocks:
|
|
260
|
+
name_match = re.search(r"async\s+(\w+)\s*\(", sig)
|
|
261
|
+
name = name_match.group(1) if name_match else sig[:30]
|
|
262
|
+
if name in existing_methods:
|
|
263
|
+
skipped.append(name)
|
|
264
|
+
else:
|
|
265
|
+
new_methods.append(block)
|
|
266
|
+
added.append(name)
|
|
267
|
+
|
|
268
|
+
if not new_methods:
|
|
269
|
+
return existing, added, skipped
|
|
270
|
+
|
|
271
|
+
insertion = "\n" + "\n\n".join(new_methods) + "\n"
|
|
272
|
+
merged = re.sub(r"\n\}\s*\n?$", insertion + "\n}\n", existing, count=1)
|
|
273
|
+
if merged == existing:
|
|
274
|
+
merged = existing.rstrip() + insertion + "}\n"
|
|
275
|
+
return merged, added, skipped
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
# ── Framework state inspection ────────────────────────────────────────────
|
|
279
|
+
|
|
280
|
+
def inspect_helix_project(helix_root: str) -> dict[str, Any]:
|
|
281
|
+
"""
|
|
282
|
+
Examine the Helix-QA project root and return a machine-readable verdict.
|
|
283
|
+
|
|
284
|
+
framework_state: "absent" | "partial" | "present"
|
|
285
|
+
recommendation: "scaffold_and_tests" | "tests_only"
|
|
286
|
+
|
|
287
|
+
"absent" helix_root missing, or src/ absent, or both required infra files absent.
|
|
288
|
+
"partial" src/ exists but one or both required infra files missing.
|
|
289
|
+
"present" Both LocatorHealer.ts and LocatorRepository.ts exist on disk.
|
|
290
|
+
"""
|
|
291
|
+
root = Path(helix_root).expanduser().resolve()
|
|
292
|
+
|
|
293
|
+
if not root.exists():
|
|
294
|
+
return {
|
|
295
|
+
"framework_state": "absent",
|
|
296
|
+
"missing_infra": sorted(_INFRA_FILES),
|
|
297
|
+
"existing_infra": [],
|
|
298
|
+
"has_src": False,
|
|
299
|
+
"recommendation": "scaffold_and_tests",
|
|
300
|
+
"message": (
|
|
301
|
+
f"helix_root '{root}' does not exist. "
|
|
302
|
+
"Create the directory and use mode='scaffold_and_tests'."
|
|
303
|
+
),
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
infra_dir = root / "src" / "utils" / "locators"
|
|
307
|
+
has_src = (root / "src").exists()
|
|
308
|
+
|
|
309
|
+
existing_infra = sorted(f for f in _INFRA_FILES if (infra_dir / f).exists())
|
|
310
|
+
missing_infra = sorted(f for f in _INFRA_FILES if f not in existing_infra)
|
|
311
|
+
missing_required = sorted(f for f in _REQUIRED_INFRA if f not in existing_infra)
|
|
312
|
+
|
|
313
|
+
if not has_src or missing_required:
|
|
314
|
+
state = "absent" if not has_src else "partial"
|
|
315
|
+
return {
|
|
316
|
+
"framework_state": state,
|
|
317
|
+
"missing_infra": missing_infra,
|
|
318
|
+
"existing_infra": existing_infra,
|
|
319
|
+
"has_src": has_src,
|
|
320
|
+
"recommendation": "scaffold_and_tests",
|
|
321
|
+
"message": (
|
|
322
|
+
f"Helix-QA framework is {state}. "
|
|
323
|
+
f"Missing required files: {missing_required}. "
|
|
324
|
+
"Run scaffold_locator_repository, then write_helix_files with mode='scaffold_and_tests'."
|
|
325
|
+
),
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
return {
|
|
329
|
+
"framework_state": "present",
|
|
330
|
+
"missing_infra": missing_infra,
|
|
331
|
+
"existing_infra": existing_infra,
|
|
332
|
+
"has_src": True,
|
|
333
|
+
"recommendation": "tests_only",
|
|
334
|
+
"message": (
|
|
335
|
+
"Helix-QA framework is present. "
|
|
336
|
+
"Infrastructure files will not be touched. "
|
|
337
|
+
"Only test files will be written or merged."
|
|
338
|
+
),
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
# ── Main write function ────────────────────────────────────────────────────
|
|
343
|
+
|
|
344
|
+
def write_files_to_helix(
|
|
345
|
+
helix_root: str,
|
|
346
|
+
files: dict[str, str],
|
|
347
|
+
mode: str = "scaffold_and_tests",
|
|
348
|
+
force_scaffold: bool = False,
|
|
349
|
+
) -> dict[str, Any]:
|
|
350
|
+
"""
|
|
351
|
+
Write generated files into the Helix-QA project with full deduplication.
|
|
352
|
+
|
|
353
|
+
mode="tests_only" write locators, pages, steps, features — merge
|
|
354
|
+
into existing files, skip infra files always.
|
|
355
|
+
mode="scaffold_and_tests" also write infra files that do not yet exist.
|
|
356
|
+
force_scaffold=True overwrite existing infra files (use deliberately).
|
|
357
|
+
"""
|
|
358
|
+
root = Path(helix_root).expanduser().resolve()
|
|
359
|
+
if not root.exists():
|
|
360
|
+
return {
|
|
361
|
+
"success": False,
|
|
362
|
+
"error": f"helix_root does not exist: {root}",
|
|
363
|
+
"written": [], "skipped": [],
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
written: list[dict] = []
|
|
367
|
+
skipped: list[dict] = []
|
|
368
|
+
deduplication: dict[str, Any] = {}
|
|
369
|
+
|
|
370
|
+
for file_key, raw_content in files.items():
|
|
371
|
+
if not raw_content or not raw_content.strip():
|
|
372
|
+
skipped.append({"file_key": file_key, "reason": "empty content"})
|
|
373
|
+
continue
|
|
374
|
+
|
|
375
|
+
# Apply interface adapter to all TypeScript content
|
|
376
|
+
content = _adapt_to_helix_interface(raw_content)
|
|
377
|
+
|
|
378
|
+
dest = _resolve_destination(root, file_key)
|
|
379
|
+
dest.parent.mkdir(parents=True, exist_ok=True)
|
|
380
|
+
dest_rel = str(dest.relative_to(root))
|
|
381
|
+
|
|
382
|
+
# ── Infrastructure files ───────────────────────────────────────────
|
|
383
|
+
if _INFRA_RE.search(file_key):
|
|
384
|
+
if mode == "tests_only":
|
|
385
|
+
skipped.append({
|
|
386
|
+
"file_key": file_key, "dest": dest_rel,
|
|
387
|
+
"reason": "infrastructure file — skipped in tests_only mode",
|
|
388
|
+
})
|
|
389
|
+
continue
|
|
390
|
+
if dest.exists() and not force_scaffold:
|
|
391
|
+
skipped.append({
|
|
392
|
+
"file_key": file_key, "dest": dest_rel,
|
|
393
|
+
"reason": "infrastructure file already exists (pass force_scaffold=True to overwrite)",
|
|
394
|
+
})
|
|
395
|
+
continue
|
|
396
|
+
try:
|
|
397
|
+
action = "overwritten" if dest.exists() else "created"
|
|
398
|
+
dest.write_text(content, encoding="utf-8")
|
|
399
|
+
written.append({"file_key": file_key, "dest": dest_rel,
|
|
400
|
+
"bytes": len(content.encode()), "action": action})
|
|
401
|
+
except OSError as exc:
|
|
402
|
+
skipped.append({"file_key": file_key, "dest": dest_rel, "reason": str(exc)})
|
|
403
|
+
continue
|
|
404
|
+
|
|
405
|
+
# ── Cucumber config: append profile, skip duplicate ────────────────
|
|
406
|
+
if _CUCUMBER_RE.search(file_key):
|
|
407
|
+
try:
|
|
408
|
+
if dest.exists():
|
|
409
|
+
existing_text = dest.read_text(encoding="utf-8")
|
|
410
|
+
profile_match = re.match(r"\s*(\w+)\s*:", content.strip())
|
|
411
|
+
profile_name = profile_match.group(1) if profile_match else None
|
|
412
|
+
if profile_name and profile_name in existing_text:
|
|
413
|
+
skipped.append({
|
|
414
|
+
"file_key": file_key, "dest": dest_rel,
|
|
415
|
+
"reason": f"profile '{profile_name}' already exists in cucumber.config.ts",
|
|
416
|
+
})
|
|
417
|
+
continue
|
|
418
|
+
dest.write_text(
|
|
419
|
+
existing_text.rstrip() + "\n\n// --- generated profile ---\n" + content,
|
|
420
|
+
encoding="utf-8",
|
|
421
|
+
)
|
|
422
|
+
written.append({"file_key": file_key, "dest": dest_rel,
|
|
423
|
+
"bytes": len(content.encode()), "action": "appended"})
|
|
424
|
+
else:
|
|
425
|
+
dest.write_text(content, encoding="utf-8")
|
|
426
|
+
written.append({"file_key": file_key, "dest": dest_rel,
|
|
427
|
+
"bytes": len(content.encode()), "action": "created"})
|
|
428
|
+
except OSError as exc:
|
|
429
|
+
skipped.append({"file_key": file_key, "dest": dest_rel, "reason": str(exc)})
|
|
430
|
+
continue
|
|
431
|
+
|
|
432
|
+
# ── Feature files: overwrite (they are the Gherkin source of truth) ─
|
|
433
|
+
if _FEATURE_RE.search(file_key):
|
|
434
|
+
try:
|
|
435
|
+
action = "overwritten" if dest.exists() else "created"
|
|
436
|
+
dest.write_text(content, encoding="utf-8")
|
|
437
|
+
written.append({"file_key": file_key, "dest": dest_rel,
|
|
438
|
+
"bytes": len(content.encode()), "action": action})
|
|
439
|
+
except OSError as exc:
|
|
440
|
+
skipped.append({"file_key": file_key, "dest": dest_rel, "reason": str(exc)})
|
|
441
|
+
continue
|
|
442
|
+
|
|
443
|
+
# ── Merge-aware write for locators / page / steps ─────────────────
|
|
444
|
+
try:
|
|
445
|
+
if not dest.exists():
|
|
446
|
+
dest.write_text(content, encoding="utf-8")
|
|
447
|
+
written.append({"file_key": file_key, "dest": dest_rel,
|
|
448
|
+
"bytes": len(content.encode()), "action": "created"})
|
|
449
|
+
continue
|
|
450
|
+
|
|
451
|
+
existing_text = dest.read_text(encoding="utf-8")
|
|
452
|
+
|
|
453
|
+
if _LOCATOR_RE.search(file_key):
|
|
454
|
+
merged, added, dup = _merge_locators(existing_text, content)
|
|
455
|
+
deduplication[dest_rel] = {
|
|
456
|
+
"type": "locators", "added_keys": added, "skipped_keys": dup,
|
|
457
|
+
}
|
|
458
|
+
elif _STEPS_RE.search(file_key):
|
|
459
|
+
merged, added, dup = _merge_steps(existing_text, content)
|
|
460
|
+
deduplication[dest_rel] = {
|
|
461
|
+
"type": "steps", "added_patterns": added, "skipped_patterns": dup,
|
|
462
|
+
}
|
|
463
|
+
elif _PAGE_RE.search(file_key):
|
|
464
|
+
merged, added, dup = _merge_page_methods(existing_text, content)
|
|
465
|
+
deduplication[dest_rel] = {
|
|
466
|
+
"type": "page", "added_methods": added, "skipped_methods": dup,
|
|
467
|
+
}
|
|
468
|
+
else:
|
|
469
|
+
merged = content
|
|
470
|
+
deduplication[dest_rel] = {"type": "unknown", "action": "overwritten"}
|
|
471
|
+
|
|
472
|
+
dest.write_text(merged, encoding="utf-8")
|
|
473
|
+
written.append({"file_key": file_key, "dest": dest_rel,
|
|
474
|
+
"bytes": len(merged.encode()), "action": "merged"})
|
|
475
|
+
|
|
476
|
+
except OSError as exc:
|
|
477
|
+
skipped.append({"file_key": file_key, "dest": dest_rel, "reason": str(exc)})
|
|
478
|
+
|
|
479
|
+
return {
|
|
480
|
+
"success": len(written) > 0 or len(skipped) == 0,
|
|
481
|
+
"helix_root": str(root),
|
|
482
|
+
"written": written,
|
|
483
|
+
"skipped": skipped,
|
|
484
|
+
"deduplication": deduplication,
|
|
485
|
+
"summary": {
|
|
486
|
+
"requested": len(files),
|
|
487
|
+
"written": len(written),
|
|
488
|
+
"skipped": len(skipped),
|
|
489
|
+
},
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
# ── Read / list helpers ───────────────────────────────────────────────────
|
|
494
|
+
|
|
495
|
+
def read_helix_file(helix_root: str, relative_path: str) -> dict[str, Any]:
|
|
496
|
+
root = Path(helix_root).expanduser().resolve()
|
|
497
|
+
target = (root / relative_path.lstrip("/")).resolve()
|
|
498
|
+
try:
|
|
499
|
+
target.relative_to(root)
|
|
500
|
+
except ValueError:
|
|
501
|
+
return {"success": False, "error": "Path escapes helix_root"}
|
|
502
|
+
if not target.exists():
|
|
503
|
+
return {"success": False, "exists": False, "path": relative_path}
|
|
504
|
+
try:
|
|
505
|
+
content = target.read_text(encoding="utf-8")
|
|
506
|
+
return {"success": True, "exists": True, "path": relative_path,
|
|
507
|
+
"content": content, "bytes": len(content.encode())}
|
|
508
|
+
except OSError as exc:
|
|
509
|
+
return {"success": False, "error": str(exc)}
|
|
510
|
+
|
|
511
|
+
|
|
512
|
+
def update_helix_file(
|
|
513
|
+
helix_root: str,
|
|
514
|
+
relative_path: str,
|
|
515
|
+
content: str,
|
|
516
|
+
force_overwrite: bool = False,
|
|
517
|
+
) -> dict[str, Any]:
|
|
518
|
+
"""
|
|
519
|
+
Write or merge a single file in the Helix-QA project.
|
|
520
|
+
|
|
521
|
+
Applies the same interface adapter and merge logic as write_files_to_helix
|
|
522
|
+
but targets one file directly, avoiding a full-dict write call.
|
|
523
|
+
|
|
524
|
+
force_overwrite=True — replace the entire file without merging.
|
|
525
|
+
"""
|
|
526
|
+
root = Path(helix_root).expanduser().resolve()
|
|
527
|
+
target = (root / relative_path.lstrip("/")).resolve()
|
|
528
|
+
# Prevent path traversal
|
|
529
|
+
try:
|
|
530
|
+
target.relative_to(root)
|
|
531
|
+
except ValueError:
|
|
532
|
+
return {"success": False, "error": "relative_path escapes helix_root — path traversal rejected"}
|
|
533
|
+
|
|
534
|
+
if not content or not content.strip():
|
|
535
|
+
return {"success": False, "error": "content is empty — nothing to write"}
|
|
536
|
+
|
|
537
|
+
adapted = _adapt_to_helix_interface(content)
|
|
538
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
|
539
|
+
dest_rel = str(target.relative_to(root))
|
|
540
|
+
|
|
541
|
+
try:
|
|
542
|
+
if not target.exists() or force_overwrite:
|
|
543
|
+
action = "overwritten" if target.exists() else "created"
|
|
544
|
+
target.write_text(adapted, encoding="utf-8")
|
|
545
|
+
return {
|
|
546
|
+
"success": True,
|
|
547
|
+
"path": dest_rel,
|
|
548
|
+
"action": action,
|
|
549
|
+
"bytes": len(adapted.encode()),
|
|
550
|
+
"deduplication": None,
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
existing_text = target.read_text(encoding="utf-8")
|
|
554
|
+
file_key = relative_path
|
|
555
|
+
|
|
556
|
+
if _LOCATOR_RE.search(file_key):
|
|
557
|
+
merged, added, dup = _merge_locators(existing_text, adapted)
|
|
558
|
+
dedup = {"type": "locators", "added_keys": added, "skipped_keys": dup}
|
|
559
|
+
elif _STEPS_RE.search(file_key):
|
|
560
|
+
merged, added, dup = _merge_steps(existing_text, adapted)
|
|
561
|
+
dedup = {"type": "steps", "added_patterns": added, "skipped_patterns": dup}
|
|
562
|
+
elif _PAGE_RE.search(file_key):
|
|
563
|
+
merged, added, dup = _merge_page_methods(existing_text, adapted)
|
|
564
|
+
dedup = {"type": "page", "added_methods": added, "skipped_methods": dup}
|
|
565
|
+
elif _FEATURE_RE.search(file_key):
|
|
566
|
+
# Feature files are the Gherkin source of truth — always overwrite
|
|
567
|
+
target.write_text(adapted, encoding="utf-8")
|
|
568
|
+
return {
|
|
569
|
+
"success": True,
|
|
570
|
+
"path": dest_rel,
|
|
571
|
+
"action": "overwritten",
|
|
572
|
+
"bytes": len(adapted.encode()),
|
|
573
|
+
"deduplication": None,
|
|
574
|
+
}
|
|
575
|
+
else:
|
|
576
|
+
merged = adapted
|
|
577
|
+
dedup = {"type": "unknown", "action": "overwritten"}
|
|
578
|
+
|
|
579
|
+
target.write_text(merged, encoding="utf-8")
|
|
580
|
+
return {
|
|
581
|
+
"success": True,
|
|
582
|
+
"path": dest_rel,
|
|
583
|
+
"action": "merged",
|
|
584
|
+
"bytes": len(merged.encode()),
|
|
585
|
+
"deduplication": dedup,
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
except OSError as exc:
|
|
589
|
+
return {"success": False, "error": str(exc), "path": dest_rel}
|
|
590
|
+
|
|
591
|
+
|
|
592
|
+
def list_helix_tree(helix_root: str) -> dict[str, Any]:
|
|
593
|
+
root = Path(helix_root).expanduser().resolve()
|
|
594
|
+
if not root.exists():
|
|
595
|
+
return {"success": False, "error": f"helix_root does not exist: {root}"}
|
|
596
|
+
|
|
597
|
+
tree: dict[str, list[str]] = {
|
|
598
|
+
"features": [], "steps": [], "pages": [],
|
|
599
|
+
"locators": [], "utils_locators": [], "other": [],
|
|
600
|
+
}
|
|
601
|
+
for path in sorted(root.rglob("*")):
|
|
602
|
+
if not path.is_file():
|
|
603
|
+
continue
|
|
604
|
+
if path.suffix not in (".ts", ".feature", ".js"):
|
|
605
|
+
continue
|
|
606
|
+
if any(p in ("node_modules", "dist", "test-results", ".git") for p in path.parts):
|
|
607
|
+
continue
|
|
608
|
+
rel = str(path.relative_to(root))
|
|
609
|
+
if "test/features" in rel or rel.endswith(".feature"):
|
|
610
|
+
tree["features"].append(rel)
|
|
611
|
+
elif "test/steps" in rel and rel.endswith(".ts"):
|
|
612
|
+
tree["steps"].append(rel)
|
|
613
|
+
elif "src/pages" in rel and rel.endswith(".ts"):
|
|
614
|
+
tree["pages"].append(rel)
|
|
615
|
+
elif "src/locators" in rel and rel.endswith(".ts"):
|
|
616
|
+
tree["locators"].append(rel)
|
|
617
|
+
elif "utils/locators" in rel and rel.endswith(".ts"):
|
|
618
|
+
tree["utils_locators"].append(rel)
|
|
619
|
+
else:
|
|
620
|
+
tree["other"].append(rel)
|
|
621
|
+
|
|
622
|
+
return {"success": True, "helix_root": str(root), "tree": tree}
|
|
File without changes
|
|
Binary file
|