ara-cli 0.1.9.74__py3-none-any.whl → 0.1.9.76__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ara_cli/ara_config.py +181 -73
- ara_cli/artefact_autofix.py +103 -72
- ara_cli/artefact_models/businessgoal_artefact_model.py +23 -25
- ara_cli/artefact_models/epic_artefact_model.py +23 -24
- ara_cli/artefact_models/feature_artefact_model.py +74 -46
- ara_cli/artefact_models/keyfeature_artefact_model.py +21 -24
- ara_cli/artefact_models/task_artefact_model.py +73 -13
- ara_cli/artefact_models/userstory_artefact_model.py +22 -24
- ara_cli/artefact_models/vision_artefact_model.py +23 -42
- ara_cli/artefact_scan.py +55 -16
- ara_cli/prompt_handler.py +4 -4
- ara_cli/tag_extractor.py +43 -28
- ara_cli/version.py +1 -1
- {ara_cli-0.1.9.74.dist-info → ara_cli-0.1.9.76.dist-info}/METADATA +1 -1
- {ara_cli-0.1.9.74.dist-info → ara_cli-0.1.9.76.dist-info}/RECORD +21 -21
- tests/test_ara_config.py +420 -36
- tests/test_artefact_scan.py +296 -35
- tests/test_chat.py +2 -2
- {ara_cli-0.1.9.74.dist-info → ara_cli-0.1.9.76.dist-info}/WHEEL +0 -0
- {ara_cli-0.1.9.74.dist-info → ara_cli-0.1.9.76.dist-info}/entry_points.txt +0 -0
- {ara_cli-0.1.9.74.dist-info → ara_cli-0.1.9.76.dist-info}/top_level.txt +0 -0
|
@@ -48,39 +48,36 @@ class FeatureIntent(Intent):
|
|
|
48
48
|
|
|
49
49
|
@classmethod
|
|
50
50
|
def deserialize_from_lines(cls, lines: List[str], start_index: int = 0) -> 'FeatureIntent':
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
51
|
+
prefixes = [
|
|
52
|
+
("As a ", "as_a"),
|
|
53
|
+
("As an ", "as_a"),
|
|
54
|
+
("I want to ", "i_want_to"),
|
|
55
|
+
("So that ", "so_that"),
|
|
56
|
+
]
|
|
57
|
+
found = {"as_a": None, "i_want_to": None, "so_that": None}
|
|
58
|
+
|
|
59
|
+
def match_and_store(line):
|
|
60
|
+
for prefix, field in prefixes:
|
|
61
|
+
if line.startswith(prefix) and found[field] is None:
|
|
62
|
+
found[field] = line[len(prefix):].strip()
|
|
63
|
+
return
|
|
59
64
|
|
|
60
65
|
index = start_index
|
|
61
|
-
while index < len(lines) and (
|
|
62
|
-
|
|
63
|
-
if line.startswith(as_a_prefix) and not as_a:
|
|
64
|
-
as_a = line[len(as_a_prefix):].strip()
|
|
65
|
-
if line.startswith(as_a_prefix_alt) and not as_a:
|
|
66
|
-
as_a = line[len(as_a_prefix_alt):].strip()
|
|
67
|
-
if line.startswith(i_want_to_prefix) and not i_want_to:
|
|
68
|
-
i_want_to = line[len(i_want_to_prefix):].strip()
|
|
69
|
-
if line.startswith(so_that_prefix) and not so_that:
|
|
70
|
-
so_that = line[len(so_that_prefix):].strip()
|
|
66
|
+
while index < len(lines) and any(v is None for v in found.values()):
|
|
67
|
+
match_and_store(lines[index].strip())
|
|
71
68
|
index += 1
|
|
72
69
|
|
|
73
|
-
if not as_a:
|
|
70
|
+
if not found["as_a"]:
|
|
74
71
|
raise ValueError("Could not find 'As a' line")
|
|
75
|
-
if not i_want_to:
|
|
72
|
+
if not found["i_want_to"]:
|
|
76
73
|
raise ValueError("Could not find 'I want to' line")
|
|
77
|
-
if not so_that:
|
|
74
|
+
if not found["so_that"]:
|
|
78
75
|
raise ValueError("Could not find 'So that' line")
|
|
79
76
|
|
|
80
77
|
return cls(
|
|
81
|
-
as_a=as_a,
|
|
82
|
-
i_want_to=i_want_to,
|
|
83
|
-
so_that=so_that
|
|
78
|
+
as_a=found["as_a"],
|
|
79
|
+
i_want_to=found["i_want_to"],
|
|
80
|
+
so_that=found["so_that"]
|
|
84
81
|
)
|
|
85
82
|
|
|
86
83
|
|
|
@@ -215,28 +212,54 @@ class ScenarioOutline(BaseModel):
|
|
|
215
212
|
def from_lines(cls, lines: List[str], start_idx: int) -> Tuple['ScenarioOutline', int]:
|
|
216
213
|
"""Parse a ScenarioOutline from a list of lines starting at start_idx."""
|
|
217
214
|
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
215
|
+
def extract_title(line: str) -> str:
|
|
216
|
+
if not line.startswith('Scenario Outline:'):
|
|
217
|
+
raise ValueError("Expected 'Scenario Outline:' at start index")
|
|
218
|
+
return line[len('Scenario Outline:'):].strip()
|
|
219
|
+
|
|
220
|
+
def extract_steps(lines: List[str], idx: int) -> Tuple[List[str], int]:
|
|
221
|
+
steps = []
|
|
222
|
+
while idx < len(lines) and not lines[idx].strip().startswith('Examples:'):
|
|
223
|
+
if lines[idx].strip():
|
|
224
|
+
steps.append(lines[idx].strip())
|
|
225
|
+
idx += 1
|
|
226
|
+
return steps, idx
|
|
227
|
+
|
|
228
|
+
def extract_headers(line: str) -> List[str]:
|
|
229
|
+
return [h.strip() for h in line.split('|') if h.strip()]
|
|
230
|
+
|
|
231
|
+
def extract_row(line: str) -> List[str]:
|
|
232
|
+
return [cell.strip() for cell in line.split('|') if cell.strip()]
|
|
233
|
+
|
|
234
|
+
def is_scenario_line(line: str) -> bool:
|
|
235
|
+
return line.startswith("Scenario:") or line.startswith("Scenario Outline:")
|
|
236
|
+
|
|
237
|
+
def extract_examples(lines: List[str], idx: int) -> Tuple[List['Example'], int]:
|
|
238
|
+
examples = []
|
|
239
|
+
|
|
240
|
+
if idx >= len(lines) or lines[idx].strip() != 'Examples:':
|
|
241
|
+
return examples, idx
|
|
242
|
+
|
|
229
243
|
idx += 1
|
|
230
|
-
headers =
|
|
244
|
+
headers = extract_headers(lines[idx])
|
|
231
245
|
idx += 1
|
|
232
|
-
|
|
233
|
-
|
|
246
|
+
|
|
247
|
+
while idx < len(lines):
|
|
248
|
+
current_line = lines[idx].strip()
|
|
249
|
+
if not current_line or is_scenario_line(current_line):
|
|
234
250
|
break
|
|
235
|
-
|
|
236
|
-
|
|
251
|
+
|
|
252
|
+
row = extract_row(lines[idx])
|
|
237
253
|
example = Example.from_row(headers, row)
|
|
238
254
|
examples.append(example)
|
|
239
255
|
idx += 1
|
|
256
|
+
|
|
257
|
+
return examples, idx
|
|
258
|
+
|
|
259
|
+
title = extract_title(lines[start_idx])
|
|
260
|
+
steps, idx = extract_steps(lines, start_idx + 1)
|
|
261
|
+
examples, idx = extract_examples(lines, idx)
|
|
262
|
+
|
|
240
263
|
return cls(title=title, steps=steps, examples=examples), idx
|
|
241
264
|
|
|
242
265
|
|
|
@@ -291,12 +314,10 @@ class FeatureArtefact(Artefact):
|
|
|
291
314
|
|
|
292
315
|
def _serialize_scenario_outline(self, scenario: ScenarioOutline) -> str:
|
|
293
316
|
"""Serialize a ScenarioOutline with aligned examples."""
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
if scenario.examples:
|
|
317
|
+
def serialize_scenario_examples():
|
|
318
|
+
nonlocal lines, scenario
|
|
319
|
+
if not scenario:
|
|
320
|
+
return
|
|
300
321
|
headers = self._extract_placeholders(scenario.steps)
|
|
301
322
|
|
|
302
323
|
rows = [headers]
|
|
@@ -322,6 +343,13 @@ class FeatureArtefact(Artefact):
|
|
|
322
343
|
for formatted_row in formatted_rows:
|
|
323
344
|
lines.append(f" {formatted_row}")
|
|
324
345
|
|
|
346
|
+
lines = []
|
|
347
|
+
lines.append(f" Scenario Outline: {scenario.title}")
|
|
348
|
+
for step in scenario.steps:
|
|
349
|
+
lines.append(f" {step}")
|
|
350
|
+
|
|
351
|
+
serialize_scenario_examples()
|
|
352
|
+
|
|
325
353
|
return "\n".join(lines)
|
|
326
354
|
|
|
327
355
|
def _extract_placeholders(self, steps):
|
|
@@ -47,39 +47,36 @@ class KeyfeatureIntent(Intent):
|
|
|
47
47
|
|
|
48
48
|
@classmethod
|
|
49
49
|
def deserialize_from_lines(cls, lines: List[str], start_index: int = 0) -> 'KeyfeatureIntent':
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
50
|
+
prefixes = [
|
|
51
|
+
("In order to ", "in_order_to"),
|
|
52
|
+
("As a ", "as_a"),
|
|
53
|
+
("As an ", "as_a"),
|
|
54
|
+
("I want ", "i_want"),
|
|
55
|
+
]
|
|
56
|
+
found = {"in_order_to": None, "as_a": None, "i_want": None}
|
|
57
|
+
|
|
58
|
+
def match_and_store(line):
|
|
59
|
+
for prefix, field in prefixes:
|
|
60
|
+
if line.startswith(prefix) and found[field] is None:
|
|
61
|
+
found[field] = line[len(prefix):].strip()
|
|
62
|
+
return
|
|
58
63
|
|
|
59
64
|
index = start_index
|
|
60
|
-
while index < len(lines) and (
|
|
61
|
-
|
|
62
|
-
if line.startswith(in_order_to_prefix) and not in_order_to:
|
|
63
|
-
in_order_to = line[len(in_order_to_prefix):].strip()
|
|
64
|
-
elif line.startswith(as_a_prefix) and not as_a:
|
|
65
|
-
as_a = line[len(as_a_prefix):].strip()
|
|
66
|
-
elif line.startswith(as_a_prefix_alt) and not as_a:
|
|
67
|
-
as_a = line[len(as_a_prefix_alt):].strip()
|
|
68
|
-
elif line.startswith(i_want_prefix) and not i_want:
|
|
69
|
-
i_want = line[len(i_want_prefix):].strip()
|
|
65
|
+
while index < len(lines) and any(v is None for v in found.values()):
|
|
66
|
+
match_and_store(lines[index])
|
|
70
67
|
index += 1
|
|
71
68
|
|
|
72
|
-
if not in_order_to:
|
|
69
|
+
if not found["in_order_to"]:
|
|
73
70
|
raise ValueError("Could not find 'In order to' line")
|
|
74
|
-
if not as_a:
|
|
71
|
+
if not found["as_a"]:
|
|
75
72
|
raise ValueError("Could not find 'As a' line")
|
|
76
|
-
if not i_want:
|
|
73
|
+
if not found["i_want"]:
|
|
77
74
|
raise ValueError("Could not find 'I want' line")
|
|
78
75
|
|
|
79
76
|
return cls(
|
|
80
|
-
in_order_to=in_order_to,
|
|
81
|
-
as_a=as_a,
|
|
82
|
-
i_want=i_want
|
|
77
|
+
in_order_to=found["in_order_to"],
|
|
78
|
+
as_a=found["as_a"],
|
|
79
|
+
i_want=found["i_want"]
|
|
83
80
|
)
|
|
84
81
|
|
|
85
82
|
|
|
@@ -33,17 +33,41 @@ class ActionItem(BaseModel):
|
|
|
33
33
|
return v
|
|
34
34
|
|
|
35
35
|
@classmethod
|
|
36
|
-
def deserialize(cls,
|
|
37
|
-
if not
|
|
36
|
+
def deserialize(cls, text: str) -> Optional['ActionItem']:
|
|
37
|
+
if not text:
|
|
38
38
|
return None
|
|
39
|
-
|
|
39
|
+
|
|
40
|
+
lines = text.strip().split('\n')
|
|
41
|
+
first_line = lines[0]
|
|
42
|
+
|
|
43
|
+
match = re.match(r'\[@(.*?)\]\s+(.*)', first_line)
|
|
40
44
|
if not match:
|
|
41
45
|
return None
|
|
42
|
-
|
|
43
|
-
|
|
46
|
+
|
|
47
|
+
status, first_line_text = match.groups()
|
|
48
|
+
|
|
49
|
+
# Validate the status before creating the ActionItem
|
|
50
|
+
if status not in ["to-do", "in-progress", "done"]:
|
|
51
|
+
raise ValueError(f"invalid status '{status}' in action item. Allowed values are 'to-do', 'in-progress', 'done'")
|
|
52
|
+
|
|
53
|
+
# If there are multiple lines, join them
|
|
54
|
+
if len(lines) > 1:
|
|
55
|
+
all_text = '\n'.join([first_line_text] + lines[1:])
|
|
56
|
+
else:
|
|
57
|
+
all_text = first_line_text
|
|
58
|
+
|
|
59
|
+
return cls(status=status, text=all_text)
|
|
44
60
|
|
|
45
61
|
def serialize(self) -> str:
|
|
46
|
-
|
|
62
|
+
lines = self.text.split('\n')
|
|
63
|
+
# First line includes the status marker
|
|
64
|
+
first_line = f"[@{self.status}] {lines[0]}"
|
|
65
|
+
if len(lines) == 1:
|
|
66
|
+
return first_line
|
|
67
|
+
|
|
68
|
+
# Additional lines follow without status marker
|
|
69
|
+
result_lines = [first_line] + lines[1:]
|
|
70
|
+
return '\n'.join(result_lines)
|
|
47
71
|
|
|
48
72
|
|
|
49
73
|
class TaskArtefact(Artefact):
|
|
@@ -54,13 +78,47 @@ class TaskArtefact(Artefact):
|
|
|
54
78
|
def _deserialize_action_items(cls, text) -> Tuple[List[ActionItem], List[str]]:
|
|
55
79
|
lines = [line.strip() for line in text.strip().splitlines() if line.strip()]
|
|
56
80
|
|
|
57
|
-
remaining_lines = []
|
|
58
81
|
action_items = []
|
|
59
|
-
|
|
82
|
+
remaining_lines = []
|
|
83
|
+
i = 0
|
|
84
|
+
|
|
85
|
+
contribution_marker = cls._contribution_starts_with()
|
|
86
|
+
description_marker = cls._description_starts_with()
|
|
87
|
+
|
|
88
|
+
while i < len(lines):
|
|
89
|
+
line = lines[i]
|
|
90
|
+
|
|
60
91
|
if line.startswith('[@'):
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
92
|
+
# Collect all lines for this action item
|
|
93
|
+
action_item_lines = [line]
|
|
94
|
+
j = i + 1
|
|
95
|
+
# Collect lines until we hit another action item or a known section
|
|
96
|
+
while j < len(lines):
|
|
97
|
+
next_line = lines[j]
|
|
98
|
+
# Check if next line is a new action item or a known section
|
|
99
|
+
if (next_line.startswith('[@') or
|
|
100
|
+
next_line.startswith(description_marker) or
|
|
101
|
+
next_line.startswith(contribution_marker)):
|
|
102
|
+
break
|
|
103
|
+
action_item_lines.append(next_line)
|
|
104
|
+
j += 1
|
|
105
|
+
|
|
106
|
+
# Join all lines and pass as a single string to deserialize
|
|
107
|
+
action_item_text = '\n'.join(action_item_lines)
|
|
108
|
+
try:
|
|
109
|
+
action_item = ActionItem.deserialize(action_item_text)
|
|
110
|
+
if action_item:
|
|
111
|
+
action_items.append(action_item)
|
|
112
|
+
except ValueError as e:
|
|
113
|
+
# Re-raise with more context about where the error occurred
|
|
114
|
+
raise ValueError(f"Error parsing action item: {e}")
|
|
115
|
+
|
|
116
|
+
# Move index to the next unprocessed line
|
|
117
|
+
i = j
|
|
118
|
+
else:
|
|
119
|
+
remaining_lines.append(line)
|
|
120
|
+
i += 1
|
|
121
|
+
|
|
64
122
|
return action_items, remaining_lines
|
|
65
123
|
|
|
66
124
|
@classmethod
|
|
@@ -82,7 +140,9 @@ class TaskArtefact(Artefact):
|
|
|
82
140
|
return ArtefactType.task
|
|
83
141
|
|
|
84
142
|
def _serialize_action_items(self) -> str:
|
|
85
|
-
action_item_lines = [
|
|
143
|
+
action_item_lines = []
|
|
144
|
+
for action_item in self.action_items:
|
|
145
|
+
action_item_lines.append(action_item.serialize())
|
|
86
146
|
return "\n".join(action_item_lines)
|
|
87
147
|
|
|
88
148
|
def serialize(self) -> str:
|
|
@@ -106,4 +166,4 @@ class TaskArtefact(Artefact):
|
|
|
106
166
|
lines.append(description)
|
|
107
167
|
lines.append("")
|
|
108
168
|
|
|
109
|
-
return "\n".join(lines)
|
|
169
|
+
return "\n".join(lines)
|
|
@@ -47,39 +47,37 @@ class UserstoryIntent(Intent):
|
|
|
47
47
|
|
|
48
48
|
@classmethod
|
|
49
49
|
def deserialize_from_lines(cls, lines: List[str], start_index: int = 0) -> 'UserstoryIntent':
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
50
|
+
prefixes = [
|
|
51
|
+
("In order to ", "in_order_to"),
|
|
52
|
+
("As a ", "as_a"),
|
|
53
|
+
("As an ", "as_a"),
|
|
54
|
+
("I want ", "i_want"),
|
|
55
|
+
]
|
|
56
|
+
found = {"in_order_to": None, "as_a": None, "i_want": None}
|
|
57
|
+
|
|
58
|
+
def match_and_store(line):
|
|
59
|
+
for prefix, field in prefixes:
|
|
60
|
+
if line.startswith(prefix) and found[field] is None:
|
|
61
|
+
found[field] = line[len(prefix):].strip()
|
|
62
|
+
return True
|
|
63
|
+
return False
|
|
58
64
|
|
|
59
65
|
index = start_index
|
|
60
|
-
while index < len(lines) and (
|
|
61
|
-
|
|
62
|
-
if line.startswith(in_order_to_prefix) and not in_order_to:
|
|
63
|
-
in_order_to = line[len(in_order_to_prefix):].strip()
|
|
64
|
-
elif line.startswith(as_a_prefix) and not as_a:
|
|
65
|
-
as_a = line[len(as_a_prefix):].strip()
|
|
66
|
-
elif line.startswith(as_a_prefix_alt) and not as_a:
|
|
67
|
-
as_a = line[len(as_a_prefix_alt):].strip()
|
|
68
|
-
elif line.startswith(i_want_prefix) and not i_want:
|
|
69
|
-
i_want = line[len(i_want_prefix):].strip()
|
|
66
|
+
while index < len(lines) and any(v is None for v in found.values()):
|
|
67
|
+
match_and_store(lines[index].strip())
|
|
70
68
|
index += 1
|
|
71
69
|
|
|
72
|
-
if not in_order_to:
|
|
70
|
+
if not found["in_order_to"]:
|
|
73
71
|
raise ValueError("Could not find 'In order to' line")
|
|
74
|
-
if not as_a:
|
|
72
|
+
if not found["as_a"]:
|
|
75
73
|
raise ValueError("Could not find 'As a' line")
|
|
76
|
-
if not i_want:
|
|
74
|
+
if not found["i_want"]:
|
|
77
75
|
raise ValueError("Could not find 'I want' line")
|
|
78
76
|
|
|
79
77
|
return cls(
|
|
80
|
-
in_order_to=in_order_to,
|
|
81
|
-
as_a=as_a,
|
|
82
|
-
i_want=i_want
|
|
78
|
+
in_order_to=found["in_order_to"],
|
|
79
|
+
as_a=found["as_a"],
|
|
80
|
+
i_want=found["i_want"]
|
|
83
81
|
)
|
|
84
82
|
|
|
85
83
|
|
|
@@ -76,54 +76,35 @@ class VisionIntent(Intent):
|
|
|
76
76
|
|
|
77
77
|
@classmethod
|
|
78
78
|
def deserialize_from_lines(cls, lines: List[str], start_index: int = 0) -> 'VisionIntent':
|
|
79
|
+
prefixes = [
|
|
80
|
+
("For ", "for_"),
|
|
81
|
+
("Who ", "who"),
|
|
82
|
+
("The ", "the"),
|
|
83
|
+
("That ", "that"),
|
|
84
|
+
("Unlike ", "unlike"),
|
|
85
|
+
("Our product ", "our_product"),
|
|
86
|
+
]
|
|
87
|
+
found = {field: "" for _, field in prefixes}
|
|
88
|
+
|
|
89
|
+
# Find the first "For " line, if it exists
|
|
79
90
|
intent_start_index = start_index
|
|
80
|
-
|
|
81
|
-
for_ = ""
|
|
82
|
-
who = ""
|
|
83
|
-
the = ""
|
|
84
|
-
that = ""
|
|
85
|
-
unlike = ""
|
|
86
|
-
our_product = ""
|
|
87
|
-
|
|
88
|
-
for_prefix = "For "
|
|
89
|
-
who_prefix = "Who "
|
|
90
|
-
the_prefix = "The "
|
|
91
|
-
that_prefix = "That "
|
|
92
|
-
unlike_prefix = "Unlike "
|
|
93
|
-
our_product_prefix = "Our product "
|
|
94
|
-
|
|
95
91
|
for i in range(start_index, len(lines)):
|
|
96
|
-
if lines[i].startswith(
|
|
92
|
+
if lines[i].startswith("For "):
|
|
97
93
|
intent_start_index = i
|
|
98
94
|
break
|
|
99
95
|
|
|
96
|
+
def match_and_store(line):
|
|
97
|
+
for prefix, field in prefixes:
|
|
98
|
+
if line.startswith(prefix) and not found[field]:
|
|
99
|
+
found[field] = line[len(prefix):].strip()
|
|
100
|
+
return
|
|
101
|
+
|
|
100
102
|
index = intent_start_index
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
index
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
index = index + 1
|
|
107
|
-
if index < len(lines) and lines[index].startswith(the_prefix):
|
|
108
|
-
the = lines[index][len(the_prefix):]
|
|
109
|
-
index = index + 1
|
|
110
|
-
if index < len(lines) and lines[index].startswith(that_prefix):
|
|
111
|
-
that = lines[index][len(that_prefix):]
|
|
112
|
-
index = index + 1
|
|
113
|
-
if index < len(lines) and lines[index].startswith(unlike_prefix):
|
|
114
|
-
unlike = lines[index][len(unlike_prefix):]
|
|
115
|
-
index = index + 1
|
|
116
|
-
if index < len(lines) and lines[index].startswith(our_product_prefix):
|
|
117
|
-
our_product = lines[index][len(our_product_prefix):]
|
|
118
|
-
|
|
119
|
-
return cls(
|
|
120
|
-
for_=for_,
|
|
121
|
-
who=who,
|
|
122
|
-
the=the,
|
|
123
|
-
that=that,
|
|
124
|
-
unlike=unlike,
|
|
125
|
-
our_product=our_product,
|
|
126
|
-
)
|
|
103
|
+
while index < len(lines) and any(not v for v in found.values()):
|
|
104
|
+
match_and_store(lines[index])
|
|
105
|
+
index += 1
|
|
106
|
+
|
|
107
|
+
return cls(**found)
|
|
127
108
|
|
|
128
109
|
|
|
129
110
|
class VisionArtefact(Artefact):
|
ara_cli/artefact_scan.py
CHANGED
|
@@ -2,9 +2,57 @@ from textwrap import indent
|
|
|
2
2
|
import os
|
|
3
3
|
|
|
4
4
|
|
|
5
|
+
def is_contribution_valid(contribution, classified_artefact_info) -> bool:
|
|
6
|
+
from ara_cli.artefact_fuzzy_search import extract_artefact_names_of_classifier
|
|
7
|
+
if not contribution or not contribution.artefact_name or not contribution.classifier:
|
|
8
|
+
return True
|
|
9
|
+
|
|
10
|
+
all_artefact_names = extract_artefact_names_of_classifier(
|
|
11
|
+
classified_files=classified_artefact_info,
|
|
12
|
+
classifier=contribution.classifier
|
|
13
|
+
)
|
|
14
|
+
if contribution.artefact_name not in all_artefact_names:
|
|
15
|
+
return False
|
|
16
|
+
return True
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def is_rule_valid(contribution, classified_artefact_info) -> bool:
|
|
20
|
+
from ara_cli.artefact_reader import ArtefactReader
|
|
21
|
+
|
|
22
|
+
if not contribution or not contribution.artefact_name or not contribution.classifier:
|
|
23
|
+
return True
|
|
24
|
+
rule = contribution.rule
|
|
25
|
+
if not rule:
|
|
26
|
+
return True
|
|
27
|
+
parent = ArtefactReader.read_artefact(contribution.artefact_name, contribution.classifier)
|
|
28
|
+
if not parent or not parent.rules:
|
|
29
|
+
return True
|
|
30
|
+
rules = parent.rules
|
|
31
|
+
if rule not in rules:
|
|
32
|
+
return False
|
|
33
|
+
return True
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def check_contribution(contribution, classified_artefact_info, file_path) -> tuple[bool, str]:
|
|
37
|
+
if not contribution:
|
|
38
|
+
return True, None
|
|
39
|
+
|
|
40
|
+
if not is_contribution_valid(contribution, classified_artefact_info):
|
|
41
|
+
reason = (f"Invalid Contribution Reference: The contribution references "
|
|
42
|
+
f"'{contribution.classifier}' artefact '{contribution.artefact_name}' "
|
|
43
|
+
f"which does not exist.")
|
|
44
|
+
return False, reason
|
|
45
|
+
|
|
46
|
+
if not is_rule_valid(contribution, classified_artefact_info):
|
|
47
|
+
reason = (f"Rule Mismatch: The contribution references "
|
|
48
|
+
f"rule '{contribution.rule}' which the parent "
|
|
49
|
+
f"{contribution.classifier} '{contribution.artefact_name}' does not have.")
|
|
50
|
+
return False, reason
|
|
51
|
+
return True, None
|
|
52
|
+
|
|
53
|
+
|
|
5
54
|
def check_file(file_path, artefact_class, classified_artefact_info=None):
|
|
6
55
|
from pydantic import ValidationError
|
|
7
|
-
from ara_cli.artefact_fuzzy_search import extract_artefact_names_of_classifier
|
|
8
56
|
from ara_cli.file_classifier import FileClassifier
|
|
9
57
|
|
|
10
58
|
try:
|
|
@@ -23,26 +71,17 @@ def check_file(file_path, artefact_class, classified_artefact_info=None):
|
|
|
23
71
|
base_name = os.path.basename(file_path)
|
|
24
72
|
file_name_without_ext, _ = os.path.splitext(base_name)
|
|
25
73
|
|
|
74
|
+
# Check title and file name matching
|
|
26
75
|
if artefact_instance.title != file_name_without_ext:
|
|
27
76
|
reason = (f"Filename-Title Mismatch: The file name '{file_name_without_ext}' "
|
|
28
77
|
f"does not match the artefact title '{artefact_instance.title}'.")
|
|
29
78
|
return False, reason
|
|
30
|
-
|
|
31
|
-
# Check contribution reference validity
|
|
79
|
+
|
|
32
80
|
contribution = artefact_instance.contribution
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
classified_files=classified_artefact_info,
|
|
38
|
-
classifier=contribution.classifier
|
|
39
|
-
)
|
|
40
|
-
|
|
41
|
-
if contribution.artefact_name not in all_artefact_names:
|
|
42
|
-
reason = (f"Invalid Contribution Reference: The contribution references "
|
|
43
|
-
f"'{contribution.classifier}' artefact '{contribution.artefact_name}' "
|
|
44
|
-
f"which does not exist.")
|
|
45
|
-
return False, reason
|
|
81
|
+
|
|
82
|
+
contribution_valid, reason = check_contribution(contribution, classified_artefact_info, file_path)
|
|
83
|
+
if not contribution_valid:
|
|
84
|
+
return False, reason
|
|
46
85
|
|
|
47
86
|
return True, None
|
|
48
87
|
except (ValidationError, ValueError, AssertionError) as e:
|
ara_cli/prompt_handler.py
CHANGED
|
@@ -239,7 +239,7 @@ def move_and_copy_files(source_path, prompt_data_path, prompt_archive_path):
|
|
|
239
239
|
file_name = os.path.basename(source_path)
|
|
240
240
|
|
|
241
241
|
# Check the name ending and extension of source path
|
|
242
|
-
endings = [".commands.md", ".rules.md", ".intention.md"]
|
|
242
|
+
endings = [".blueprint.md", ".commands.md", ".rules.md", ".intention.md"]
|
|
243
243
|
if any(file_name.endswith(ext) for ext in endings):
|
|
244
244
|
for ext in endings:
|
|
245
245
|
if file_name.endswith(ext):
|
|
@@ -394,7 +394,7 @@ def create_and_send_custom_prompt(classifier, parameter):
|
|
|
394
394
|
prompt_data_path = f"ara/{sub_directory}/{parameter}.data/prompt.data"
|
|
395
395
|
prompt_file_path_markdown = join(prompt_data_path, f"{classifier}.prompt.md")
|
|
396
396
|
|
|
397
|
-
extensions = [".rules.md", ".prompt_givens.md", ".intention.md", ".commands.md"]
|
|
397
|
+
extensions = [".blueprint.md", ".rules.md", ".prompt_givens.md", ".intention.md", ".commands.md"]
|
|
398
398
|
combined_content_markdown, image_data_list = collect_file_content_by_extension(prompt_data_path, extensions)
|
|
399
399
|
|
|
400
400
|
with open(prompt_file_path_markdown, 'w', encoding='utf-8') as file:
|
|
@@ -430,7 +430,7 @@ def generate_config_prompt_template_file(prompt_data_path, config_prompt_templat
|
|
|
430
430
|
config = ConfigManager.get_config()
|
|
431
431
|
global_prompt_template_path = TemplatePathManager.get_template_base_path()
|
|
432
432
|
dir_list = ["ara/.araconfig/custom-prompt-modules"] + [f"{os.path.join(global_prompt_template_path,'prompt-modules')}"]
|
|
433
|
-
file_list = ['*.rules.md','*.intention.md', '*.commands.md']
|
|
433
|
+
file_list = ['*.blueprint.md','*.rules.md','*.intention.md', '*.commands.md']
|
|
434
434
|
|
|
435
435
|
print(f"used {dir_list} for prompt templates file listing")
|
|
436
436
|
generate_markdown_listing(dir_list, file_list, config_prompt_templates_path)
|
|
@@ -439,7 +439,7 @@ def generate_config_prompt_template_file(prompt_data_path, config_prompt_templat
|
|
|
439
439
|
def generate_config_prompt_givens_file(prompt_data_path, config_prompt_givens_name, artefact_to_mark=None):
|
|
440
440
|
config_prompt_givens_path = os.path.join(prompt_data_path, config_prompt_givens_name)
|
|
441
441
|
config = ConfigManager.get_config()
|
|
442
|
-
dir_list = ["ara"] + [
|
|
442
|
+
dir_list = ["ara"] + [ext.source_dir for ext in config.ext_code_dirs] + [config.doc_dir] + [config.glossary_dir]
|
|
443
443
|
|
|
444
444
|
print(f"used {dir_list} for prompt givens file listing")
|
|
445
445
|
generate_markdown_listing(dir_list, config.ara_prompt_given_list_includes, config_prompt_givens_path)
|