ara-cli 0.1.10.4__py3-none-any.whl → 0.1.10.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -204,7 +204,8 @@ def _default_feature(title: str, use_default_contribution: bool) -> FeatureArtef
204
204
 
205
205
  def _default_task(title: str, use_default_contribution: bool) -> TaskArtefact:
206
206
  return TaskArtefact(
207
- status="to-do",
207
+ tags=[],
208
+ status=None,
208
209
  title=title,
209
210
  description="<further optional description to understand the task, no format defined>",
210
211
  contribution=default_contribution() if use_default_contribution else None
@@ -2,22 +2,19 @@ import os
2
2
  import fnmatch
3
3
  from typing import List, Dict, Any
4
4
 
5
- # Ağaç yapımız için bir tip tanımı yapalım
6
5
  DirTree = Dict[str, Any]
7
6
 
8
7
  def _build_tree(root_path: str, patterns: List[str]) -> DirTree:
9
- """Belirtilen yoldaki dizin yapısını temsil eden içe bir sözlük oluşturur."""
8
+ """Creates a nested dictionary representing the directory structure in the specified path."""
10
9
  tree: DirTree = {'files': [], 'dirs': {}}
11
10
  try:
12
11
  for item in os.listdir(root_path):
13
12
  item_path = os.path.join(root_path, item)
14
13
  if os.path.isdir(item_path):
15
14
  subtree = _build_tree(item_path, patterns)
16
- # Sadece içinde dosya olan veya dosyası olan alt klasörleri ekle
17
15
  if subtree['files'] or subtree['dirs']:
18
16
  tree['dirs'][item] = subtree
19
17
  elif os.path.isfile(item_path):
20
- # Dosyanın verilen desenlerden herhangi biriyle eşleşip eşleşmediğini kontrol et
21
18
  if any(fnmatch.fnmatch(item, pattern) for pattern in patterns):
22
19
  tree['files'].append(item)
23
20
  except OSError as e:
@@ -25,23 +22,17 @@ def _build_tree(root_path: str, patterns: List[str]) -> DirTree:
25
22
  return tree
26
23
 
27
24
  def _write_tree_to_markdown(md_file, tree: DirTree, level: int):
28
- """Ağaç veri yapısını markdown formatında dosyaya yazar."""
29
- # Dosyaları girintili olarak yaz
25
+ """Writes the tree data structure to the file in markdown format."""
30
26
  indent = ' ' * level
31
27
  for filename in sorted(tree['files']):
32
28
  md_file.write(f"{indent}- [] {filename}\n")
33
29
 
34
- # Alt dizinler için başlık oluştur ve recursive olarak devam et
35
30
  for dirname, subtree in sorted(tree['dirs'].items()):
36
- # Alt başlıklar için girinti yok, sadece başlık seviyesi artıyor
37
31
  md_file.write(f"{' ' * (level -1)}{'#' * (level + 1)} {dirname}\n")
38
32
  _write_tree_to_markdown(md_file, subtree, level + 1)
39
33
 
40
34
  def generate_global_markdown_listing(directories: List[str], file_patterns: List[str], output_file: str):
41
- """
42
- Global dizinler için hiyerarşik bir markdown dosya listesi oluşturur.
43
- En üst başlık olarak mutlak yolu kullanır, alt öğeler için göreceli isimler kullanır.
44
- """
35
+ """Creates a hierarchical list of markdown files for global directories. Uses the absolute path as the top heading and relative names for children."""
45
36
  with open(output_file, 'w', encoding='utf-8') as md_file:
46
37
  for directory in directories:
47
38
  abs_dir = os.path.abspath(directory)
@@ -49,12 +40,11 @@ def generate_global_markdown_listing(directories: List[str], file_patterns: List
49
40
  if not os.path.isdir(abs_dir):
50
41
  print(f"Warning: Global directory not found: {abs_dir}")
51
42
  md_file.write(f"# {directory}\n")
52
- md_file.write(f" - !! UYARI: Dizin bulunamadı: {abs_dir}\n\n")
43
+ md_file.write(f" - !! Warning: Global directory not found: {abs_dir}\n\n")
53
44
  continue
54
45
 
55
46
  tree = _build_tree(abs_dir, file_patterns)
56
-
57
- # Sadece ağaç boş değilse yaz
47
+
58
48
  if tree['files'] or tree['dirs']:
59
49
  md_file.write(f"# {abs_dir}\n")
60
50
  _write_tree_to_markdown(md_file, tree, 1)
@@ -8,6 +8,7 @@ from ara_cli.classifier import Classifier
8
8
  from ara_cli.directory_navigator import DirectoryNavigator
9
9
  from ara_cli.artefact_models.artefact_mapping import title_prefix_to_artefact_class
10
10
 
11
+
11
12
  def _find_extract_token(tokens):
12
13
  """Find the first token that needs to be processed."""
13
14
  for token in tokens:
@@ -15,6 +16,7 @@ def _find_extract_token(tokens):
15
16
  return token
16
17
  return None
17
18
 
19
+
18
20
  def _extract_file_path(content_lines):
19
21
  """Extract file path from content lines."""
20
22
  if not content_lines:
@@ -22,6 +24,7 @@ def _extract_file_path(content_lines):
22
24
  file_path_search = re.search(r"# filename: (.+)", content_lines[0])
23
25
  return file_path_search.group(1).strip() if file_path_search else None
24
26
 
27
+
25
28
  def _find_artefact_class(content_lines):
26
29
  """Find the appropriate artefact class from content lines."""
27
30
  for line in content_lines[:2]:
@@ -33,11 +36,13 @@ def _find_artefact_class(content_lines):
33
36
  return title_prefix_to_artefact_class[first_word]
34
37
  return None
35
38
 
39
+
36
40
  def _process_file_extraction(file_path, code_content, force, write):
37
41
  """Process file extraction logic."""
38
42
  print(f"Filename extracted: {file_path}")
39
43
  handle_existing_file(file_path, code_content, force, write)
40
44
 
45
+
41
46
  def _process_artefact_extraction(artefact_class, content_lines, force, write):
42
47
  """Process artefact extraction logic."""
43
48
  artefact = artefact_class.deserialize('\n'.join(content_lines))
@@ -54,14 +59,13 @@ def _process_artefact_extraction(artefact_class, content_lines, force, write):
54
59
 
55
60
  os.chdir(original_directory)
56
61
 
57
- def _process_extraction_block(token_to_process, updated_content, force, write):
58
- """Process a single extraction block."""
59
- # Get the original block text for later replacement
60
- source_lines = updated_content.split('\n')
61
- start_line, end_line = token_to_process.map
62
- original_block_text = '\n'.join(source_lines[start_line:end_line])
63
62
 
64
- block_content = token_to_process.content
63
+ def _perform_extraction_for_block(source_lines, block_start, block_end, force, write):
64
+ """Helper function to process a single, identified block."""
65
+ original_block_text = '\n'.join(source_lines[block_start:block_end + 1])
66
+ block_content_lines = source_lines[block_start + 1:block_end]
67
+ block_content = '\n'.join(block_content_lines)
68
+
65
69
  block_lines = block_content.split('\n')
66
70
  content_lines_after_extract = block_lines[1:]
67
71
 
@@ -76,49 +80,129 @@ def _process_extraction_block(token_to_process, updated_content, force, write):
76
80
  _process_artefact_extraction(artefact_class, content_lines_after_extract, force, write)
77
81
  else:
78
82
  print("No filename or valid artefact found, skipping processing for this block.")
83
+ return None, None
79
84
 
80
- # Update the main content by replacing the processed block text with a modified version
81
85
  modified_block_text = original_block_text.replace("# [x] extract", "# [v] extract", 1)
82
- return updated_content.replace(original_block_text, modified_block_text, 1)
86
+ return original_block_text, modified_block_text
87
+
88
+
89
+ class FenceDetector:
90
+ """Helper class to detect and match fence blocks."""
91
+
92
+ def __init__(self, source_lines):
93
+ self.source_lines = source_lines
94
+
95
+ def is_extract_fence(self, line_num):
96
+ """Check if line is a fence with extract marker."""
97
+ line = self.source_lines[line_num]
98
+ stripped_line = line.strip()
99
+
100
+ is_fence = stripped_line.startswith('```') or stripped_line.startswith('~~~')
101
+ if not is_fence:
102
+ return False
103
+
104
+ if not (line_num + 1 < len(self.source_lines)):
105
+ return False
106
+
107
+ return self.source_lines[line_num + 1].strip().startswith("# [x] extract")
108
+
109
+ def find_matching_fence_end(self, start_line):
110
+ """Find the matching end fence for a given start fence."""
111
+ fence_line = self.source_lines[start_line]
112
+ indentation = len(fence_line) - len(fence_line.lstrip())
113
+ stripped_fence_line = fence_line.strip()
114
+ fence_char = stripped_fence_line[0]
115
+ fence_length = len(stripped_fence_line) - len(stripped_fence_line.lstrip(fence_char))
116
+
117
+ for i in range(start_line + 1, len(self.source_lines)):
118
+ scan_line = self.source_lines[i]
119
+ stripped_scan_line = scan_line.strip()
120
+
121
+ if not stripped_scan_line or stripped_scan_line[0] != fence_char:
122
+ continue
123
+
124
+ if not all(c == fence_char for c in stripped_scan_line):
125
+ continue
126
+
127
+ candidate_indentation = len(scan_line) - len(scan_line.lstrip())
128
+ candidate_length = len(stripped_scan_line)
129
+
130
+ if candidate_length == fence_length and candidate_indentation == indentation:
131
+ return i
132
+
133
+ return -1
134
+
135
+
136
+ def _process_document_blocks(source_lines, force, write):
137
+ """Process all extract blocks in the document."""
138
+ fence_detector = FenceDetector(source_lines)
139
+ replacements = []
140
+ line_num = 0
141
+
142
+ while line_num < len(source_lines):
143
+ if not fence_detector.is_extract_fence(line_num):
144
+ line_num += 1
145
+ continue
83
146
 
84
- def extract_responses(document_path, relative_to_ara_root=False, force=False, write=False):
85
- print(f"Starting extraction from '{document_path}'")
86
- block_extraction_counter = 0
147
+ block_start_line = line_num
148
+ block_end_line = fence_detector.find_matching_fence_end(block_start_line)
149
+
150
+ if block_end_line != -1:
151
+ print(f"Block found and processed starting on line {block_start_line + 1}.")
152
+ original, modified = _perform_extraction_for_block(
153
+ source_lines, block_start_line, block_end_line, force, write
154
+ )
155
+ if original and modified:
156
+ replacements.append((original, modified))
157
+ line_num = block_end_line + 1
158
+ else:
159
+ line_num += 1
160
+
161
+ return replacements
162
+
163
+
164
+ def _apply_replacements(content, replacements):
165
+ """Apply all replacements to the content."""
166
+ updated_content = content
167
+ for original, modified in replacements:
168
+ updated_content = updated_content.replace(original, modified, 1)
169
+ return updated_content
87
170
 
88
- try:
89
- with open(document_path, 'r', encoding='utf-8', errors='replace') as file:
90
- content = file.read()
91
- except FileNotFoundError:
92
- print(f"Error: File not found at '{document_path}'. Skipping extraction.")
93
- return
94
171
 
172
+ def _setup_working_directory(relative_to_ara_root):
173
+ """Setup working directory and return original cwd."""
95
174
  cwd = os.getcwd()
96
175
  if relative_to_ara_root:
97
176
  navigator = DirectoryNavigator()
98
177
  navigator.navigate_to_target()
99
178
  os.chdir('..')
179
+ return cwd
100
180
 
101
- updated_content = content
102
-
103
- while True:
104
- md = MarkdownIt()
105
- tokens = md.parse(updated_content)
106
-
107
- token_to_process = _find_extract_token(tokens)
108
- if not token_to_process:
109
- break # No more blocks to process
110
181
 
111
- block_extraction_counter += 1
112
- print("Block found and processed.")
182
+ def extract_responses(document_path, relative_to_ara_root=False, force=False, write=False):
183
+ print(f"Starting extraction from '{document_path}'")
184
+
185
+ try:
186
+ with open(document_path, 'r', encoding='utf-8', errors='replace') as file:
187
+ content = file.read()
188
+ except FileNotFoundError:
189
+ print(f"Error: File not found at '{document_path}'. Skipping extraction.")
190
+ return
113
191
 
114
- updated_content = _process_extraction_block(token_to_process, updated_content, force, write)
192
+ cwd = _setup_working_directory(relative_to_ara_root)
193
+
194
+ source_lines = content.split('\n')
195
+ replacements = _process_document_blocks(source_lines, force, write)
196
+
197
+ updated_content = _apply_replacements(content, replacements)
115
198
 
116
199
  os.chdir(cwd)
117
200
  with open(document_path, 'w', encoding='utf-8') as file:
118
201
  file.write(updated_content)
119
202
 
120
- if block_extraction_counter > 0:
121
- print(f"End of extraction. Found and processed {block_extraction_counter} blocks in '{os.path.basename(document_path)}'.")
203
+ if replacements:
204
+ print(f"End of extraction. Found and processed {len(replacements)} blocks in '{os.path.basename(document_path)}'.")
205
+
122
206
 
123
207
  def modify_and_save_file(response, file_path):
124
208
  print(f"Debug: Modifying and saving file {file_path}")
@@ -144,9 +228,11 @@ def modify_and_save_file(response, file_path):
144
228
  except json.JSONDecodeError as ex:
145
229
  print(f"ERROR: Failed to decode JSON response: {ex}")
146
230
 
231
+
147
232
  def prompt_user_decision(prompt):
148
233
  return input(prompt)
149
234
 
235
+
150
236
  def determine_should_create(skip_query=False):
151
237
  if skip_query:
152
238
  return True
@@ -155,6 +241,7 @@ def determine_should_create(skip_query=False):
155
241
  return True
156
242
  return False
157
243
 
244
+
158
245
  def create_file_if_not_exist(filename, content, skip_query=False):
159
246
  try:
160
247
  if not os.path.exists(filename):
@@ -174,6 +261,7 @@ def create_file_if_not_exist(filename, content, skip_query=False):
174
261
  print(f"Error: {e}")
175
262
  print(f"Failed to create file {filename} due to an OS error")
176
263
 
264
+
177
265
  def create_prompt_for_file_modification(content_str, filename):
178
266
  if not os.path.exists(filename):
179
267
  print(f"WARNING: {filename} for merge prompt creation does not exist.")
@@ -204,6 +292,7 @@ def create_prompt_for_file_modification(content_str, filename):
204
292
 
205
293
  return prompt_text
206
294
 
295
+
207
296
  def handle_existing_file(filename, block_content, skip_query=False, write=False):
208
297
  if not os.path.isfile(filename):
209
298
  print(f"File {filename} does not exist, attempting to create")
@@ -240,6 +329,7 @@ def handle_existing_file(filename, block_content, skip_query=False, write=False)
240
329
  response += content
241
330
  modify_and_save_file(response, filename)
242
331
 
332
+
243
333
  def extract_and_save_prompt_results(classifier, param, write=False):
244
334
  sub_directory = Classifier.get_sub_directory(classifier)
245
335
  prompt_log_file = f"ara/{sub_directory}/{param}.data/{classifier}.prompt_log.md"
@@ -0,0 +1,32 @@
1
+ MODE: STRICT_GENERATION
2
+ TASK: Produce one complete pytest file for a given Python module.
3
+
4
+ REQUIREMENTS:
5
+ - Output only valid Python code (no text, no markdown).
6
+ - Target: full behavioral + branch coverage (100%).
7
+ - Tests follow AAA pattern (# Arrange, # Act, # Assert).
8
+ - Exactly ONE assert or ONE pytest.raises() per test.
9
+ - Use tmp_path fixture for filesystem isolation.
10
+ - Include an autouse=True fixture for global patching if needed.
11
+ - No external I/O or network calls.
12
+ - All tests independent, self-contained.
13
+
14
+ STYLE:
15
+ - File starts with module docstring describing AAA, single assert rule, autouse fixture.
16
+ - Group tests with comment headers:
17
+ # --- Success paths --- / # --- Error paths --- / # --- Edge cases ---
18
+ - Function names: test_<function>_<expected_behavior>_<condition>
19
+ - Variables descriptive (rel, path, new_content, etc.).
20
+ - Use direct asserts (assert result is True, assert path.read_text() == "x").
21
+ - For errors:
22
+ with pytest.raises(ExceptionType, match=r"text"): function_call()
23
+
24
+ COVERAGE:
25
+ - Include success, failure, and rare edge branches (e.g., conditional exceptions).
26
+ - Ensure 100% of conditional branches executed.
27
+
28
+ OUTPUT:
29
+ - Single .py file, runnable via: pytest -v --disable-warnings --maxfail=1 --cov=<target_module>
30
+ - No explanations or prose, only the test code.
31
+
32
+ END
ara_cli/version.py CHANGED
@@ -1,2 +1,2 @@
1
1
  # version.py
2
- __version__ = "0.1.10.4" # fith parameter like .0 for local install test purposes only. official numbers should be 4 digit numbers
2
+ __version__ = "0.1.10.5" # fith parameter like .0 for local install test purposes only. official numbers should be 4 digit numbers
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ara_cli
3
- Version: 0.1.10.4
3
+ Version: 0.1.10.5
4
4
  Summary: Powerful, open source command-line tool for managing, structuring and automating software development artifacts in line with Business-Driven Development (BDD) and AI-assisted processes
5
5
  Description-Content-Type: text/markdown
6
6
  Requires-Dist: langfuse
@@ -22,11 +22,11 @@ ara_cli/error_handler.py,sha256=nNaJSq82f3xiz_QFRKPg5kX_-oI-UoFdRJ2OTj1AR18,4019
22
22
  ara_cli/file_classifier.py,sha256=nUcNrhflUydCyCRbXHjEEXYwwwfUm65lYnNEvc86fpM,4026
23
23
  ara_cli/file_lister.py,sha256=0C-j8IzajXo5qlvnuy5WFfe43ALwJ-0JFh2K6Xx2ccw,2332
24
24
  ara_cli/filename_validator.py,sha256=Aw9PL8d5-Ymhp3EY6lDrUBk3cudaNqo1Uw5RzPpI1jA,118
25
- ara_cli/global_file_lister.py,sha256=IIrtFoN5KYyJ3jVPanXZJ4UbYZfSdONRwxkZzvmq6-k,2806
25
+ ara_cli/global_file_lister.py,sha256=y2UCDoNXODnTAjmlzlV5cf7QxiZ55KEBbbOGQ_JpiHw,2303
26
26
  ara_cli/list_filter.py,sha256=qKGwwQsrWe7L5FbdxEbBYD1bbbi8c-RMypjXqXvLbgs,5291
27
27
  ara_cli/output_suppressor.py,sha256=nwiHaQLwabOjMoJOeUESBnZszGMxrQZfJ3N2OvahX7Y,389
28
28
  ara_cli/prompt_chat.py,sha256=kd_OINDQFit6jN04bb7mzgY259JBbRaTaNp9F-webkc,1346
29
- ara_cli/prompt_extractor.py,sha256=KuJzx_heH3GbRr8PmbIk16ORGfttrV74aHY_iSe2OB4,9581
29
+ ara_cli/prompt_extractor.py,sha256=sMrpxmpPMU76_PBvOU15kuflsG6mgdpSIlGPmnDRpc0,12349
30
30
  ara_cli/prompt_handler.py,sha256=3iSdEJe6RL496Gc1KncOREK_Nb9HYr7Ti9KTTwA_sTU,30393
31
31
  ara_cli/prompt_rag.py,sha256=ydlhe4CUqz0jdzlY7jBbpKaf_5fjMrAZKnriKea3ZAg,7485
32
32
  ara_cli/run_file_lister.py,sha256=XbrrDTJXp1LFGx9Lv91SNsEHZPP-PyEMBF_P4btjbDA,2360
@@ -34,7 +34,7 @@ ara_cli/tag_extractor.py,sha256=vjuPGGlT3mQpez6eY9LLgIBFQqPsTEgfAWODbXlgODc,4057
34
34
  ara_cli/template_loader.py,sha256=nYQ4zadcXX6oXSNgFNlXx8u5961BZCj3dw0tSKHdNfE,10756
35
35
  ara_cli/template_manager.py,sha256=l2c785YHB7m0e2TjE0CX-nwXrS4v3EiT9qrS5KuatAc,7105
36
36
  ara_cli/update_config_prompt.py,sha256=moqj2Kha7S7fEGzTReU0v2y8UjXC8QfnoiieOQr35C4,5157
37
- ara_cli/version.py,sha256=VnqPJ_1-kTVTVdR9fygWODBR47iqfCLPlvi7bgxJ_9g,146
37
+ ara_cli/version.py,sha256=Hwkja5NG6SdOg-ccF1I4uuSp_0zOE28sIV9RMXv82EI,146
38
38
  ara_cli/ara_subcommands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
39
  ara_cli/ara_subcommands/autofix.py,sha256=h7-6hV97Q6PisUJ_U1Qs4sHYwkHsDpeYH63y_LQsfSc,1095
40
40
  ara_cli/ara_subcommands/chat.py,sha256=9zorWKbM0ulu9xFhW2tzV5vl8hCLOCjcp2E9hYgZJ90,1239
@@ -62,7 +62,7 @@ ara_cli/artefact_models/artefact_data_retrieval.py,sha256=CooXOJBYWSyiViN2xkC8ba
62
62
  ara_cli/artefact_models/artefact_load.py,sha256=IXzWxP-Q_j_oDGMno0m-OuXCQ7Vd5c_NctshGr4ROBw,621
63
63
  ara_cli/artefact_models/artefact_mapping.py,sha256=8aD0spBjkJ8toMAmFawc6UTUxB6-tEEViZXv2I-r88Q,1874
64
64
  ara_cli/artefact_models/artefact_model.py,sha256=Ek7CPzYyj5GajoSdb0G8_I9HbBXFAzRq7DB5CEyNZH0,18817
65
- ara_cli/artefact_models/artefact_templates.py,sha256=8N1gJlS1KLd79y2nasEgU8xeK-WaP6IenBK5Ojcmn9Y,10028
65
+ ara_cli/artefact_models/artefact_templates.py,sha256=u2VxVXIzrC2PKWiHlhwP4pbCQGjacVqcZ8mZTdV2bE0,10042
66
66
  ara_cli/artefact_models/businessgoal_artefact_model.py,sha256=GYT5S2xEnQHwv-k-lEeX5NMSqA-UEfV3PhNjgPDUJpw,4698
67
67
  ara_cli/artefact_models/capability_artefact_model.py,sha256=SZqHx4O2mj4urn77Stnj4_Jxtlq3-LgBBU9SMkByppI,3079
68
68
  ara_cli/artefact_models/epic_artefact_model.py,sha256=_HlFmOJqkDKj3SRpLI4H5rU-va-nFamxhMH4BCODR5c,6053
@@ -104,6 +104,7 @@ ara_cli/templates/template.userstory.prompt_log.md,sha256=Yp62iF7zDy2XNIwwJN35jK
104
104
  ara_cli/templates/template.vision.prompt_log.md,sha256=CAzBzj3O23CzrPIUq3xzpXGKn3_nAvyBLRUi-5Bnq_0,196
105
105
  ara_cli/templates/prompt-modules/blueprints/complete_pytest_unittest.blueprint.md,sha256=DtZsdfVDNy9_cGE_Nn_TE2T3oRwr27kecZchOp5uIG0,672
106
106
  ara_cli/templates/prompt-modules/blueprints/empty.blueprint.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
107
+ ara_cli/templates/prompt-modules/blueprints/pytest_unittest_prompt.blueprint.md,sha256=4R6Vtr55ZaPkOYM6K8_4vkHX91n5pZ1_CAVez-k9Cws,1305
107
108
  ara_cli/templates/prompt-modules/blueprints/task_todo_list_C4_architecture_analysis.blueprint.md,sha256=jEZrZaVK_pkRhLp1SpTX3xR6BGXkox6NafXEjX_GDvM,3099
108
109
  ara_cli/templates/prompt-modules/blueprints/task_todo_list_implement_feature_BDD_way.blueprint.md,sha256=EimfgzjhFIsZnJ0qMNbVlUQEU0vC9Sv1VupesLz7A1E,2088
109
110
  ara_cli/templates/prompt-modules/commands/architecture_C4_analysis.commands.md,sha256=xzfmOBsETlh90P1haJOiC8y8eLPboZ3yFodbuSUPLBU,1557
@@ -179,15 +180,15 @@ tests/test_directory_navigator.py,sha256=7G0MVrBbtBvbrFUpL0zb_9EkEWi1dulWuHsrQxM
179
180
  tests/test_file_classifier.py,sha256=4O1C_iDpGGm35b7aI-HIJd5kkWxFUOrI2n4lEpiDNTM,11855
180
181
  tests/test_file_creator.py,sha256=tgBCq6KPv-qMSDhj9AZvQIJABiAqgpFRnEg1fqbVrTI,2013
181
182
  tests/test_file_lister.py,sha256=Q9HwhKKx540EPzTmfzOCnvtAgON0aMmpJE2eOe1J3EA,4324
182
- tests/test_global_file_lister.py,sha256=ycvf2YL8q5QSEMwcnQfUdoWnQQ8xTSyEtccAeXwl6QU,5487
183
+ tests/test_global_file_lister.py,sha256=E-RxGIwxbUsUxadlcFRTAOtgQcnFd-LnKsLzbH9H7ME,5498
183
184
  tests/test_list_filter.py,sha256=fJA3d_SdaOAUkE7jn68MOVS0THXGghy1fye_64Zvo1U,7964
184
185
  tests/test_prompt_handler.py,sha256=4__q8Ap2hVGXVF1DvvWqq-K630buC7yjccoWkk_FG18,34255
185
186
  tests/test_tag_extractor.py,sha256=7eVD10Y1uLkoSrEgqkXzRvPFs8lJ1RiaJzDu7ml_FZE,3118
186
187
  tests/test_template_loader.py,sha256=R7s8HJZbKqja-1TRBMBkVKPTgajofUjjRKUJq7a3_Oc,7427
187
188
  tests/test_template_manager.py,sha256=qliEeYgAEakn8JIqIHa8u0Ht6DY4L3T6DcHBXkjzR4I,4167
188
189
  tests/test_update_config_prompt.py,sha256=xsqj1WTn4BsG5Q2t-sNPfu7EoMURFcS-hfb5VSXUnJc,6765
189
- ara_cli-0.1.10.4.dist-info/METADATA,sha256=azNT8SFuAIl_AydpLM9v3Gd8utN4g8vTbgO8m7_3fG8,6834
190
- ara_cli-0.1.10.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
191
- ara_cli-0.1.10.4.dist-info/entry_points.txt,sha256=v4h7MzysTgSIDYfEo3oj4Kz_8lzsRa3hq-KJHEcLVX8,45
192
- ara_cli-0.1.10.4.dist-info/top_level.txt,sha256=WM4cLHT5DYUaWzLtRj-gu3yVNFpGQ6lLRI3FMmC-38I,14
193
- ara_cli-0.1.10.4.dist-info/RECORD,,
190
+ ara_cli-0.1.10.5.dist-info/METADATA,sha256=bif1qapBgIms0mokosJMv63sWZe-eVp5pQepUA5h5HM,6834
191
+ ara_cli-0.1.10.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
192
+ ara_cli-0.1.10.5.dist-info/entry_points.txt,sha256=v4h7MzysTgSIDYfEo3oj4Kz_8lzsRa3hq-KJHEcLVX8,45
193
+ ara_cli-0.1.10.5.dist-info/top_level.txt,sha256=WM4cLHT5DYUaWzLtRj-gu3yVNFpGQ6lLRI3FMmC-38I,14
194
+ ara_cli-0.1.10.5.dist-info/RECORD,,
@@ -120,7 +120,7 @@ class TestGenerateGlobalMarkdownListing:
120
120
  abs_path = os.path.abspath(non_existent_dir)
121
121
 
122
122
  assert f"# {non_existent_dir}\n" in content
123
- assert f" - !! UYARI: Dizin bulunamadı: {abs_path}" in content
123
+ assert f" - !! Warning: Global directory not found: {abs_path}" in content
124
124
 
125
125
  def test_generate_listing_with_no_matching_files(self, temp_dir_structure, tmp_path):
126
126
  """Tests that the output file is empty if no files match the patterns."""