ara-cli 0.1.9.87__py3-none-any.whl → 0.1.9.89__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ara_cli/ara_config.py +16 -9
- ara_cli/version.py +1 -1
- {ara_cli-0.1.9.87.dist-info → ara_cli-0.1.9.89.dist-info}/METADATA +1 -1
- {ara_cli-0.1.9.87.dist-info → ara_cli-0.1.9.89.dist-info}/RECORD +9 -9
- tests/test_ara_config.py +3 -3
- tests/test_prompt_handler.py +7 -4
- {ara_cli-0.1.9.87.dist-info → ara_cli-0.1.9.89.dist-info}/WHEEL +0 -0
- {ara_cli-0.1.9.87.dist-info → ara_cli-0.1.9.89.dist-info}/entry_points.txt +0 -0
- {ara_cli-0.1.9.87.dist-info → ara_cli-0.1.9.89.dist-info}/top_level.txt +0 -0
ara_cli/ara_config.py
CHANGED
|
@@ -14,6 +14,7 @@ class LLMConfigItem(BaseModel):
|
|
|
14
14
|
model: str
|
|
15
15
|
temperature: float = Field(ge=0.0, le=1.0)
|
|
16
16
|
max_tokens: Optional[int] = None
|
|
17
|
+
max_completion_tokens: Optional[int] = None
|
|
17
18
|
|
|
18
19
|
class ARAconfig(BaseModel):
|
|
19
20
|
ext_code_dirs: List[Dict[str, str]] = Field(default_factory=lambda: [
|
|
@@ -42,50 +43,56 @@ class ARAconfig(BaseModel):
|
|
|
42
43
|
"*.jpeg",
|
|
43
44
|
])
|
|
44
45
|
llm_config: Dict[str, LLMConfigItem] = Field(default_factory=lambda: {
|
|
46
|
+
"gpt-5": LLMConfigItem(
|
|
47
|
+
provider="openai",
|
|
48
|
+
model="openai/gpt-5",
|
|
49
|
+
temperature=1,
|
|
50
|
+
max_completion_tokens=16000
|
|
51
|
+
),
|
|
45
52
|
"gpt-4o": LLMConfigItem(
|
|
46
53
|
provider="openai",
|
|
47
54
|
model="openai/gpt-4o",
|
|
48
55
|
temperature=0.8,
|
|
49
|
-
max_tokens=
|
|
56
|
+
max_tokens=16000
|
|
50
57
|
),
|
|
51
58
|
"gpt-4.1": LLMConfigItem(
|
|
52
59
|
provider="openai",
|
|
53
60
|
model="openai/gpt-4.1",
|
|
54
61
|
temperature=0.8,
|
|
55
|
-
max_tokens=
|
|
62
|
+
max_tokens=16000
|
|
56
63
|
),
|
|
57
64
|
"o3-mini": LLMConfigItem(
|
|
58
65
|
provider="openai",
|
|
59
66
|
model="openai/o3-mini",
|
|
60
67
|
temperature=1.0,
|
|
61
|
-
max_tokens=
|
|
68
|
+
max_tokens=8000
|
|
62
69
|
),
|
|
63
70
|
"opus-4": LLMConfigItem(
|
|
64
71
|
provider="anthropic",
|
|
65
72
|
model="anthropic/claude-opus-4-20250514",
|
|
66
|
-
temperature=0.
|
|
73
|
+
temperature=0.5,
|
|
67
74
|
max_tokens=32000
|
|
68
75
|
),
|
|
69
76
|
"sonnet-4": LLMConfigItem(
|
|
70
77
|
provider="anthropic",
|
|
71
78
|
model="anthropic/claude-sonnet-4-20250514",
|
|
72
|
-
temperature=0.
|
|
73
|
-
max_tokens=
|
|
79
|
+
temperature=0.5,
|
|
80
|
+
max_tokens=32000
|
|
74
81
|
),
|
|
75
82
|
"together-ai-llama-2": LLMConfigItem(
|
|
76
83
|
provider="together_ai",
|
|
77
84
|
model="together_ai/togethercomputer/llama-2-70b",
|
|
78
85
|
temperature=0.8,
|
|
79
|
-
max_tokens=
|
|
86
|
+
max_tokens=4000
|
|
80
87
|
),
|
|
81
88
|
"groq-llama-3": LLMConfigItem(
|
|
82
89
|
provider="groq",
|
|
83
90
|
model="groq/llama3-70b-8192",
|
|
84
91
|
temperature=0.8,
|
|
85
|
-
max_tokens=
|
|
92
|
+
max_tokens=4000
|
|
86
93
|
)
|
|
87
94
|
})
|
|
88
|
-
default_llm: Optional[str] = "gpt-
|
|
95
|
+
default_llm: Optional[str] = "gpt-5"
|
|
89
96
|
|
|
90
97
|
@model_validator(mode='after')
|
|
91
98
|
def check_critical_fields(self) -> 'ARAconfig':
|
ara_cli/version.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
# version.py
|
|
2
|
-
__version__ = "0.1.9.
|
|
2
|
+
__version__ = "0.1.9.89" # fith parameter like .0 for local install test purposes only. official numbers should be 4 digit numbers
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ara_cli
|
|
3
|
-
Version: 0.1.9.
|
|
3
|
+
Version: 0.1.9.89
|
|
4
4
|
Summary: Powerful, open source command-line tool for managing, structuring and automating software development artifacts in line with Business-Driven Development (BDD) and AI-assisted processes
|
|
5
5
|
Description-Content-Type: text/markdown
|
|
6
6
|
Requires-Dist: litellm
|
|
@@ -2,7 +2,7 @@ ara_cli/__init__.py,sha256=0zl7IegxTid26EBGLav_fXZ4CCIV3H5TfAoFQiOHjvg,148
|
|
|
2
2
|
ara_cli/__main__.py,sha256=J5DCDLRZ6UcpYwM1-NkjaLo4PTetcSj2dB4HrrftkUw,2064
|
|
3
3
|
ara_cli/ara_command_action.py,sha256=_LHE2V5hbJxN7ccYiptuPktRfbTnXmQEt_D_FxDBlBY,22456
|
|
4
4
|
ara_cli/ara_command_parser.py,sha256=I-e9W-QwTIMKMzlHycSlCWCyBFQfiFYvGre1XsDbrFI,20573
|
|
5
|
-
ara_cli/ara_config.py,sha256=
|
|
5
|
+
ara_cli/ara_config.py,sha256=w7GkDNy2Tx75LFZBu8J2KWAUMpk4F5jzuMhSjmvn948,7206
|
|
6
6
|
ara_cli/artefact_autofix.py,sha256=WVTiIR-jo4YKmmz4eS3qTFvl45W1YKwAk1XSuz9QX10,20015
|
|
7
7
|
ara_cli/artefact_creator.py,sha256=0Ory6cB-Ahkw-BDNb8QHnTbp_OHGABdkb9bhwcEdcIc,6063
|
|
8
8
|
ara_cli/artefact_deleter.py,sha256=Co4wwCH3yW8H9NrOq7_2p5571EeHr0TsfE-H8KqoOfY,1900
|
|
@@ -31,7 +31,7 @@ ara_cli/run_file_lister.py,sha256=XbrrDTJXp1LFGx9Lv91SNsEHZPP-PyEMBF_P4btjbDA,23
|
|
|
31
31
|
ara_cli/tag_extractor.py,sha256=TGdaQOVnjy25R0zDsAifB67C5oom0Fwo24s0_fr5A_I,3151
|
|
32
32
|
ara_cli/template_manager.py,sha256=YwrN6AYPpl6ZrW8BVQpVXx8yTRf-oNpJUIKeg4NAggs,6606
|
|
33
33
|
ara_cli/update_config_prompt.py,sha256=Oy9vNTw6UhDohyTEfSKkqE5ifEMPlmWNYkKHgUrK_pY,4607
|
|
34
|
-
ara_cli/version.py,sha256=
|
|
34
|
+
ara_cli/version.py,sha256=NaGz6YoHedIfREfmsLD-XITUWBN2mNZImEJSC6EQf9g,146
|
|
35
35
|
ara_cli/artefact_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
36
36
|
ara_cli/artefact_models/artefact_load.py,sha256=IXzWxP-Q_j_oDGMno0m-OuXCQ7Vd5c_NctshGr4ROBw,621
|
|
37
37
|
ara_cli/artefact_models/artefact_mapping.py,sha256=8aD0spBjkJ8toMAmFawc6UTUxB6-tEEViZXv2I-r88Q,1874
|
|
@@ -134,7 +134,7 @@ ara_cli/templates/specification_breakdown_files/template.technology.exploration.
|
|
|
134
134
|
ara_cli/templates/specification_breakdown_files/template.technology.md,sha256=bySiksz-8xtq0Nnj4svqe2MgUftWrVkbK9AcrDUE3KY,952
|
|
135
135
|
tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
136
136
|
tests/test_ara_command_action.py,sha256=JTLqXM9BSMlU33OQgrk_sZnoowFJZKZAx8q-st-wa34,25821
|
|
137
|
-
tests/test_ara_config.py,sha256=
|
|
137
|
+
tests/test_ara_config.py,sha256=H5GwDbab0GMSa6IbHdruzmbsHy5Ia0xX0uteJdfZ9Rg,14277
|
|
138
138
|
tests/test_artefact_autofix.py,sha256=pApZ-N0dW8Ujt-cNLbgvd4bhiIIK8oXb-saLf6QlA-8,25022
|
|
139
139
|
tests/test_artefact_fuzzy_search.py,sha256=5Sh3_l9QK8-WHn6JpGPU1b6h4QEnl2JoMq1Tdp2cj1U,1261
|
|
140
140
|
tests/test_artefact_link_updater.py,sha256=biqbEp2jCOz8giv72hu2P2hDfeJfJ9OrVGdAv5d9cK4,2191
|
|
@@ -149,12 +149,12 @@ tests/test_file_classifier.py,sha256=kLWPiePu3F5mkVuI_lK_2QlLh2kXD_Mt2K8KZZ1fAnA
|
|
|
149
149
|
tests/test_file_creator.py,sha256=D3G7MbgE0m8JmZihxnTryxLco6iZdbV--2CGc0L20FM,2109
|
|
150
150
|
tests/test_file_lister.py,sha256=Q9HwhKKx540EPzTmfzOCnvtAgON0aMmpJE2eOe1J3EA,4324
|
|
151
151
|
tests/test_list_filter.py,sha256=fJA3d_SdaOAUkE7jn68MOVS0THXGghy1fye_64Zvo1U,7964
|
|
152
|
-
tests/test_prompt_handler.py,sha256=
|
|
152
|
+
tests/test_prompt_handler.py,sha256=GJbKeipXAwKs-IpHlzaFBxB9_G3FlHwTpCGqfBNfSy8,13338
|
|
153
153
|
tests/test_tag_extractor.py,sha256=nSiAYlTKZ7TLAOtcJpwK5zTWHhFYU0tI5xKnivLc1dU,2712
|
|
154
154
|
tests/test_template_manager.py,sha256=q-LMHRG4rHkD6ON6YW4cpZxUx9hul6Or8wVVRC2kb-8,4099
|
|
155
155
|
tests/test_update_config_prompt.py,sha256=xsqj1WTn4BsG5Q2t-sNPfu7EoMURFcS-hfb5VSXUnJc,6765
|
|
156
|
-
ara_cli-0.1.9.
|
|
157
|
-
ara_cli-0.1.9.
|
|
158
|
-
ara_cli-0.1.9.
|
|
159
|
-
ara_cli-0.1.9.
|
|
160
|
-
ara_cli-0.1.9.
|
|
156
|
+
ara_cli-0.1.9.89.dist-info/METADATA,sha256=f0NXWsKYiDAZ1GCsswX9Y2bPbvztjI4LSOFx7zmatio,6739
|
|
157
|
+
ara_cli-0.1.9.89.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
158
|
+
ara_cli-0.1.9.89.dist-info/entry_points.txt,sha256=v4h7MzysTgSIDYfEo3oj4Kz_8lzsRa3hq-KJHEcLVX8,45
|
|
159
|
+
ara_cli-0.1.9.89.dist-info/top_level.txt,sha256=WM4cLHT5DYUaWzLtRj-gu3yVNFpGQ6lLRI3FMmC-38I,14
|
|
160
|
+
ara_cli-0.1.9.89.dist-info/RECORD,,
|
tests/test_ara_config.py
CHANGED
|
@@ -97,8 +97,8 @@ class TestARAconfig:
|
|
|
97
97
|
config = ARAconfig()
|
|
98
98
|
assert config.ext_code_dirs == [{"source_dir": "./src"}, {"source_dir": "./tests"}]
|
|
99
99
|
assert config.glossary_dir == "./glossary"
|
|
100
|
-
assert config.default_llm == "gpt-
|
|
101
|
-
assert "gpt-
|
|
100
|
+
assert config.default_llm == "gpt-5"
|
|
101
|
+
assert "gpt-5" in config.llm_config
|
|
102
102
|
|
|
103
103
|
@patch('sys.stdout', new_callable=StringIO)
|
|
104
104
|
def test_check_critical_fields_with_empty_list_reverts_to_default(self, mock_stdout):
|
|
@@ -228,7 +228,7 @@ class TestReadData:
|
|
|
228
228
|
result = read_data("config.json")
|
|
229
229
|
|
|
230
230
|
assert isinstance(result, ARAconfig)
|
|
231
|
-
assert result.default_llm == "gpt-
|
|
231
|
+
assert result.default_llm == "gpt-5" # Should be the default config
|
|
232
232
|
|
|
233
233
|
output = mock_stdout.getvalue()
|
|
234
234
|
assert "Error: Invalid JSON in configuration file" in output
|
tests/test_prompt_handler.py
CHANGED
|
@@ -19,8 +19,8 @@ def mock_config():
|
|
|
19
19
|
custom_prompt_templates_subdir="custom-prompt-modules",
|
|
20
20
|
ara_prompt_given_list_includes=["*.py"],
|
|
21
21
|
llm_config={
|
|
22
|
-
"gpt-4o": LLMConfigItem(provider="openai", model="openai/gpt-4o", temperature=0.8, max_tokens=1024),
|
|
23
|
-
"o3-mini": LLMConfigItem(provider="openai", model="openai/o3-mini", temperature=0.9, max_tokens=2048),
|
|
22
|
+
"gpt-4o": LLMConfigItem(provider="openai", model="openai/gpt-4o", temperature=0.8, max_tokens=1024, max_completion_tokens= None),
|
|
23
|
+
"o3-mini": LLMConfigItem(provider="openai", model="openai/o3-mini", temperature=0.9, max_tokens=2048, max_completion_tokens= None),
|
|
24
24
|
},
|
|
25
25
|
default_llm="gpt-4o"
|
|
26
26
|
)
|
|
@@ -116,8 +116,11 @@ class TestCoreLogic:
|
|
|
116
116
|
|
|
117
117
|
result = list(prompt_handler.send_prompt(prompt))
|
|
118
118
|
|
|
119
|
-
|
|
120
|
-
|
|
119
|
+
# Create expected parameters to match the actual implementation
|
|
120
|
+
# The actual send_prompt function copies config_parameters and only removes 'provider'
|
|
121
|
+
expected_params = mock_config.llm_config['gpt-4o'].model_dump()
|
|
122
|
+
if 'provider' in expected_params:
|
|
123
|
+
del expected_params['provider']
|
|
121
124
|
|
|
122
125
|
mock_completion.assert_called_once_with(
|
|
123
126
|
messages=prompt,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|