olca 0.2.46__py3-none-any.whl → 0.2.47__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- olca/olcacli.py +15 -23
- {olca-0.2.46.dist-info → olca-0.2.47.dist-info}/METADATA +1 -1
- {olca-0.2.46.dist-info → olca-0.2.47.dist-info}/RECORD +7 -7
- {olca-0.2.46.dist-info → olca-0.2.47.dist-info}/LICENSE +0 -0
- {olca-0.2.46.dist-info → olca-0.2.47.dist-info}/WHEEL +0 -0
- {olca-0.2.46.dist-info → olca-0.2.47.dist-info}/entry_points.txt +0 -0
- {olca-0.2.46.dist-info → olca-0.2.47.dist-info}/top_level.txt +0 -0
olca/olcacli.py
CHANGED
@@ -234,7 +234,7 @@ def main():
|
|
234
234
|
api_key = os.getenv(api_keyname)
|
235
235
|
if api_key:
|
236
236
|
os.environ[api_key_variable] = api_key
|
237
|
-
except
|
237
|
+
except:
|
238
238
|
#load .env file in current dir or HOME and find OPENAI_API_KEY
|
239
239
|
try:
|
240
240
|
dotenv.load_dotenv()
|
@@ -246,17 +246,11 @@ def main():
|
|
246
246
|
print("Error: Could not load .env file")
|
247
247
|
exit(1)
|
248
248
|
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
249
|
system_instructions = config.get('system_instructions', '')
|
256
250
|
user_input = config.get('user_input', '')
|
257
251
|
default_model_id = "gpt-4o-mini"
|
258
|
-
model_name=config.get('model_name', default_model_id)
|
259
|
-
recursion_limit=config.get('recursion_limit', 15)
|
252
|
+
model_name = config.get('model_name', default_model_id)
|
253
|
+
recursion_limit = config.get('recursion_limit', 15)
|
260
254
|
disable_system_append = _parse_args().disable_system_append
|
261
255
|
|
262
256
|
# Use the system_instructions and user_input in your CLI logic
|
@@ -266,11 +260,8 @@ def main():
|
|
266
260
|
print("Recursion Limit:", recursion_limit)
|
267
261
|
print("Trace:", tracing_enabled)
|
268
262
|
|
269
|
-
|
270
|
-
|
271
|
-
|
272
263
|
model = ChatOpenAI(model=model_name, temperature=0)
|
273
|
-
selected_tools = [
|
264
|
+
selected_tools = ["terminal"]
|
274
265
|
|
275
266
|
human_switch = args.human
|
276
267
|
#look in olca_config.yaml for human: true
|
@@ -281,43 +272,44 @@ def main():
|
|
281
272
|
selected_tools.append("human")
|
282
273
|
|
283
274
|
if args.math:
|
284
|
-
math_llm=OpenAI()
|
275
|
+
math_llm = OpenAI()
|
285
276
|
selected_tools.append("llm-math")
|
286
277
|
if human_switch:
|
287
|
-
tools = load_tools(
|
278
|
+
tools = load_tools(selected_tools, llm=math_llm, allow_dangerous_tools=True, input_func=get_input)
|
288
279
|
else:
|
289
|
-
tools = load_tools(
|
280
|
+
tools = load_tools(selected_tools, llm=math_llm, allow_dangerous_tools=True)
|
290
281
|
else:
|
291
282
|
if human_switch:
|
292
|
-
tools = load_tools(
|
283
|
+
tools = load_tools(selected_tools, allow_dangerous_tools=True, input_func=get_input)
|
293
284
|
else:
|
294
|
-
tools = load_tools(
|
285
|
+
tools = load_tools(selected_tools, allow_dangerous_tools=True)
|
295
286
|
|
296
287
|
if human_switch:
|
297
288
|
user_input = user_input + " Dont forget to USE THE HUMAN-IN-THE-LOOP TOOL"
|
298
|
-
system_instructions= system_instructions + ". Use the human-in-the-loop tool"
|
289
|
+
system_instructions = system_instructions + ". Use the human-in-the-loop tool"
|
299
290
|
|
300
291
|
# Define the graph
|
301
292
|
graph = create_react_agent(model, tools=tools)
|
302
293
|
|
303
294
|
if graph.config is None:
|
304
|
-
|
295
|
+
graph.config = {}
|
305
296
|
graph.config["recursion_limit"] = recursion_limit
|
306
297
|
|
307
|
-
inputs,system_instructions,user_input = prepare_input(user_input, system_instructions, not disable_system_append, human_switch)
|
298
|
+
inputs, system_instructions, user_input = prepare_input(user_input, system_instructions, not disable_system_append, human_switch)
|
308
299
|
|
309
300
|
setup_required_directories(system_instructions, user_input)
|
310
301
|
|
311
|
-
|
312
302
|
try:
|
313
303
|
graph_config = {"callbacks": callbacks} if callbacks else {}
|
314
304
|
if recursion_limit:
|
315
305
|
graph_config["recursion_limit"] = recursion_limit
|
316
306
|
print_stream(graph.stream(inputs, config=graph_config))
|
317
307
|
except GraphRecursionError as e:
|
318
|
-
#print(f"Error: {e}")
|
319
308
|
print("Recursion limit reached. Please increase the 'recursion_limit' in the olca_config.yaml file.")
|
320
309
|
print("For troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT")
|
310
|
+
except KeyboardInterrupt:
|
311
|
+
print("\nExiting gracefully.")
|
312
|
+
exit(0)
|
321
313
|
|
322
314
|
def setup_required_directories(system_instructions, user_input):
|
323
315
|
try:
|
@@ -1,12 +1,12 @@
|
|
1
1
|
olca/__init__.py,sha256=3QyLLAys_KiiDIe-cfO_7QyY7di_qCaCS-sVziW2BOw,23
|
2
2
|
olca/fusewill_cli.py,sha256=Gf8CaYs7Uo4NH8QfgRNYalpmSUo047p9rzdkvIABHi8,7872
|
3
3
|
olca/fusewill_utils.py,sha256=IOIElqWCIsNzePlS1FZa5_35vySYLwbMUGW6UhNefIc,6065
|
4
|
-
olca/olcacli.py,sha256=
|
4
|
+
olca/olcacli.py,sha256=JdIF3Si07uonzbmaA1H9WEkNPjCs_NszC0u1uU_oWB4,15875
|
5
5
|
olca/tracing.py,sha256=4FprOOpq0tzucV1ekBlPgGXmTNLwJIfs21-oyheKXzA,1412
|
6
6
|
olca/utils.py,sha256=zM94HDMDYF95Yd9ubeOK6vuepbQN4kDFh0rTvaVFagI,912
|
7
|
-
olca-0.2.
|
8
|
-
olca-0.2.
|
9
|
-
olca-0.2.
|
10
|
-
olca-0.2.
|
11
|
-
olca-0.2.
|
12
|
-
olca-0.2.
|
7
|
+
olca-0.2.47.dist-info/LICENSE,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092
|
8
|
+
olca-0.2.47.dist-info/METADATA,sha256=cbj8odmvf3sIzGLyeTCi7CVzHrJBq6TgrAZ13tXauQM,25311
|
9
|
+
olca-0.2.47.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
10
|
+
olca-0.2.47.dist-info/entry_points.txt,sha256=AhP5FMv6vnOq9C76V_vxRVIO50smnZXG4RIY47oD2_U,103
|
11
|
+
olca-0.2.47.dist-info/top_level.txt,sha256=bGDtAReS-xlS0F6MM-DyD0IQUqjNdWmgemnM3vNtrpI,5
|
12
|
+
olca-0.2.47.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|