olca 0.2.66__tar.gz → 0.2.72__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: olca
3
- Version: 0.2.66
3
+ Version: 0.2.72
4
4
  Summary: A Python package for experimental usage of Langchain and Human-in-the-Loop
5
5
  Home-page: https://github.com/jgwill/olca
6
6
  Author: Jean GUillaume ISabelle
@@ -444,11 +444,16 @@ To initialize `olca`, you need to create a configuration file named `olca.yml`.
444
444
  ```yaml
445
445
  api_keyname: OPENAI_API_KEY__o450olca241128
446
446
  human: true
447
- model_name: gpt-4o-mini
447
+ model_name: gpt-4o-mini #or bellow:
448
+ model_name: ollama://llama3.1:latest #or with host
449
+ model_name: ollama://llama3.1:latest@mymachine.mydomain.com:11434
448
450
  recursion_limit: 300
449
451
  system_instructions: You focus on interacting with human and do what they ask. Make sure you dont quit the program.
450
452
  temperature: 0.0
451
453
  tracing: true
454
+ tracing_providers:
455
+ - langsmith
456
+ - langfuse
452
457
  user_input: Look in the file 3act.md and in ./story, we have created a story point by point and we need you to generate the next iteration of the book in the folder ./book. You use what you find in ./story to start the work. Give me your plan to correct or accept.
453
458
  ```
454
459
 
@@ -73,11 +73,16 @@ To initialize `olca`, you need to create a configuration file named `olca.yml`.
73
73
  ```yaml
74
74
  api_keyname: OPENAI_API_KEY__o450olca241128
75
75
  human: true
76
- model_name: gpt-4o-mini
76
+ model_name: gpt-4o-mini #or bellow:
77
+ model_name: ollama://llama3.1:latest #or with host
78
+ model_name: ollama://llama3.1:latest@mymachine.mydomain.com:11434
77
79
  recursion_limit: 300
78
80
  system_instructions: You focus on interacting with human and do what they ask. Make sure you dont quit the program.
79
81
  temperature: 0.0
80
82
  tracing: true
83
+ tracing_providers:
84
+ - langsmith
85
+ - langfuse
81
86
  user_input: Look in the file 3act.md and in ./story, we have created a story point by point and we need you to generate the next iteration of the book in the folder ./book. You use what you find in ./story to start the work. Give me your plan to correct or accept.
82
87
  ```
83
88
 
@@ -161,9 +161,6 @@ def main():
161
161
  # Load environment variables first
162
162
  load_environment()
163
163
 
164
- # Initialize Langfuse if needed
165
- langfuse = initialize_langfuse(debug=True if args.debug else False)
166
-
167
164
  if args.init:
168
165
  if os.path.exists(olca_config_file):
169
166
  print("Error: Configuration file already exists. Cannot run 'olca init'.")
@@ -179,6 +176,12 @@ def main():
179
176
  return
180
177
 
181
178
  config = load_config(olca_config_file)
179
+
180
+ tracing_providers = config.get('tracing_providers', [])
181
+ if 'langfuse' in tracing_providers:
182
+ langfuse = initialize_langfuse(debug=True if args.debug else False)
183
+ else:
184
+ langfuse = None
182
185
 
183
186
  # Initialize tracing
184
187
  tracing_manager = TracingManager(config)
@@ -225,8 +228,18 @@ def main():
225
228
  provider, base_model, host = parse_model_uri(model_name)
226
229
 
227
230
  if provider == "ollama":
228
- from langchain_ollama import OllamaLLM
229
- model = OllamaLLM(model=base_model, base_url=host if host else None)
231
+ import ollama
232
+ from langchain_ollama import ChatOllama
233
+ try:
234
+ ollama.Client(host=host if host else None).show(base_model)
235
+ except:
236
+ print(f"Model {base_model} not found, pulling it...")
237
+ pull_stream = ollama.Client(host=host if host else None).pull(base_model, stream=True)
238
+ for chunk in pull_stream:
239
+ pass
240
+ print(f"\nPulled {base_model}")
241
+
242
+ model = ChatOllama(model=base_model, host=host if host else None)
230
243
 
231
244
  elif provider == "openai":
232
245
  from langchain_openai import ChatOpenAI
@@ -43,9 +43,15 @@ def initialize_config_file():
43
43
  "temperature": float(input("temperature [0]: ") or default_temperature),
44
44
  "human": input("human [true]: ").lower() in ["true", "yes", "y", "1", ""] or use_default_human_input,
45
45
  "tracing": input("tracing [true]: ").lower() in ["true", "yes", "y", "1", ""] or use_default_tracing,
46
- "tracing_providers": ["langsmith", "langfuse"]
47
46
  }
48
-
47
+
48
+ tracing_providers = []
49
+ if input("Use langsmith for tracing? [Y/n]: ").lower() in ["y", "yes", ""]:
50
+ tracing_providers.append("langsmith")
51
+ if input("Use langfuse for tracing? [Y/n]: ").lower() in ["y", "yes", ""]:
52
+ tracing_providers.append("langfuse")
53
+ config["tracing_providers"] = tracing_providers
54
+
49
55
  user_system_instructions = input(f"system_instructions [{default_system_instructions}]: ")
50
56
  user_system_instructions = user_system_instructions or default_system_instructions
51
57
  user_system_instructions = user_system_instructions.replace("\n", " ").replace("\r", " ").replace("\t", " ")
@@ -1,6 +1,4 @@
1
1
  import os
2
- from langfuse.callback import CallbackHandler as LangfuseCallbackHandler
3
- from langfuse import Langfuse
4
2
  from olca.utils import initialize_langfuse
5
3
  import warnings
6
4
 
@@ -35,6 +33,8 @@ class TracingManager:
35
33
  os.environ["LANGCHAIN_TRACING_V2"] = "true"
36
34
 
37
35
  def _setup_langfuse(self):
36
+ from langfuse.callback import CallbackHandler as LangfuseCallbackHandler
37
+ from langfuse import Langfuse
38
38
  self.langfuse = initialize_langfuse()
39
39
  if not self.langfuse:
40
40
  print("Warning: Missing Langfuse environment variables")
@@ -2,7 +2,6 @@ import os
2
2
  import sys
3
3
  import dotenv
4
4
  import webbrowser
5
- from langfuse import Langfuse
6
5
 
7
6
  def load_environment():
8
7
  dotenv.load_dotenv(dotenv_path=os.path.join(os.getcwd(), ".env"))
@@ -13,6 +12,7 @@ def load_environment():
13
12
  dotenv.load_dotenv(dotenv_path=os.path.expanduser("~/.env"))
14
13
 
15
14
  def initialize_langfuse( debug=False):
15
+ from langfuse import Langfuse
16
16
  required_vars = ["LANGFUSE_PUBLIC_KEY", "LANGFUSE_SECRET_KEY", "LANGFUSE_HOST"]
17
17
  if not all(os.getenv(var) for var in required_vars):
18
18
  return None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: olca
3
- Version: 0.2.66
3
+ Version: 0.2.72
4
4
  Summary: A Python package for experimental usage of Langchain and Human-in-the-Loop
5
5
  Home-page: https://github.com/jgwill/olca
6
6
  Author: Jean GUillaume ISabelle
@@ -444,11 +444,16 @@ To initialize `olca`, you need to create a configuration file named `olca.yml`.
444
444
  ```yaml
445
445
  api_keyname: OPENAI_API_KEY__o450olca241128
446
446
  human: true
447
- model_name: gpt-4o-mini
447
+ model_name: gpt-4o-mini #or bellow:
448
+ model_name: ollama://llama3.1:latest #or with host
449
+ model_name: ollama://llama3.1:latest@mymachine.mydomain.com:11434
448
450
  recursion_limit: 300
449
451
  system_instructions: You focus on interacting with human and do what they ask. Make sure you dont quit the program.
450
452
  temperature: 0.0
451
453
  tracing: true
454
+ tracing_providers:
455
+ - langsmith
456
+ - langfuse
452
457
  user_input: Look in the file 3act.md and in ./story, we have created a story point by point and we need you to generate the next iteration of the book in the folder ./book. You use what you find in ./story to start the work. Give me your plan to correct or accept.
453
458
  ```
454
459
 
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
7
7
 
8
8
  [project]
9
9
  name = "olca"
10
- version = "0.2.66"
10
+ version = "0.2.72"
11
11
 
12
12
  description = "A Python package for experimental usage of Langchain and Human-in-the-Loop"
13
13
  readme = "README.md"
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name='olca',
5
- version = "0.2.66",
5
+ version = "0.2.72",
6
6
  author='Jean GUillaume ISabelle',
7
7
  author_email='jgi@jgwill.com',
8
8
  description='A Python package for experimenting with Langchain agent and interactivity in Terminal modalities.',
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes