vectara-agentic 0.1.10__py3-none-any.whl → 0.1.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vectara-agentic might be problematic. Click here for more details.

@@ -3,7 +3,7 @@ vectara_agentic package.
3
3
  """
4
4
 
5
5
  # Define the package version
6
- __version__ = "0.1.10"
6
+ __version__ = "0.1.12"
7
7
 
8
8
  # Import classes and functions from modules
9
9
  # from .module1 import Class1, function1
@@ -0,0 +1,82 @@
1
+ import os
2
+ import json
3
+ import pandas as pd
4
+
5
+ from phoenix.otel import register
6
+ import phoenix as px
7
+ from openinference.instrumentation.llama_index import LlamaIndexInstrumentor
8
+ from phoenix.trace.dsl import SpanQuery
9
+ from phoenix.trace import SpanEvaluations
10
+
11
+ from .types import ObserverType
12
+
13
+ def setup_observer():
14
+ observer = ObserverType(os.getenv("VECTARA_AGENTIC_OBSERVER_TYPE", "NO_OBSERVER"))
15
+ if observer == ObserverType.ARIZE_PHOENIX:
16
+ phoenix_endpoint = os.getenv("PHOENIX_ENDPOINT", None)
17
+ if not phoenix_endpoint:
18
+ px.launch_app()
19
+ tracer_provider = register(endpoint='http://localhost:6006/v1/traces', project_name="vectara-agentic")
20
+ elif 'app.phoenix.arize.com' in phoenix_endpoint: # hosted on Arizze
21
+ phoenix_api_key = os.getenv("PHOENIX_API_KEY", None)
22
+ if not phoenix_api_key:
23
+ raise Exception("Arize Phoenix API key not set. Please set PHOENIX_API_KEY environment variable.")
24
+ os.environ["PHOENIX_CLIENT_HEADERS"] = f"api_key={phoenix_api_key}"
25
+ os.environ["PHOENIX_COLLECTOR_ENDPOINT"] = "https://app.phoenix.arize.com"
26
+ tracer_provider = register(endpoint=phoenix_endpoint, project_name="vectara-agentic")
27
+ else: # Self hosted Phoenix
28
+ tracer_provider = register(endpoint=phoenix_endpoint, project_name="vectara-agentic")
29
+ LlamaIndexInstrumentor().instrument(tracer_provider=tracer_provider)
30
+ else:
31
+ print("No observer set.")
32
+
33
+
34
+ def _extract_fcs_value(output):
35
+ try:
36
+ output_json = json.loads(output)
37
+ if 'metadata' in output_json and 'fcs' in output_json['metadata']:
38
+ return output_json['metadata']['fcs']
39
+ except json.JSONDecodeError:
40
+ print(f"Failed to parse JSON: {output}")
41
+ except KeyError:
42
+ print(f"'fcs' not found in: {output_json}")
43
+ return None
44
+
45
+ def _find_top_level_parent_id(row, all_spans):
46
+ current_id = row['parent_id']
47
+ while current_id is not None:
48
+ parent_row = all_spans[all_spans.index == current_id]
49
+ if parent_row.empty:
50
+ break
51
+ new_parent_id = parent_row['parent_id'].iloc[0]
52
+ if new_parent_id == current_id:
53
+ break
54
+ if new_parent_id is None:
55
+ return current_id
56
+ current_id = new_parent_id
57
+ return current_id
58
+
59
+ def eval_fcs():
60
+ query = SpanQuery().select(
61
+ "output.value",
62
+ "parent_id",
63
+ "name"
64
+ )
65
+ client = px.Client()
66
+ all_spans = client.query_spans(query, project_name="vectara-agentic")
67
+ vectara_spans = all_spans[all_spans['name'] == 'VectaraQueryEngine._query'].copy()
68
+ vectara_spans['top_level_parent_id'] = vectara_spans.apply(lambda row: _find_top_level_parent_id(row, all_spans), axis=1)
69
+ vectara_spans['score'] = vectara_spans['output.value'].apply(lambda x: _extract_fcs_value(x))
70
+
71
+ vectara_spans.reset_index(inplace=True)
72
+ top_level_spans = vectara_spans.copy()
73
+ top_level_spans['context.span_id'] = top_level_spans['top_level_parent_id']
74
+ vectara_spans = pd.concat([vectara_spans, top_level_spans], ignore_index=True)
75
+ vectara_spans.set_index('context.span_id', inplace=True)
76
+
77
+ px.Client().log_evaluations(
78
+ SpanEvaluations(
79
+ dataframe=vectara_spans,
80
+ eval_name="Vectara FCS",
81
+ ),
82
+ )
vectara_agentic/agent.py CHANGED
@@ -23,17 +23,14 @@ from llama_index.core.callbacks import CallbackManager, TokenCountingHandler
23
23
  from llama_index.core.callbacks.base_handler import BaseCallbackHandler
24
24
  from llama_index.agent.openai import OpenAIAgent
25
25
  from llama_index.core.memory import ChatMemoryBuffer
26
- from llama_index.core import set_global_handler
27
- from llama_index.core.tools.types import ToolMetadata
28
-
29
- import phoenix as px
30
26
 
31
27
  from dotenv import load_dotenv
32
28
 
33
- from .types import AgentType, AgentStatusType, LLMRole, ObserverType, ToolType
29
+ from .types import AgentType, AgentStatusType, LLMRole, ToolType
34
30
  from .utils import get_llm, get_tokenizer_for_model
35
31
  from ._prompts import REACT_PROMPT_TEMPLATE, GENERAL_PROMPT_TEMPLATE
36
32
  from ._callback import AgentCallbackHandler
33
+ from ._observability import setup_observer, eval_fcs
37
34
  from .tools import VectaraToolFactory, VectaraTool
38
35
 
39
36
 
@@ -78,7 +75,7 @@ class Agent:
78
75
  custom_instructions: str = "",
79
76
  verbose: bool = True,
80
77
  update_func: Optional[Callable[[AgentStatusType, str], None]] = None,
81
- agent_type: AgentType = AgentType(os.getenv("VECTARA_AGENTIC_AGENT_TYPE", "OPENAI")),
78
+ agent_type: AgentType = None,
82
79
  ) -> None:
83
80
  """
84
81
  Initialize the agent with the specified type, tools, topic, and system message.
@@ -91,7 +88,7 @@ class Agent:
91
88
  verbose (bool, optional): Whether the agent should print its steps. Defaults to True.
92
89
  update_func (Callable): A callback function the code calls on any agent updates.
93
90
  """
94
- self.agent_type = agent_type
91
+ self.agent_type = agent_type or AgentType(os.getenv("VECTARA_AGENTIC_AGENT_TYPE", "OPENAI"))
95
92
  self.tools = tools
96
93
  self.llm = get_llm(LLMRole.MAIN)
97
94
  self._custom_instructions = custom_instructions
@@ -144,17 +141,10 @@ class Agent:
144
141
  else:
145
142
  raise ValueError(f"Unknown agent type: {self.agent_type}")
146
143
 
147
- observer = ObserverType(os.getenv("VECTARA_AGENTIC_OBSERVER_TYPE", "NO_OBSERVER"))
148
- if observer == ObserverType.ARIZE_PHOENIX:
149
- if os.environ.get("OTEL_EXPORTER_OTLP_HEADERS", None):
150
- set_global_handler("arize_phoenix", endpoint="https://llamatrace.com/v1/traces")
151
- print("Arize Phoenix observer set. https://llamatrace.com")
152
- else:
153
- px.launch_app()
154
- set_global_handler("arize_phoenix", endpoint="http://localhost:6006/v1/traces")
155
- print("Arize Phoenix observer set. http://localhost:6006/.")
156
- else:
157
- print("No observer set.")
144
+ try:
145
+ setup_observer()
146
+ except Exception as e:
147
+ print(f"Failed to set up observer ({e}), ignoring")
158
148
 
159
149
  def __eq__(self, other):
160
150
  if not isinstance(other, Agent):
@@ -203,6 +193,7 @@ class Agent:
203
193
  custom_instructions: str = "",
204
194
  verbose: bool = True,
205
195
  update_func: Optional[Callable[[AgentStatusType, str], None]] = None,
196
+ agent_type: AgentType = None,
206
197
  ) -> "Agent":
207
198
  """
208
199
  Create an agent from tools, agent type, and language model.
@@ -219,7 +210,7 @@ class Agent:
219
210
  Returns:
220
211
  Agent: An instance of the Agent class.
221
212
  """
222
- return cls(tools, topic, custom_instructions, verbose, update_func)
213
+ return cls(tools, topic, custom_instructions, verbose, update_func, agent_type)
223
214
 
224
215
  @classmethod
225
216
  def from_corpus(
@@ -355,6 +346,7 @@ class Agent:
355
346
  agent_response = self.agent.chat(prompt)
356
347
  if self.verbose:
357
348
  print(f"Time taken: {time.time() - st}")
349
+ eval_fcs()
358
350
  return agent_response.response
359
351
  except Exception as e:
360
352
  import traceback
vectara_agentic/types.py CHANGED
@@ -28,6 +28,7 @@ class ModelProvider(Enum):
28
28
  GROQ = "GROQ"
29
29
  FIREWORKS = "FIREWORKS"
30
30
  COHERE = "COHERE"
31
+ GEMINI = "GEMINI"
31
32
 
32
33
 
33
34
  class AgentStatusType(Enum):
vectara_agentic/utils.py CHANGED
@@ -11,6 +11,7 @@ from llama_index.llms.together import TogetherLLM
11
11
  from llama_index.llms.groq import Groq
12
12
  from llama_index.llms.fireworks import Fireworks
13
13
  from llama_index.llms.cohere import Cohere
14
+ from llama_index.llms.gemini import Gemini
14
15
 
15
16
  import tiktoken
16
17
  from typing import Tuple, Callable, Optional
@@ -24,6 +25,7 @@ provider_to_default_model_name = {
24
25
  ModelProvider.GROQ: "llama-3.1-70b-versatile",
25
26
  ModelProvider.FIREWORKS: "accounts/fireworks/models/firefunction-v2",
26
27
  ModelProvider.COHERE: "command-r-plus",
28
+ ModelProvider.GEMINI: "models/gemini-pro",
27
29
  }
28
30
 
29
31
  DEFAULT_MODEL_PROVIDER = ModelProvider.OPENAI
@@ -77,17 +79,19 @@ def get_llm(role: LLMRole) -> LLM:
77
79
  model_provider, model_name = _get_llm_params_for_role(role)
78
80
 
79
81
  if model_provider == ModelProvider.OPENAI:
80
- llm = OpenAI(model=model_name, temperature=0)
82
+ llm = OpenAI(model=model_name, temperature=0, is_function_calling_model=True)
81
83
  elif model_provider == ModelProvider.ANTHROPIC:
82
- llm = Anthropic(model=model_name, temperature=0)
84
+ llm = Anthropic(model=model_name, temperature=0, is_function_calling_model=True)
85
+ elif model_provider == ModelProvider.GEMINI:
86
+ llm = Gemini(model=model_name, temperature=0, is_function_calling_model=True)
83
87
  elif model_provider == ModelProvider.TOGETHER:
84
- llm = TogetherLLM(model=model_name, temperature=0)
88
+ llm = TogetherLLM(model=model_name, temperature=0, is_function_calling_model=True)
85
89
  elif model_provider == ModelProvider.GROQ:
86
- llm = Groq(model=model_name, temperature=0)
90
+ llm = Groq(model=model_name, temperature=0, is_function_calling_model=True)
87
91
  elif model_provider == ModelProvider.FIREWORKS:
88
- llm = Fireworks(model=model_name, temperature=0)
92
+ llm = Fireworks(model=model_name, temperature=0, is_function_calling_model=True)
89
93
  elif model_provider == ModelProvider.COHERE:
90
- llm = Cohere(model=model_name, temperature=0)
94
+ llm = Cohere(model=model_name, temperature=0, is_function_calling_model=True)
91
95
  else:
92
96
  raise ValueError(f"Unknown LLM provider: {model_provider}")
93
97
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vectara_agentic
3
- Version: 0.1.10
3
+ Version: 0.1.12
4
4
  Summary: A Python package for creating AI Assistants and AI Agents with Vectara
5
5
  Home-page: https://github.com/vectara/py-vectara-agentic
6
6
  Author: Ofer Mendelevitch
@@ -16,23 +16,27 @@ Classifier: Topic :: Software Development :: Libraries :: Python Modules
16
16
  Requires-Python: >=3.10
17
17
  Description-Content-Type: text/markdown
18
18
  License-File: LICENSE
19
- Requires-Dist: llama-index ==0.11.10
19
+ Requires-Dist: llama-index ==0.11.13
20
20
  Requires-Dist: llama-index-indices-managed-vectara ==0.2.2
21
21
  Requires-Dist: llama-index-agent-llm-compiler ==0.2.0
22
- Requires-Dist: llama-index-agent-openai ==0.3.1
23
- Requires-Dist: llama-index-llms-openai ==0.2.7
22
+ Requires-Dist: llama-index-agent-openai ==0.3.4
23
+ Requires-Dist: llama-index-llms-openai ==0.2.9
24
24
  Requires-Dist: llama-index-llms-anthropic ==0.3.1
25
25
  Requires-Dist: llama-index-llms-together ==0.2.0
26
26
  Requires-Dist: llama-index-llms-groq ==0.2.0
27
27
  Requires-Dist: llama-index-llms-fireworks ==0.2.0
28
28
  Requires-Dist: llama-index-llms-cohere ==0.3.0
29
+ Requires-Dist: llama-index-llms-gemini ==0.3.5
29
30
  Requires-Dist: llama-index-tools-yahoo-finance ==0.2.0
30
31
  Requires-Dist: llama-index-tools-arxiv ==0.2.0
31
32
  Requires-Dist: llama-index-tools-database ==0.2.0
32
33
  Requires-Dist: llama-index-tools-google ==0.2.0
33
34
  Requires-Dist: llama-index-tools-tavily-research ==0.2.0
34
- Requires-Dist: llama-index-callbacks-arize-phoenix ==0.2.1
35
- Requires-Dist: pydantic ==2.8.2
35
+ Requires-Dist: openinference-instrumentation-llama-index ==3.0.2
36
+ Requires-Dist: opentelemetry-proto ==1.16.0
37
+ Requires-Dist: arize-phoenix-otel ==0.5.1
38
+ Requires-Dist: tokenizers >=0.20
39
+ Requires-Dist: pydantic ==2.9.2
36
40
  Requires-Dist: retrying ==1.3.4
37
41
  Requires-Dist: pymongo ==4.6.3
38
42
  Requires-Dist: python-dotenv ==1.0.1
@@ -65,7 +69,7 @@ Requires-Dist: dill ==0.3.8
65
69
  - [Vectara account](https://console.vectara.com/signup/?utm_source=github&utm_medium=code&utm_term=DevRel&utm_content=vectara-agentic&utm_campaign=github-code-DevRel-vectara-agentic)
66
70
  - A Vectara corpus with an [API key](https://docs.vectara.com/docs/api-keys)
67
71
  - [Python 3.10 or higher](https://www.python.org/downloads/)
68
- - OpenAI API key (or API keys for Anthropic, TOGETHER.AI, Fireworks AI, Cohere or GROQ)
72
+ - OpenAI API key (or API keys for Anthropic, TOGETHER.AI, Fireworks AI, Cohere, GEMINI or GROQ)
69
73
 
70
74
  ## Installation
71
75
 
@@ -142,7 +146,7 @@ financial_bot_instructions = """
142
146
  Configure `vectara-agentic` using environment variables:
143
147
 
144
148
  - `VECTARA_AGENTIC_AGENT_TYPE`: valid values are `REACT`, `LLMCOMPILER` or `OPENAI` (default: `OPENAI`)
145
- - `VECTARA_AGENTIC_MAIN_LLM_PROVIDER`: valid values are `OPENAI`, `ANTHROPIC`, `TOGETHER`, `GROQ`, `COHERE` or `FIREWORKS` (default: `OPENAI`)
149
+ - `VECTARA_AGENTIC_MAIN_LLM_PROVIDER`: valid values are `OPENAI`, `ANTHROPIC`, `TOGETHER`, `GROQ`, `COHERE`, `GEMINI` or `FIREWORKS` (default: `OPENAI`)
146
150
  - `VECTARA_AGENTIC_MAIN_MODEL_NAME`: agent model name (default depends on provider)
147
151
  - `VECTARA_AGENTIC_TOOL_LLM_PROVIDER`: tool LLM provider (default: `OPENAI`)
148
152
  - `VECTARA_AGENTIC_TOOL_MODEL_NAME`: tool model name (default depends on provider)
@@ -192,13 +196,20 @@ The `Agent` class supports serialization. Use the `dumps()` to serialize and `lo
192
196
  ## Observability
193
197
 
194
198
  vectara-agentic supports observability via the existing integration of LlamaIndex and Arize Phoenix.
195
- To enable tracing of your vectara-agentic assistant, follow these steps (adapted from [here](https://docs.llamaindex.ai/en/stable/module_guides/observability/)):
196
- 1. Go to `https://llamatrace.com/login` an create an account, then create an API key and put it in the `PHOENIX_API_KEY` variable
197
- 2. `os["VECTARA_AGENTIC_OBSERVER_TYPE"] = "ARIZE_PHOENIX"`: to enable Arize Phoenix observability
198
- 3. `os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"api_key={PHOENIX_API_KEY}"`
199
+ First, set `os["VECTARA_AGENTIC_OBSERVER_TYPE"] = "ARIZE_PHOENIX"`.
200
+ Then you can use Arize Phoenix in three ways:
201
+ 1. **Locally**.
202
+ 1. If you have a local phoenix server that you've run using e.g. `python -m phoenix.server.main serve`, vectara-agentic will send all traces to it.
203
+ 2. If not, vectara-agentic will run a local instance during the agent's lifecycle, and will close it when finished.
204
+ 3. In both cases, traces will be sent to the local instance, and you can see the dashboard at `http://localhost:6006`
205
+ 2. **Hosted Instance**. In this case the traces are sent to the Phoenix instances hosted on Arize.
206
+ 1. Go to `https://app.phoenix.arize.com`, setup an account if you don't have one.
207
+ 2. create an API key and put it in the `PHOENIX_API_KEY` variable. This variable indicates you want to use the hosted version.
208
+ 3. To view the traces go to `https://app.phoenix.arize.com`.
209
+
210
+ Now when you run your agent, all call traces are sent to Phoenix and recorded.
211
+ In addition, vectara-agentic also records `FCS` values into Arize for every Vectara RAG call. You can see those results in the `Feedback` column of the arize UI.
199
212
 
200
- Now when you run your agent, all metrics are sent to LlamaTrace and recorded. You can view them at `https://llamatrace.com`.
201
- If you do not include the `OTEL_EXPORTER_OTLP_HEADERS` a local instance of Arize Phoenix will be setup instead and you can view it at `http://localhost:6006`
202
213
 
203
214
  ## About Custom Instructions
204
215
 
@@ -0,0 +1,14 @@
1
+ vectara_agentic/__init__.py,sha256=jCwV8s1vQMRav_ZDkG32Va_hjXaS52ejqq05IR0T5YY,449
2
+ vectara_agentic/_callback.py,sha256=_o8XK1gBmsqpsJACAdJtbtnOnhLe6ZbGahCgb3WMuJQ,3674
3
+ vectara_agentic/_observability.py,sha256=ABn3gY18QrVZZRap23Qw6xeMDBn8U7tQycja6ZNFX-Y,3338
4
+ vectara_agentic/_prompts.py,sha256=UV03GBdz0LplkyOacJyBLbrBpWSqUS7iRtM5xmJ0BVU,4572
5
+ vectara_agentic/agent.py,sha256=8--LXTqBZQbwQx8d6LMxcgJDPJnXev6RfeSU4FPJqQM,19172
6
+ vectara_agentic/tools.py,sha256=1-1GmOQPrxbS6kumFrco2kGheIz0f19sgzPtb_-GjIU,19071
7
+ vectara_agentic/tools_catalog.py,sha256=RByoXkF1GhY0rPQGLIeiqQo-j7o1h3lA6KY55ZM9mGg,4448
8
+ vectara_agentic/types.py,sha256=SSo4Ph6aaEELF0OTKetyghq5GNgMnVCj49i8yq5TYsU,1102
9
+ vectara_agentic/utils.py,sha256=lWfMqUhcT80jjDQOcx41I3L88z5yEAAyBQrtImnF5o0,3757
10
+ vectara_agentic-0.1.12.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
11
+ vectara_agentic-0.1.12.dist-info/METADATA,sha256=6iA3xwkxP_I-OpMRlN8DixSdfO54g3ZJj5TnhrbCfMI,11466
12
+ vectara_agentic-0.1.12.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
13
+ vectara_agentic-0.1.12.dist-info/top_level.txt,sha256=qT7JB9Xz7byehzlPd_rY4WWEAvPMhs63WMWgPsFthxU,16
14
+ vectara_agentic-0.1.12.dist-info/RECORD,,
@@ -1,13 +0,0 @@
1
- vectara_agentic/__init__.py,sha256=K7BcM2qRPyrdggQvXrdcPkharv0_8k763NZI3icrGxw,449
2
- vectara_agentic/_callback.py,sha256=_o8XK1gBmsqpsJACAdJtbtnOnhLe6ZbGahCgb3WMuJQ,3674
3
- vectara_agentic/_prompts.py,sha256=UV03GBdz0LplkyOacJyBLbrBpWSqUS7iRtM5xmJ0BVU,4572
4
- vectara_agentic/agent.py,sha256=N3FUcpvrCcnORNw57HVDAtPi_cMqqxOr7po37ybSONA,19696
5
- vectara_agentic/tools.py,sha256=1-1GmOQPrxbS6kumFrco2kGheIz0f19sgzPtb_-GjIU,19071
6
- vectara_agentic/tools_catalog.py,sha256=RByoXkF1GhY0rPQGLIeiqQo-j7o1h3lA6KY55ZM9mGg,4448
7
- vectara_agentic/types.py,sha256=lTL3Is5W7IFyTKuEKu_VKaAsmVFVzKss_y184ayLti8,1080
8
- vectara_agentic/utils.py,sha256=x8nBncooXHm6gXH-A77TRVzoPGoGleO5VeYi2fVRAA4,3340
9
- vectara_agentic-0.1.10.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
10
- vectara_agentic-0.1.10.dist-info/METADATA,sha256=vopWHXs1wXL6_-6d2-RfeKq4ilGRr5uZfouy8MzTeXE,10882
11
- vectara_agentic-0.1.10.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
12
- vectara_agentic-0.1.10.dist-info/top_level.txt,sha256=qT7JB9Xz7byehzlPd_rY4WWEAvPMhs63WMWgPsFthxU,16
13
- vectara_agentic-0.1.10.dist-info/RECORD,,