crewplus 0.2.31__py3-none-any.whl → 0.2.34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crewplus might be problematic. Click here for more details.

@@ -4,6 +4,7 @@ import logging
4
4
  from typing import Any, Dict, Iterator, List, Optional, AsyncIterator, Union, Tuple
5
5
  from google import genai
6
6
  from google.genai import types
7
+ from google.oauth2 import service_account
7
8
  import base64
8
9
  import requests
9
10
  from langchain_core.language_models import BaseChatModel
@@ -31,8 +32,15 @@ class GeminiChatModel(BaseChatModel):
31
32
  with the LangChain ecosystem.
32
33
 
33
34
  It supports standard invocation, streaming, and asynchronous operations.
34
- API keys can be provided directly or loaded from the `GOOGLE_API_KEY`
35
- environment variable.
35
+ By default, it uses Google AI with an API key. It can also be configured to use
36
+ Google Cloud Vertex AI.
37
+
38
+ **Authentication:**
39
+ - **Google AI (Default):** The `google_api_key` parameter or the `GOOGLE_API_KEY`
40
+ environment variable is used.
41
+ - **Vertex AI:** To use Vertex AI, set `use_vertex_ai=True` and provide
42
+ GCP configuration (`project_id`, `location`). Authentication is handled
43
+ via `service_account_file`, `credentials`, or Application Default Credentials (ADC).
36
44
 
37
45
  **Tracing Integration:**
38
46
  Tracing (e.g., with Langfuse) is automatically enabled when the respective
@@ -53,6 +61,11 @@ class GeminiChatModel(BaseChatModel):
53
61
  top_k (Optional[int]): The top-k sampling parameter.
54
62
  logger (Optional[logging.Logger]): An optional logger instance.
55
63
  enable_tracing (Optional[bool]): Enable/disable all tracing (auto-detect if None).
64
+ use_vertex_ai (bool): If True, uses Vertex AI instead of Google AI Platform. Defaults to False.
65
+ project_id (Optional[str]): GCP Project ID, required for Vertex AI.
66
+ location (Optional[str]): GCP Location for Vertex AI (e.g., "us-central1").
67
+ service_account_file (Optional[str]): Path to GCP service account JSON for Vertex AI.
68
+ credentials (Optional[Any]): GCP credentials object for Vertex AI (alternative to file).
56
69
 
57
70
  Example:
58
71
  .. code-block:: python
@@ -185,6 +198,22 @@ class GeminiChatModel(BaseChatModel):
185
198
  "Hello without tracing",
186
199
  config={"metadata": {"tracing_disabled": True}}
187
200
  )
201
+
202
+ Example (Vertex AI):
203
+ .. code-block:: python
204
+
205
+ # Assumes GCP environment is configured (e.g., gcloud auth application-default login)
206
+ # or environment variables are set:
207
+ # os.environ["GCP_PROJECT_ID"] = "your-gcp-project-id"
208
+ # os.environ["GCP_LOCATION"] = "us-central1"
209
+ # os.environ["GCP_SERVICE_ACCOUNT_FILE"] = "path/to/your/service-account-key.json"
210
+
211
+ vertex_model = GeminiChatModel(
212
+ model_name="gemini-1.5-flash-001",
213
+ use_vertex_ai=True,
214
+ )
215
+ response = vertex_model.invoke("Hello from Vertex AI!")
216
+ print(response.content)
188
217
  """
189
218
 
190
219
  # Model configuration
@@ -195,6 +224,13 @@ class GeminiChatModel(BaseChatModel):
195
224
  top_p: Optional[float] = Field(default=None, description="Top-p sampling parameter")
196
225
  top_k: Optional[int] = Field(default=None, description="Top-k sampling parameter")
197
226
 
227
+ # Vertex AI specific configuration
228
+ use_vertex_ai: bool = Field(default=False, description="Use Vertex AI instead of Google AI Platform")
229
+ project_id: Optional[str] = Field(default=None, description="Google Cloud Project ID for Vertex AI")
230
+ location: Optional[str] = Field(default=None, description="Google Cloud Location for Vertex AI (e.g., 'us-central1')")
231
+ service_account_file: Optional[str] = Field(default=None, description="Path to Google Cloud service account key file")
232
+ credentials: Optional[Any] = Field(default=None, description="Google Cloud credentials object", exclude=True)
233
+
198
234
  # Configuration for tracing and logging
199
235
  logger: Optional[logging.Logger] = Field(default=None, description="Optional logger instance", exclude=True)
200
236
  enable_tracing: Optional[bool] = Field(default=None, description="Enable tracing (auto-detect if None)")
@@ -213,6 +249,19 @@ class GeminiChatModel(BaseChatModel):
213
249
  self.logger.addHandler(logging.StreamHandler())
214
250
  self.logger.setLevel(logging.INFO)
215
251
 
252
+ self._initialize_client()
253
+
254
+ self._tracing_manager = TracingManager(self)
255
+
256
+ def _initialize_client(self):
257
+ """Initializes the Google GenAI client for either Google AI or Vertex AI."""
258
+ if self.use_vertex_ai:
259
+ self._init_vertex_ai_client()
260
+ else:
261
+ self._init_google_ai_client()
262
+
263
+ def _init_google_ai_client(self):
264
+ """Initializes the client for Google AI Platform."""
216
265
  # Get API key from environment if not provided
217
266
  if self.google_api_key is None:
218
267
  api_key = os.getenv("GOOGLE_API_KEY")
@@ -222,13 +271,51 @@ class GeminiChatModel(BaseChatModel):
222
271
  # Initialize the Google GenAI client
223
272
  if self.google_api_key:
224
273
  self._client = genai.Client(api_key=self.google_api_key.get_secret_value())
225
- self.logger.info(f"Initialized GeminiChatModel with model: {self.model_name}")
274
+ self.logger.info(f"Initialized GeminiChatModel with model: {self.model_name} for Google AI")
226
275
  else:
227
276
  error_msg = "Google API key is required. Set GOOGLE_API_KEY environment variable or pass google_api_key parameter."
228
277
  self.logger.error(error_msg)
229
278
  raise ValueError(error_msg)
279
+
280
+ def _init_vertex_ai_client(self):
281
+ """Initializes the client for Vertex AI."""
282
+ # Get config from environment if not provided
283
+ if self.project_id is None:
284
+ self.project_id = os.getenv("GCP_PROJECT_ID")
285
+ if self.location is None:
286
+ self.location = os.getenv("GCP_LOCATION")
230
287
 
231
- self._tracing_manager = TracingManager(self)
288
+ if not self.project_id or not self.location:
289
+ error_msg = "For Vertex AI, 'project_id' and 'location' are required."
290
+ self.logger.error(error_msg)
291
+ raise ValueError(error_msg)
292
+
293
+ creds = self.credentials
294
+ if creds is None:
295
+ # Get service account file from env if not provided
296
+ sa_file = self.service_account_file or os.getenv("GCP_SERVICE_ACCOUNT_FILE")
297
+ if sa_file:
298
+ try:
299
+ creds = service_account.Credentials.from_service_account_file(
300
+ sa_file,
301
+ scopes=['https://www.googleapis.com/auth/cloud-platform']
302
+ )
303
+ except Exception as e:
304
+ error_msg = f"Failed to load credentials from service account file '{sa_file}': {e}"
305
+ self.logger.error(error_msg)
306
+ raise ValueError(error_msg)
307
+
308
+ # If creds is still None, the client will use Application Default Credentials (ADC).
309
+
310
+ self._client = genai.Client(
311
+ project=self.project_id,
312
+ location=self.location,
313
+ credentials=creds,
314
+ )
315
+ self.logger.info(
316
+ f"Initialized GeminiChatModel with model: {self.model_name} for Vertex AI "
317
+ f"(Project: {self.project_id}, Location: {self.location})"
318
+ )
232
319
 
233
320
  def get_model_identifier(self) -> str:
234
321
  """Return a string identifying this model for tracing and logging."""
@@ -188,6 +188,25 @@ class ModelLoadBalancer:
188
188
  if 'max_tokens' in model_config:
189
189
  kwargs['max_tokens'] = model_config['max_tokens']
190
190
  return GeminiChatModel(**kwargs)
191
+ elif provider == 'vertex-ai':
192
+ deployment_name = model_config['deployment_name']
193
+
194
+ # Handle the 'model_name@location' format for deployment_name
195
+ model_name_for_gemini = deployment_name.split('@')[0] if '@' in deployment_name else deployment_name
196
+
197
+ kwargs = {
198
+ 'use_vertex_ai': True,
199
+ 'model_name': model_name_for_gemini,
200
+ 'project_id': model_config['project_id'],
201
+ 'location': model_config['location'],
202
+ }
203
+ if 'service_account_file' in model_config:
204
+ kwargs['service_account_file'] = model_config['service_account_file']
205
+ if 'temperature' in model_config:
206
+ kwargs['temperature'] = model_config['temperature']
207
+ if 'max_tokens' in model_config:
208
+ kwargs['max_tokens'] = model_config['max_tokens']
209
+ return GeminiChatModel(**kwargs)
191
210
  else:
192
211
  self.logger.error(f"Unsupported provider: {provider}")
193
212
  raise ValueError(f"Unsupported provider: {provider}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: crewplus
3
- Version: 0.2.31
3
+ Version: 0.2.34
4
4
  Summary: Base services for CrewPlus AI applications
5
5
  Author-Email: Tim Liu <tim@opsmateai.com>
6
6
  License: MIT
@@ -1,13 +1,13 @@
1
- crewplus-0.2.31.dist-info/METADATA,sha256=IyfwlRN7t1zCjAlVhAobrV8gxmJ1z3VuBe-tbmv6Za0,5327
2
- crewplus-0.2.31.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
- crewplus-0.2.31.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
- crewplus-0.2.31.dist-info/licenses/LICENSE,sha256=2_NHSHRTKB_cTcT_GXgcenOCtIZku8j343mOgAguTfc,1087
1
+ crewplus-0.2.34.dist-info/METADATA,sha256=UNWEOkl43TfQMN5AtWXFlMKy2Fdy7zBv6oKfN8_y_qM,5327
2
+ crewplus-0.2.34.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
+ crewplus-0.2.34.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
+ crewplus-0.2.34.dist-info/licenses/LICENSE,sha256=2_NHSHRTKB_cTcT_GXgcenOCtIZku8j343mOgAguTfc,1087
5
5
  crewplus/__init__.py,sha256=m46HkZL1Y4toD619NL47Sn2Qe084WFFSFD7e6VoYKZc,284
6
6
  crewplus/services/__init__.py,sha256=V1CG8b2NOmRzNgQH7BPl4KVxWSYJH5vfEsW1wVErKNE,375
7
7
  crewplus/services/azure_chat_model.py,sha256=WMSf4BDO8UcP7ZASNGRJxdTEnuWBmCRSY_4yx_VMbok,5499
8
- crewplus/services/gemini_chat_model.py,sha256=oVLL07VEjwrHOeP56YSPLnldbfjvTVRrsTI6xcPNt1E,35224
8
+ crewplus/services/gemini_chat_model.py,sha256=XlJeaic1lq31lMx1EPKCe4AlYaHDR57tXOdu8V8j6dU,39623
9
9
  crewplus/services/init_services.py,sha256=EBpDkIwzuujmdlqjyWvdLQCfhQmfS_OKFz-9Ji8nmAU,1628
10
- crewplus/services/model_load_balancer.py,sha256=2LOiPUslRKreBZTndKOx6WH8VtJT8mep37GsDINQTJs,10780
10
+ crewplus/services/model_load_balancer.py,sha256=PU3wn8lh6pGeVFn62SURW_1lIGXbUnAklM1EWcJMhLU,11752
11
11
  crewplus/services/tracing_manager.py,sha256=aCU9N4Jvh8pDD3h8kWX4O-Ax8xwdLHnQ4wJ3sf-vLwA,6289
12
12
  crewplus/utils/__init__.py,sha256=2Gk1n5srFJQnFfBuYTxktdtKOVZyNrFcNaZKhXk35Pw,142
13
13
  crewplus/utils/schema_action.py,sha256=GDaBoVFQD1rXqrLVSMTfXYW1xcUu7eDcHsn57XBSnIg,422
@@ -20,4 +20,4 @@ docs/GeminiChatModel.md,sha256=zZYyl6RmjZTUsKxxMiC9O4yV70MC4TD-IGUmWhIDBKA,8677
20
20
  docs/ModelLoadBalancer.md,sha256=aGHES1dcXPz4c7Y8kB5-vsCNJjriH2SWmjBkSGoYKiI,4398
21
21
  docs/VDBService.md,sha256=Dw286Rrf_fsi13jyD3Bo4Sy7nZ_G7tYm7d8MZ2j9hxk,9375
22
22
  docs/index.md,sha256=3tlc15uR8lzFNM5WjdoZLw0Y9o1P1gwgbEnOdIBspqc,1643
23
- crewplus-0.2.31.dist-info/RECORD,,
23
+ crewplus-0.2.34.dist-info/RECORD,,