openlit 1.25.0__tar.gz → 1.26.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {openlit-1.25.0 → openlit-1.26.0}/PKG-INFO +2 -2
  2. {openlit-1.25.0 → openlit-1.26.0}/README.md +1 -1
  3. {openlit-1.25.0 → openlit-1.26.0}/pyproject.toml +1 -1
  4. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/__helpers.py +14 -0
  5. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/__init__.py +58 -14
  6. {openlit-1.25.0 → openlit-1.26.0}/LICENSE +0 -0
  7. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/anthropic/__init__.py +0 -0
  8. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/anthropic/anthropic.py +0 -0
  9. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/anthropic/async_anthropic.py +0 -0
  10. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/azure_ai_inference/__init__.py +0 -0
  11. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/azure_ai_inference/async_azure_ai_inference.py +0 -0
  12. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/azure_ai_inference/azure_ai_inference.py +0 -0
  13. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/bedrock/__init__.py +0 -0
  14. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/bedrock/bedrock.py +0 -0
  15. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/chroma/__init__.py +0 -0
  16. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/chroma/chroma.py +0 -0
  17. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/cohere/__init__.py +0 -0
  18. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/cohere/cohere.py +0 -0
  19. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/elevenlabs/__init__.py +0 -0
  20. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/elevenlabs/async_elevenlabs.py +0 -0
  21. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/elevenlabs/elevenlabs.py +0 -0
  22. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/embedchain/__init__.py +0 -0
  23. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/embedchain/embedchain.py +0 -0
  24. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/google_ai_studio/__init__.py +0 -0
  25. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/google_ai_studio/async_google_ai_studio.py +0 -0
  26. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/google_ai_studio/google_ai_studio.py +0 -0
  27. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/gpt4all/__init__.py +0 -0
  28. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/gpt4all/gpt4all.py +0 -0
  29. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/gpu/__init__.py +0 -0
  30. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/groq/__init__.py +0 -0
  31. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/groq/async_groq.py +0 -0
  32. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/groq/groq.py +0 -0
  33. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/haystack/__init__.py +0 -0
  34. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/haystack/haystack.py +0 -0
  35. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/langchain/__init__.py +0 -0
  36. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/langchain/langchain.py +0 -0
  37. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/llamaindex/__init__.py +0 -0
  38. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/llamaindex/llamaindex.py +0 -0
  39. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/milvus/__init__.py +0 -0
  40. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/milvus/milvus.py +0 -0
  41. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/mistral/__init__.py +0 -0
  42. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/mistral/async_mistral.py +0 -0
  43. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/mistral/mistral.py +0 -0
  44. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/ollama/__init__.py +0 -0
  45. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/ollama/async_ollama.py +0 -0
  46. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/ollama/ollama.py +0 -0
  47. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/openai/__init__.py +0 -0
  48. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/openai/async_azure_openai.py +0 -0
  49. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/openai/async_openai.py +0 -0
  50. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/openai/azure_openai.py +0 -0
  51. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/openai/openai.py +0 -0
  52. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/pinecone/__init__.py +0 -0
  53. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/pinecone/pinecone.py +0 -0
  54. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/qdrant/__init__.py +0 -0
  55. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/qdrant/qdrant.py +0 -0
  56. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/transformers/__init__.py +0 -0
  57. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/transformers/transformers.py +0 -0
  58. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/vertexai/__init__.py +0 -0
  59. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/vertexai/async_vertexai.py +0 -0
  60. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/vertexai/vertexai.py +0 -0
  61. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/vllm/__init__.py +0 -0
  62. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/instrumentation/vllm/vllm.py +0 -0
  63. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/otel/metrics.py +0 -0
  64. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/otel/tracing.py +0 -0
  65. {openlit-1.25.0 → openlit-1.26.0}/src/openlit/semcov/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: openlit
3
- Version: 1.25.0
3
+ Version: 1.26.0
4
4
  Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects
5
5
  Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
6
6
  Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT,gpu
@@ -173,7 +173,7 @@ To send telemetry to OpenTelemetry backends requiring authentication, set the `o
173
173
  ### Step 3: Visualize and Optimize!
174
174
  With the LLM Observability data now being collected and sent to OpenLIT, the next step is to visualize and analyze this data to get insights into your LLM application’s performance, behavior, and identify areas of improvement.
175
175
 
176
- To begin exploring your LLM Application's performance data within the OpenLIT UI, please see the [Quickstart Guide](https://docs.openlit.io/latest/quickstart).
176
+ To begin exploring your LLM Application's performance data within the OpenLIT, please see the [Quickstart Guide](https://docs.openlit.io/latest/quickstart).
177
177
 
178
178
  If you want to integrate and send metrics and traces to your existing observability tools, refer to our [Connections Guide](https://docs.openlit.io/latest/connections/intro) for detailed instructions.
179
179
 
@@ -145,7 +145,7 @@ To send telemetry to OpenTelemetry backends requiring authentication, set the `o
145
145
  ### Step 3: Visualize and Optimize!
146
146
  With the LLM Observability data now being collected and sent to OpenLIT, the next step is to visualize and analyze this data to get insights into your LLM application’s performance, behavior, and identify areas of improvement.
147
147
 
148
- To begin exploring your LLM Application's performance data within the OpenLIT UI, please see the [Quickstart Guide](https://docs.openlit.io/latest/quickstart).
148
+ To begin exploring your LLM Application's performance data within the OpenLIT, please see the [Quickstart Guide](https://docs.openlit.io/latest/quickstart).
149
149
 
150
150
  If you want to integrate and send metrics and traces to your existing observability tools, refer to our [Connections Guide](https://docs.openlit.io/latest/connections/intro) for detailed instructions.
151
151
 
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "openlit"
3
- version = "1.25.0"
3
+ version = "1.26.0"
4
4
  description = "OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects"
5
5
  authors = ["OpenLIT"]
6
6
  repository = "https://github.com/openlit/openlit/tree/main/openlit/python"
@@ -2,6 +2,7 @@
2
2
  """
3
3
  This module has functions to calculate model costs based on tokens and to fetch pricing information.
4
4
  """
5
+ import os
5
6
  import json
6
7
  import logging
7
8
  from urllib.parse import urlparse
@@ -12,6 +13,19 @@ from opentelemetry.trace import Status, StatusCode
12
13
  # Set up logging
13
14
  logger = logging.getLogger(__name__)
14
15
 
16
+ def get_env_variable(name, arg_value, error_message):
17
+ """
18
+ Retrieve an environment variable if the argument is not provided
19
+ and raise an error if both are not set.
20
+ """
21
+ if arg_value is not None:
22
+ return arg_value
23
+ value = os.getenv(name)
24
+ if not value:
25
+ logging.error(error_message)
26
+ raise RuntimeError(error_message)
27
+ return value
28
+
15
29
  def openai_tokens(text, model):
16
30
  """
17
31
  Calculate the number of tokens a given text would take up for a specified model.
@@ -20,7 +20,7 @@ from opentelemetry.trace import SpanKind, Status, StatusCode, Span
20
20
  from openlit.semcov import SemanticConvetion
21
21
  from openlit.otel.tracing import setup_tracing
22
22
  from openlit.otel.metrics import setup_meter
23
- from openlit.__helpers import fetch_pricing_info
23
+ from openlit.__helpers import fetch_pricing_info, get_env_variable
24
24
 
25
25
 
26
26
  # Instrumentors for various large language models.
@@ -324,19 +324,6 @@ def get_prompt(url=None, name=None, api_key=None, prompt_id=None,
324
324
  Retrieve and returns the prompt from OpenLIT Prompt Hub
325
325
  """
326
326
 
327
- def get_env_variable(name, arg_value, error_message):
328
- """
329
- Retrieve an environment variable if the argument is not provided
330
- and raise an error if both are not set.
331
- """
332
- if arg_value is not None:
333
- return arg_value
334
- value = os.getenv(name)
335
- if not value:
336
- logging.error(error_message)
337
- raise RuntimeError(error_message)
338
- return value
339
-
340
327
  # Validate and set the base URL
341
328
  url = get_env_variable(
342
329
  'OPENLIT_URL',
@@ -386,6 +373,63 @@ def get_prompt(url=None, name=None, api_key=None, prompt_id=None,
386
373
  print(f"Error fetching prompt: {error}")
387
374
  return None
388
375
 
376
+ def get_secrets(url=None, api_key=None, key=None, tags=None, should_set_env=None):
377
+ """
378
+ Retrieve & returns the secrets from OpenLIT Vault & sets all to env is should_set_env is True
379
+ """
380
+
381
+ # Validate and set the base URL
382
+ url = get_env_variable(
383
+ 'OPENLIT_URL',
384
+ url,
385
+ 'Missing OpenLIT URL: Provide as arg or set OPENLIT_URL env var.'
386
+ )
387
+
388
+ # Validate and set the API key
389
+ api_key = get_env_variable(
390
+ 'OPENLIT_API_KEY',
391
+ api_key,
392
+ 'Missing API key: Provide as arg or set OPENLIT_API_KEY env var.'
393
+ )
394
+
395
+ # Construct the API endpoint
396
+ endpoint = url + "/api/vault/get-secrets"
397
+
398
+ # Prepare the payload
399
+ payload = {
400
+ 'key': key,
401
+ 'tags': tags,
402
+ }
403
+
404
+ # Remove None values from payload
405
+ payload = {k: v for k, v in payload.items() if v is not None}
406
+
407
+ # Prepare headers
408
+ headers = {
409
+ 'Authorization': f'Bearer {api_key}',
410
+ 'Content-Type': 'application/json'
411
+ }
412
+
413
+ try:
414
+ # Make the POST request to the API with headers
415
+ response = requests.post(endpoint, json=payload, headers=headers, timeout=120)
416
+
417
+ # Check if the response is successful
418
+ response.raise_for_status()
419
+
420
+ # Return the JSON response
421
+ vault_response = response.json()
422
+
423
+ res = vault_response.get('res', [])
424
+
425
+ if should_set_env is True:
426
+ for token, value in res.items():
427
+ os.environ[token] = str(value)
428
+ return vault_response
429
+ except requests.RequestException as error:
430
+ print(f"Error fetching secrets: {error}")
431
+ return None
432
+
389
433
  def trace(wrapped):
390
434
  """
391
435
  Generates a telemetry wrapper for messages to collect metrics.
File without changes