openlit 1.24.1__tar.gz → 1.26.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {openlit-1.24.1 → openlit-1.26.0}/PKG-INFO +3 -2
  2. {openlit-1.24.1 → openlit-1.26.0}/README.md +1 -1
  3. {openlit-1.24.1 → openlit-1.26.0}/pyproject.toml +1 -1
  4. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/__helpers.py +14 -0
  5. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/__init__.py +114 -1
  6. {openlit-1.24.1 → openlit-1.26.0}/LICENSE +0 -0
  7. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/anthropic/__init__.py +0 -0
  8. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/anthropic/anthropic.py +0 -0
  9. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/anthropic/async_anthropic.py +0 -0
  10. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/azure_ai_inference/__init__.py +0 -0
  11. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/azure_ai_inference/async_azure_ai_inference.py +0 -0
  12. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/azure_ai_inference/azure_ai_inference.py +0 -0
  13. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/bedrock/__init__.py +0 -0
  14. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/bedrock/bedrock.py +0 -0
  15. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/chroma/__init__.py +0 -0
  16. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/chroma/chroma.py +0 -0
  17. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/cohere/__init__.py +0 -0
  18. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/cohere/cohere.py +0 -0
  19. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/elevenlabs/__init__.py +0 -0
  20. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/elevenlabs/async_elevenlabs.py +0 -0
  21. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/elevenlabs/elevenlabs.py +0 -0
  22. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/embedchain/__init__.py +0 -0
  23. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/embedchain/embedchain.py +0 -0
  24. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/google_ai_studio/__init__.py +0 -0
  25. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/google_ai_studio/async_google_ai_studio.py +0 -0
  26. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/google_ai_studio/google_ai_studio.py +0 -0
  27. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/gpt4all/__init__.py +0 -0
  28. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/gpt4all/gpt4all.py +0 -0
  29. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/gpu/__init__.py +0 -0
  30. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/groq/__init__.py +0 -0
  31. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/groq/async_groq.py +0 -0
  32. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/groq/groq.py +0 -0
  33. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/haystack/__init__.py +0 -0
  34. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/haystack/haystack.py +0 -0
  35. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/langchain/__init__.py +0 -0
  36. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/langchain/langchain.py +0 -0
  37. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/llamaindex/__init__.py +0 -0
  38. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/llamaindex/llamaindex.py +0 -0
  39. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/milvus/__init__.py +0 -0
  40. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/milvus/milvus.py +0 -0
  41. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/mistral/__init__.py +0 -0
  42. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/mistral/async_mistral.py +0 -0
  43. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/mistral/mistral.py +0 -0
  44. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/ollama/__init__.py +0 -0
  45. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/ollama/async_ollama.py +0 -0
  46. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/ollama/ollama.py +0 -0
  47. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/openai/__init__.py +0 -0
  48. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/openai/async_azure_openai.py +0 -0
  49. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/openai/async_openai.py +0 -0
  50. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/openai/azure_openai.py +0 -0
  51. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/openai/openai.py +0 -0
  52. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/pinecone/__init__.py +0 -0
  53. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/pinecone/pinecone.py +0 -0
  54. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/qdrant/__init__.py +0 -0
  55. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/qdrant/qdrant.py +0 -0
  56. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/transformers/__init__.py +0 -0
  57. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/transformers/transformers.py +0 -0
  58. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/vertexai/__init__.py +0 -0
  59. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/vertexai/async_vertexai.py +0 -0
  60. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/vertexai/vertexai.py +0 -0
  61. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/vllm/__init__.py +0 -0
  62. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/instrumentation/vllm/vllm.py +0 -0
  63. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/otel/metrics.py +0 -0
  64. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/otel/tracing.py +0 -0
  65. {openlit-1.24.1 → openlit-1.26.0}/src/openlit/semcov/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: openlit
3
- Version: 1.24.1
3
+ Version: 1.26.0
4
4
  Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects
5
5
  Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
6
6
  Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT,gpu
@@ -12,6 +12,7 @@ Classifier: Programming Language :: Python :: 3.9
12
12
  Classifier: Programming Language :: Python :: 3.10
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
+ Classifier: Programming Language :: Python :: 3.13
15
16
  Requires-Dist: boto3 (>=1.34.0,<2.0.0)
16
17
  Requires-Dist: botocore (>=1.34.0,<2.0.0)
17
18
  Requires-Dist: gpustat (>=1.1.1,<2.0.0)
@@ -172,7 +173,7 @@ To send telemetry to OpenTelemetry backends requiring authentication, set the `o
172
173
  ### Step 3: Visualize and Optimize!
173
174
  With the LLM Observability data now being collected and sent to OpenLIT, the next step is to visualize and analyze this data to get insights into your LLM application’s performance, behavior, and identify areas of improvement.
174
175
 
175
- To begin exploring your LLM Application's performance data within the OpenLIT UI, please see the [Quickstart Guide](https://docs.openlit.io/latest/quickstart).
176
+ To begin exploring your LLM Application's performance data within the OpenLIT, please see the [Quickstart Guide](https://docs.openlit.io/latest/quickstart).
176
177
 
177
178
  If you want to integrate and send metrics and traces to your existing observability tools, refer to our [Connections Guide](https://docs.openlit.io/latest/connections/intro) for detailed instructions.
178
179
 
@@ -145,7 +145,7 @@ To send telemetry to OpenTelemetry backends requiring authentication, set the `o
145
145
  ### Step 3: Visualize and Optimize!
146
146
  With the LLM Observability data now being collected and sent to OpenLIT, the next step is to visualize and analyze this data to get insights into your LLM application’s performance, behavior, and identify areas of improvement.
147
147
 
148
- To begin exploring your LLM Application's performance data within the OpenLIT UI, please see the [Quickstart Guide](https://docs.openlit.io/latest/quickstart).
148
+ To begin exploring your LLM Application's performance data within the OpenLIT, please see the [Quickstart Guide](https://docs.openlit.io/latest/quickstart).
149
149
 
150
150
  If you want to integrate and send metrics and traces to your existing observability tools, refer to our [Connections Guide](https://docs.openlit.io/latest/connections/intro) for detailed instructions.
151
151
 
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "openlit"
3
- version = "1.24.1"
3
+ version = "1.26.0"
4
4
  description = "OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects"
5
5
  authors = ["OpenLIT"]
6
6
  repository = "https://github.com/openlit/openlit/tree/main/openlit/python"
@@ -2,6 +2,7 @@
2
2
  """
3
3
  This module has functions to calculate model costs based on tokens and to fetch pricing information.
4
4
  """
5
+ import os
5
6
  import json
6
7
  import logging
7
8
  from urllib.parse import urlparse
@@ -12,6 +13,19 @@ from opentelemetry.trace import Status, StatusCode
12
13
  # Set up logging
13
14
  logger = logging.getLogger(__name__)
14
15
 
16
+ def get_env_variable(name, arg_value, error_message):
17
+ """
18
+ Retrieve an environment variable if the argument is not provided
19
+ and raise an error if both are not set.
20
+ """
21
+ if arg_value is not None:
22
+ return arg_value
23
+ value = os.getenv(name)
24
+ if not value:
25
+ logging.error(error_message)
26
+ raise RuntimeError(error_message)
27
+ return value
28
+
15
29
  def openai_tokens(text, model):
16
30
  """
17
31
  Calculate the number of tokens a given text would take up for a specified model.
@@ -7,9 +7,11 @@ large language models (LLMs).
7
7
 
8
8
  from typing import Dict
9
9
  import logging
10
+ import os
10
11
  from importlib.util import find_spec
11
12
  from functools import wraps
12
13
  from contextlib import contextmanager
14
+ import requests
13
15
 
14
16
 
15
17
  # Import internal modules for setting up tracing and fetching pricing info.
@@ -18,7 +20,7 @@ from opentelemetry.trace import SpanKind, Status, StatusCode, Span
18
20
  from openlit.semcov import SemanticConvetion
19
21
  from openlit.otel.tracing import setup_tracing
20
22
  from openlit.otel.metrics import setup_meter
21
- from openlit.__helpers import fetch_pricing_info
23
+ from openlit.__helpers import fetch_pricing_info, get_env_variable
22
24
 
23
25
 
24
26
  # Instrumentors for various large language models.
@@ -316,6 +318,117 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
316
318
  except Exception as e:
317
319
  logger.error("Error during openLIT initialization: %s", e)
318
320
 
321
+ def get_prompt(url=None, name=None, api_key=None, prompt_id=None,
322
+ version=None, should_compile=None, variables=None, meta_properties=None):
323
+ """
324
+ Retrieve and returns the prompt from OpenLIT Prompt Hub
325
+ """
326
+
327
+ # Validate and set the base URL
328
+ url = get_env_variable(
329
+ 'OPENLIT_URL',
330
+ url,
331
+ 'Missing OpenLIT URL: Provide as arg or set OPENLIT_URL env var.'
332
+ )
333
+
334
+ # Validate and set the API key
335
+ api_key = get_env_variable(
336
+ 'OPENLIT_API_KEY',
337
+ api_key,
338
+ 'Missing API key: Provide as arg or set OPENLIT_API_KEY env var.'
339
+ )
340
+
341
+ # Construct the API endpoint
342
+ endpoint = url + "/api/prompt/get-compiled"
343
+
344
+ # Prepare the payload
345
+ payload = {
346
+ 'name': name,
347
+ 'promptId': prompt_id,
348
+ 'version': version,
349
+ 'shouldCompile': should_compile,
350
+ 'variables': variables,
351
+ 'metaProperties': meta_properties
352
+ }
353
+
354
+ # Remove None values from payload
355
+ payload = {k: v for k, v in payload.items() if v is not None}
356
+
357
+ # Prepare headers
358
+ headers = {
359
+ 'Authorization': f'Bearer {api_key}',
360
+ 'Content-Type': 'application/json'
361
+ }
362
+
363
+ try:
364
+ # Make the POST request to the API with headers
365
+ response = requests.post(endpoint, json=payload, headers=headers, timeout=120)
366
+
367
+ # Check if the response is successful
368
+ response.raise_for_status()
369
+
370
+ # Return the JSON response
371
+ return response.json()
372
+ except requests.RequestException as error:
373
+ print(f"Error fetching prompt: {error}")
374
+ return None
375
+
376
+ def get_secrets(url=None, api_key=None, key=None, tags=None, should_set_env=None):
377
+ """
378
+ Retrieve & returns the secrets from OpenLIT Vault & sets all to env is should_set_env is True
379
+ """
380
+
381
+ # Validate and set the base URL
382
+ url = get_env_variable(
383
+ 'OPENLIT_URL',
384
+ url,
385
+ 'Missing OpenLIT URL: Provide as arg or set OPENLIT_URL env var.'
386
+ )
387
+
388
+ # Validate and set the API key
389
+ api_key = get_env_variable(
390
+ 'OPENLIT_API_KEY',
391
+ api_key,
392
+ 'Missing API key: Provide as arg or set OPENLIT_API_KEY env var.'
393
+ )
394
+
395
+ # Construct the API endpoint
396
+ endpoint = url + "/api/vault/get-secrets"
397
+
398
+ # Prepare the payload
399
+ payload = {
400
+ 'key': key,
401
+ 'tags': tags,
402
+ }
403
+
404
+ # Remove None values from payload
405
+ payload = {k: v for k, v in payload.items() if v is not None}
406
+
407
+ # Prepare headers
408
+ headers = {
409
+ 'Authorization': f'Bearer {api_key}',
410
+ 'Content-Type': 'application/json'
411
+ }
412
+
413
+ try:
414
+ # Make the POST request to the API with headers
415
+ response = requests.post(endpoint, json=payload, headers=headers, timeout=120)
416
+
417
+ # Check if the response is successful
418
+ response.raise_for_status()
419
+
420
+ # Return the JSON response
421
+ vault_response = response.json()
422
+
423
+ res = vault_response.get('res', [])
424
+
425
+ if should_set_env is True:
426
+ for token, value in res.items():
427
+ os.environ[token] = str(value)
428
+ return vault_response
429
+ except requests.RequestException as error:
430
+ print(f"Error fetching secrets: {error}")
431
+ return None
319
432
 
320
433
  def trace(wrapped):
321
434
  """
File without changes