openlit 1.24.1__py3-none-any.whl → 1.26.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
openlit/__helpers.py CHANGED
@@ -2,6 +2,7 @@
2
2
  """
3
3
  This module has functions to calculate model costs based on tokens and to fetch pricing information.
4
4
  """
5
+ import os
5
6
  import json
6
7
  import logging
7
8
  from urllib.parse import urlparse
@@ -12,6 +13,19 @@ from opentelemetry.trace import Status, StatusCode
12
13
  # Set up logging
13
14
  logger = logging.getLogger(__name__)
14
15
 
16
+ def get_env_variable(name, arg_value, error_message):
17
+ """
18
+ Retrieve an environment variable if the argument is not provided
19
+ and raise an error if both are not set.
20
+ """
21
+ if arg_value is not None:
22
+ return arg_value
23
+ value = os.getenv(name)
24
+ if not value:
25
+ logging.error(error_message)
26
+ raise RuntimeError(error_message)
27
+ return value
28
+
15
29
  def openai_tokens(text, model):
16
30
  """
17
31
  Calculate the number of tokens a given text would take up for a specified model.
openlit/__init__.py CHANGED
@@ -7,9 +7,11 @@ large language models (LLMs).
7
7
 
8
8
  from typing import Dict
9
9
  import logging
10
+ import os
10
11
  from importlib.util import find_spec
11
12
  from functools import wraps
12
13
  from contextlib import contextmanager
14
+ import requests
13
15
 
14
16
 
15
17
  # Import internal modules for setting up tracing and fetching pricing info.
@@ -18,7 +20,7 @@ from opentelemetry.trace import SpanKind, Status, StatusCode, Span
18
20
  from openlit.semcov import SemanticConvetion
19
21
  from openlit.otel.tracing import setup_tracing
20
22
  from openlit.otel.metrics import setup_meter
21
- from openlit.__helpers import fetch_pricing_info
23
+ from openlit.__helpers import fetch_pricing_info, get_env_variable
22
24
 
23
25
 
24
26
  # Instrumentors for various large language models.
@@ -316,6 +318,117 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
316
318
  except Exception as e:
317
319
  logger.error("Error during openLIT initialization: %s", e)
318
320
 
321
+ def get_prompt(url=None, name=None, api_key=None, prompt_id=None,
322
+ version=None, should_compile=None, variables=None, meta_properties=None):
323
+ """
324
+ Retrieve and returns the prompt from OpenLIT Prompt Hub
325
+ """
326
+
327
+ # Validate and set the base URL
328
+ url = get_env_variable(
329
+ 'OPENLIT_URL',
330
+ url,
331
+ 'Missing OpenLIT URL: Provide as arg or set OPENLIT_URL env var.'
332
+ )
333
+
334
+ # Validate and set the API key
335
+ api_key = get_env_variable(
336
+ 'OPENLIT_API_KEY',
337
+ api_key,
338
+ 'Missing API key: Provide as arg or set OPENLIT_API_KEY env var.'
339
+ )
340
+
341
+ # Construct the API endpoint
342
+ endpoint = url + "/api/prompt/get-compiled"
343
+
344
+ # Prepare the payload
345
+ payload = {
346
+ 'name': name,
347
+ 'promptId': prompt_id,
348
+ 'version': version,
349
+ 'shouldCompile': should_compile,
350
+ 'variables': variables,
351
+ 'metaProperties': meta_properties
352
+ }
353
+
354
+ # Remove None values from payload
355
+ payload = {k: v for k, v in payload.items() if v is not None}
356
+
357
+ # Prepare headers
358
+ headers = {
359
+ 'Authorization': f'Bearer {api_key}',
360
+ 'Content-Type': 'application/json'
361
+ }
362
+
363
+ try:
364
+ # Make the POST request to the API with headers
365
+ response = requests.post(endpoint, json=payload, headers=headers, timeout=120)
366
+
367
+ # Check if the response is successful
368
+ response.raise_for_status()
369
+
370
+ # Return the JSON response
371
+ return response.json()
372
+ except requests.RequestException as error:
373
+ print(f"Error fetching prompt: {error}")
374
+ return None
375
+
376
+ def get_secrets(url=None, api_key=None, key=None, tags=None, should_set_env=None):
377
+ """
378
+ Retrieve & returns the secrets from OpenLIT Vault & sets all to env is should_set_env is True
379
+ """
380
+
381
+ # Validate and set the base URL
382
+ url = get_env_variable(
383
+ 'OPENLIT_URL',
384
+ url,
385
+ 'Missing OpenLIT URL: Provide as arg or set OPENLIT_URL env var.'
386
+ )
387
+
388
+ # Validate and set the API key
389
+ api_key = get_env_variable(
390
+ 'OPENLIT_API_KEY',
391
+ api_key,
392
+ 'Missing API key: Provide as arg or set OPENLIT_API_KEY env var.'
393
+ )
394
+
395
+ # Construct the API endpoint
396
+ endpoint = url + "/api/vault/get-secrets"
397
+
398
+ # Prepare the payload
399
+ payload = {
400
+ 'key': key,
401
+ 'tags': tags,
402
+ }
403
+
404
+ # Remove None values from payload
405
+ payload = {k: v for k, v in payload.items() if v is not None}
406
+
407
+ # Prepare headers
408
+ headers = {
409
+ 'Authorization': f'Bearer {api_key}',
410
+ 'Content-Type': 'application/json'
411
+ }
412
+
413
+ try:
414
+ # Make the POST request to the API with headers
415
+ response = requests.post(endpoint, json=payload, headers=headers, timeout=120)
416
+
417
+ # Check if the response is successful
418
+ response.raise_for_status()
419
+
420
+ # Return the JSON response
421
+ vault_response = response.json()
422
+
423
+ res = vault_response.get('res', [])
424
+
425
+ if should_set_env is True:
426
+ for token, value in res.items():
427
+ os.environ[token] = str(value)
428
+ return vault_response
429
+ except requests.RequestException as error:
430
+ print(f"Error fetching secrets: {error}")
431
+ return None
319
432
 
320
433
  def trace(wrapped):
321
434
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: openlit
3
- Version: 1.24.1
3
+ Version: 1.26.0
4
4
  Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects
5
5
  Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
6
6
  Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT,gpu
@@ -12,6 +12,7 @@ Classifier: Programming Language :: Python :: 3.9
12
12
  Classifier: Programming Language :: Python :: 3.10
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
+ Classifier: Programming Language :: Python :: 3.13
15
16
  Requires-Dist: boto3 (>=1.34.0,<2.0.0)
16
17
  Requires-Dist: botocore (>=1.34.0,<2.0.0)
17
18
  Requires-Dist: gpustat (>=1.1.1,<2.0.0)
@@ -172,7 +173,7 @@ To send telemetry to OpenTelemetry backends requiring authentication, set the `o
172
173
  ### Step 3: Visualize and Optimize!
173
174
  With the LLM Observability data now being collected and sent to OpenLIT, the next step is to visualize and analyze this data to get insights into your LLM application’s performance, behavior, and identify areas of improvement.
174
175
 
175
- To begin exploring your LLM Application's performance data within the OpenLIT UI, please see the [Quickstart Guide](https://docs.openlit.io/latest/quickstart).
176
+ To begin exploring your LLM Application's performance data within the OpenLIT, please see the [Quickstart Guide](https://docs.openlit.io/latest/quickstart).
176
177
 
177
178
  If you want to integrate and send metrics and traces to your existing observability tools, refer to our [Connections Guide](https://docs.openlit.io/latest/connections/intro) for detailed instructions.
178
179
 
@@ -1,5 +1,5 @@
1
- openlit/__helpers.py,sha256=lrn4PBs9owDudiCY2NBoVbAi7AU_HtUpyOj0oqPBsPY,5545
2
- openlit/__init__.py,sha256=iuYE-acp7ScvxzA9P3f8obfOLLLrNMYkLbyKRz8Liec,16049
1
+ openlit/__helpers.py,sha256=H-8uJKs_CP9Y2HL4lz5n0AgN60wwZ675hlWHMDO143A,5936
2
+ openlit/__init__.py,sha256=MMQriKcROfw0DBBC3FdHbOKI--9tq-ZM0pY6UsQvjGk,19287
3
3
  openlit/instrumentation/anthropic/__init__.py,sha256=oaU53BOPyfUKbEzYvLr1DPymDluurSnwo4Hernf2XdU,1955
4
4
  openlit/instrumentation/anthropic/anthropic.py,sha256=y7CEGhKOGHWt8G_5Phr4qPJTfPGRJIAr9Yk6nM3CcvM,16775
5
5
  openlit/instrumentation/anthropic/async_anthropic.py,sha256=Zz1KRKIG9wGn0quOoLvjORC-49IvHQpJ6GBdB-4PfCQ,16816
@@ -59,7 +59,7 @@ openlit/instrumentation/vllm/vllm.py,sha256=lDzM7F5pgxvh8nKL0dcKB4TD0Mc9wXOWeXOs
59
59
  openlit/otel/metrics.py,sha256=-PntPlH4xOBXgnDqgJp3iA7rh1TWxM7PudGnb0GxVDA,4298
60
60
  openlit/otel/tracing.py,sha256=2kSj7n7uXSkRegcGFDC8IbnDOxqWTA8dGODs__Yn_yA,3719
61
61
  openlit/semcov/__init__.py,sha256=wpAarrnkndbgvP8VSudi8IRInYtD02hkewqjyiC0dMk,7614
62
- openlit-1.24.1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
63
- openlit-1.24.1.dist-info/METADATA,sha256=sPi3YeEnTMkzJqPH0SqYrdgezeK4vf2Y5W6MJPuNmT4,15710
64
- openlit-1.24.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
65
- openlit-1.24.1.dist-info/RECORD,,
62
+ openlit-1.26.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
63
+ openlit-1.26.0.dist-info/METADATA,sha256=YRmexsUEdSEe5MBZu8IR6u6s4XMzi2TSJGw1Z7CMCgQ,15758
64
+ openlit-1.26.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
65
+ openlit-1.26.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.9.0
2
+ Generator: poetry-core 1.9.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any