openlit 1.22.5__tar.gz → 1.24.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {openlit-1.22.5 → openlit-1.24.1}/PKG-INFO +5 -5
  2. {openlit-1.22.5 → openlit-1.24.1}/pyproject.toml +5 -5
  3. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/langchain/langchain.py +9 -9
  4. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/otel/metrics.py +1 -1
  5. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/otel/tracing.py +1 -1
  6. {openlit-1.22.5 → openlit-1.24.1}/LICENSE +0 -0
  7. {openlit-1.22.5 → openlit-1.24.1}/README.md +0 -0
  8. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/__helpers.py +0 -0
  9. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/__init__.py +0 -0
  10. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/anthropic/__init__.py +0 -0
  11. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/anthropic/anthropic.py +0 -0
  12. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/anthropic/async_anthropic.py +0 -0
  13. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/azure_ai_inference/__init__.py +0 -0
  14. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/azure_ai_inference/async_azure_ai_inference.py +0 -0
  15. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/azure_ai_inference/azure_ai_inference.py +0 -0
  16. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/bedrock/__init__.py +0 -0
  17. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/bedrock/bedrock.py +0 -0
  18. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/chroma/__init__.py +0 -0
  19. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/chroma/chroma.py +0 -0
  20. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/cohere/__init__.py +0 -0
  21. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/cohere/cohere.py +0 -0
  22. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/elevenlabs/__init__.py +0 -0
  23. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/elevenlabs/async_elevenlabs.py +0 -0
  24. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/elevenlabs/elevenlabs.py +0 -0
  25. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/embedchain/__init__.py +0 -0
  26. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/embedchain/embedchain.py +0 -0
  27. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/google_ai_studio/__init__.py +0 -0
  28. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/google_ai_studio/async_google_ai_studio.py +0 -0
  29. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/google_ai_studio/google_ai_studio.py +0 -0
  30. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/gpt4all/__init__.py +0 -0
  31. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/gpt4all/gpt4all.py +0 -0
  32. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/gpu/__init__.py +0 -0
  33. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/groq/__init__.py +0 -0
  34. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/groq/async_groq.py +0 -0
  35. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/groq/groq.py +0 -0
  36. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/haystack/__init__.py +0 -0
  37. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/haystack/haystack.py +0 -0
  38. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/langchain/__init__.py +0 -0
  39. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/llamaindex/__init__.py +0 -0
  40. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/llamaindex/llamaindex.py +0 -0
  41. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/milvus/__init__.py +0 -0
  42. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/milvus/milvus.py +0 -0
  43. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/mistral/__init__.py +0 -0
  44. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/mistral/async_mistral.py +0 -0
  45. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/mistral/mistral.py +0 -0
  46. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/ollama/__init__.py +0 -0
  47. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/ollama/async_ollama.py +0 -0
  48. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/ollama/ollama.py +0 -0
  49. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/openai/__init__.py +0 -0
  50. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/openai/async_azure_openai.py +0 -0
  51. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/openai/async_openai.py +0 -0
  52. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/openai/azure_openai.py +0 -0
  53. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/openai/openai.py +0 -0
  54. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/pinecone/__init__.py +0 -0
  55. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/pinecone/pinecone.py +0 -0
  56. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/qdrant/__init__.py +0 -0
  57. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/qdrant/qdrant.py +0 -0
  58. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/transformers/__init__.py +0 -0
  59. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/transformers/transformers.py +0 -0
  60. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/vertexai/__init__.py +0 -0
  61. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/vertexai/async_vertexai.py +0 -0
  62. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/vertexai/vertexai.py +0 -0
  63. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/vllm/__init__.py +0 -0
  64. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/instrumentation/vllm/vllm.py +0 -0
  65. {openlit-1.22.5 → openlit-1.24.1}/src/openlit/semcov/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: openlit
3
- Version: 1.22.5
3
+ Version: 1.24.1
4
4
  Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects
5
5
  Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
6
6
  Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT,gpu
@@ -15,10 +15,10 @@ Classifier: Programming Language :: Python :: 3.12
15
15
  Requires-Dist: boto3 (>=1.34.0,<2.0.0)
16
16
  Requires-Dist: botocore (>=1.34.0,<2.0.0)
17
17
  Requires-Dist: gpustat (>=1.1.1,<2.0.0)
18
- Requires-Dist: opentelemetry-api (>=1.24.0,<2.0.0)
19
- Requires-Dist: opentelemetry-exporter-otlp (>=1.24.0,<2.0.0)
20
- Requires-Dist: opentelemetry-instrumentation (>=0.45b0,<0.46)
21
- Requires-Dist: opentelemetry-sdk (>=1.24.0,<2.0.0)
18
+ Requires-Dist: opentelemetry-api (>=1.27.0,<2.0.0)
19
+ Requires-Dist: opentelemetry-exporter-otlp (>=1.27.0,<2.0.0)
20
+ Requires-Dist: opentelemetry-instrumentation (>=0.48b0,<0.49)
21
+ Requires-Dist: opentelemetry-sdk (>=1.27.0,<2.0.0)
22
22
  Requires-Dist: requests (>=2.26.0,<3.0.0)
23
23
  Requires-Dist: schedule (>=1.2.2,<2.0.0)
24
24
  Requires-Dist: tiktoken (>=0.7.0,<0.8.0)
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "openlit"
3
- version = "1.22.5"
3
+ version = "1.24.1"
4
4
  description = "OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects"
5
5
  authors = ["OpenLIT"]
6
6
  repository = "https://github.com/openlit/openlit/tree/main/openlit/python"
@@ -16,10 +16,10 @@ tiktoken = "^0.7.0"
16
16
  gpustat = "^1.1.1"
17
17
  boto3 = "^1.34.0"
18
18
  botocore = "^1.34.0"
19
- opentelemetry-api = "^1.24.0"
20
- opentelemetry-sdk = "^1.24.0"
21
- opentelemetry-exporter-otlp = "^1.24.0"
22
- opentelemetry-instrumentation = "^0.45b0"
19
+ opentelemetry-api = "^1.27.0"
20
+ opentelemetry-sdk = "^1.27.0"
21
+ opentelemetry-exporter-otlp = "^1.27.0"
22
+ opentelemetry-instrumentation = "^0.48b0"
23
23
 
24
24
  [build-system]
25
25
  requires = ["poetry-core>=1.1.0"]
@@ -623,9 +623,11 @@ def achat(gen_ai_endpoint, version, environment, application_name,
623
623
  input_tokens = response.response_metadata.get("prompt_eval_count", 0)
624
624
  output_tokens = response.response_metadata.get("eval_count", 0)
625
625
 
626
+ prompt = "" if isinstance(args[0], list) else args[0]
627
+ model = getattr(instance, 'model_name', getattr(instance, 'model', 'gpt-4'))
626
628
  # Calculate cost of the operation
627
629
  cost = get_chat_model_cost(
628
- str(getattr(instance, 'model')),
630
+ model,
629
631
  pricing_info, input_tokens, output_tokens
630
632
  )
631
633
 
@@ -641,15 +643,13 @@ def achat(gen_ai_endpoint, version, environment, application_name,
641
643
  span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
642
644
  application_name)
643
645
  span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL,
644
- str(getattr(instance, 'model')))
646
+ model)
645
647
  span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_TEMPERATURE,
646
- str(getattr(instance, 'temperature')))
648
+ str(getattr(instance, 'temperature',1)))
647
649
  span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_TOP_K,
648
- str(getattr(instance, 'top_k')))
650
+ str(getattr(instance, 'top_k',1)))
649
651
  span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_TOP_P,
650
- str(getattr(instance, 'top_p')))
651
- span.set_attribute(SemanticConvetion.GEN_AI_RESPONSE_FINISH_REASON,
652
- [response.response_metadata["done_reason"]])
652
+ str(getattr(instance, 'top_p',1)))
653
653
  span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_IS_STREAM,
654
654
  False)
655
655
  span.set_attribute(SemanticConvetion.GEN_AI_USAGE_PROMPT_TOKENS,
@@ -664,7 +664,7 @@ def achat(gen_ai_endpoint, version, environment, application_name,
664
664
  span.add_event(
665
665
  name=SemanticConvetion.GEN_AI_CONTENT_PROMPT_EVENT,
666
666
  attributes={
667
- SemanticConvetion.GEN_AI_CONTENT_PROMPT: args[0],
667
+ SemanticConvetion.GEN_AI_CONTENT_PROMPT: prompt,
668
668
  },
669
669
  )
670
670
  span.add_event(
@@ -689,7 +689,7 @@ def achat(gen_ai_endpoint, version, environment, application_name,
689
689
  SemanticConvetion.GEN_AI_TYPE:
690
690
  SemanticConvetion.GEN_AI_TYPE_CHAT,
691
691
  SemanticConvetion.GEN_AI_REQUEST_MODEL:
692
- str(getattr(instance, 'model'))
692
+ model
693
693
  }
694
694
 
695
695
  metrics["genai_requests"].add(1, attributes)
@@ -34,7 +34,7 @@ def setup_meter(application_name, environment, meter, otlp_endpoint, otlp_header
34
34
  try:
35
35
  if meter is None and not METER_SET:
36
36
  # Create a resource with the service name attribute.
37
- resource = Resource(attributes={
37
+ resource = Resource.create(attributes={
38
38
  SERVICE_NAME: application_name,
39
39
  DEPLOYMENT_ENVIRONMENT: environment,
40
40
  TELEMETRY_SDK_NAME: "openlit"}
@@ -45,7 +45,7 @@ def setup_tracing(application_name, environment, tracer, otlp_endpoint, otlp_hea
45
45
 
46
46
  if not TRACER_SET:
47
47
  # Create a resource with the service name attribute.
48
- resource = Resource(attributes={
48
+ resource = Resource.create(attributes={
49
49
  SERVICE_NAME: application_name,
50
50
  DEPLOYMENT_ENVIRONMENT: environment,
51
51
  TELEMETRY_SDK_NAME: "openlit"}
File without changes
File without changes