genai-otel-instrument 0.1.7.dev0__tar.gz → 0.1.9.dev0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of genai-otel-instrument might be problematic. Click here for more details.

Files changed (207) hide show
  1. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/CHANGELOG.md +44 -0
  2. {genai_otel_instrument-0.1.7.dev0/genai_otel_instrument.egg-info → genai_otel_instrument-0.1.9.dev0}/PKG-INFO +7 -3
  3. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/README.md +6 -2
  4. genai_otel_instrument-0.1.9.dev0/examples/huggingface/example_automodel.py +89 -0
  5. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/__version__.py +3 -3
  6. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/auto_instrument.py +7 -3
  7. genai_otel_instrument-0.1.9.dev0/genai_otel/cost_enriching_exporter.py +207 -0
  8. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/cost_enrichment_processor.py +2 -3
  9. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/huggingface_instrumentor.py +178 -5
  10. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0/genai_otel_instrument.egg-info}/PKG-INFO +7 -3
  11. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel_instrument.egg-info/SOURCES.txt +2 -0
  12. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/pyproject.toml +6 -0
  13. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_huggingface_instrumentor.py +26 -0
  14. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.claude/settings.local.json +0 -0
  15. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.github/workflows/README.md +0 -0
  16. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.github/workflows/pre-release-check.yml +0 -0
  17. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.github/workflows/publish.yml +0 -0
  18. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.github/workflows/test.yml +0 -0
  19. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.gitignore +0 -0
  20. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.idea/.gitignore +0 -0
  21. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.idea/genai_otel_instrument.iml +0 -0
  22. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.idea/inspectionProfiles/Project_Default.xml +0 -0
  23. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.idea/inspectionProfiles/profiles_settings.xml +0 -0
  24. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.idea/misc.xml +0 -0
  25. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.idea/modules.xml +0 -0
  26. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.idea/vcs.xml +0 -0
  27. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.pre-commit-config.yaml +0 -0
  28. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/.pylintrc +0 -0
  29. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/Contributing.md +0 -0
  30. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/DEVELOPMENT.md +0 -0
  31. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/LICENSE +0 -0
  32. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/MANIFEST.in +0 -0
  33. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/OTEL_SEMANTIC_COMPATIBILITY.md +0 -0
  34. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/OTEL_SEMANTIC_GAP_ANALYSIS_AND_IMPLEMENTATION_PLAN.md +0 -0
  35. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/PRE_RELEASE_CHECKLIST.md +0 -0
  36. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/TEST_COVERAGE_CHECKLIST.md +0 -0
  37. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/TROUBLESHOOTING.md +0 -0
  38. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/example_usage.py +0 -0
  39. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/README.md +0 -0
  40. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/anthropic/.env.example +0 -0
  41. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/anthropic/README.md +0 -0
  42. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/anthropic/example.py +0 -0
  43. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/aws_bedrock/.env.example +0 -0
  44. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/aws_bedrock/README.md +0 -0
  45. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/aws_bedrock/example.py +0 -0
  46. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/azure_openai/.env.example +0 -0
  47. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/azure_openai/README.md +0 -0
  48. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/azure_openai/example.py +0 -0
  49. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/cohere/.env.example +0 -0
  50. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/cohere/README.md +0 -0
  51. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/cohere/example.py +0 -0
  52. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/.env.example +0 -0
  53. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/Dockerfile +0 -0
  54. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/OPENSEARCH_SETUP.md +0 -0
  55. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/README.md +0 -0
  56. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/app.py +0 -0
  57. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/docker-compose.yml +0 -0
  58. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/grafana/dashboards/GenAI OTel Demo Metrics-1761310525837.json +0 -0
  59. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/grafana/dashboards/GenAI OTel Demo Traces-1761321575526.json +0 -0
  60. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/grafana/dashboards/GenAI Traces - OpenSearch-1761319701624.json +0 -0
  61. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/grafana/dashboards/genai-metrics-dashboard.json +0 -0
  62. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/grafana/dashboards/genai-opensearch-traces-dashboard.json +0 -0
  63. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/grafana/dashboards/genai-traces-dashboard.json +0 -0
  64. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/grafana/provisioning/dashboards/dashboards.yml +0 -0
  65. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/grafana/provisioning/datasources/jaeger.yml +0 -0
  66. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/grafana/provisioning/datasources/opensearch.yml +0 -0
  67. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/grafana/provisioning/datasources/prometheus.yml +0 -0
  68. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/opensearch-setup.sh +0 -0
  69. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/otel-collector-config.yml +0 -0
  70. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/prometheus.yml +0 -0
  71. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/demo/requirements.txt +0 -0
  72. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/google_ai/.env.example +0 -0
  73. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/google_ai/README.md +0 -0
  74. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/google_ai/example.py +0 -0
  75. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/groq/.env.example +0 -0
  76. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/groq/README.md +0 -0
  77. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/groq/example.py +0 -0
  78. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/huggingface/.env.example +0 -0
  79. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/huggingface/README.md +0 -0
  80. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/huggingface/example.py +0 -0
  81. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/langchain/.env.example +0 -0
  82. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/langchain/README.md +0 -0
  83. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/langchain/example.py +0 -0
  84. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/litellm/example.py +0 -0
  85. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/llamaindex/.env.example +0 -0
  86. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/llamaindex/README.md +0 -0
  87. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/llamaindex/example.py +0 -0
  88. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/mistralai/.env.example +0 -0
  89. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/mistralai/README.md +0 -0
  90. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/mistralai/example.py +0 -0
  91. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/ollama/.env.example +0 -0
  92. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/ollama/README.md +0 -0
  93. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/ollama/example.py +0 -0
  94. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/openai/.env.example +0 -0
  95. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/openai/README.md +0 -0
  96. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/openai/example.py +0 -0
  97. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/phase4_session_rag_tracking.py +0 -0
  98. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/replicate/.env.example +0 -0
  99. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/replicate/README.md +0 -0
  100. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/replicate/example.py +0 -0
  101. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/smolagents/example.py +0 -0
  102. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/togetherai/.env.example +0 -0
  103. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/togetherai/README.md +0 -0
  104. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/togetherai/example.py +0 -0
  105. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/vertexai/.env.example +0 -0
  106. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/vertexai/README.md +0 -0
  107. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/examples/vertexai/example.py +0 -0
  108. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/__init__.py +0 -0
  109. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/cli.py +0 -0
  110. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/config.py +0 -0
  111. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/cost_calculator.py +0 -0
  112. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/exceptions.py +0 -0
  113. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/gpu_metrics.py +0 -0
  114. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/__init__.py +0 -0
  115. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/anthropic_instrumentor.py +0 -0
  116. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/anyscale_instrumentor.py +0 -0
  117. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/aws_bedrock_instrumentor.py +0 -0
  118. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/azure_openai_instrumentor.py +0 -0
  119. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/base.py +0 -0
  120. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/cohere_instrumentor.py +0 -0
  121. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/google_ai_instrumentor.py +0 -0
  122. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/groq_instrumentor.py +0 -0
  123. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/langchain_instrumentor.py +0 -0
  124. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/llamaindex_instrumentor.py +0 -0
  125. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/mistralai_instrumentor.py +0 -0
  126. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/ollama_instrumentor.py +0 -0
  127. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/openai_instrumentor.py +0 -0
  128. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/replicate_instrumentor.py +0 -0
  129. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/togetherai_instrumentor.py +0 -0
  130. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/instrumentors/vertexai_instrumentor.py +0 -0
  131. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/llm_pricing.json +0 -0
  132. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/logging_config.py +0 -0
  133. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/mcp_instrumentors/__init__.py +0 -0
  134. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/mcp_instrumentors/api_instrumentor.py +0 -0
  135. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/mcp_instrumentors/base.py +0 -0
  136. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/mcp_instrumentors/database_instrumentor.py +0 -0
  137. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/mcp_instrumentors/kafka_instrumentor.py +0 -0
  138. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/mcp_instrumentors/manager.py +0 -0
  139. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/mcp_instrumentors/redis_instrumentor.py +0 -0
  140. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/mcp_instrumentors/vector_db_instrumentor.py +0 -0
  141. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/metrics.py +0 -0
  142. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel/py.typed +0 -0
  143. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel_instrument.egg-info/dependency_links.txt +0 -0
  144. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel_instrument.egg-info/entry_points.txt +0 -0
  145. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel_instrument.egg-info/requires.txt +0 -0
  146. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/genai_otel_instrument.egg-info/top_level.txt +0 -0
  147. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/openlit/semcov.py +0 -0
  148. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/requirements-dev.txt +0 -0
  149. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/requirements-testing.txt +0 -0
  150. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/requirements.txt +0 -0
  151. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/sample.env +0 -0
  152. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/add_ollama_pricing.py +0 -0
  153. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/debug/simple_test.py +0 -0
  154. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/debug/test_example_debug.py +0 -0
  155. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/debug/test_exporter_fix.py +0 -0
  156. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/debug/test_final.py +0 -0
  157. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/debug/test_gpu_debug.py +0 -0
  158. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/debug/test_gpu_metrics.py +0 -0
  159. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/debug/test_litellm_instrumentation.py +0 -0
  160. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/debug/test_ollama_cost.py +0 -0
  161. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/debug/test_ollama_span_attributes.py +0 -0
  162. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/fix_all_deps.sh +0 -0
  163. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/fix_instrumentors.py +0 -0
  164. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/test_installation.py +0 -0
  165. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/scripts/test_release.sh +0 -0
  166. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/setup.cfg +0 -0
  167. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/setup.py +0 -0
  168. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/__init__.py +0 -0
  169. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_anthropic_instrumentor.py +0 -0
  170. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_anyscale_instrumentor.py +0 -0
  171. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_aws_bedrock_instrumentor.py +0 -0
  172. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_azure_openai_instrumentor.py +0 -0
  173. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_base.py +0 -0
  174. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_cohere_instrumentor.py +0 -0
  175. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_google_ai_instrumentor.py +0 -0
  176. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_groq_instrumentor.py +0 -0
  177. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_langchain_instrumentor.py +0 -0
  178. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_litellm_instrumentor.py +0 -0
  179. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_llamaindex_instrumentor.py +0 -0
  180. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_mcp_instrumentor.py +0 -0
  181. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_mistralai_instrumentor.py +0 -0
  182. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_ollama_instrumentor.py +0 -0
  183. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_openai_instrumentor.py +0 -0
  184. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_replicate_instrumentor.py +0 -0
  185. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_smolagents_instrumentor.py +0 -0
  186. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_togetherai_instrumentor.py +0 -0
  187. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/instrumentors/test_vertexai_instrumentor.py +0 -0
  188. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/mcp_instrumentors/test_api_instrumentor.py +0 -0
  189. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/mcp_instrumentors/test_database_instrumentor.py +0 -0
  190. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/mcp_instrumentors/test_kafka_instrumentor.py +0 -0
  191. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/mcp_instrumentors/test_manager.py +0 -0
  192. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/mcp_instrumentors/test_mcp_base.py +0 -0
  193. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/mcp_instrumentors/test_redis_instrumentor.py +0 -0
  194. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/mcp_instrumentors/test_vector_db_instrumentor.py +0 -0
  195. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_auto_instrument.py +0 -0
  196. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_cli.py +0 -0
  197. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_config.py +0 -0
  198. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_cost_calculator.py +0 -0
  199. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_cost_enrichment_processor.py +0 -0
  200. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_exceptions.py +0 -0
  201. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_gpu_metrics.py +0 -0
  202. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_init.py +0 -0
  203. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_logging_config.py +0 -0
  204. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_metrics.py +0 -0
  205. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_openai_instrumentor.py +0 -0
  206. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_otel_setup.py +0 -0
  207. {genai_otel_instrument-0.1.7.dev0 → genai_otel_instrument-0.1.9.dev0}/tests/test_phase4_features.py +0 -0
@@ -6,6 +6,50 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
6
6
 
7
7
  ## [Unreleased]
8
8
 
9
+ ## [0.1.8] - 2025-01-27
10
+
11
+ ### Added
12
+
13
+ - **HuggingFace AutoModelForCausalLM and AutoModelForSeq2SeqLM Instrumentation**
14
+ - Added support for direct model usage via `AutoModelForCausalLM.generate()` and `AutoModelForSeq2SeqLM.generate()`
15
+ - Automatic token counting from input and output tensor shapes
16
+ - Cost calculation based on model parameter count (uses CostCalculator's local model pricing tiers)
17
+ - Span attributes: `gen_ai.system`, `gen_ai.request.model`, `gen_ai.operation.name`, token counts, costs
18
+ - Metrics: request counter, token counter, latency histogram, cost counter
19
+ - Supports generation parameters: `max_length`, `max_new_tokens`, `temperature`, `top_p`
20
+ - Implementation in `genai_otel/instrumentors/huggingface_instrumentor.py:184-333`
21
+ - Example usage in `examples/huggingface/example_automodel.py`
22
+ - All 443 tests pass (added 1 new test)
23
+
24
+ ### Fixed
25
+
26
+ - **CRITICAL: Cost Tracking for OpenInference Instrumentors (smolagents, litellm, mcp)**
27
+ - Replaced `CostEnrichmentSpanProcessor` with `CostEnrichingSpanExporter` to properly add cost attributes
28
+ - **Root Cause**: SpanProcessor's `on_end()` receives immutable `ReadableSpan` objects that cannot be modified
29
+ - **Solution**: Custom SpanExporter that enriches span data before export, creating new ReadableSpan instances with cost attributes
30
+ - Cost attributes now correctly appear for smolagents, litellm, and mcp spans:
31
+ - `gen_ai.usage.cost.total`: Total cost in USD
32
+ - `gen_ai.usage.cost.prompt`: Prompt tokens cost
33
+ - `gen_ai.usage.cost.completion`: Completion tokens cost
34
+ - Supports all OpenInference semantic conventions:
35
+ - Model name: `llm.model_name`, `gen_ai.request.model`, `embedding.model_name`
36
+ - Token counts: `llm.token_count.{prompt,completion}`, `gen_ai.usage.{prompt_tokens,completion_tokens}`
37
+ - Span kinds: `openinference.span.kind` (LLM, EMBEDDING, CHAIN, etc.)
38
+ - Implementation in `genai_otel/cost_enriching_exporter.py`
39
+ - Updated `genai_otel/auto_instrument.py` to wrap OTLP and Console exporters
40
+ - Model name normalization handles provider prefixes (e.g., `openai/gpt-3.5-turbo` → `gpt-3.5-turbo`)
41
+ - All 442 existing tests continue to pass
42
+
43
+ - **HuggingFace AutoModelForCausalLM AttributeError Fix**
44
+ - Fixed `AttributeError: type object 'AutoModelForCausalLM' has no attribute 'generate'`
45
+ - Root cause: `AutoModelForCausalLM` is a factory class; `generate()` exists on `GenerationMixin`
46
+ - Solution: Wrap `GenerationMixin.generate()` which all generative models inherit from
47
+ - This covers all model types: `AutoModelForCausalLM`, `AutoModelForSeq2SeqLM`, `GPT2LMHeadModel`, etc.
48
+ - Added fallback import for older transformers versions
49
+ - Implementation in `genai_otel/instrumentors/huggingface_instrumentor.py:184-346`
50
+
51
+ ## [0.1.7] - 2025-01-25
52
+
9
53
  ### Added
10
54
 
11
55
  - **Phase 4: Session and User Tracking (4.1)**
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: genai-otel-instrument
3
- Version: 0.1.7.dev0
3
+ Version: 0.1.9.dev0
4
4
  Summary: Comprehensive OpenTelemetry auto-instrumentation for LLM/GenAI applications
5
5
  Author-email: Kshitij Thakkar <kshitijthakkar@rocketmail.com>
6
6
  License: Apache-2.0
@@ -257,7 +257,8 @@ For a more comprehensive demonstration of various LLM providers and MCP tools, r
257
257
 
258
258
  ### LLM Providers (Auto-detected)
259
259
  - **With Full Cost Tracking**: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure OpenAI, Cohere, Mistral AI, Together AI, Groq, Ollama, Vertex AI
260
- - **Hardware/Local Pricing**: Replicate (hardware-based $/second), HuggingFace (local execution, free)
260
+ - **Hardware/Local Pricing**: Replicate (hardware-based $/second), HuggingFace (local execution with estimated costs)
261
+ - **HuggingFace Support**: `pipeline()`, `AutoModelForCausalLM.generate()`, `AutoModelForSeq2SeqLM.generate()`, `InferenceClient` API calls
261
262
  - **Other Providers**: Anyscale
262
263
 
263
264
  ### Frameworks
@@ -307,7 +308,10 @@ The library includes comprehensive cost tracking with pricing data for **145+ mo
307
308
 
308
309
  ### Special Pricing Models
309
310
  - **Replicate**: Hardware-based pricing ($/second of GPU/CPU time) - not token-based
310
- - **HuggingFace Transformers**: Local execution - no API costs
311
+ - **HuggingFace Transformers**: Local model execution with estimated costs based on parameter count
312
+ - Supports `pipeline()`, `AutoModelForCausalLM.generate()`, `AutoModelForSeq2SeqLM.generate()`
313
+ - Cost estimation uses GPU/compute resource pricing tiers (tiny/small/medium/large)
314
+ - Automatic token counting from tensor shapes
311
315
 
312
316
  ### Pricing Features
313
317
  - **Differential Pricing**: Separate rates for prompt tokens vs. completion tokens
@@ -77,7 +77,8 @@ For a more comprehensive demonstration of various LLM providers and MCP tools, r
77
77
 
78
78
  ### LLM Providers (Auto-detected)
79
79
  - **With Full Cost Tracking**: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure OpenAI, Cohere, Mistral AI, Together AI, Groq, Ollama, Vertex AI
80
- - **Hardware/Local Pricing**: Replicate (hardware-based $/second), HuggingFace (local execution, free)
80
+ - **Hardware/Local Pricing**: Replicate (hardware-based $/second), HuggingFace (local execution with estimated costs)
81
+ - **HuggingFace Support**: `pipeline()`, `AutoModelForCausalLM.generate()`, `AutoModelForSeq2SeqLM.generate()`, `InferenceClient` API calls
81
82
  - **Other Providers**: Anyscale
82
83
 
83
84
  ### Frameworks
@@ -127,7 +128,10 @@ The library includes comprehensive cost tracking with pricing data for **145+ mo
127
128
 
128
129
  ### Special Pricing Models
129
130
  - **Replicate**: Hardware-based pricing ($/second of GPU/CPU time) - not token-based
130
- - **HuggingFace Transformers**: Local execution - no API costs
131
+ - **HuggingFace Transformers**: Local model execution with estimated costs based on parameter count
132
+ - Supports `pipeline()`, `AutoModelForCausalLM.generate()`, `AutoModelForSeq2SeqLM.generate()`
133
+ - Cost estimation uses GPU/compute resource pricing tiers (tiny/small/medium/large)
134
+ - Automatic token counting from tensor shapes
131
135
 
132
136
  ### Pricing Features
133
137
  - **Differential Pricing**: Separate rates for prompt tokens vs. completion tokens
@@ -0,0 +1,89 @@
1
+ """HuggingFace AutoModelForCausalLM Example with Token Counting and Cost Tracking.
2
+
3
+ This example demonstrates:
4
+ 1. Auto-instrumentation of AutoModelForCausalLM.generate()
5
+ 2. Automatic token counting (prompt + completion tokens)
6
+ 3. Cost calculation for local model inference
7
+ 4. Full observability with traces and metrics
8
+
9
+ Requirements:
10
+ pip install transformers torch
11
+ """
12
+
13
+ import genai_otel
14
+
15
+ # Auto-instrument HuggingFace Transformers
16
+ genai_otel.instrument()
17
+
18
+ from transformers import AutoModelForCausalLM, AutoTokenizer
19
+
20
+ print("\n" + "=" * 80)
21
+ print("Loading model and tokenizer...")
22
+ print("=" * 80 + "\n")
23
+
24
+ # Load a small model for testing (117M parameters)
25
+ model_name = "Qwen/Qwen3-0.6B"
26
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
27
+ model = AutoModelForCausalLM.from_pretrained(model_name)
28
+
29
+ print(f"Model loaded: {model_name}")
30
+ print(f"Model config: {model.config._name_or_path}\n")
31
+
32
+ # Prepare input
33
+ prompt = "The future of AI is"
34
+ inputs = tokenizer(prompt, return_tensors="pt")
35
+
36
+ print(f"Prompt: '{prompt}'")
37
+ print(f"Input tokens: {inputs['input_ids'].shape[-1]}\n")
38
+
39
+ print("=" * 80)
40
+ print("Generating text (instrumented)...")
41
+ print("=" * 80 + "\n")
42
+
43
+ # Generate text - This is automatically instrumented!
44
+ # The wrapper will:
45
+ # - Create a span with model info
46
+ # - Count input tokens (from input_ids.shape)
47
+ # - Count output tokens (from generated sequence length)
48
+ # - Calculate cost based on GPT-2's parameter count (117M -> tier pricing)
49
+ # - Record metrics for tokens and cost
50
+ outputs = model.generate(
51
+ inputs["input_ids"],
52
+ max_new_tokens=50,
53
+ temperature=0.7,
54
+ do_sample=True,
55
+ pad_token_id=tokenizer.eos_token_id,
56
+ )
57
+
58
+ # Decode the generated text
59
+ generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
60
+
61
+ print(f"Generated text: {generated_text}\n")
62
+ print(f"Total output tokens: {outputs.shape[-1]}")
63
+ print(f"Input tokens: {inputs['input_ids'].shape[-1]}")
64
+ print(f"Generated (new) tokens: {outputs.shape[-1] - inputs['input_ids'].shape[-1]}\n")
65
+
66
+ print("=" * 80)
67
+ print("Telemetry captured:")
68
+ print("=" * 80)
69
+ print("✓ Span created: huggingface.model.generate")
70
+ print("✓ Attributes set:")
71
+ print(f" - gen_ai.system: huggingface")
72
+ print(f" - gen_ai.request.model: {model_name}")
73
+ print(f" - gen_ai.operation.name: text_generation")
74
+ print(f" - gen_ai.usage.prompt_tokens: {inputs['input_ids'].shape[-1]}")
75
+ print(f" - gen_ai.usage.completion_tokens: {outputs.shape[-1] - inputs['input_ids'].shape[-1]}")
76
+ print(f" - gen_ai.usage.total_tokens: {outputs.shape[-1]}")
77
+ print(" - gen_ai.usage.cost.total: $X.XXXXXX (estimated)")
78
+ print(" - gen_ai.usage.cost.prompt: $X.XXXXXX")
79
+ print(" - gen_ai.usage.cost.completion: $X.XXXXXX")
80
+ print("\n✓ Metrics recorded:")
81
+ print(" - gen_ai.requests counter")
82
+ print(" - gen_ai.client.token.usage (prompt + completion)")
83
+ print(" - gen_ai.client.operation.duration histogram")
84
+ print(" - gen_ai.usage.cost counter")
85
+ print("\n✓ Traces and metrics exported to OTLP endpoint!")
86
+ print("=" * 80)
87
+
88
+ print("\nNote: Cost is estimated based on model size (GPT-2 = 117M params)")
89
+ print("Local models are free to run, but costs reflect GPU/compute resources.")
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.1.7.dev0'
32
- __version_tuple__ = version_tuple = (0, 1, 7, 'dev0')
31
+ __version__ = version = '0.1.9.dev0'
32
+ __version_tuple__ = version_tuple = (0, 1, 9, 'dev0')
33
33
 
34
- __commit_id__ = commit_id = 'g6e54041fe'
34
+ __commit_id__ = commit_id = 'gf4ccb18e4'
@@ -19,6 +19,7 @@ from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExport
19
19
  from .config import OTelConfig
20
20
  from .cost_calculator import CostCalculator
21
21
  from .cost_enrichment_processor import CostEnrichmentSpanProcessor
22
+ from .cost_enriching_exporter import CostEnrichingSpanExporter
22
23
  from .gpu_metrics import GPUMetricsCollector
23
24
  from .mcp_instrumentors import MCPInstrumentorManager
24
25
  from .metrics import (
@@ -169,14 +170,17 @@ def setup_auto_instrumentation(config: OTelConfig):
169
170
 
170
171
  set_global_textmap(TraceContextTextMapPropagator())
171
172
 
172
- # Add cost enrichment processor for OpenInference instrumentors
173
- # This enriches spans from smolagents, litellm, mcp with cost attributes
173
+ # Add cost enrichment processor for custom instrumentors (OpenAI, Ollama, etc.)
174
+ # These instrumentors set cost attributes directly, so processor is mainly for logging
175
+ # Also attempts to enrich OpenInference spans (smolagents, litellm, mcp), though
176
+ # the processor can't modify ReadableSpan - the exporter below handles that
177
+ cost_calculator = None
174
178
  if config.enable_cost_tracking:
175
179
  try:
176
180
  cost_calculator = CostCalculator()
177
181
  cost_processor = CostEnrichmentSpanProcessor(cost_calculator)
178
182
  tracer_provider.add_span_processor(cost_processor)
179
- logger.info("Cost enrichment processor added for OpenInference instrumentors")
183
+ logger.info("Cost enrichment processor added")
180
184
  except Exception as e:
181
185
  logger.warning(f"Failed to add cost enrichment processor: {e}", exc_info=True)
182
186
 
@@ -0,0 +1,207 @@
1
+ """Custom SpanExporter that enriches spans with cost attributes before export.
2
+
3
+ This exporter wraps another exporter (like OTLPSpanExporter) and adds cost
4
+ attributes to spans before passing them to the wrapped exporter.
5
+ """
6
+
7
+ import logging
8
+ from typing import Optional, Sequence
9
+
10
+ from opentelemetry.sdk.trace import ReadableSpan
11
+ from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
12
+
13
+ from .cost_calculator import CostCalculator
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class CostEnrichingSpanExporter(SpanExporter):
19
+ """Wraps a SpanExporter and enriches spans with cost attributes before export.
20
+
21
+ This exporter:
22
+ 1. Receives ReadableSpan objects from the SDK
23
+ 2. Extracts model name and token usage from span attributes
24
+ 3. Calculates cost using CostCalculator
25
+ 4. Creates enriched span data with cost attributes
26
+ 5. Exports to the wrapped exporter (e.g., OTLP)
27
+ """
28
+
29
+ def __init__(
30
+ self, wrapped_exporter: SpanExporter, cost_calculator: Optional[CostCalculator] = None
31
+ ):
32
+ """Initialize the cost enriching exporter.
33
+
34
+ Args:
35
+ wrapped_exporter: The underlying exporter to send enriched spans to.
36
+ cost_calculator: CostCalculator instance to use for cost calculations.
37
+ If None, creates a new instance.
38
+ """
39
+ self.wrapped_exporter = wrapped_exporter
40
+ self.cost_calculator = cost_calculator or CostCalculator()
41
+ logger.info(
42
+ f"CostEnrichingSpanExporter initialized, wrapping {type(wrapped_exporter).__name__}"
43
+ )
44
+
45
+ def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
46
+ """Export spans after enriching them with cost attributes.
47
+
48
+ Args:
49
+ spans: Sequence of ReadableSpan objects to export.
50
+
51
+ Returns:
52
+ SpanExportResult from the wrapped exporter.
53
+ """
54
+ try:
55
+ # Enrich spans with cost attributes
56
+ enriched_spans = []
57
+ for span in spans:
58
+ enriched_span = self._enrich_span(span)
59
+ enriched_spans.append(enriched_span)
60
+
61
+ # Export to wrapped exporter
62
+ return self.wrapped_exporter.export(enriched_spans)
63
+
64
+ except Exception as e:
65
+ logger.error(f"Failed to export spans: {e}", exc_info=True)
66
+ return SpanExportResult.FAILURE
67
+
68
+ def _enrich_span(self, span: ReadableSpan) -> ReadableSpan:
69
+ """Enrich a span with cost attributes if applicable.
70
+
71
+ Args:
72
+ span: The original ReadableSpan.
73
+
74
+ Returns:
75
+ A new ReadableSpan with cost attributes added (or the original if not applicable).
76
+ """
77
+ try:
78
+ # Check if span has LLM-related attributes
79
+ if not span.attributes:
80
+ return span
81
+
82
+ attributes = dict(span.attributes) # Make a mutable copy
83
+
84
+ # Check for model name - support both GenAI and OpenInference conventions
85
+ model = (
86
+ attributes.get("gen_ai.request.model")
87
+ or attributes.get("llm.model_name")
88
+ or attributes.get("embedding.model_name")
89
+ )
90
+ if not model:
91
+ return span
92
+
93
+ # Skip if cost attributes are already present
94
+ if "gen_ai.usage.cost.total" in attributes:
95
+ logger.debug(f"Span '{span.name}' already has cost attributes, skipping enrichment")
96
+ return span
97
+
98
+ # Extract token usage - support GenAI, OpenInference, and legacy conventions
99
+ prompt_tokens = (
100
+ attributes.get("gen_ai.usage.prompt_tokens")
101
+ or attributes.get("gen_ai.usage.input_tokens")
102
+ or attributes.get("llm.token_count.prompt") # OpenInference
103
+ or 0
104
+ )
105
+ completion_tokens = (
106
+ attributes.get("gen_ai.usage.completion_tokens")
107
+ or attributes.get("gen_ai.usage.output_tokens")
108
+ or attributes.get("llm.token_count.completion") # OpenInference
109
+ or 0
110
+ )
111
+
112
+ # Skip if no tokens recorded
113
+ if prompt_tokens == 0 and completion_tokens == 0:
114
+ return span
115
+
116
+ # Get call type - support both GenAI and OpenInference conventions
117
+ span_kind = attributes.get("openinference.span.kind", "").upper()
118
+ call_type = attributes.get("gen_ai.operation.name") or span_kind.lower() or "chat"
119
+
120
+ # Map operation names to call types
121
+ call_type_mapping = {
122
+ "chat": "chat",
123
+ "completion": "chat",
124
+ "embedding": "embedding",
125
+ "embeddings": "embedding",
126
+ "text_generation": "chat",
127
+ "image_generation": "image",
128
+ "audio": "audio",
129
+ "llm": "chat",
130
+ "chain": "chat",
131
+ "retriever": "embedding",
132
+ "reranker": "embedding",
133
+ "tool": "chat",
134
+ "agent": "chat",
135
+ }
136
+ normalized_call_type = call_type_mapping.get(str(call_type).lower(), "chat")
137
+
138
+ # Calculate cost
139
+ usage = {
140
+ "prompt_tokens": int(prompt_tokens),
141
+ "completion_tokens": int(completion_tokens),
142
+ "total_tokens": int(prompt_tokens) + int(completion_tokens),
143
+ }
144
+
145
+ cost_info = self.cost_calculator.calculate_granular_cost(
146
+ model=str(model),
147
+ usage=usage,
148
+ call_type=normalized_call_type,
149
+ )
150
+
151
+ if cost_info and cost_info.get("total", 0.0) > 0:
152
+ # Add cost attributes to the mutable copy
153
+ attributes["gen_ai.usage.cost.total"] = cost_info["total"]
154
+
155
+ if cost_info.get("prompt", 0.0) > 0:
156
+ attributes["gen_ai.usage.cost.prompt"] = cost_info["prompt"]
157
+ if cost_info.get("completion", 0.0) > 0:
158
+ attributes["gen_ai.usage.cost.completion"] = cost_info["completion"]
159
+
160
+ logger.info(
161
+ f"Enriched span '{span.name}' with cost: {cost_info['total']:.6f} USD "
162
+ f"for model {model} ({usage['total_tokens']} tokens)"
163
+ )
164
+
165
+ # Create a new ReadableSpan with enriched attributes
166
+ # ReadableSpan is a NamedTuple, so we need to replace it
167
+ from opentelemetry.sdk.trace import ReadableSpan as RS
168
+
169
+ enriched_span = RS(
170
+ name=span.name,
171
+ context=span.context,
172
+ kind=span.kind,
173
+ parent=span.parent,
174
+ start_time=span.start_time,
175
+ end_time=span.end_time,
176
+ status=span.status,
177
+ attributes=attributes, # Use enriched attributes
178
+ events=span.events,
179
+ links=span.links,
180
+ resource=span.resource,
181
+ instrumentation_scope=span.instrumentation_scope,
182
+ )
183
+ return enriched_span
184
+
185
+ except Exception as e:
186
+ logger.warning(
187
+ f"Failed to enrich span '{getattr(span, 'name', 'unknown')}' with cost: {e}",
188
+ exc_info=True,
189
+ )
190
+
191
+ return span
192
+
193
+ def shutdown(self) -> None:
194
+ """Shutdown the wrapped exporter."""
195
+ logger.info("CostEnrichingSpanExporter shutting down")
196
+ self.wrapped_exporter.shutdown()
197
+
198
+ def force_flush(self, timeout_millis: int = 30000) -> bool:
199
+ """Force flush the wrapped exporter.
200
+
201
+ Args:
202
+ timeout_millis: Timeout in milliseconds.
203
+
204
+ Returns:
205
+ True if flush succeeded.
206
+ """
207
+ return self.wrapped_exporter.force_flush(timeout_millis)
@@ -132,9 +132,8 @@ class CostEnrichmentSpanProcessor(SpanProcessor):
132
132
 
133
133
  if cost_info and cost_info.get("total", 0.0) > 0:
134
134
  # Add cost attributes to the span
135
- # Note: We can't modify ReadableSpan attributes directly,
136
- # but we can if span is still a Span instance
137
- if isinstance(span, Span):
135
+ # Use duck typing to check if span supports set_attribute
136
+ if hasattr(span, "set_attribute") and callable(getattr(span, "set_attribute")):
138
137
  span.set_attribute("gen_ai.usage.cost.total", cost_info["total"])
139
138
 
140
139
  if cost_info.get("prompt", 0.0) > 0: