monocle-apptrace 0.4.1__py3-none-any.whl → 0.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of monocle-apptrace might be problematic. Click here for more details.

@@ -23,6 +23,7 @@ from monocle_apptrace.instrumentation.metamodel.aiohttp.methods import (AIOHTTP_
23
23
  from monocle_apptrace.instrumentation.metamodel.aiohttp._helper import aiohttpSpanHandler
24
24
  from monocle_apptrace.instrumentation.metamodel.azfunc._helper import (azureSpanHandler)
25
25
  from monocle_apptrace.instrumentation.metamodel.azfunc.methods import AZFUNC_HTTP_METHODS
26
+ from monocle_apptrace.instrumentation.metamodel.gemini.methods import GEMINI_METHODS
26
27
  class WrapperMethod:
27
28
  def __init__(
28
29
  self,
@@ -71,7 +72,7 @@ class WrapperMethod:
71
72
  def get_span_handler(self) -> SpanHandler:
72
73
  return self.span_handler()
73
74
 
74
- DEFAULT_METHODS_LIST = LANGCHAIN_METHODS + LLAMAINDEX_METHODS + HAYSTACK_METHODS + BOTOCORE_METHODS + FLASK_METHODS + REQUESTS_METHODS + LANGGRAPH_METHODS + OPENAI_METHODS + TEAMAI_METHODS + ANTHROPIC_METHODS + AIOHTTP_METHODS + AZURE_AI_INFERENCE_METHODS + AZFUNC_HTTP_METHODS
75
+ DEFAULT_METHODS_LIST = LANGCHAIN_METHODS + LLAMAINDEX_METHODS + HAYSTACK_METHODS + BOTOCORE_METHODS + FLASK_METHODS + REQUESTS_METHODS + LANGGRAPH_METHODS + OPENAI_METHODS + TEAMAI_METHODS + ANTHROPIC_METHODS + AIOHTTP_METHODS + AZURE_AI_INFERENCE_METHODS + AZFUNC_HTTP_METHODS + GEMINI_METHODS
75
76
 
76
77
  MONOCLE_SPAN_HANDLERS: Dict[str, SpanHandler] = {
77
78
  "default": SpanHandler(),
@@ -0,0 +1,76 @@
1
+ import logging
2
+ from monocle_apptrace.instrumentation.common.utils import (
3
+ get_exception_message,
4
+ get_status_code,
5
+ )
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+ def resolve_from_alias(my_map, alias):
10
+ """Find a alias that is not none from list of aliases"""
11
+
12
+ for i in alias:
13
+ if i in my_map.keys():
14
+ return my_map[i]
15
+ return None
16
+
17
+ def extract_messages(kwargs):
18
+ """Extract system and user messages"""
19
+ try:
20
+ messages = []
21
+ config = kwargs.get('config')
22
+ if config and hasattr(config, 'system_instruction'):
23
+ system_instructions = getattr(config, 'system_instruction', None)
24
+ if system_instructions:
25
+ messages.append({'system': system_instructions})
26
+
27
+ contents = kwargs.get('contents')
28
+ if isinstance(contents, list):
29
+ for content in contents:
30
+ if hasattr(content, 'parts') and getattr(content, 'parts'):
31
+ part = content.parts[0]
32
+ if hasattr(part, 'text'):
33
+ messages.append({getattr(content, 'role', 'user'): part.text})
34
+ elif isinstance(contents, str):
35
+ messages.append({'input': contents})
36
+
37
+ return [str(message) for message in messages]
38
+ except Exception as e:
39
+ logger.warning("Warning: Error occurred in extract_messages: %s", str(e))
40
+ return []
41
+
42
+ def extract_assistant_message(arguments):
43
+ try:
44
+ status = get_status_code(arguments)
45
+ response: str = ""
46
+ if status == 'success':
47
+ if hasattr(arguments['result'], "text") and len(arguments['result'].text):
48
+ response = arguments['result'].text
49
+ else:
50
+ if arguments["exception"] is not None:
51
+ response = get_exception_message(arguments)
52
+ elif hasattr(arguments["result"], "error"):
53
+ response = arguments["result"].error
54
+ return response
55
+ except (IndexError, AttributeError) as e:
56
+ logger.warning("Warning: Error occurred in extract_assistant_message: %s", str(e))
57
+ return None
58
+
59
+ def extract_inference_endpoint(instance):
60
+ try:
61
+ if hasattr(instance,'_api_client') and hasattr(instance._api_client, '_http_options'):
62
+ if hasattr(instance._api_client._http_options,'base_url'):
63
+ return instance._api_client._http_options.base_url
64
+ except Exception as e:
65
+ logger.warning("Warning: Error occurred in inference endpoint: %s", str(e))
66
+ return []
67
+
68
+ def update_span_from_llm_response(response, instance):
69
+ meta_dict = {}
70
+ if response is not None and hasattr(response, "usage_metadata") and response.usage_metadata is not None:
71
+ token_usage = response.usage_metadata
72
+ if token_usage is not None:
73
+ meta_dict.update({"completion_tokens": token_usage.candidates_token_count})
74
+ meta_dict.update({"prompt_tokens": token_usage.prompt_token_count })
75
+ meta_dict.update({"total_tokens": token_usage.total_token_count})
76
+ return meta_dict
@@ -0,0 +1,75 @@
1
+ from monocle_apptrace.instrumentation.metamodel.gemini import (
2
+ _helper,
3
+ )
4
+ from monocle_apptrace.instrumentation.common.utils import get_llm_type, get_status, get_status_code
5
+ INFERENCE = {
6
+ "type": "inference",
7
+ "attributes": [
8
+ [
9
+ {
10
+ "_comment": "provider type , inference_endpoint",
11
+ "attribute": "type",
12
+ "accessor": lambda arguments: 'inference.gemini'
13
+ },
14
+ {
15
+ "attribute": "inference_endpoint",
16
+ "accessor": lambda arguments: _helper.extract_inference_endpoint(arguments['instance'])
17
+ }
18
+ ],
19
+ [
20
+ {
21
+ "_comment": "LLM Model",
22
+ "attribute": "name",
23
+ "accessor": lambda arguments: _helper.resolve_from_alias(arguments['kwargs'],
24
+ ['model'])
25
+ },
26
+ {
27
+ "attribute": "type",
28
+ "accessor": lambda arguments: 'model.llm.' + _helper.resolve_from_alias(arguments['kwargs'],
29
+ ['model'])
30
+ }
31
+ ]
32
+ ],
33
+ "events": [
34
+ {
35
+ "name": "data.input",
36
+ "attributes": [
37
+
38
+ {
39
+ "_comment": "this is instruction and user query to LLM",
40
+ "attribute": "input",
41
+ "accessor": lambda arguments: _helper.extract_messages(arguments['kwargs'])
42
+ }
43
+ ]
44
+ },
45
+ {
46
+ "name": "data.output",
47
+ "attributes": [
48
+ {
49
+ "_comment": "this is result from LLM",
50
+ "attribute": "status",
51
+ "accessor": lambda arguments: get_status(arguments)
52
+ },
53
+ {
54
+ "attribute": "status_code",
55
+ "accessor": lambda arguments: get_status_code(arguments)
56
+ },
57
+ {
58
+ "attribute": "response",
59
+ "accessor": lambda arguments: _helper.extract_assistant_message(arguments)
60
+ }
61
+ ]
62
+ },
63
+ {
64
+ "name": "metadata",
65
+ "attributes": [
66
+ {
67
+ "_comment": "this is metadata usage from LLM",
68
+ "accessor": lambda arguments: _helper.update_span_from_llm_response(arguments['result'], arguments['instance'])
69
+ }
70
+ ]
71
+ }
72
+
73
+
74
+ ]
75
+ }
@@ -0,0 +1,14 @@
1
+ from monocle_apptrace.instrumentation.common.wrapper import task_wrapper
2
+ from monocle_apptrace.instrumentation.metamodel.gemini.entities.inference import (
3
+ INFERENCE,
4
+ )
5
+
6
+ GEMINI_METHODS = [
7
+ {
8
+ "package": "google.genai.models",
9
+ "object": "Models",
10
+ "method": "generate_content",
11
+ "wrapper_method": task_wrapper,
12
+ "output_processor": INFERENCE,
13
+ }
14
+ ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: monocle_apptrace
3
- Version: 0.4.1
3
+ Version: 0.4.2
4
4
  Summary: package with monocle genAI tracing
5
5
  Project-URL: Homepage, https://github.com/monocle2ai/monocle
6
6
  Project-URL: Issues, https://github.com/monocle2ai/monocle/issues
@@ -18,7 +18,7 @@ monocle_apptrace/instrumentation/common/span_handler.py,sha256=sWlGglXjCX8UjL7YV
18
18
  monocle_apptrace/instrumentation/common/tracing.md,sha256=6Lr8QGxEFHKhj-mMvLV3xjFnplKSs6HEdwl0McPK47M,7577
19
19
  monocle_apptrace/instrumentation/common/utils.py,sha256=1A_i0fz9QAgsdqfytGzwxdURs5MHXO0ZlyP4QDErmCI,14498
20
20
  monocle_apptrace/instrumentation/common/wrapper.py,sha256=NZC0xymn2q6_bFK0d91F0Z-W-YoCmIjOZEm1t1XKSY4,11409
21
- monocle_apptrace/instrumentation/common/wrapper_method.py,sha256=Dqqe2UuhVq-Ej07YnWWnnkz_Jkx10gIZfykeCTmwySs,4396
21
+ monocle_apptrace/instrumentation/common/wrapper_method.py,sha256=b70d6C08y8aYLXZc0abs84Vf3dCpXSk6RqhyChPDqng,4498
22
22
  monocle_apptrace/instrumentation/metamodel/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  monocle_apptrace/instrumentation/metamodel/aiohttp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  monocle_apptrace/instrumentation/metamodel/aiohttp/_helper.py,sha256=h-zrif2vgPL9JbCf1eKHbKBYHAxMdHxOdY-soIDGti8,2361
@@ -47,6 +47,11 @@ monocle_apptrace/instrumentation/metamodel/flask/__init__.py,sha256=47DEQpj8HBSa
47
47
  monocle_apptrace/instrumentation/metamodel/flask/_helper.py,sha256=seLVsL5gE3GbjY3Yetgg1WnswhDzb0zEQR05fHf5xTM,3094
48
48
  monocle_apptrace/instrumentation/metamodel/flask/methods.py,sha256=dWCMEDk-HWHiD0vlMoAVYbIFclstmVkUpRrCtqDWyFE,739
49
49
  monocle_apptrace/instrumentation/metamodel/flask/entities/http.py,sha256=wIudpNk6-DY72k0p90XtvjKt8BilvOd-87Q2iqJnWa8,1525
50
+ monocle_apptrace/instrumentation/metamodel/gemini/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
+ monocle_apptrace/instrumentation/metamodel/gemini/_helper.py,sha256=pNwwO_J-ZDYxhoiRkrOmWqiMVOCuByHaydiT9CDyC38,3044
52
+ monocle_apptrace/instrumentation/metamodel/gemini/methods.py,sha256=lIAXZ15MVOWsGMiWLMp6ZeOlQxG1cP3gIbz6cp794ps,383
53
+ monocle_apptrace/instrumentation/metamodel/gemini/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
+ monocle_apptrace/instrumentation/metamodel/gemini/entities/inference.py,sha256=oJnPMI8m04A9OLwZEUEMJp_lBxYFSAXUFybpYBU4Rd8,2553
50
55
  monocle_apptrace/instrumentation/metamodel/haystack/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
56
  monocle_apptrace/instrumentation/metamodel/haystack/_helper.py,sha256=IacHHTBqts3l-fWgq_6gPiycbGIWHiEduDW26-s6SDc,5709
52
57
  monocle_apptrace/instrumentation/metamodel/haystack/methods.py,sha256=Zd70ycMQ5qWsjPXnQL6qoThNKrQA80P6t11sFyEbQR4,1585
@@ -89,8 +94,8 @@ monocle_apptrace/instrumentation/metamodel/teamsai/entities/__init__.py,sha256=4
89
94
  monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
90
95
  monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/actionplanner_output_processor.py,sha256=f2ZgGIutsvQJ81ocMS3CD0TjiTVEqS8hT_xvt_fh2Kk,3200
91
96
  monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py,sha256=M5uPEbP5c57txrd7BDRXhK5rvRJfyNyvqavtkXuPjXU,2738
92
- monocle_apptrace-0.4.1.dist-info/METADATA,sha256=fbTO9VKwbju2ndxG4AEYGNgCdHw7vmmlWPe-VTPP0HA,6590
93
- monocle_apptrace-0.4.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
94
- monocle_apptrace-0.4.1.dist-info/licenses/LICENSE,sha256=ay9trLiP5I7ZsFXo6AqtkLYdRqe5S9r-DrPOvsNlZrg,9136
95
- monocle_apptrace-0.4.1.dist-info/licenses/NOTICE,sha256=9jn4xtwM_uUetJMx5WqGnhrR7MIhpoRlpokjSTlyt8c,112
96
- monocle_apptrace-0.4.1.dist-info/RECORD,,
97
+ monocle_apptrace-0.4.2.dist-info/METADATA,sha256=w845IPzUbC_fE6Y2wLZYacm42gVpB-S2-U9XK3WjwQw,6590
98
+ monocle_apptrace-0.4.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
99
+ monocle_apptrace-0.4.2.dist-info/licenses/LICENSE,sha256=ay9trLiP5I7ZsFXo6AqtkLYdRqe5S9r-DrPOvsNlZrg,9136
100
+ monocle_apptrace-0.4.2.dist-info/licenses/NOTICE,sha256=9jn4xtwM_uUetJMx5WqGnhrR7MIhpoRlpokjSTlyt8c,112
101
+ monocle_apptrace-0.4.2.dist-info/RECORD,,