monocle-apptrace 0.3.0b5__py3-none-any.whl → 0.3.0b6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of monocle-apptrace might be problematic. Click here for more details.

Files changed (24) hide show
  1. monocle_apptrace/exporters/azure/blob_exporter.py +7 -0
  2. monocle_apptrace/exporters/okahu/okahu_exporter.py +1 -1
  3. monocle_apptrace/instrumentation/common/constants.py +17 -0
  4. monocle_apptrace/instrumentation/common/instrumentor.py +54 -45
  5. monocle_apptrace/instrumentation/common/span_handler.py +81 -37
  6. monocle_apptrace/instrumentation/common/utils.py +55 -6
  7. monocle_apptrace/instrumentation/common/wrapper.py +41 -45
  8. monocle_apptrace/instrumentation/common/wrapper_method.py +8 -4
  9. monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py +1 -1
  10. monocle_apptrace/instrumentation/metamodel/haystack/methods.py +2 -1
  11. monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py +3 -2
  12. monocle_apptrace/instrumentation/metamodel/langchain/methods.py +12 -6
  13. monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py +5 -3
  14. monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py +6 -3
  15. monocle_apptrace/instrumentation/metamodel/openai/_helper.py +31 -7
  16. monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py +1 -1
  17. monocle_apptrace/instrumentation/metamodel/openai/entities/retrieval.py +20 -1
  18. monocle_apptrace/instrumentation/metamodel/openai/methods.py +21 -1
  19. monocle_apptrace/instrumentation/metamodel/requests/__init__.py +3 -1
  20. {monocle_apptrace-0.3.0b5.dist-info → monocle_apptrace-0.3.0b6.dist-info}/METADATA +1 -1
  21. {monocle_apptrace-0.3.0b5.dist-info → monocle_apptrace-0.3.0b6.dist-info}/RECORD +24 -24
  22. {monocle_apptrace-0.3.0b5.dist-info → monocle_apptrace-0.3.0b6.dist-info}/WHEEL +0 -0
  23. {monocle_apptrace-0.3.0b5.dist-info → monocle_apptrace-0.3.0b6.dist-info}/licenses/LICENSE +0 -0
  24. {monocle_apptrace-0.3.0b5.dist-info → monocle_apptrace-0.3.0b6.dist-info}/licenses/NOTICE +0 -0
@@ -11,6 +11,7 @@ from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
11
11
  from typing import Sequence
12
12
  from monocle_apptrace.exporters.base_exporter import SpanExporterBase
13
13
  import json
14
+ from monocle_apptrace.instrumentation.common.constants import MONOCLE_SDK_VERSION
14
15
  logger = logging.getLogger(__name__)
15
16
 
16
17
  class AzureBlobSpanExporter(SpanExporterBase):
@@ -72,6 +73,12 @@ class AzureBlobSpanExporter(SpanExporterBase):
72
73
  """The actual async export logic is run here."""
73
74
  # Add spans to the export queue
74
75
  for span in spans:
76
+ # Azure blob library has a check to generate it's own span if OpenTelemetry is loaded and Azure trace package is installed (just pip install azure-trace-opentelemetry)
77
+ # With Monocle,OpenTelemetry is always loaded. If the Azure trace package is installed, then it triggers the blob trace generation on every blob operation.
78
+ # Thus, the Monocle span write ends up generating a blob span which again comes back to the exporter .. and would result in an infinite loop.
79
+ # To avoid this, we check if the span has the Monocle SDK version attribute and skip it if it doesn't. That way the blob span genearted by Azure library are not exported.
80
+ if not span.attributes.get(MONOCLE_SDK_VERSION):
81
+ continue # TODO: All exporters to use same base class and check it there
75
82
  self.export_queue.append(span)
76
83
  if len(self.export_queue) >= self.max_batch_size:
77
84
  await self.__export_spans()
@@ -83,7 +83,7 @@ class OkahuSpanExporter(SpanExporter):
83
83
  result.text,
84
84
  )
85
85
  return SpanExportResult.FAILURE
86
- logger.warning("spans successfully exported to okahu")
86
+ logger.debug("spans successfully exported to okahu")
87
87
  return SpanExportResult.SUCCESS
88
88
  except ReadTimeout as e:
89
89
  logger.warning("Trace export timed out: %s", str(e))
@@ -37,6 +37,19 @@ service_name_map = {
37
37
  GITHUB_CODESPACE_SERVICE_NAME: GITHUB_CODESPACE_IDENTIFIER_ENV_NAME
38
38
  }
39
39
 
40
+
41
+ llm_type_map = {
42
+ "sagemakerendpoint": "aws_sagemaker",
43
+ "azureopenai": "azure_openai",
44
+ "openai": "openai",
45
+ "chatopenai": "openai",
46
+ "azurechatopenai": "azure_openai",
47
+ "bedrock": "aws_bedrock",
48
+ "sagemakerllm": "aws_sagemaker",
49
+ "chatbedrock": "aws_bedrock",
50
+ "openaigenerator": "openai",
51
+ }
52
+
40
53
  MONOCLE_INSTRUMENTOR = "monocle_apptrace"
41
54
  WORKFLOW_TYPE_KEY = "workflow_type"
42
55
  DATA_INPUT_KEY = "data.input"
@@ -52,3 +65,7 @@ MONOCLE_SCOPE_NAME_PREFIX = "monocle.scope."
52
65
  SCOPE_METHOD_LIST = 'MONOCLE_SCOPE_METHODS'
53
66
  SCOPE_METHOD_FILE = 'monocle_scopes.json'
54
67
  SCOPE_CONFIG_PATH = 'MONOCLE_SCOPE_CONFIG_PATH'
68
+ TRACE_PROPOGATION_URLS = "MONOCLE_TRACE_PROPAGATATION_URLS"
69
+ WORKFLOW_TYPE_KEY = "monocle.workflow_type"
70
+ WORKFLOW_TYPE_GENERIC = "workflow.generic"
71
+ MONOCLE_SDK_VERSION = "monocle_apptrace.version"
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ import inspect
2
3
  from typing import Collection, Dict, List, Union
3
4
  import random
4
5
  import uuid
@@ -14,7 +15,7 @@ from opentelemetry.sdk.trace import Span, TracerProvider
14
15
  from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanProcessor
15
16
  from opentelemetry.trace import get_tracer
16
17
  from wrapt import wrap_function_wrapper
17
- from opentelemetry.trace.propagation import set_span_in_context
18
+ from opentelemetry.trace.propagation import set_span_in_context, _SPAN_KEY
18
19
  from monocle_apptrace.exporters.monocle_exporters import get_monocle_exporter
19
20
  from monocle_apptrace.instrumentation.common.span_handler import SpanHandler
20
21
  from monocle_apptrace.instrumentation.common.wrapper_method import (
@@ -22,11 +23,11 @@ from monocle_apptrace.instrumentation.common.wrapper_method import (
22
23
  WrapperMethod,
23
24
  MONOCLE_SPAN_HANDLERS
24
25
  )
25
- from monocle_apptrace.instrumentation.common.wrapper import scope_wrapper
26
+ from monocle_apptrace.instrumentation.common.wrapper import scope_wrapper, ascope_wrapper
26
27
  from monocle_apptrace.instrumentation.common.utils import (
27
- set_scope, remove_scope, http_route_handler, load_scopes
28
+ set_scope, remove_scope, http_route_handler, load_scopes, async_wrapper, http_async_route_handler
28
29
  )
29
- from monocle_apptrace.instrumentation.common.constants import MONOCLE_INSTRUMENTOR
30
+ from monocle_apptrace.instrumentation.common.constants import MONOCLE_INSTRUMENTOR, WORKFLOW_TYPE_KEY
30
31
  from functools import wraps
31
32
  logger = logging.getLogger(__name__)
32
33
 
@@ -67,8 +68,8 @@ class MonocleInstrumentor(BaseInstrumentor):
67
68
  handler = SpanHandler()
68
69
  with tracer.start_as_current_span(span_name) as span:
69
70
  response = fn(*args, **kwargs)
70
- handler.hydrate_span(to_wrap, span=span, wrapped=wrapped, instance=instance, args=args, kwargs=kwargs,
71
- result=response)
71
+ handler.hydrate_span(to_wrap, wrapped=wrapped, instance=instance, args=args, kwargs=kwargs,
72
+ result=response, span=span)
72
73
  return response
73
74
 
74
75
  return with_instrumentation
@@ -93,7 +94,10 @@ class MonocleInstrumentor(BaseInstrumentor):
93
94
  final_method_list.append(method.to_dict())
94
95
 
95
96
  for method in load_scopes():
96
- method['wrapper_method'] = scope_wrapper
97
+ if method.get('async', False):
98
+ method['wrapper_method'] = ascope_wrapper
99
+ else:
100
+ method['wrapper_method'] = scope_wrapper
97
101
  final_method_list.append(method)
98
102
 
99
103
  for method_config in final_method_list:
@@ -191,40 +195,30 @@ def on_processor_start(span: Span, parent_context):
191
195
  def set_context_properties(properties: dict) -> None:
192
196
  attach(set_value(SESSION_PROPERTIES_KEY, properties))
193
197
 
194
-
195
- def propagate_trace_id(traceId = "", use_trace_context = False):
198
+ def start_trace():
196
199
  try:
197
- if traceId.startswith("0x"):
198
- traceId = traceId.lstrip("0x")
199
200
  tracer = get_tracer(instrumenting_module_name= MONOCLE_INSTRUMENTOR, tracer_provider= get_tracer_provider())
200
- initial_id_generator = tracer.id_generator
201
- _parent_span_context = get_current() if use_trace_context else None
202
- if traceId and is_valid_trace_id_uuid(traceId):
203
- tracer.id_generator = FixedIdGenerator(uuid.UUID(traceId).int)
204
-
205
- span = tracer.start_span(name = "parent_placeholder_span", context= _parent_span_context)
206
- updated_span_context = set_span_in_context(span=span, context= _parent_span_context)
207
- updated_span_context = set_value("root_span_id", span.get_span_context().span_id, updated_span_context)
208
- token = attach(updated_span_context)
209
-
210
- span.end()
211
- tracer.id_generator = initial_id_generator
201
+ span = tracer.start_span(name = "workflow")
202
+ updated_span_context = set_span_in_context(span=span)
203
+ SpanHandler.set_default_monocle_attributes(span)
204
+ SpanHandler.set_workflow_properties(span)
205
+ token = SpanHandler.attach_workflow_type(context=updated_span_context)
212
206
  return token
213
207
  except:
214
- logger.warning("Failed to propagate trace id")
215
- return
208
+ logger.warning("Failed to start trace")
209
+ return None
216
210
 
217
-
218
- def propagate_trace_id_from_traceparent():
219
- propagate_trace_id(use_trace_context = True)
220
-
221
-
222
- def stop_propagate_trace_id(token) -> None:
211
+ def stop_trace(token) -> None:
223
212
  try:
224
- detach(token)
213
+ _parent_span_context = get_current()
214
+ if _parent_span_context is not None:
215
+ parent_span: Span = _parent_span_context.get(_SPAN_KEY, None)
216
+ if parent_span is not None:
217
+ parent_span.end()
218
+ if token is not None:
219
+ SpanHandler.detach_workflow_type(token)
225
220
  except:
226
- logger.warning("Failed to stop propagating trace id")
227
-
221
+ logger.warning("Failed to stop trace")
228
222
 
229
223
  def is_valid_trace_id_uuid(traceId: str) -> bool:
230
224
  try:
@@ -248,23 +242,38 @@ def monocle_trace_scope(scope_name: str, scope_value:str = None):
248
242
  yield
249
243
  finally:
250
244
  stop_scope(token)
251
-
245
+
252
246
  def monocle_trace_scope_method(scope_name: str):
253
247
  def decorator(func):
254
- def wrapper(*args, **kwargs):
255
- token = start_scope(scope_name)
256
- try:
257
- result = func(*args, **kwargs)
248
+ if inspect.iscoroutinefunction(func):
249
+ @wraps(func)
250
+ async def wrapper(*args, **kwargs):
251
+ result = async_wrapper(func, scope_name, None, *args, **kwargs)
258
252
  return result
259
- finally:
260
- stop_scope(token)
261
- return wrapper
253
+ return wrapper
254
+ else:
255
+ @wraps(func)
256
+ def wrapper(*args, **kwargs):
257
+ token = start_scope(scope_name)
258
+ try:
259
+ result = func(*args, **kwargs)
260
+ return result
261
+ finally:
262
+ stop_scope(token)
263
+ return wrapper
262
264
  return decorator
263
265
 
264
266
  def monocle_trace_http_route(func):
265
- def wrapper(req):
266
- return http_route_handler(req.headers, func, req)
267
- return wrapper
267
+ if inspect.iscoroutinefunction(func):
268
+ @wraps(func)
269
+ async def wrapper(*args, **kwargs):
270
+ return http_async_route_handler(func, *args, **kwargs)
271
+ return wrapper
272
+ else:
273
+ @wraps(func)
274
+ def wrapper(*args, **kwargs):
275
+ return http_route_handler(func, *args, **kwargs)
276
+ return wrapper
268
277
 
269
278
  class FixedIdGenerator(id_generator.IdGenerator):
270
279
  def __init__(
@@ -1,16 +1,17 @@
1
1
  import logging
2
2
  import os
3
3
  from importlib.metadata import version
4
- from opentelemetry.context import get_current
5
- from opentelemetry.context import get_value
4
+ from opentelemetry.context import get_value, set_value, attach, detach
6
5
  from opentelemetry.sdk.trace import Span
7
6
 
8
7
  from monocle_apptrace.instrumentation.common.constants import (
9
8
  QUERY,
10
9
  service_name_map,
11
10
  service_type_map,
11
+ MONOCLE_SDK_VERSION
12
12
  )
13
13
  from monocle_apptrace.instrumentation.common.utils import set_attribute, get_scopes
14
+ from monocle_apptrace.instrumentation.common.constants import WORKFLOW_TYPE_KEY, WORKFLOW_TYPE_GENERIC
14
15
 
15
16
  logger = logging.getLogger(__name__)
16
17
 
@@ -20,8 +21,6 @@ WORKFLOW_TYPE_MAP = {
20
21
  "haystack": "workflow.haystack"
21
22
  }
22
23
 
23
-
24
-
25
24
  class SpanHandler:
26
25
 
27
26
  def __init__(self,instrumentor=None):
@@ -40,18 +39,32 @@ class SpanHandler:
40
39
  pass
41
40
 
42
41
  def skip_span(self, to_wrap, wrapped, instance, args, kwargs) -> bool:
42
+ # If this is a workflow span type and a workflow span is already generated, then skip generating this span
43
+ if to_wrap.get('span_type') == "workflow" and self.is_workflow_span_active():
44
+ return True
43
45
  return False
44
46
 
45
47
  def pre_task_processing(self, to_wrap, wrapped, instance, args,kwargs, span):
46
- if self.__is_root_span(span):
47
- try:
48
- sdk_version = version("monocle_apptrace")
49
- span.set_attribute("monocle_apptrace.version", sdk_version)
50
- except Exception as e:
51
- logger.warning("Exception finding monocle-apptrace version.")
52
48
  if "pipeline" in to_wrap['package']:
53
49
  set_attribute(QUERY, args[0]['prompt_builder']['question'])
54
50
 
51
+ @staticmethod
52
+ def set_default_monocle_attributes(span: Span):
53
+ """ Set default monocle attributes for all spans """
54
+ try:
55
+ sdk_version = version("monocle_apptrace")
56
+ span.set_attribute(MONOCLE_SDK_VERSION, sdk_version)
57
+ except Exception as e:
58
+ logger.warning("Exception finding monocle-apptrace version.")
59
+ for scope_key, scope_value in get_scopes().items():
60
+ span.set_attribute(f"scope.{scope_key}", scope_value)
61
+
62
+ @staticmethod
63
+ def set_workflow_properties(span: Span, to_wrap = None):
64
+ """ Set attributes of workflow if this is a root span"""
65
+ SpanHandler.set_workflow_attributes(to_wrap, span)
66
+ SpanHandler.set_app_hosting_identifier_attribute(span)
67
+
55
68
  def post_task_processing(self, to_wrap, wrapped, instance, args, kwargs, result, span):
56
69
  pass
57
70
 
@@ -61,10 +74,8 @@ class SpanHandler:
61
74
 
62
75
  def hydrate_attributes(self, to_wrap, wrapped, instance, args, kwargs, result, span):
63
76
  span_index = 0
64
- if self.__is_root_span(span):
65
- span_index += self.set_workflow_attributes(to_wrap, span, span_index+1)
66
- span_index += self.set_app_hosting_identifier_attribute(span, span_index+1)
67
-
77
+ if SpanHandler.is_root_span(span):
78
+ span_index = 2 # root span will have workflow and hosting entities pre-populated
68
79
  if 'output_processor' in to_wrap and to_wrap["output_processor"] is not None:
69
80
  output_processor=to_wrap['output_processor']
70
81
  if 'type' in output_processor:
@@ -124,46 +135,79 @@ class SpanHandler:
124
135
  logger.debug(f"Error evaluating accessor for attribute '{attribute_key}': {e}")
125
136
  span.add_event(name=event_name, attributes=event_attributes)
126
137
 
127
-
128
-
129
- def set_workflow_attributes(self, to_wrap, span: Span, span_index):
130
- return_value = 1
131
- workflow_name = self.get_workflow_name(span=span)
138
+ @staticmethod
139
+ def set_workflow_attributes(to_wrap, span: Span):
140
+ span_index = 1
141
+ workflow_name = SpanHandler.get_workflow_name(span=span)
132
142
  if workflow_name:
133
143
  span.set_attribute("span.type", "workflow")
134
144
  span.set_attribute(f"entity.{span_index}.name", workflow_name)
135
- # workflow type
136
- package_name = to_wrap.get('package')
137
- workflow_type_set = False
138
- for (package, workflow_type) in WORKFLOW_TYPE_MAP.items():
139
- if (package_name is not None and package in package_name):
140
- span.set_attribute(f"entity.{span_index}.type", workflow_type)
141
- workflow_type_set = True
142
- if not workflow_type_set:
143
- span.set_attribute(f"entity.{span_index}.type", "workflow.generic")
144
- return return_value
145
-
146
- def set_app_hosting_identifier_attribute(self, span, span_index):
147
- return_value = 0
145
+ workflow_type = SpanHandler.get_workflow_type(to_wrap)
146
+ span.set_attribute(f"entity.{span_index}.type", workflow_type)
147
+
148
+ @staticmethod
149
+ def get_workflow_type(to_wrap):
150
+ # workflow type
151
+ workflow_type = WORKFLOW_TYPE_GENERIC
152
+ if to_wrap is not None:
153
+ package_name = to_wrap.get('package')
154
+ for (package, framework_workflow_type) in WORKFLOW_TYPE_MAP.items():
155
+ if (package_name is not None and package in package_name):
156
+ workflow_type = framework_workflow_type
157
+ break
158
+ return workflow_type
159
+
160
+ def set_app_hosting_identifier_attribute(span):
161
+ span_index = 2
148
162
  # Search env to indentify the infra service type, if found check env for service name if possible
163
+ span.set_attribute(f"entity.{span_index}.type", f"app_hosting.generic")
164
+ span.set_attribute(f"entity.{span_index}.name", "generic")
149
165
  for type_env, type_name in service_type_map.items():
150
166
  if type_env in os.environ:
151
- return_value = 1
152
167
  span.set_attribute(f"entity.{span_index}.type", f"app_hosting.{type_name}")
153
168
  entity_name_env = service_name_map.get(type_name, "unknown")
154
169
  span.set_attribute(f"entity.{span_index}.name", os.environ.get(entity_name_env, "generic"))
155
- return return_value
156
170
 
157
- def get_workflow_name(self, span: Span) -> str:
171
+ @staticmethod
172
+ def get_workflow_name(span: Span) -> str:
158
173
  try:
159
174
  return get_value("workflow_name") or span.resource.attributes.get("service.name")
160
175
  except Exception as e:
161
176
  logger.exception(f"Error getting workflow name: {e}")
162
177
  return None
163
178
 
164
- def __is_root_span(self, curr_span: Span) -> bool:
179
+ @staticmethod
180
+ def is_root_span(curr_span: Span) -> bool:
165
181
  try:
166
182
  if curr_span is not None and hasattr(curr_span, "parent"):
167
- return curr_span.parent is None or get_current().get("root_span_id") == curr_span.parent.span_id
183
+ return curr_span.parent is None
168
184
  except Exception as e:
169
185
  logger.warning(f"Error finding root span: {e}")
186
+
187
+ def is_non_workflow_root_span(self, curr_span: Span, to_wrap) -> bool:
188
+ return SpanHandler.is_root_span(curr_span) and to_wrap.get("span_type") != "workflow"
189
+
190
+ def is_workflow_span_active(self):
191
+ return get_value(WORKFLOW_TYPE_KEY) is not None
192
+
193
+ @staticmethod
194
+ def attach_workflow_type(to_wrap=None, context=None):
195
+ token = None
196
+ if to_wrap:
197
+ if to_wrap.get('span_type') == "workflow":
198
+ token = attach(set_value(WORKFLOW_TYPE_KEY,
199
+ SpanHandler.get_workflow_type(to_wrap), context))
200
+ else:
201
+ token = attach(set_value(WORKFLOW_TYPE_KEY, WORKFLOW_TYPE_GENERIC, context))
202
+ return token
203
+
204
+ @staticmethod
205
+ def detach_workflow_type(token):
206
+ if token:
207
+ return detach(token)
208
+
209
+ class NonFrameworkSpanHandler(SpanHandler):
210
+
211
+ # If the language framework is being executed, then skip generating direct openAI spans
212
+ def skip_span(self, to_wrap, wrapped, instance, args, kwargs) -> bool:
213
+ return get_value(WORKFLOW_TYPE_KEY) in WORKFLOW_TYPE_MAP.values()
@@ -1,7 +1,7 @@
1
1
  import logging, json
2
2
  import os
3
3
  from typing import Callable, Generic, Optional, TypeVar, Mapping
4
- from threading import local
4
+ import threading, asyncio
5
5
 
6
6
  from opentelemetry.context import attach, detach, get_current, get_value, set_value, Context
7
7
  from opentelemetry.trace import NonRecordingSpan, Span, get_tracer
@@ -9,7 +9,7 @@ from opentelemetry.trace.propagation import _SPAN_KEY
9
9
  from opentelemetry.sdk.trace import id_generator, TracerProvider
10
10
  from opentelemetry.propagate import inject, extract
11
11
  from opentelemetry import baggage
12
- from monocle_apptrace.instrumentation.common.constants import MONOCLE_SCOPE_NAME_PREFIX, SCOPE_METHOD_FILE, SCOPE_CONFIG_PATH
12
+ from monocle_apptrace.instrumentation.common.constants import MONOCLE_SCOPE_NAME_PREFIX, SCOPE_METHOD_FILE, SCOPE_CONFIG_PATH, llm_type_map
13
13
 
14
14
  T = TypeVar('T')
15
15
  U = TypeVar('U')
@@ -232,13 +232,55 @@ def clear_http_scopes(token:object) -> None:
232
232
  global http_scopes
233
233
  remove_scopes(token)
234
234
 
235
- def http_route_handler(headers, func, req):
236
- token = extract_http_headers(headers)
235
+ def http_route_handler(func, *args, **kwargs):
236
+ if 'req' in kwargs and hasattr(kwargs['req'], 'headers'):
237
+ headers = kwargs['req'].headers
238
+ else:
239
+ headers = None
240
+ token = None
241
+ if headers is not None:
242
+ token = extract_http_headers(headers)
237
243
  try:
238
- result = func(req)
244
+ result = func(*args, **kwargs)
239
245
  return result
240
246
  finally:
241
- clear_http_scopes(token)
247
+ if token is not None:
248
+ clear_http_scopes(token)
249
+
250
+ async def http_async_route_handler(func, *args, **kwargs):
251
+ if 'req' in kwargs and hasattr(kwargs['req'], 'headers'):
252
+ headers = kwargs['req'].headers
253
+ else:
254
+ headers = None
255
+ return async_wrapper(func, None, headers, *args, **kwargs)
256
+
257
+ def run_async_with_scope(method, scope_name, headers, *args, **kwargs):
258
+ token = None
259
+ if scope_name:
260
+ token = set_scope(scope_name)
261
+ elif headers:
262
+ token = extract_http_headers(headers)
263
+ try:
264
+ return asyncio.run(method(*args, **kwargs))
265
+ finally:
266
+ if token:
267
+ remove_scope(token)
268
+
269
+ def async_wrapper(method, scope_name=None, headers=None, *args, **kwargs):
270
+ try:
271
+ run_loop = asyncio.get_running_loop()
272
+ except RuntimeError:
273
+ run_loop = None
274
+
275
+ if run_loop and run_loop.is_running():
276
+ results = []
277
+ thread = threading.Thread(target=lambda: results.append(run_async_with_scope(method, scope_name, headers, *args, **kwargs)))
278
+ thread.start()
279
+ thread.join()
280
+ return_value = results[0] if len(results) > 0 else None
281
+ return return_value
282
+ else:
283
+ return run_async_with_scope(method, scope_name, headers, *args, **kwargs)
242
284
 
243
285
  class Option(Generic[T]):
244
286
  def __init__(self, value: Optional[T]):
@@ -270,6 +312,13 @@ def try_option(func: Callable[..., T], *args, **kwargs) -> Option[T]:
270
312
  except Exception:
271
313
  return Option(None)
272
314
 
315
+ def get_llm_type(instance):
316
+ try:
317
+ llm_type = llm_type_map.get(type(instance).__name__.lower())
318
+ return llm_type
319
+ except:
320
+ pass
321
+
273
322
  def resolve_from_alias(my_map, alias):
274
323
  """Find a alias that is not none from list of aliases"""
275
324
  for i in alias:
@@ -1,21 +1,20 @@
1
1
  # pylint: disable=protected-access
2
2
  import logging
3
-
4
3
  from opentelemetry.trace import Tracer
4
+ from opentelemetry.context import set_value, attach, detach, get_value
5
5
 
6
6
  from monocle_apptrace.instrumentation.common.span_handler import SpanHandler
7
7
  from monocle_apptrace.instrumentation.common.utils import (
8
8
  get_fully_qualified_class_name,
9
9
  with_tracer_wrapper,
10
10
  set_scope,
11
- remove_scope
11
+ remove_scope,
12
+ async_wrapper
12
13
  )
14
+ from monocle_apptrace.instrumentation.common.constants import WORKFLOW_TYPE_KEY
13
15
  logger = logging.getLogger(__name__)
14
16
 
15
-
16
- @with_tracer_wrapper
17
- def task_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
18
-
17
+ def wrapper_processor(async_task: bool, tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
19
18
  # Some Langchain objects are wrapped elsewhere, so we ignore them here
20
19
  if instance.__class__.__name__ in ("AgentExecutor"):
21
20
  return wrapped(*args, **kwargs)
@@ -28,49 +27,51 @@ def task_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instanc
28
27
  name = get_fully_qualified_class_name(instance)
29
28
 
30
29
  return_value = None
30
+ token = None
31
31
  try:
32
32
  handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
33
- if to_wrap.get('skip_span') or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
34
- return_value = wrapped(*args, **kwargs)
35
- else:
36
- with tracer.start_as_current_span(name) as span:
37
- handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
33
+ skip_scan:bool = to_wrap.get('skip_span') or handler.skip_span(to_wrap, wrapped, instance, args, kwargs)
34
+ token = SpanHandler.attach_workflow_type(to_wrap=to_wrap)
35
+ if skip_scan:
36
+ if async_task:
37
+ return_value = async_wrapper(wrapped, None, None, *args, **kwargs)
38
+ else:
38
39
  return_value = wrapped(*args, **kwargs)
39
- handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span)
40
- handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
40
+ else:
41
+ return_value = span_processor(name, async_task, tracer, handler, to_wrap, wrapped, instance, args, kwargs)
41
42
  return return_value
42
43
  finally:
44
+ handler.detach_workflow_type(token)
43
45
  handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
44
46
 
45
- @with_tracer_wrapper
46
- async def atask_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
47
- """Instruments and calls every function defined in TO_WRAP."""
48
-
49
- # Some Langchain objects are wrapped elsewhere, so we ignore them here
50
- if instance.__class__.__name__ in ("AgentExecutor"):
51
- return wrapped(*args, **kwargs)
52
-
53
- if hasattr(instance, "name") and instance.name:
54
- name = f"{to_wrap.get('span_name')}.{instance.name.lower()}"
55
- elif to_wrap.get("span_name"):
56
- name = to_wrap.get("span_name")
57
- else:
58
- name = get_fully_qualified_class_name(instance)
59
-
47
+ def span_processor(name: str, async_task: bool, tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
48
+ # For singleton spans, eg OpenAI inference generate a workflow span to format the workflow specific attributes
60
49
  return_value = None
61
- try:
62
- handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
63
- if to_wrap.get('skip_span') or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
64
- return_value = wrapped(*args, **kwargs)
50
+ with tracer.start_as_current_span(name) as span:
51
+ # Since Spanhandler can be overridden, ensure we set default monocle attributes.
52
+ SpanHandler.set_default_monocle_attributes(span)
53
+ if SpanHandler.is_root_span(span):
54
+ SpanHandler.set_workflow_properties(span, to_wrap)
55
+ if handler.is_non_workflow_root_span(span, to_wrap):
56
+ # This is a direct API call of a non-framework type, call the span_processor recursively for the actual span
57
+ return_value = span_processor(name, async_task, tracer, handler, to_wrap, wrapped, instance, args, kwargs)
65
58
  else:
66
- with tracer.start_as_current_span(name) as span:
67
- handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
59
+ handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
60
+ if async_task:
61
+ return_value = async_wrapper(wrapped, None, None, *args, **kwargs)
62
+ else:
68
63
  return_value = wrapped(*args, **kwargs)
69
- handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span)
70
- handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
71
- return return_value
72
- finally:
73
- handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
64
+ handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span)
65
+ handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
66
+ return return_value
67
+
68
+ @with_tracer_wrapper
69
+ def task_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
70
+ return wrapper_processor(False, tracer, handler, to_wrap, wrapped, instance, args, kwargs)
71
+
72
+ @with_tracer_wrapper
73
+ async def atask_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
74
+ return wrapper_processor(True, tracer, handler, to_wrap, wrapped, instance, args, kwargs)
74
75
 
75
76
  @with_tracer_wrapper
76
77
  def scope_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
@@ -82,13 +83,8 @@ def scope_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instan
82
83
  remove_scope(token)
83
84
  return return_value
84
85
 
85
-
86
86
  @with_tracer_wrapper
87
87
  async def ascope_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
88
88
  scope_name = to_wrap.get('scope_name', None)
89
- if scope_name:
90
- token = set_scope(scope_name)
91
- return_value = wrapped(*args, **kwargs)
92
- if scope_name:
93
- remove_scope(token)
89
+ return_value = async_wrapper(wrapped, scope_name, None, *args, **kwargs)
94
90
  return return_value
@@ -1,7 +1,7 @@
1
1
  # pylint: disable=too-few-public-methods
2
2
  from typing import Any, Dict
3
3
  from monocle_apptrace.instrumentation.common.wrapper import task_wrapper, scope_wrapper
4
- from monocle_apptrace.instrumentation.common.span_handler import SpanHandler
4
+ from monocle_apptrace.instrumentation.common.span_handler import SpanHandler, NonFrameworkSpanHandler
5
5
  from monocle_apptrace.instrumentation.metamodel.botocore.methods import BOTOCORE_METHODS
6
6
  from monocle_apptrace.instrumentation.metamodel.botocore.handlers.botocore_span_handler import BotoCoreSpanHandler
7
7
  from monocle_apptrace.instrumentation.metamodel.langchain.methods import (
@@ -26,13 +26,15 @@ class WrapperMethod:
26
26
  output_processor : str = None,
27
27
  wrapper_method = task_wrapper,
28
28
  span_handler = 'default',
29
- scope_name: str = None
29
+ scope_name: str = None,
30
+ span_type: str = None
30
31
  ):
31
32
  self.package = package
32
33
  self.object = object_name
33
34
  self.method = method
34
35
  self.span_name = span_name
35
36
  self.output_processor=output_processor
37
+ self.span_type = span_type
36
38
 
37
39
  self.span_handler:SpanHandler.__class__ = span_handler
38
40
  self.scope_name = scope_name
@@ -51,7 +53,8 @@ class WrapperMethod:
51
53
  'output_processor': self.output_processor,
52
54
  'wrapper_method': self.wrapper_method,
53
55
  'span_handler': self.span_handler,
54
- 'scope_name': self.scope_name
56
+ 'scope_name': self.scope_name,
57
+ 'span_type': self.span_type
55
58
  }
56
59
  return instance_dict
57
60
 
@@ -64,5 +67,6 @@ MONOCLE_SPAN_HANDLERS: Dict[str, SpanHandler] = {
64
67
  "default": SpanHandler(),
65
68
  "botocore_handler": BotoCoreSpanHandler(),
66
69
  "flask_handler": FlaskSpanHandler(),
67
- "request_handler": RequestSpanHandler()
70
+ "request_handler": RequestSpanHandler(),
71
+ "non_framework_handler": NonFrameworkSpanHandler()
68
72
  }
@@ -9,7 +9,7 @@ INFERENCE = {
9
9
  {
10
10
  "_comment": "provider type ,name , deployment , inference_endpoint",
11
11
  "attribute": "type",
12
- "accessor": lambda arguments: 'inference.azure_oai'
12
+ "accessor": lambda arguments: 'inference.azure_openai'
13
13
  },
14
14
  {
15
15
  "attribute": "provider_name",
@@ -37,6 +37,7 @@ HAYSTACK_METHODS = [
37
37
  "package": "haystack.core.pipeline.pipeline",
38
38
  "object": "Pipeline",
39
39
  "method": "run",
40
- "wrapper_method": task_wrapper
40
+ "wrapper_method": task_wrapper,
41
+ "span_type": "workflow"
41
42
  }
42
43
  ]
@@ -1,7 +1,7 @@
1
1
  from monocle_apptrace.instrumentation.metamodel.langchain import (
2
2
  _helper,
3
3
  )
4
- from monocle_apptrace.instrumentation.common.utils import resolve_from_alias
4
+ from monocle_apptrace.instrumentation.common.utils import resolve_from_alias, get_llm_type
5
5
 
6
6
  INFERENCE = {
7
7
  "type": "inference",
@@ -10,7 +10,8 @@ INFERENCE = {
10
10
  {
11
11
  "_comment": "provider type ,name , deployment , inference_endpoint",
12
12
  "attribute": "type",
13
- "accessor": lambda arguments: 'inference.azure_oai'
13
+ "accessor": lambda arguments: 'inference.' + (get_llm_type(arguments['instance']) or 'generic')
14
+
14
15
  },
15
16
  {
16
17
  "attribute": "provider_name",
@@ -11,13 +11,15 @@ LANGCHAIN_METHODS = [
11
11
  "package": "langchain.prompts.base",
12
12
  "object": "BasePromptTemplate",
13
13
  "method": "invoke",
14
- "wrapper_method": task_wrapper
14
+ "wrapper_method": task_wrapper,
15
+ "span_type": "workflow"
15
16
  },
16
17
  {
17
18
  "package": "langchain.prompts.base",
18
19
  "object": "BasePromptTemplate",
19
20
  "method": "ainvoke",
20
- "wrapper_method": atask_wrapper
21
+ "wrapper_method": atask_wrapper,
22
+ "span_type": "workflow"
21
23
  },
22
24
  {
23
25
  "package": "langchain.chat_models.base",
@@ -80,26 +82,30 @@ LANGCHAIN_METHODS = [
80
82
  "package": "langchain.schema",
81
83
  "object": "BaseOutputParser",
82
84
  "method": "invoke",
83
- "wrapper_method": task_wrapper
85
+ "wrapper_method": task_wrapper,
86
+ "span_type": "workflow"
84
87
  },
85
88
  {
86
89
  "package": "langchain.schema",
87
90
  "object": "BaseOutputParser",
88
91
  "method": "ainvoke",
89
- "wrapper_method": atask_wrapper
92
+ "wrapper_method": atask_wrapper,
93
+ "span_type": "workflow"
90
94
  },
91
95
  {
92
96
  "package": "langchain.schema.runnable",
93
97
  "object": "RunnableSequence",
94
98
  "method": "invoke",
95
99
  "span_name": "langchain.workflow",
96
- "wrapper_method": task_wrapper
100
+ "wrapper_method": task_wrapper,
101
+ "span_type": "workflow"
97
102
  },
98
103
  {
99
104
  "package": "langchain.schema.runnable",
100
105
  "object": "RunnableSequence",
101
106
  "method": "ainvoke",
102
107
  "span_name": "langchain.workflow",
103
- "wrapper_method": atask_wrapper
108
+ "wrapper_method": atask_wrapper,
109
+ "span_type": "workflow"
104
110
  }
105
111
  ]
@@ -1,7 +1,7 @@
1
1
  from monocle_apptrace.instrumentation.metamodel.llamaindex import (
2
2
  _helper,
3
3
  )
4
- from monocle_apptrace.instrumentation.common.utils import resolve_from_alias
4
+ from monocle_apptrace.instrumentation.common.utils import resolve_from_alias, get_llm_type
5
5
 
6
6
  INFERENCE = {
7
7
  "type": "inference",
@@ -10,11 +10,13 @@ INFERENCE = {
10
10
  {
11
11
  "_comment": "provider type ,name , deployment , inference_endpoint",
12
12
  "attribute": "type",
13
- "accessor": lambda arguments: 'inference.azure_oai'
13
+ "accessor": lambda arguments: 'inference.' + (get_llm_type(arguments['instance']) or 'generic')
14
+
14
15
  },
15
16
  {
16
17
  "attribute": "provider_name",
17
- "accessor": lambda arguments: _helper.extract_provider_name(arguments['instance'])
18
+ "accessor": lambda arguments: arguments['kwargs'].get('provider_name') or _helper.extract_provider_name(arguments['instance'])
19
+
18
20
  },
19
21
  {
20
22
  "attribute": "deployment",
@@ -30,14 +30,16 @@ LLAMAINDEX_METHODS = [
30
30
  "object": "BaseQueryEngine",
31
31
  "method": "query",
32
32
  "span_name": "llamaindex.query",
33
- "wrapper_method": task_wrapper
33
+ "wrapper_method": task_wrapper,
34
+ "span_type": "workflow"
34
35
  },
35
36
  {
36
37
  "package": "llama_index.core.base.base_query_engine",
37
38
  "object": "BaseQueryEngine",
38
39
  "method": "aquery",
39
40
  "span_name": "llamaindex.query",
40
- "wrapper_method": atask_wrapper
41
+ "wrapper_method": atask_wrapper,
42
+ "span_type": "workflow"
41
43
  },
42
44
  {
43
45
  "package": "llama_index.core.llms.custom",
@@ -53,7 +55,8 @@ LLAMAINDEX_METHODS = [
53
55
  "method": "achat",
54
56
  "span_name": "llamaindex.llmchat",
55
57
  "wrapper_method": atask_wrapper,
56
- "output_processor": INFERENCE
58
+ "output_processor": INFERENCE,
59
+
57
60
  },
58
61
  {
59
62
  "package": "llama_index.llms.openai.base",
@@ -32,7 +32,7 @@ def extract_messages(kwargs):
32
32
 
33
33
  def extract_assistant_message(response):
34
34
  try:
35
- if hasattr(response,"choices") and len(response.choices) >0:
35
+ if response is not None and hasattr(response,"choices") and len(response.choices) >0:
36
36
  if hasattr(response.choices[0],"message"):
37
37
  return response.choices[0].message.content
38
38
  except (IndexError, AttributeError) as e:
@@ -60,15 +60,20 @@ def resolve_from_alias(my_map, alias):
60
60
  return None
61
61
 
62
62
 
63
- def update_input_span_events(args):
64
- return args[0] if len(args) > 0 else ""
63
+ def update_input_span_events(kwargs):
64
+ if 'input' in kwargs and isinstance(kwargs['input'], list):
65
+ query = ' '.join(kwargs['input'])
66
+ return query
65
67
 
66
68
 
67
69
  def update_output_span_events(results):
68
- output_arg_text = " ".join([doc.page_content for doc in results if hasattr(doc, 'page_content')])
69
- if len(output_arg_text) > 100:
70
- output_arg_text = output_arg_text[:100] + "..."
71
- return output_arg_text
70
+ if hasattr(results,'data') and isinstance(results.data, list):
71
+ embeddings = results.data
72
+ embedding_strings = [f"index={e.index}, embedding={e.embedding}" for e in embeddings]
73
+ output = '\n'.join(embedding_strings)
74
+ if len(output) > 100:
75
+ output = output[:100] + "..."
76
+ return output
72
77
 
73
78
 
74
79
  def update_span_from_llm_response(response):
@@ -86,3 +91,22 @@ def update_span_from_llm_response(response):
86
91
  meta_dict.update({"total_tokens": getattr(response.usage, "total_tokens", None)})
87
92
  return meta_dict
88
93
 
94
+ def extract_vector_input(vector_input: dict):
95
+ if 'input' in vector_input:
96
+ return vector_input['input']
97
+ return ""
98
+
99
+ def extract_vector_output(vector_output):
100
+ try:
101
+ if hasattr(vector_output, 'data') and len(vector_output.data) > 0:
102
+ return vector_output.data[0].embedding
103
+ except Exception as e:
104
+ pass
105
+ return ""
106
+
107
+ def get_inference_type(instance):
108
+ inference_type: Option[str] = try_option(getattr, instance._client, '_api_version')
109
+ if inference_type.unwrap_or(None):
110
+ return 'azure_openai'
111
+ else:
112
+ return 'openai'
@@ -10,7 +10,7 @@ INFERENCE = {
10
10
  {
11
11
  "_comment": "provider type ,name , deployment , inference_endpoint",
12
12
  "attribute": "type",
13
- "accessor": lambda arguments: 'inference.azure_oai'
13
+ "accessor": lambda arguments: 'inference.' + (_helper.get_inference_type(arguments['instance'])) or 'openai'
14
14
  },
15
15
  {
16
16
  "attribute": "provider_name",
@@ -19,6 +19,25 @@ RETRIEVAL = {
19
19
  ]
20
20
  ],
21
21
  "events": [
22
-
22
+ {
23
+ "name": "data.input",
24
+ "attributes": [
25
+ {
26
+ "_comment": "this is instruction and user query to LLM",
27
+ "attribute": "input",
28
+ "accessor": lambda arguments: _helper.update_input_span_events(arguments['kwargs'])
29
+ }
30
+ ]
31
+ },
32
+ {
33
+ "name": "data.output",
34
+ "attributes": [
35
+ {
36
+ "_comment": "this is result from LLM",
37
+ "attribute": "response",
38
+ "accessor": lambda arguments: _helper.update_output_span_events(arguments['result'])
39
+ }
40
+ ]
41
+ }
23
42
  ]
24
43
  }
@@ -12,7 +12,15 @@ OPENAI_METHODS = [
12
12
  "object": "Completions",
13
13
  "method": "create",
14
14
  "wrapper_method": task_wrapper,
15
- "span_name": "openai_inference",
15
+ "span_handler": "non_framework_handler",
16
+ "output_processor": INFERENCE
17
+ },
18
+ {
19
+ "package": "openai.resources.chat.completions",
20
+ "object": "AsyncCompletions",
21
+ "method": "create",
22
+ "wrapper_method": atask_wrapper,
23
+ "span_handler": "non_framework_handler",
16
24
  "output_processor": INFERENCE
17
25
  },
18
26
  {
@@ -20,6 +28,18 @@ OPENAI_METHODS = [
20
28
  "object": "Embeddings",
21
29
  "method": "create",
22
30
  "wrapper_method": task_wrapper,
31
+ "span_name": "openai_embeddings",
32
+ "span_handler": "non_framework_handler",
33
+ "output_processor": RETRIEVAL
34
+ },
35
+ {
36
+ "package": "openai.resources.embeddings",
37
+ "object": "AsyncEmbeddings",
38
+ "method": "create",
39
+ "wrapper_method": atask_wrapper,
40
+ "span_name": "openai_embeddings",
41
+ "span_handler": "non_framework_handler",
23
42
  "output_processor": RETRIEVAL
24
43
  }
44
+
25
45
  ]
@@ -1,2 +1,4 @@
1
1
  from os import environ
2
- allowed_urls = environ.get('MONOCLE_TRACE_PROPAGATATION_URLS', ' ').split(',')
2
+ from monocle_apptrace.instrumentation.common.constants import TRACE_PROPOGATION_URLS
3
+ allowed_url_str = environ.get(TRACE_PROPOGATION_URLS, "")
4
+ allowed_urls:list[str] = [] if allowed_url_str == "" else allowed_url_str.split(',')
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: monocle_apptrace
3
- Version: 0.3.0b5
3
+ Version: 0.3.0b6
4
4
  Summary: package with monocle genAI tracing
5
5
  Project-URL: Homepage, https://github.com/monocle2ai/monocle
6
6
  Project-URL: Issues, https://github.com/monocle2ai/monocle/issues
@@ -7,17 +7,17 @@ monocle_apptrace/exporters/file_exporter.py,sha256=gN9pJ_X5pcstVVsyivgHsjWhr443e
7
7
  monocle_apptrace/exporters/monocle_exporters.py,sha256=AxhZsTHjz2ZTuI-QOw1zk_bCKD899_EYyiEtCyAb1GA,2210
8
8
  monocle_apptrace/exporters/aws/s3_exporter.py,sha256=JMxtox61J6gUoEFsM1PJisBJPySMpm_U2Uv68WioKtE,7146
9
9
  monocle_apptrace/exporters/aws/s3_exporter_opendal.py,sha256=FvyW0KkAz0W_1g16C_ERmamg4fSreT-UXgLaN9URTVQ,5057
10
- monocle_apptrace/exporters/azure/blob_exporter.py,sha256=m7Hsw3OXlP2GOCQcdxf8LM6Fe12fZmih45x82Z12dbI,5597
10
+ monocle_apptrace/exporters/azure/blob_exporter.py,sha256=9wIGeKe8slUx69MFqj_Bl2xupNuUB_mCw9lu5g1i77A,6514
11
11
  monocle_apptrace/exporters/azure/blob_exporter_opendal.py,sha256=h5xv7JU6YEXL4AKT2B1op3YsHoA0rNnLCGq8seoVRWs,6114
12
- monocle_apptrace/exporters/okahu/okahu_exporter.py,sha256=p2rjStwo0OMEdHWQt_QvREpUWXbDm5jGx3qXeYai4_M,4407
12
+ monocle_apptrace/exporters/okahu/okahu_exporter.py,sha256=ENPcP2UB4DhM-2cKNWEqmPtcAkWB8mFmVBKVfkA8IuI,4405
13
13
  monocle_apptrace/instrumentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  monocle_apptrace/instrumentation/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- monocle_apptrace/instrumentation/common/constants.py,sha256=5h3UHq-5Wz9wmwETZi950XMHnV3rSH18yTDUTlQZnyw,2158
16
- monocle_apptrace/instrumentation/common/instrumentor.py,sha256=mHiNA2Lcb3S0tiXjI78bHYZUpzCZziw98Y-n2x1E1Ck,11100
17
- monocle_apptrace/instrumentation/common/span_handler.py,sha256=vvKZkrXuFXqo0JDljLgv1nlrSWg8huFamqUoUl-e9Hw,7784
18
- monocle_apptrace/instrumentation/common/utils.py,sha256=PiqvYlBaxoxTlJ48iW5Uw3RJ9DERVxMxIL7c6tcsiJc,9463
19
- monocle_apptrace/instrumentation/common/wrapper.py,sha256=jPIiPIecTQXsnR6SVs7KtPfjq37MuBi5CeeZLPVz03w,3860
20
- monocle_apptrace/instrumentation/common/wrapper_method.py,sha256=_j1JZ-K7Fo5UMuRypNui14n6Fc8zAMTS1zqslAfods4,2996
15
+ monocle_apptrace/instrumentation/common/constants.py,sha256=6H5oLxGUD0Gd4bvEGq-fKm-W-ULWQ0bMOQs4puz5--I,2676
16
+ monocle_apptrace/instrumentation/common/instrumentor.py,sha256=PiLOOArvLotnuhVqD5BIkZkY2cMJw2u8jSZYpi1_Wp0,11578
17
+ monocle_apptrace/instrumentation/common/span_handler.py,sha256=tH4FKh2RAbAqzuoKwxgY8CO6f8WydoxTv4zRdHEeJCA,9741
18
+ monocle_apptrace/instrumentation/common/utils.py,sha256=z-m_lR2zwIng76ewWVOHfpGF8olB5n-7pxInPOOEXh8,11064
19
+ monocle_apptrace/instrumentation/common/wrapper.py,sha256=_1iUi9UOp579BB_o76Om_OIlr8RNbrIEn2Vlxfg-RII,4353
20
+ monocle_apptrace/instrumentation/common/wrapper_method.py,sha256=7k_rHOzbFRfeW40CMfa78wwyPVfSgcXiyDsgezjDcaA,3188
21
21
  monocle_apptrace/instrumentation/metamodel/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
22
  monocle_apptrace/instrumentation/metamodel/botocore/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  monocle_apptrace/instrumentation/metamodel/botocore/_helper.py,sha256=JIYtaN57OXKO9zPuxMZzDycJbgHgAQaQUkwuCI_SzF8,3744
@@ -30,15 +30,15 @@ monocle_apptrace/instrumentation/metamodel/flask/_helper.py,sha256=AcQ5F6_IDmu9P
30
30
  monocle_apptrace/instrumentation/metamodel/flask/methods.py,sha256=QkWHX4wKQf_GiJBHmiS9_JD2CiKMTCWMcig2dxAiKgU,340
31
31
  monocle_apptrace/instrumentation/metamodel/haystack/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  monocle_apptrace/instrumentation/metamodel/haystack/_helper.py,sha256=VgTrKn7rZMcv4OVdVEBI76G-5B0Rux4guiI6Nsso14s,4833
33
- monocle_apptrace/instrumentation/metamodel/haystack/methods.py,sha256=4WwhZoPQBkV42TpBvn-rXu37xtaBRrw7_VZB3MGrfxE,1434
33
+ monocle_apptrace/instrumentation/metamodel/haystack/methods.py,sha256=1XpEfU8-cczTiH2KbxGgSm-27V7xk1j5LxVciWfNuJo,1467
34
34
  monocle_apptrace/instrumentation/metamodel/haystack/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
- monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py,sha256=PkCaaar5hbZH7YGtWisq8dUJqBINsFGmtaUgt11UDa4,3019
35
+ monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py,sha256=bCAp8qpw2GGt1RRZcrucOGqP_Z9gkN8iCCQh6Mlf_Z0,3022
36
36
  monocle_apptrace/instrumentation/metamodel/haystack/entities/retrieval.py,sha256=nq3lsk2qFxXqwrAHsBt8zrh4ZVGAJABkPtylrjUCCqc,2357
37
37
  monocle_apptrace/instrumentation/metamodel/langchain/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
38
  monocle_apptrace/instrumentation/metamodel/langchain/_helper.py,sha256=g88Hz4n25ALJnjYFhdbdoIlSFUJUkN-8gho8ru7txEQ,4910
39
- monocle_apptrace/instrumentation/metamodel/langchain/methods.py,sha256=xEWO4uSiOnR221cvXESnVgAfC6JeExsP46ZkbK8_Yqs,3027
39
+ monocle_apptrace/instrumentation/metamodel/langchain/methods.py,sha256=hlLR43KXwiwYshvgoBrlqMOemFifhpgeR7smTb4zkCc,3225
40
40
  monocle_apptrace/instrumentation/metamodel/langchain/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
- monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py,sha256=wjW9hb1Qwr_aqY0lPehdPftyHtuvHinGxVmy0TVj5xo,2705
41
+ monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py,sha256=2CNHloheb4LG7rPEUIF3E3M1cuc8CWVZf9J6l_hvK1E,2764
42
42
  monocle_apptrace/instrumentation/metamodel/langchain/entities/retrieval.py,sha256=r4UqTCT5vOfkbz9lwoTRoiMkUUJtPMwqOYbqo53A6K8,2039
43
43
  monocle_apptrace/instrumentation/metamodel/langgraph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
44
  monocle_apptrace/instrumentation/metamodel/langgraph/_helper.py,sha256=-XmAbhkgqwaunFZa-BP0zWZ3e-uD-ihSszbn5Cz75yc,2043
@@ -47,22 +47,22 @@ monocle_apptrace/instrumentation/metamodel/langgraph/entities/__init__.py,sha256
47
47
  monocle_apptrace/instrumentation/metamodel/langgraph/entities/inference.py,sha256=OaPeQ8pkyEP5j6ad537MTPp0BdDI7nabxf60u66Dzbk,1659
48
48
  monocle_apptrace/instrumentation/metamodel/llamaindex/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
49
  monocle_apptrace/instrumentation/metamodel/llamaindex/_helper.py,sha256=5nqG-bSW3-ZEADZcwlHXIhhGZoTZu2a5Sc3Lo_AByeo,6199
50
- monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py,sha256=r-o2zz9_ATfgKjUmIiaeSFQ774Vy4wGYueoVc-TqGMI,3061
50
+ monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py,sha256=3Lr7C3GPQMScLX7gQTrPxU7hs8TTIYFTXApAGyB2yjU,3137
51
51
  monocle_apptrace/instrumentation/metamodel/llamaindex/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
52
  monocle_apptrace/instrumentation/metamodel/llamaindex/entities/agent.py,sha256=g7IEwFMLjYvxljX7iHoYSPJW6k-wC7Z3i_y2qlNEZcs,1338
53
- monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py,sha256=585hJXbdN2kFOnABv12vlzFkCbDExZln5ISvQI71EHw,2623
53
+ monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py,sha256=Hich1AoEHnCUvh0MIISNOjbH9t71eex_IsY_4j3JN5U,2727
54
54
  monocle_apptrace/instrumentation/metamodel/llamaindex/entities/retrieval.py,sha256=QBF1nrqog5KHh925jiY2V-kejL6iVLKUowZmqUDoiJ4,1870
55
55
  monocle_apptrace/instrumentation/metamodel/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
- monocle_apptrace/instrumentation/metamodel/openai/_helper.py,sha256=nQSV8B000AfWjaIAKqkA5ZD5e-ozx3KIVTEBOYvoYxA,3066
57
- monocle_apptrace/instrumentation/metamodel/openai/methods.py,sha256=Ahmi0wo6UUHcOXz-6U_6EPZ2bEKXnu5o8NUVrikz0w0,759
56
+ monocle_apptrace/instrumentation/metamodel/openai/_helper.py,sha256=VDjpKRXXbOTma3clD54SYG1TaMXr-To0S3yotp7_9aY,3877
57
+ monocle_apptrace/instrumentation/metamodel/openai/methods.py,sha256=bQ0cW_9Ry5bKKsYGzatys-R6wBW3kpYha5QX328AWLM,1420
58
58
  monocle_apptrace/instrumentation/metamodel/openai/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
59
- monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py,sha256=yu0a9DyBqGOGRQFcPgl1fOSCKu6Jc41Dxx9N0QuwVR0,2659
60
- monocle_apptrace/instrumentation/metamodel/openai/entities/retrieval.py,sha256=QICL56xZF0R84rCpf_Wj_1pMUOt6M8UzLRaICJnEQ7Y,755
61
- monocle_apptrace/instrumentation/metamodel/requests/__init__.py,sha256=A9M_SrBoVqjo2HENM9VtAdOzIme82lsoGt361bBKW84,101
59
+ monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py,sha256=Egpx7ROZvwH6E3hqDWXa1gCXiNijnH3LD0HqQWhfspg,2716
60
+ monocle_apptrace/instrumentation/metamodel/openai/entities/retrieval.py,sha256=LU7aec302ZqPrs9MzFWU-JTnhK8OpYfgQKMmktlD6-8,1457
61
+ monocle_apptrace/instrumentation/metamodel/requests/__init__.py,sha256=mg04UgoPzzcH-cPOahYUqN9T-TolZyOZipnBwDg5TP8,250
62
62
  monocle_apptrace/instrumentation/metamodel/requests/_helper.py,sha256=lKU7py-M0eweHA_LWatwdyWbSGSlQNhScGZ43Xko7us,1115
63
63
  monocle_apptrace/instrumentation/metamodel/requests/methods.py,sha256=OJtosy_07xy01o5Qv-53--aCLQLkr82NZtyi2t6ZDEM,326
64
- monocle_apptrace-0.3.0b5.dist-info/METADATA,sha256=Tr4V0KMOc7nfKDTNDDE_OKc559DGJPULoacZEiG7EZE,6314
65
- monocle_apptrace-0.3.0b5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
66
- monocle_apptrace-0.3.0b5.dist-info/licenses/LICENSE,sha256=ay9trLiP5I7ZsFXo6AqtkLYdRqe5S9r-DrPOvsNlZrg,9136
67
- monocle_apptrace-0.3.0b5.dist-info/licenses/NOTICE,sha256=9jn4xtwM_uUetJMx5WqGnhrR7MIhpoRlpokjSTlyt8c,112
68
- monocle_apptrace-0.3.0b5.dist-info/RECORD,,
64
+ monocle_apptrace-0.3.0b6.dist-info/METADATA,sha256=nJZcCp2923tMJsL-UyHaNCj7VLs1tYcb02pp5gIqJ4I,6314
65
+ monocle_apptrace-0.3.0b6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
66
+ monocle_apptrace-0.3.0b6.dist-info/licenses/LICENSE,sha256=ay9trLiP5I7ZsFXo6AqtkLYdRqe5S9r-DrPOvsNlZrg,9136
67
+ monocle_apptrace-0.3.0b6.dist-info/licenses/NOTICE,sha256=9jn4xtwM_uUetJMx5WqGnhrR7MIhpoRlpokjSTlyt8c,112
68
+ monocle_apptrace-0.3.0b6.dist-info/RECORD,,