monocle-apptrace 0.3.0b5__py3-none-any.whl → 0.3.0b7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of monocle-apptrace might be problematic. Click here for more details.

Files changed (34) hide show
  1. monocle_apptrace/__init__.py +1 -0
  2. monocle_apptrace/exporters/aws/s3_exporter.py +20 -6
  3. monocle_apptrace/exporters/aws/s3_exporter_opendal.py +22 -11
  4. monocle_apptrace/exporters/azure/blob_exporter.py +29 -8
  5. monocle_apptrace/exporters/azure/blob_exporter_opendal.py +23 -8
  6. monocle_apptrace/exporters/exporter_processor.py +128 -3
  7. monocle_apptrace/exporters/file_exporter.py +16 -0
  8. monocle_apptrace/exporters/monocle_exporters.py +10 -1
  9. monocle_apptrace/exporters/okahu/okahu_exporter.py +8 -6
  10. monocle_apptrace/instrumentation/__init__.py +1 -0
  11. monocle_apptrace/instrumentation/common/__init__.py +2 -0
  12. monocle_apptrace/instrumentation/common/constants.py +17 -0
  13. monocle_apptrace/instrumentation/common/instrumentor.py +136 -53
  14. monocle_apptrace/instrumentation/common/span_handler.py +92 -41
  15. monocle_apptrace/instrumentation/common/utils.py +84 -6
  16. monocle_apptrace/instrumentation/common/wrapper.py +43 -45
  17. monocle_apptrace/instrumentation/common/wrapper_method.py +8 -4
  18. monocle_apptrace/instrumentation/metamodel/botocore/handlers/botocore_span_handler.py +2 -1
  19. monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py +1 -1
  20. monocle_apptrace/instrumentation/metamodel/haystack/methods.py +2 -1
  21. monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py +3 -2
  22. monocle_apptrace/instrumentation/metamodel/langchain/methods.py +12 -6
  23. monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py +5 -3
  24. monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py +6 -3
  25. monocle_apptrace/instrumentation/metamodel/openai/_helper.py +31 -7
  26. monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py +1 -1
  27. monocle_apptrace/instrumentation/metamodel/openai/entities/retrieval.py +20 -1
  28. monocle_apptrace/instrumentation/metamodel/openai/methods.py +21 -1
  29. monocle_apptrace/instrumentation/metamodel/requests/__init__.py +3 -1
  30. {monocle_apptrace-0.3.0b5.dist-info → monocle_apptrace-0.3.0b7.dist-info}/METADATA +1 -1
  31. {monocle_apptrace-0.3.0b5.dist-info → monocle_apptrace-0.3.0b7.dist-info}/RECORD +34 -34
  32. {monocle_apptrace-0.3.0b5.dist-info → monocle_apptrace-0.3.0b7.dist-info}/WHEEL +0 -0
  33. {monocle_apptrace-0.3.0b5.dist-info → monocle_apptrace-0.3.0b7.dist-info}/licenses/LICENSE +0 -0
  34. {monocle_apptrace-0.3.0b5.dist-info → monocle_apptrace-0.3.0b7.dist-info}/licenses/NOTICE +0 -0
@@ -1,21 +1,20 @@
1
1
  # pylint: disable=protected-access
2
2
  import logging
3
-
4
3
  from opentelemetry.trace import Tracer
4
+ from opentelemetry.context import set_value, attach, detach, get_value
5
5
 
6
6
  from monocle_apptrace.instrumentation.common.span_handler import SpanHandler
7
7
  from monocle_apptrace.instrumentation.common.utils import (
8
8
  get_fully_qualified_class_name,
9
9
  with_tracer_wrapper,
10
10
  set_scope,
11
- remove_scope
11
+ remove_scope,
12
+ async_wrapper
12
13
  )
14
+ from monocle_apptrace.instrumentation.common.constants import WORKFLOW_TYPE_KEY
13
15
  logger = logging.getLogger(__name__)
14
16
 
15
-
16
- @with_tracer_wrapper
17
- def task_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
18
-
17
+ def wrapper_processor(async_task: bool, tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
19
18
  # Some Langchain objects are wrapped elsewhere, so we ignore them here
20
19
  if instance.__class__.__name__ in ("AgentExecutor"):
21
20
  return wrapped(*args, **kwargs)
@@ -28,49 +27,52 @@ def task_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instanc
28
27
  name = get_fully_qualified_class_name(instance)
29
28
 
30
29
  return_value = None
30
+ token = None
31
31
  try:
32
32
  handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
33
- if to_wrap.get('skip_span') or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
34
- return_value = wrapped(*args, **kwargs)
35
- else:
36
- with tracer.start_as_current_span(name) as span:
37
- handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
33
+ skip_scan:bool = to_wrap.get('skip_span') or handler.skip_span(to_wrap, wrapped, instance, args, kwargs)
34
+ if not to_wrap.get('skip_span'):
35
+ token = SpanHandler.attach_workflow_type(to_wrap=to_wrap)
36
+ if skip_scan:
37
+ if async_task:
38
+ return_value = async_wrapper(wrapped, None, None, None, *args, **kwargs)
39
+ else:
38
40
  return_value = wrapped(*args, **kwargs)
39
- handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span)
40
- handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
41
+ else:
42
+ return_value = span_processor(name, async_task, tracer, handler, to_wrap, wrapped, instance, args, kwargs)
41
43
  return return_value
42
44
  finally:
45
+ handler.detach_workflow_type(token)
43
46
  handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
44
47
 
45
- @with_tracer_wrapper
46
- async def atask_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
47
- """Instruments and calls every function defined in TO_WRAP."""
48
-
49
- # Some Langchain objects are wrapped elsewhere, so we ignore them here
50
- if instance.__class__.__name__ in ("AgentExecutor"):
51
- return wrapped(*args, **kwargs)
52
-
53
- if hasattr(instance, "name") and instance.name:
54
- name = f"{to_wrap.get('span_name')}.{instance.name.lower()}"
55
- elif to_wrap.get("span_name"):
56
- name = to_wrap.get("span_name")
57
- else:
58
- name = get_fully_qualified_class_name(instance)
59
-
48
+ def span_processor(name: str, async_task: bool, tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
49
+ # For singleton spans, eg OpenAI inference generate a workflow span to format the workflow specific attributes
60
50
  return_value = None
61
- try:
62
- handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
63
- if to_wrap.get('skip_span') or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
64
- return_value = wrapped(*args, **kwargs)
51
+ with tracer.start_as_current_span(name) as span:
52
+ # Since Spanhandler can be overridden, ensure we set default monocle attributes.
53
+ SpanHandler.set_default_monocle_attributes(span)
54
+ if SpanHandler.is_root_span(span):
55
+ SpanHandler.set_workflow_properties(span, to_wrap)
56
+ if handler.is_non_workflow_root_span(span, to_wrap):
57
+ # This is a direct API call of a non-framework type, call the span_processor recursively for the actual span
58
+ return_value = span_processor(name, async_task, tracer, handler, to_wrap, wrapped, instance, args, kwargs)
65
59
  else:
66
- with tracer.start_as_current_span(name) as span:
67
- handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
60
+ handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
61
+ if async_task:
62
+ return_value = async_wrapper(wrapped, None, None, None, *args, **kwargs)
63
+ else:
68
64
  return_value = wrapped(*args, **kwargs)
69
- handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span)
70
- handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
71
- return return_value
72
- finally:
73
- handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
65
+ handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span)
66
+ handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
67
+ return return_value
68
+
69
+ @with_tracer_wrapper
70
+ def task_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
71
+ return wrapper_processor(False, tracer, handler, to_wrap, wrapped, instance, args, kwargs)
72
+
73
+ @with_tracer_wrapper
74
+ async def atask_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
75
+ return wrapper_processor(True, tracer, handler, to_wrap, wrapped, instance, args, kwargs)
74
76
 
75
77
  @with_tracer_wrapper
76
78
  def scope_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
@@ -82,13 +84,9 @@ def scope_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instan
82
84
  remove_scope(token)
83
85
  return return_value
84
86
 
85
-
86
87
  @with_tracer_wrapper
87
88
  async def ascope_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
88
89
  scope_name = to_wrap.get('scope_name', None)
89
- if scope_name:
90
- token = set_scope(scope_name)
91
- return_value = wrapped(*args, **kwargs)
92
- if scope_name:
93
- remove_scope(token)
90
+ scope_value = to_wrap.get('scope_value', None)
91
+ return_value = async_wrapper(wrapped, scope_name, scope_value, None, *args, **kwargs)
94
92
  return return_value
@@ -1,7 +1,7 @@
1
1
  # pylint: disable=too-few-public-methods
2
2
  from typing import Any, Dict
3
3
  from monocle_apptrace.instrumentation.common.wrapper import task_wrapper, scope_wrapper
4
- from monocle_apptrace.instrumentation.common.span_handler import SpanHandler
4
+ from monocle_apptrace.instrumentation.common.span_handler import SpanHandler, NonFrameworkSpanHandler
5
5
  from monocle_apptrace.instrumentation.metamodel.botocore.methods import BOTOCORE_METHODS
6
6
  from monocle_apptrace.instrumentation.metamodel.botocore.handlers.botocore_span_handler import BotoCoreSpanHandler
7
7
  from monocle_apptrace.instrumentation.metamodel.langchain.methods import (
@@ -26,13 +26,15 @@ class WrapperMethod:
26
26
  output_processor : str = None,
27
27
  wrapper_method = task_wrapper,
28
28
  span_handler = 'default',
29
- scope_name: str = None
29
+ scope_name: str = None,
30
+ span_type: str = None
30
31
  ):
31
32
  self.package = package
32
33
  self.object = object_name
33
34
  self.method = method
34
35
  self.span_name = span_name
35
36
  self.output_processor=output_processor
37
+ self.span_type = span_type
36
38
 
37
39
  self.span_handler:SpanHandler.__class__ = span_handler
38
40
  self.scope_name = scope_name
@@ -51,7 +53,8 @@ class WrapperMethod:
51
53
  'output_processor': self.output_processor,
52
54
  'wrapper_method': self.wrapper_method,
53
55
  'span_handler': self.span_handler,
54
- 'scope_name': self.scope_name
56
+ 'scope_name': self.scope_name,
57
+ 'span_type': self.span_type
55
58
  }
56
59
  return instance_dict
57
60
 
@@ -64,5 +67,6 @@ MONOCLE_SPAN_HANDLERS: Dict[str, SpanHandler] = {
64
67
  "default": SpanHandler(),
65
68
  "botocore_handler": BotoCoreSpanHandler(),
66
69
  "flask_handler": FlaskSpanHandler(),
67
- "request_handler": RequestSpanHandler()
70
+ "request_handler": RequestSpanHandler(),
71
+ "non_framework_handler": NonFrameworkSpanHandler()
68
72
  }
@@ -1,3 +1,4 @@
1
+ from opentelemetry.context import get_value, set_value, attach, detach
1
2
  from monocle_apptrace.instrumentation.common.span_handler import SpanHandler
2
3
 
3
4
  class BotoCoreSpanHandler(SpanHandler):
@@ -22,4 +23,4 @@ class BotoCoreSpanHandler(SpanHandler):
22
23
  def post_tracing(self, to_wrap, wrapped, instance, args, kwargs, return_value):
23
24
  self._botocore_processor(to_wrap=to_wrap, wrapped=wrapped, instance=instance, return_value=return_value, args=args,
24
25
  kwargs=kwargs)
25
- return super().pre_tracing(to_wrap, wrapped, instance, args, kwargs)
26
+ return super().pre_tracing(to_wrap, wrapped, instance, args, kwargs)
@@ -9,7 +9,7 @@ INFERENCE = {
9
9
  {
10
10
  "_comment": "provider type ,name , deployment , inference_endpoint",
11
11
  "attribute": "type",
12
- "accessor": lambda arguments: 'inference.azure_oai'
12
+ "accessor": lambda arguments: 'inference.azure_openai'
13
13
  },
14
14
  {
15
15
  "attribute": "provider_name",
@@ -37,6 +37,7 @@ HAYSTACK_METHODS = [
37
37
  "package": "haystack.core.pipeline.pipeline",
38
38
  "object": "Pipeline",
39
39
  "method": "run",
40
- "wrapper_method": task_wrapper
40
+ "wrapper_method": task_wrapper,
41
+ "span_type": "workflow"
41
42
  }
42
43
  ]
@@ -1,7 +1,7 @@
1
1
  from monocle_apptrace.instrumentation.metamodel.langchain import (
2
2
  _helper,
3
3
  )
4
- from monocle_apptrace.instrumentation.common.utils import resolve_from_alias
4
+ from monocle_apptrace.instrumentation.common.utils import resolve_from_alias, get_llm_type
5
5
 
6
6
  INFERENCE = {
7
7
  "type": "inference",
@@ -10,7 +10,8 @@ INFERENCE = {
10
10
  {
11
11
  "_comment": "provider type ,name , deployment , inference_endpoint",
12
12
  "attribute": "type",
13
- "accessor": lambda arguments: 'inference.azure_oai'
13
+ "accessor": lambda arguments: 'inference.' + (get_llm_type(arguments['instance']) or 'generic')
14
+
14
15
  },
15
16
  {
16
17
  "attribute": "provider_name",
@@ -11,13 +11,15 @@ LANGCHAIN_METHODS = [
11
11
  "package": "langchain.prompts.base",
12
12
  "object": "BasePromptTemplate",
13
13
  "method": "invoke",
14
- "wrapper_method": task_wrapper
14
+ "wrapper_method": task_wrapper,
15
+ "span_type": "workflow"
15
16
  },
16
17
  {
17
18
  "package": "langchain.prompts.base",
18
19
  "object": "BasePromptTemplate",
19
20
  "method": "ainvoke",
20
- "wrapper_method": atask_wrapper
21
+ "wrapper_method": atask_wrapper,
22
+ "span_type": "workflow"
21
23
  },
22
24
  {
23
25
  "package": "langchain.chat_models.base",
@@ -80,26 +82,30 @@ LANGCHAIN_METHODS = [
80
82
  "package": "langchain.schema",
81
83
  "object": "BaseOutputParser",
82
84
  "method": "invoke",
83
- "wrapper_method": task_wrapper
85
+ "wrapper_method": task_wrapper,
86
+ "span_type": "workflow"
84
87
  },
85
88
  {
86
89
  "package": "langchain.schema",
87
90
  "object": "BaseOutputParser",
88
91
  "method": "ainvoke",
89
- "wrapper_method": atask_wrapper
92
+ "wrapper_method": atask_wrapper,
93
+ "span_type": "workflow"
90
94
  },
91
95
  {
92
96
  "package": "langchain.schema.runnable",
93
97
  "object": "RunnableSequence",
94
98
  "method": "invoke",
95
99
  "span_name": "langchain.workflow",
96
- "wrapper_method": task_wrapper
100
+ "wrapper_method": task_wrapper,
101
+ "span_type": "workflow"
97
102
  },
98
103
  {
99
104
  "package": "langchain.schema.runnable",
100
105
  "object": "RunnableSequence",
101
106
  "method": "ainvoke",
102
107
  "span_name": "langchain.workflow",
103
- "wrapper_method": atask_wrapper
108
+ "wrapper_method": atask_wrapper,
109
+ "span_type": "workflow"
104
110
  }
105
111
  ]
@@ -1,7 +1,7 @@
1
1
  from monocle_apptrace.instrumentation.metamodel.llamaindex import (
2
2
  _helper,
3
3
  )
4
- from monocle_apptrace.instrumentation.common.utils import resolve_from_alias
4
+ from monocle_apptrace.instrumentation.common.utils import resolve_from_alias, get_llm_type
5
5
 
6
6
  INFERENCE = {
7
7
  "type": "inference",
@@ -10,11 +10,13 @@ INFERENCE = {
10
10
  {
11
11
  "_comment": "provider type ,name , deployment , inference_endpoint",
12
12
  "attribute": "type",
13
- "accessor": lambda arguments: 'inference.azure_oai'
13
+ "accessor": lambda arguments: 'inference.' + (get_llm_type(arguments['instance']) or 'generic')
14
+
14
15
  },
15
16
  {
16
17
  "attribute": "provider_name",
17
- "accessor": lambda arguments: _helper.extract_provider_name(arguments['instance'])
18
+ "accessor": lambda arguments: arguments['kwargs'].get('provider_name') or _helper.extract_provider_name(arguments['instance'])
19
+
18
20
  },
19
21
  {
20
22
  "attribute": "deployment",
@@ -30,14 +30,16 @@ LLAMAINDEX_METHODS = [
30
30
  "object": "BaseQueryEngine",
31
31
  "method": "query",
32
32
  "span_name": "llamaindex.query",
33
- "wrapper_method": task_wrapper
33
+ "wrapper_method": task_wrapper,
34
+ "span_type": "workflow"
34
35
  },
35
36
  {
36
37
  "package": "llama_index.core.base.base_query_engine",
37
38
  "object": "BaseQueryEngine",
38
39
  "method": "aquery",
39
40
  "span_name": "llamaindex.query",
40
- "wrapper_method": atask_wrapper
41
+ "wrapper_method": atask_wrapper,
42
+ "span_type": "workflow"
41
43
  },
42
44
  {
43
45
  "package": "llama_index.core.llms.custom",
@@ -53,7 +55,8 @@ LLAMAINDEX_METHODS = [
53
55
  "method": "achat",
54
56
  "span_name": "llamaindex.llmchat",
55
57
  "wrapper_method": atask_wrapper,
56
- "output_processor": INFERENCE
58
+ "output_processor": INFERENCE,
59
+
57
60
  },
58
61
  {
59
62
  "package": "llama_index.llms.openai.base",
@@ -32,7 +32,7 @@ def extract_messages(kwargs):
32
32
 
33
33
  def extract_assistant_message(response):
34
34
  try:
35
- if hasattr(response,"choices") and len(response.choices) >0:
35
+ if response is not None and hasattr(response,"choices") and len(response.choices) >0:
36
36
  if hasattr(response.choices[0],"message"):
37
37
  return response.choices[0].message.content
38
38
  except (IndexError, AttributeError) as e:
@@ -60,15 +60,20 @@ def resolve_from_alias(my_map, alias):
60
60
  return None
61
61
 
62
62
 
63
- def update_input_span_events(args):
64
- return args[0] if len(args) > 0 else ""
63
+ def update_input_span_events(kwargs):
64
+ if 'input' in kwargs and isinstance(kwargs['input'], list):
65
+ query = ' '.join(kwargs['input'])
66
+ return query
65
67
 
66
68
 
67
69
  def update_output_span_events(results):
68
- output_arg_text = " ".join([doc.page_content for doc in results if hasattr(doc, 'page_content')])
69
- if len(output_arg_text) > 100:
70
- output_arg_text = output_arg_text[:100] + "..."
71
- return output_arg_text
70
+ if hasattr(results,'data') and isinstance(results.data, list):
71
+ embeddings = results.data
72
+ embedding_strings = [f"index={e.index}, embedding={e.embedding}" for e in embeddings]
73
+ output = '\n'.join(embedding_strings)
74
+ if len(output) > 100:
75
+ output = output[:100] + "..."
76
+ return output
72
77
 
73
78
 
74
79
  def update_span_from_llm_response(response):
@@ -86,3 +91,22 @@ def update_span_from_llm_response(response):
86
91
  meta_dict.update({"total_tokens": getattr(response.usage, "total_tokens", None)})
87
92
  return meta_dict
88
93
 
94
+ def extract_vector_input(vector_input: dict):
95
+ if 'input' in vector_input:
96
+ return vector_input['input']
97
+ return ""
98
+
99
+ def extract_vector_output(vector_output):
100
+ try:
101
+ if hasattr(vector_output, 'data') and len(vector_output.data) > 0:
102
+ return vector_output.data[0].embedding
103
+ except Exception as e:
104
+ pass
105
+ return ""
106
+
107
+ def get_inference_type(instance):
108
+ inference_type: Option[str] = try_option(getattr, instance._client, '_api_version')
109
+ if inference_type.unwrap_or(None):
110
+ return 'azure_openai'
111
+ else:
112
+ return 'openai'
@@ -10,7 +10,7 @@ INFERENCE = {
10
10
  {
11
11
  "_comment": "provider type ,name , deployment , inference_endpoint",
12
12
  "attribute": "type",
13
- "accessor": lambda arguments: 'inference.azure_oai'
13
+ "accessor": lambda arguments: 'inference.' + (_helper.get_inference_type(arguments['instance'])) or 'openai'
14
14
  },
15
15
  {
16
16
  "attribute": "provider_name",
@@ -19,6 +19,25 @@ RETRIEVAL = {
19
19
  ]
20
20
  ],
21
21
  "events": [
22
-
22
+ {
23
+ "name": "data.input",
24
+ "attributes": [
25
+ {
26
+ "_comment": "this is instruction and user query to LLM",
27
+ "attribute": "input",
28
+ "accessor": lambda arguments: _helper.update_input_span_events(arguments['kwargs'])
29
+ }
30
+ ]
31
+ },
32
+ {
33
+ "name": "data.output",
34
+ "attributes": [
35
+ {
36
+ "_comment": "this is result from LLM",
37
+ "attribute": "response",
38
+ "accessor": lambda arguments: _helper.update_output_span_events(arguments['result'])
39
+ }
40
+ ]
41
+ }
23
42
  ]
24
43
  }
@@ -12,7 +12,15 @@ OPENAI_METHODS = [
12
12
  "object": "Completions",
13
13
  "method": "create",
14
14
  "wrapper_method": task_wrapper,
15
- "span_name": "openai_inference",
15
+ "span_handler": "non_framework_handler",
16
+ "output_processor": INFERENCE
17
+ },
18
+ {
19
+ "package": "openai.resources.chat.completions",
20
+ "object": "AsyncCompletions",
21
+ "method": "create",
22
+ "wrapper_method": atask_wrapper,
23
+ "span_handler": "non_framework_handler",
16
24
  "output_processor": INFERENCE
17
25
  },
18
26
  {
@@ -20,6 +28,18 @@ OPENAI_METHODS = [
20
28
  "object": "Embeddings",
21
29
  "method": "create",
22
30
  "wrapper_method": task_wrapper,
31
+ "span_name": "openai_embeddings",
32
+ "span_handler": "non_framework_handler",
33
+ "output_processor": RETRIEVAL
34
+ },
35
+ {
36
+ "package": "openai.resources.embeddings",
37
+ "object": "AsyncEmbeddings",
38
+ "method": "create",
39
+ "wrapper_method": atask_wrapper,
40
+ "span_name": "openai_embeddings",
41
+ "span_handler": "non_framework_handler",
23
42
  "output_processor": RETRIEVAL
24
43
  }
44
+
25
45
  ]
@@ -1,2 +1,4 @@
1
1
  from os import environ
2
- allowed_urls = environ.get('MONOCLE_TRACE_PROPAGATATION_URLS', ' ').split(',')
2
+ from monocle_apptrace.instrumentation.common.constants import TRACE_PROPOGATION_URLS
3
+ allowed_url_str = environ.get(TRACE_PROPOGATION_URLS, "")
4
+ allowed_urls:list[str] = [] if allowed_url_str == "" else allowed_url_str.split(',')
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: monocle_apptrace
3
- Version: 0.3.0b5
3
+ Version: 0.3.0b7
4
4
  Summary: package with monocle genAI tracing
5
5
  Project-URL: Homepage, https://github.com/monocle2ai/monocle
6
6
  Project-URL: Issues, https://github.com/monocle2ai/monocle/issues
@@ -1,44 +1,44 @@
1
1
  monocle_apptrace/README.md,sha256=T5NFC01bF8VR0oVnAX_n0bhsEtttwqfTxDNAe5Y_ivE,3765
2
- monocle_apptrace/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ monocle_apptrace/__init__.py,sha256=XtoX7gHUSZgkY1nry8IFny8RslPhutZQUuEkqIrBzFQ,30
3
3
  monocle_apptrace/__main__.py,sha256=wBwV0fpwIuj9XSorPRP1MpkHHkZPM9Tg-lIFj1nokkU,609
4
4
  monocle_apptrace/exporters/base_exporter.py,sha256=Gov_QKp5fonVZ-YdNM2ynoPot7GCaSNmKbCHIP3bDlE,1680
5
- monocle_apptrace/exporters/exporter_processor.py,sha256=BTcBgMuFLHCdCgVvc9TKIo9y8g1BvShI0L4vX6Q-cmk,393
6
- monocle_apptrace/exporters/file_exporter.py,sha256=gN9pJ_X5pcstVVsyivgHsjWhr443eRa6Y6Hx1rGLQAM,2280
7
- monocle_apptrace/exporters/monocle_exporters.py,sha256=AxhZsTHjz2ZTuI-QOw1zk_bCKD899_EYyiEtCyAb1GA,2210
8
- monocle_apptrace/exporters/aws/s3_exporter.py,sha256=JMxtox61J6gUoEFsM1PJisBJPySMpm_U2Uv68WioKtE,7146
9
- monocle_apptrace/exporters/aws/s3_exporter_opendal.py,sha256=FvyW0KkAz0W_1g16C_ERmamg4fSreT-UXgLaN9URTVQ,5057
10
- monocle_apptrace/exporters/azure/blob_exporter.py,sha256=m7Hsw3OXlP2GOCQcdxf8LM6Fe12fZmih45x82Z12dbI,5597
11
- monocle_apptrace/exporters/azure/blob_exporter_opendal.py,sha256=h5xv7JU6YEXL4AKT2B1op3YsHoA0rNnLCGq8seoVRWs,6114
12
- monocle_apptrace/exporters/okahu/okahu_exporter.py,sha256=p2rjStwo0OMEdHWQt_QvREpUWXbDm5jGx3qXeYai4_M,4407
13
- monocle_apptrace/instrumentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
- monocle_apptrace/instrumentation/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- monocle_apptrace/instrumentation/common/constants.py,sha256=5h3UHq-5Wz9wmwETZi950XMHnV3rSH18yTDUTlQZnyw,2158
16
- monocle_apptrace/instrumentation/common/instrumentor.py,sha256=mHiNA2Lcb3S0tiXjI78bHYZUpzCZziw98Y-n2x1E1Ck,11100
17
- monocle_apptrace/instrumentation/common/span_handler.py,sha256=vvKZkrXuFXqo0JDljLgv1nlrSWg8huFamqUoUl-e9Hw,7784
18
- monocle_apptrace/instrumentation/common/utils.py,sha256=PiqvYlBaxoxTlJ48iW5Uw3RJ9DERVxMxIL7c6tcsiJc,9463
19
- monocle_apptrace/instrumentation/common/wrapper.py,sha256=jPIiPIecTQXsnR6SVs7KtPfjq37MuBi5CeeZLPVz03w,3860
20
- monocle_apptrace/instrumentation/common/wrapper_method.py,sha256=_j1JZ-K7Fo5UMuRypNui14n6Fc8zAMTS1zqslAfods4,2996
5
+ monocle_apptrace/exporters/exporter_processor.py,sha256=-spCIJ_UfJ0fax_jE-ii3ODQBwtnHZgYIGVNd91Q718,6298
6
+ monocle_apptrace/exporters/file_exporter.py,sha256=BSEYUb9Z_dascR9i_FL_HxnxnxjyxtR_5teoSjIpZQc,3198
7
+ monocle_apptrace/exporters/monocle_exporters.py,sha256=TKULSQDZLIrf76NMhxYfsnG3vV11B1l2liI1wEWGaLo,2759
8
+ monocle_apptrace/exporters/aws/s3_exporter.py,sha256=fvUUuukFM6hIliGqP61WXlVMFbxlIQtMgT3iwjUYDTA,8187
9
+ monocle_apptrace/exporters/aws/s3_exporter_opendal.py,sha256=0aEUxdMgJaDUwqjw0DqlCMr8kjl01KgwUt3_RRCVFds,5917
10
+ monocle_apptrace/exporters/azure/blob_exporter.py,sha256=75G8rcISQ0sZCECN2G67-DGFkJGGu2clNyrcoxEm9H8,7371
11
+ monocle_apptrace/exporters/azure/blob_exporter_opendal.py,sha256=wQUtciyFMD28tpWTiP0-kBjUuxy4LuQSo04aMuHwtb4,7140
12
+ monocle_apptrace/exporters/okahu/okahu_exporter.py,sha256=qj7paDHbWbYudH18xanUuxmhOHHlYEVj1kpzK7f2OTY,4601
13
+ monocle_apptrace/instrumentation/__init__.py,sha256=oa412OuokRm9Vf3XlCJLqpZjz9ZcuxAKxnEBvOK7u2M,21
14
+ monocle_apptrace/instrumentation/common/__init__.py,sha256=_YD94HPvDvHcrkt9Ll11BaHNzJ4W56GUJ7GPjp_diyA,223
15
+ monocle_apptrace/instrumentation/common/constants.py,sha256=6H5oLxGUD0Gd4bvEGq-fKm-W-ULWQ0bMOQs4puz5--I,2676
16
+ monocle_apptrace/instrumentation/common/instrumentor.py,sha256=v-ZriWJdHlSOWkwprlwDaxm6kOVKyqehZ3m_kbECm0k,15087
17
+ monocle_apptrace/instrumentation/common/span_handler.py,sha256=WHvLc3TSqsrv62qJ_qclC57QT0bFoTCJ4hc-qe3SOYg,10229
18
+ monocle_apptrace/instrumentation/common/utils.py,sha256=iGxvC8V-2uLbrhFG9u9NKOyHkbd1moIkg6ukujDT88Y,12023
19
+ monocle_apptrace/instrumentation/common/wrapper.py,sha256=FNam-sz5gbTxa0Ym6-xyVhCA5HVAEObKDdQFubasIpU,4474
20
+ monocle_apptrace/instrumentation/common/wrapper_method.py,sha256=7k_rHOzbFRfeW40CMfa78wwyPVfSgcXiyDsgezjDcaA,3188
21
21
  monocle_apptrace/instrumentation/metamodel/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
22
  monocle_apptrace/instrumentation/metamodel/botocore/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  monocle_apptrace/instrumentation/metamodel/botocore/_helper.py,sha256=JIYtaN57OXKO9zPuxMZzDycJbgHgAQaQUkwuCI_SzF8,3744
24
24
  monocle_apptrace/instrumentation/metamodel/botocore/methods.py,sha256=LzmjbZjDWy7Uozc0chNjWG6PZhLngh_KJe5L6rw5rqI,452
25
25
  monocle_apptrace/instrumentation/metamodel/botocore/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
26
  monocle_apptrace/instrumentation/metamodel/botocore/entities/inference.py,sha256=JfTRmrxgU6e-b3dBbunWt5ObY_Ry_ZBYJBwKJB5UlJ8,2255
27
- monocle_apptrace/instrumentation/metamodel/botocore/handlers/botocore_span_handler.py,sha256=_KJMFFhuetg3HpjewS3tmwS0K__P6uExKtqhXCBT5ws,1347
27
+ monocle_apptrace/instrumentation/metamodel/botocore/handlers/botocore_span_handler.py,sha256=Vfbx4g7P3_9iXXCySuqc2FOU_CTP-OZy7PHc7D2qOls,1419
28
28
  monocle_apptrace/instrumentation/metamodel/flask/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
29
  monocle_apptrace/instrumentation/metamodel/flask/_helper.py,sha256=AcQ5F6_IDmu9PXaeKKeiGIyq2I2YzA7wu1cvLzR-uyU,1175
30
30
  monocle_apptrace/instrumentation/metamodel/flask/methods.py,sha256=QkWHX4wKQf_GiJBHmiS9_JD2CiKMTCWMcig2dxAiKgU,340
31
31
  monocle_apptrace/instrumentation/metamodel/haystack/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  monocle_apptrace/instrumentation/metamodel/haystack/_helper.py,sha256=VgTrKn7rZMcv4OVdVEBI76G-5B0Rux4guiI6Nsso14s,4833
33
- monocle_apptrace/instrumentation/metamodel/haystack/methods.py,sha256=4WwhZoPQBkV42TpBvn-rXu37xtaBRrw7_VZB3MGrfxE,1434
33
+ monocle_apptrace/instrumentation/metamodel/haystack/methods.py,sha256=1XpEfU8-cczTiH2KbxGgSm-27V7xk1j5LxVciWfNuJo,1467
34
34
  monocle_apptrace/instrumentation/metamodel/haystack/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
- monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py,sha256=PkCaaar5hbZH7YGtWisq8dUJqBINsFGmtaUgt11UDa4,3019
35
+ monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py,sha256=bCAp8qpw2GGt1RRZcrucOGqP_Z9gkN8iCCQh6Mlf_Z0,3022
36
36
  monocle_apptrace/instrumentation/metamodel/haystack/entities/retrieval.py,sha256=nq3lsk2qFxXqwrAHsBt8zrh4ZVGAJABkPtylrjUCCqc,2357
37
37
  monocle_apptrace/instrumentation/metamodel/langchain/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
38
  monocle_apptrace/instrumentation/metamodel/langchain/_helper.py,sha256=g88Hz4n25ALJnjYFhdbdoIlSFUJUkN-8gho8ru7txEQ,4910
39
- monocle_apptrace/instrumentation/metamodel/langchain/methods.py,sha256=xEWO4uSiOnR221cvXESnVgAfC6JeExsP46ZkbK8_Yqs,3027
39
+ monocle_apptrace/instrumentation/metamodel/langchain/methods.py,sha256=hlLR43KXwiwYshvgoBrlqMOemFifhpgeR7smTb4zkCc,3225
40
40
  monocle_apptrace/instrumentation/metamodel/langchain/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
- monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py,sha256=wjW9hb1Qwr_aqY0lPehdPftyHtuvHinGxVmy0TVj5xo,2705
41
+ monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py,sha256=2CNHloheb4LG7rPEUIF3E3M1cuc8CWVZf9J6l_hvK1E,2764
42
42
  monocle_apptrace/instrumentation/metamodel/langchain/entities/retrieval.py,sha256=r4UqTCT5vOfkbz9lwoTRoiMkUUJtPMwqOYbqo53A6K8,2039
43
43
  monocle_apptrace/instrumentation/metamodel/langgraph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
44
  monocle_apptrace/instrumentation/metamodel/langgraph/_helper.py,sha256=-XmAbhkgqwaunFZa-BP0zWZ3e-uD-ihSszbn5Cz75yc,2043
@@ -47,22 +47,22 @@ monocle_apptrace/instrumentation/metamodel/langgraph/entities/__init__.py,sha256
47
47
  monocle_apptrace/instrumentation/metamodel/langgraph/entities/inference.py,sha256=OaPeQ8pkyEP5j6ad537MTPp0BdDI7nabxf60u66Dzbk,1659
48
48
  monocle_apptrace/instrumentation/metamodel/llamaindex/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
49
  monocle_apptrace/instrumentation/metamodel/llamaindex/_helper.py,sha256=5nqG-bSW3-ZEADZcwlHXIhhGZoTZu2a5Sc3Lo_AByeo,6199
50
- monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py,sha256=r-o2zz9_ATfgKjUmIiaeSFQ774Vy4wGYueoVc-TqGMI,3061
50
+ monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py,sha256=3Lr7C3GPQMScLX7gQTrPxU7hs8TTIYFTXApAGyB2yjU,3137
51
51
  monocle_apptrace/instrumentation/metamodel/llamaindex/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
52
  monocle_apptrace/instrumentation/metamodel/llamaindex/entities/agent.py,sha256=g7IEwFMLjYvxljX7iHoYSPJW6k-wC7Z3i_y2qlNEZcs,1338
53
- monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py,sha256=585hJXbdN2kFOnABv12vlzFkCbDExZln5ISvQI71EHw,2623
53
+ monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py,sha256=Hich1AoEHnCUvh0MIISNOjbH9t71eex_IsY_4j3JN5U,2727
54
54
  monocle_apptrace/instrumentation/metamodel/llamaindex/entities/retrieval.py,sha256=QBF1nrqog5KHh925jiY2V-kejL6iVLKUowZmqUDoiJ4,1870
55
55
  monocle_apptrace/instrumentation/metamodel/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
- monocle_apptrace/instrumentation/metamodel/openai/_helper.py,sha256=nQSV8B000AfWjaIAKqkA5ZD5e-ozx3KIVTEBOYvoYxA,3066
57
- monocle_apptrace/instrumentation/metamodel/openai/methods.py,sha256=Ahmi0wo6UUHcOXz-6U_6EPZ2bEKXnu5o8NUVrikz0w0,759
56
+ monocle_apptrace/instrumentation/metamodel/openai/_helper.py,sha256=VDjpKRXXbOTma3clD54SYG1TaMXr-To0S3yotp7_9aY,3877
57
+ monocle_apptrace/instrumentation/metamodel/openai/methods.py,sha256=bQ0cW_9Ry5bKKsYGzatys-R6wBW3kpYha5QX328AWLM,1420
58
58
  monocle_apptrace/instrumentation/metamodel/openai/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
59
- monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py,sha256=yu0a9DyBqGOGRQFcPgl1fOSCKu6Jc41Dxx9N0QuwVR0,2659
60
- monocle_apptrace/instrumentation/metamodel/openai/entities/retrieval.py,sha256=QICL56xZF0R84rCpf_Wj_1pMUOt6M8UzLRaICJnEQ7Y,755
61
- monocle_apptrace/instrumentation/metamodel/requests/__init__.py,sha256=A9M_SrBoVqjo2HENM9VtAdOzIme82lsoGt361bBKW84,101
59
+ monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py,sha256=Egpx7ROZvwH6E3hqDWXa1gCXiNijnH3LD0HqQWhfspg,2716
60
+ monocle_apptrace/instrumentation/metamodel/openai/entities/retrieval.py,sha256=LU7aec302ZqPrs9MzFWU-JTnhK8OpYfgQKMmktlD6-8,1457
61
+ monocle_apptrace/instrumentation/metamodel/requests/__init__.py,sha256=mg04UgoPzzcH-cPOahYUqN9T-TolZyOZipnBwDg5TP8,250
62
62
  monocle_apptrace/instrumentation/metamodel/requests/_helper.py,sha256=lKU7py-M0eweHA_LWatwdyWbSGSlQNhScGZ43Xko7us,1115
63
63
  monocle_apptrace/instrumentation/metamodel/requests/methods.py,sha256=OJtosy_07xy01o5Qv-53--aCLQLkr82NZtyi2t6ZDEM,326
64
- monocle_apptrace-0.3.0b5.dist-info/METADATA,sha256=Tr4V0KMOc7nfKDTNDDE_OKc559DGJPULoacZEiG7EZE,6314
65
- monocle_apptrace-0.3.0b5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
66
- monocle_apptrace-0.3.0b5.dist-info/licenses/LICENSE,sha256=ay9trLiP5I7ZsFXo6AqtkLYdRqe5S9r-DrPOvsNlZrg,9136
67
- monocle_apptrace-0.3.0b5.dist-info/licenses/NOTICE,sha256=9jn4xtwM_uUetJMx5WqGnhrR7MIhpoRlpokjSTlyt8c,112
68
- monocle_apptrace-0.3.0b5.dist-info/RECORD,,
64
+ monocle_apptrace-0.3.0b7.dist-info/METADATA,sha256=sKSON_WUMObjmugvQx9wCLmoy_jj6yp5RwQgVHtQ6qE,6314
65
+ monocle_apptrace-0.3.0b7.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
66
+ monocle_apptrace-0.3.0b7.dist-info/licenses/LICENSE,sha256=ay9trLiP5I7ZsFXo6AqtkLYdRqe5S9r-DrPOvsNlZrg,9136
67
+ monocle_apptrace-0.3.0b7.dist-info/licenses/NOTICE,sha256=9jn4xtwM_uUetJMx5WqGnhrR7MIhpoRlpokjSTlyt8c,112
68
+ monocle_apptrace-0.3.0b7.dist-info/RECORD,,