monocle-apptrace 0.1.1__py3-none-any.whl → 0.3.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of monocle-apptrace might be problematic. Click here for more details.

Files changed (47) hide show
  1. monocle_apptrace/botocore/__init__.py +9 -0
  2. monocle_apptrace/constants.py +18 -4
  3. monocle_apptrace/exporters/aws/s3_exporter.py +158 -0
  4. monocle_apptrace/exporters/azure/blob_exporter.py +125 -0
  5. monocle_apptrace/exporters/base_exporter.py +48 -0
  6. monocle_apptrace/exporters/exporter_processor.py +19 -0
  7. monocle_apptrace/exporters/monocle_exporters.py +27 -0
  8. monocle_apptrace/exporters/okahu/okahu_exporter.py +115 -0
  9. monocle_apptrace/haystack/__init__.py +4 -4
  10. monocle_apptrace/haystack/wrap_pipeline.py +3 -2
  11. monocle_apptrace/instrumentor.py +14 -17
  12. monocle_apptrace/langchain/__init__.py +6 -3
  13. monocle_apptrace/llamaindex/__init__.py +8 -7
  14. monocle_apptrace/message_processing.py +80 -0
  15. monocle_apptrace/metamodel/entities/README.md +33 -10
  16. monocle_apptrace/metamodel/entities/app_hosting_types.json +29 -0
  17. monocle_apptrace/metamodel/entities/entities.json +49 -0
  18. monocle_apptrace/metamodel/entities/inference_types.json +33 -0
  19. monocle_apptrace/metamodel/entities/model_types.json +41 -0
  20. monocle_apptrace/metamodel/entities/vector_store_types.json +25 -0
  21. monocle_apptrace/metamodel/entities/workflow_types.json +22 -0
  22. monocle_apptrace/metamodel/maps/attributes/inference/botocore_entities.json +27 -0
  23. monocle_apptrace/metamodel/maps/attributes/inference/haystack_entities.json +57 -0
  24. monocle_apptrace/metamodel/maps/attributes/inference/langchain_entities.json +57 -0
  25. monocle_apptrace/metamodel/maps/attributes/inference/llamaindex_entities.json +57 -0
  26. monocle_apptrace/metamodel/maps/attributes/retrieval/haystack_entities.json +31 -0
  27. monocle_apptrace/metamodel/maps/attributes/retrieval/langchain_entities.json +31 -0
  28. monocle_apptrace/metamodel/maps/attributes/retrieval/llamaindex_entities.json +31 -0
  29. monocle_apptrace/metamodel/maps/botocore_methods.json +13 -0
  30. monocle_apptrace/metamodel/maps/haystack_methods.json +26 -6
  31. monocle_apptrace/metamodel/maps/{lang_chain_methods.json → langchain_methods.json} +31 -8
  32. monocle_apptrace/metamodel/maps/{llama_index_methods.json → llamaindex_methods.json} +30 -8
  33. monocle_apptrace/metamodel/spans/span_example.json +1 -1
  34. monocle_apptrace/metamodel/spans/span_types.json +16 -0
  35. monocle_apptrace/utils.py +179 -20
  36. monocle_apptrace/wrap_common.py +350 -150
  37. monocle_apptrace/wrapper.py +5 -2
  38. {monocle_apptrace-0.1.1.dist-info → monocle_apptrace-0.3.0b1.dist-info}/METADATA +8 -3
  39. monocle_apptrace-0.3.0b1.dist-info/RECORD +48 -0
  40. {monocle_apptrace-0.1.1.dist-info → monocle_apptrace-0.3.0b1.dist-info}/WHEEL +1 -1
  41. monocle_apptrace/haystack/wrap_node.py +0 -27
  42. monocle_apptrace/haystack/wrap_openai.py +0 -44
  43. monocle_apptrace/metamodel/entities/entity_types.json +0 -157
  44. monocle_apptrace/metamodel/entities/entity_types.py +0 -51
  45. monocle_apptrace-0.1.1.dist-info/RECORD +0 -29
  46. {monocle_apptrace-0.1.1.dist-info → monocle_apptrace-0.3.0b1.dist-info}/licenses/LICENSE +0 -0
  47. {monocle_apptrace-0.1.1.dist-info → monocle_apptrace-0.3.0b1.dist-info}/licenses/NOTICE +0 -0
@@ -0,0 +1,31 @@
1
+ {
2
+ "type": "retrieval",
3
+ "attributes": [
4
+ [
5
+ {
6
+ "_comment": "vector store name and type",
7
+ "attribute": "name",
8
+ "accessor": "lambda arguments: type(arguments['instance']._vector_store).__name__"
9
+ },
10
+ {
11
+ "attribute": "type",
12
+ "accessor": "lambda arguments: 'vectorstore.'+type(arguments['instance']._vector_store).__name__"
13
+ },
14
+ {
15
+ "attribute": "deployment",
16
+ "accessor": "lambda arguments: get_vectorstore_deployment(arguments['instance']._vector_store)"
17
+ }
18
+ ],
19
+ [
20
+ {
21
+ "_comment": "embedding model name and type",
22
+ "attribute": "name",
23
+ "accessor": "lambda arguments: arguments['instance']._embed_model.model_name"
24
+ },
25
+ {
26
+ "attribute": "type",
27
+ "accessor": "lambda arguments: 'model.embedding.'+arguments['instance']._embed_model.model_name"
28
+ }
29
+ ]
30
+ ]
31
+ }
@@ -0,0 +1,13 @@
1
+ {
2
+ "wrapper_methods": [
3
+ {
4
+ "package": "botocore.client",
5
+ "object": "ClientCreator",
6
+ "method": "create_client",
7
+ "wrapper_package": "wrap_common",
8
+ "wrapper_method": "task_wrapper",
9
+ "skip_span": true,
10
+ "output_processor": ["metamodel/maps/attributes/inference/botocore_entities.json"]
11
+ }
12
+ ]
13
+ }
@@ -1,25 +1,45 @@
1
1
  {
2
2
  "wrapper_methods" : [
3
+ {
4
+ "package": "haystack_integrations.components.retrievers.opensearch",
5
+ "object": "OpenSearchEmbeddingRetriever",
6
+ "method": "run",
7
+ "span_name": "haystack.retriever",
8
+ "wrapper_package": "wrap_common",
9
+ "wrapper_method": "task_wrapper",
10
+ "output_processor": ["metamodel/maps/attributes/retrieval/haystack_entities.json"]
11
+ },
12
+ {
13
+ "package": "haystack.components.retrievers.in_memory",
14
+ "object": "InMemoryEmbeddingRetriever",
15
+ "method": "run",
16
+ "span_name": "haystack.retriever",
17
+ "wrapper_package": "wrap_common",
18
+ "wrapper_method": "task_wrapper",
19
+ "output_processor": ["metamodel/maps/attributes/retrieval/haystack_entities.json"]
20
+ },
3
21
  {
4
22
  "package": "haystack.components.generators.openai",
5
23
  "object": "OpenAIGenerator",
6
24
  "method": "run",
7
- "wrapper_package": "haystack.wrap_openai",
8
- "wrapper_method": "wrap_openai"
25
+ "wrapper_package": "wrap_common",
26
+ "wrapper_method": "llm_wrapper",
27
+ "output_processor": ["metamodel/maps/attributes/inference/haystack_entities.json"]
9
28
  },
10
29
  {
11
30
  "package": "haystack.components.generators.chat.openai",
12
31
  "object": "OpenAIChatGenerator",
13
32
  "method": "run",
14
- "wrapper_package": "haystack.wrap_openai",
15
- "wrapper_method": "wrap_openai"
33
+ "wrapper_package": "wrap_common",
34
+ "wrapper_method": "llm_wrapper",
35
+ "output_processor": ["metamodel/maps/attributes/inference/haystack_entities.json"]
16
36
  },
17
37
  {
18
38
  "package": "haystack.core.pipeline.pipeline",
19
39
  "object": "Pipeline",
20
40
  "method": "run",
21
- "wrapper_package": "haystack.wrap_pipeline",
22
- "wrapper_method": "wrap"
41
+ "wrapper_package": "wrap_common",
42
+ "wrapper_method": "task_wrapper"
23
43
  }
24
44
  ]
25
45
  }
@@ -1,4 +1,4 @@
1
- {
1
+ {
2
2
  "wrapper_methods" : [
3
3
  {
4
4
  "package": "langchain.prompts.base",
@@ -19,42 +19,65 @@
19
19
  "object": "BaseChatModel",
20
20
  "method": "invoke",
21
21
  "wrapper_package": "wrap_common",
22
- "wrapper_method": "llm_wrapper"
22
+ "wrapper_method": "llm_wrapper",
23
+ "output_processor": ["metamodel/maps/attributes/inference/langchain_entities.json"]
23
24
  },
24
25
  {
25
26
  "package": "langchain.chat_models.base",
26
27
  "object": "BaseChatModel",
27
28
  "method": "ainvoke",
28
29
  "wrapper_package": "wrap_common",
29
- "wrapper_method": "allm_wrapper"
30
+ "wrapper_method": "allm_wrapper",
31
+ "output_processor": ["metamodel/maps/attributes/inference/langchain_entities.json"]
30
32
  },
31
33
  {
32
34
  "package": "langchain_core.language_models.llms",
33
35
  "object": "LLM",
34
36
  "method": "_generate",
35
37
  "wrapper_package": "wrap_common",
36
- "wrapper_method": "llm_wrapper"
38
+ "wrapper_method": "llm_wrapper",
39
+ "output_processor": ["metamodel/maps/attributes/inference/langchain_entities.json"]
37
40
  },
38
41
  {
39
42
  "package": "langchain_core.language_models.llms",
40
43
  "object": "LLM",
41
44
  "method": "_agenerate",
42
45
  "wrapper_package": "wrap_common",
43
- "wrapper_method": "llm_wrapper"
46
+ "wrapper_method": "allm_wrapper",
47
+ "output_processor": ["metamodel/maps/attributes/inference/langchain_entities.json"]
48
+ },
49
+ {
50
+ "package": "langchain_core.language_models.llms",
51
+ "object": "BaseLLM",
52
+ "method": "invoke",
53
+ "wrapper_package": "wrap_common",
54
+ "wrapper_method": "llm_wrapper",
55
+ "output_processor": ["metamodel/maps/attributes/inference/langchain_entities.json"]
56
+ },
57
+ {
58
+ "package": "langchain_core.language_models.llms",
59
+ "object": "BaseLLM",
60
+ "method": "ainvoke",
61
+ "wrapper_package": "wrap_common",
62
+ "wrapper_method": "allm_wrapper",
63
+ "output_processor": ["metamodel/maps/attributes/inference/langchain_entities.json"]
44
64
  },
45
65
  {
46
66
  "package": "langchain_core.retrievers",
47
67
  "object": "BaseRetriever",
48
68
  "method": "invoke",
49
69
  "wrapper_package": "wrap_common",
50
- "wrapper_method": "task_wrapper"
70
+ "wrapper_method": "task_wrapper",
71
+ "output_processor": ["metamodel/maps/attributes/retrieval/langchain_entities.json"]
72
+
51
73
  },
52
74
  {
53
75
  "package": "langchain_core.retrievers",
54
76
  "object": "BaseRetriever",
55
77
  "method": "ainvoke",
56
78
  "wrapper_package": "wrap_common",
57
- "wrapper_method": "atask_wrapper"
79
+ "wrapper_method": "atask_wrapper",
80
+ "output_processor": ["metamodel/maps/attributes/retrieval/langchain_entities.json"]
58
81
  },
59
82
  {
60
83
  "package": "langchain.schema",
@@ -103,4 +126,4 @@
103
126
  "wrapper_method": "atask_wrapper"
104
127
  }
105
128
  ]
106
- }
129
+ }
@@ -6,7 +6,8 @@
6
6
  "method": "retrieve",
7
7
  "span_name": "llamaindex.retrieve",
8
8
  "wrapper_package": "wrap_common",
9
- "wrapper_method": "task_wrapper"
9
+ "wrapper_method": "task_wrapper",
10
+ "output_processor": ["metamodel/maps/attributes/retrieval/llamaindex_entities.json"]
10
11
  },
11
12
  {
12
13
  "package": "llama_index.core.indices.base_retriever",
@@ -14,7 +15,8 @@
14
15
  "method": "aretrieve",
15
16
  "span_name": "llamaindex.retrieve",
16
17
  "wrapper_package": "wrap_common",
17
- "wrapper_method": "atask_wrapper"
18
+ "wrapper_method": "atask_wrapper",
19
+ "output_processor": ["metamodel/maps/attributes/retrieval/llamaindex_entities.json"]
18
20
  },
19
21
  {
20
22
  "package": "llama_index.core.base.base_query_engine",
@@ -38,7 +40,8 @@
38
40
  "method": "chat",
39
41
  "span_name": "llamaindex.llmchat",
40
42
  "wrapper_package": "wrap_common",
41
- "wrapper_method": "task_wrapper"
43
+ "wrapper_method": "task_wrapper",
44
+ "output_processor": ["metamodel/maps/attributes/inference/llamaindex_entities.json"]
42
45
  },
43
46
  {
44
47
  "package": "llama_index.core.llms.custom",
@@ -46,7 +49,8 @@
46
49
  "method": "achat",
47
50
  "span_name": "llamaindex.llmchat",
48
51
  "wrapper_package": "wrap_common",
49
- "wrapper_method": "atask_wrapper"
52
+ "wrapper_method": "atask_wrapper",
53
+ "output_processor": ["metamodel/maps/attributes/inference/llamaindex_entities.json"]
50
54
  },
51
55
  {
52
56
  "package": "llama_index.llms.openai.base",
@@ -55,8 +59,7 @@
55
59
  "span_name": "llamaindex.openai",
56
60
  "wrapper_package": "wrap_common",
57
61
  "wrapper_method": "llm_wrapper",
58
- "span_name_getter_package" : "llamaindex",
59
- "span_name_getter_mothod" : "get_llm_span_name_for_openai"
62
+ "output_processor": ["metamodel/maps/attributes/inference/llamaindex_entities.json"]
60
63
  },
61
64
  {
62
65
  "package": "llama_index.llms.openai.base",
@@ -64,7 +67,26 @@
64
67
  "method": "achat",
65
68
  "span_name": "llamaindex.openai",
66
69
  "wrapper_package": "wrap_common",
67
- "wrapper_method": "allm_wrapper"
70
+ "wrapper_method": "allm_wrapper",
71
+ "output_processor": ["metamodel/maps/attributes/inference/llamaindex_entities.json"]
72
+ },
73
+ {
74
+ "package": "llama_index.llms.mistralai.base",
75
+ "object": "MistralAI",
76
+ "method": "chat",
77
+ "span_name": "llamaindex.mistralai",
78
+ "wrapper_package": "wrap_common",
79
+ "wrapper_method": "llm_wrapper",
80
+ "output_processor": ["metamodel/maps/attributes/inference/llamaindex_entities.json"]
81
+ },
82
+ {
83
+ "package": "llama_index.llms.mistralai.base",
84
+ "object": "MistralAI",
85
+ "method": "achat",
86
+ "span_name": "llamaindex.mistralai",
87
+ "wrapper_package": "wrap_common",
88
+ "wrapper_method": "allm_wrapper",
89
+ "output_processor": ["metamodel/maps/attributes/inference/llamaindex_entities.json"]
68
90
  }
69
91
  ]
70
- }
92
+ }
@@ -13,7 +13,7 @@
13
13
  "status_code": "OK"
14
14
  },
15
15
  "attributes": {
16
- "span.type": "Retrieval",
16
+ "span.type": "retrieval",
17
17
  "entity.count": 2,
18
18
  "entity.1.name": "ChromaVectorStore",
19
19
  "entity.1.type": "vectorstore.chroma",
@@ -0,0 +1,16 @@
1
+ {
2
+ "span_types" : [
3
+ {
4
+ "type": "inference",
5
+ "description": "Model inference span"
6
+ },
7
+ {
8
+ "type": "retrieval",
9
+ "description": "vector embedding retrieval"
10
+ },
11
+ {
12
+ "type": "workflow",
13
+ "description": "workflow orchetraction at top level"
14
+ }
15
+ ]
16
+ }
monocle_apptrace/utils.py CHANGED
@@ -2,8 +2,14 @@ import logging
2
2
  import json
3
3
  from importlib import import_module
4
4
  import os
5
- from opentelemetry.trace import Span
6
- from monocle_apptrace.constants import azure_service_map, aws_service_map
5
+ from opentelemetry.trace import NonRecordingSpan,Span
6
+ from opentelemetry.trace.propagation import _SPAN_KEY
7
+ from opentelemetry.context import (attach, detach,get_current)
8
+ from opentelemetry.context import attach, set_value, get_value
9
+ from monocle_apptrace.constants import service_name_map, service_type_map
10
+ from json.decoder import JSONDecodeError
11
+
12
+ logger = logging.getLogger(__name__)
7
13
 
8
14
  embedding_model_context = {}
9
15
 
@@ -21,12 +27,14 @@ def dont_throw(func):
21
27
  """
22
28
  # Obtain a logger specific to the function's module
23
29
  logger = logging.getLogger(func.__module__)
30
+
24
31
  # pylint: disable=inconsistent-return-statements
25
32
  def wrapper(*args, **kwargs):
26
33
  try:
27
34
  return func(*args, **kwargs)
28
35
  except Exception as ex:
29
36
  logger.warning("Failed to execute %s, error: %s", func.__name__, str(ex))
37
+
30
38
  return wrapper
31
39
 
32
40
  def with_tracer_wrapper(func):
@@ -34,8 +42,25 @@ def with_tracer_wrapper(func):
34
42
 
35
43
  def _with_tracer(tracer, to_wrap):
36
44
  def wrapper(wrapped, instance, args, kwargs):
37
- return func(tracer, to_wrap, wrapped, instance, args, kwargs)
38
-
45
+ token = None
46
+ try:
47
+ _parent_span_context = get_current()
48
+ if _parent_span_context is not None and _parent_span_context.get(_SPAN_KEY, None):
49
+ parent_span: Span = _parent_span_context.get(_SPAN_KEY, None)
50
+ is_invalid_span = isinstance(parent_span, NonRecordingSpan)
51
+ if is_invalid_span:
52
+ token = attach(context={})
53
+ except Exception as e:
54
+ logger.error("Exception in attaching parent context: %s", e)
55
+
56
+ val = func(tracer, to_wrap, wrapped, instance, args, kwargs)
57
+ # Detach the token if it was set
58
+ if token:
59
+ try:
60
+ detach(token=token)
61
+ except Exception as e:
62
+ logger.error("Exception in detaching parent context: %s", e)
63
+ return val
39
64
  return wrapper
40
65
 
41
66
  return _with_tracer
@@ -48,32 +73,81 @@ def resolve_from_alias(my_map, alias):
48
73
  return my_map[i]
49
74
  return None
50
75
 
51
- def load_wrapper_from_config(config_file_path: str, module_name: str = None):
52
- wrapper_methods = []
53
- with open(config_file_path, encoding='UTF-8') as config_file:
76
+ def load_output_processor(wrapper_method, attributes_config_base_path):
77
+ """Load the output processor from a file if the file path is provided and valid."""
78
+ logger = logging.getLogger()
79
+ output_processor_file_path = wrapper_method["output_processor"][0]
80
+ logger.info(f'Output processor file path is: {output_processor_file_path}')
81
+
82
+ if isinstance(output_processor_file_path, str) and output_processor_file_path: # Combined condition
83
+ if not attributes_config_base_path:
84
+ absolute_file_path = os.path.abspath(output_processor_file_path)
85
+ else:
86
+ absolute_file_path = os.path.join(attributes_config_base_path, output_processor_file_path)
87
+
88
+ logger.info(f'Absolute file path is: {absolute_file_path}')
89
+ try:
90
+ with open(absolute_file_path, encoding='UTF-8') as op_file:
91
+ wrapper_method["output_processor"] = json.load(op_file)
92
+ logger.info('Output processor loaded successfully.')
93
+ except FileNotFoundError:
94
+ logger.error(f"Error: File not found at {absolute_file_path}.")
95
+ except JSONDecodeError:
96
+ logger.error(f"Error: Invalid JSON content in the file {absolute_file_path}.")
97
+ except Exception as e:
98
+ logger.error(f"Error: An unexpected error occurred: {e}")
99
+ else:
100
+ logger.error("Invalid or missing output processor file path.")
101
+
102
+ def get_wrapper_methods_config(
103
+ wrapper_methods_config_path: str,
104
+ attributes_config_base_path: str = None
105
+ ):
106
+ parent_dir = os.path.dirname(os.path.join(os.path.dirname(__file__), '..'))
107
+ wrapper_methods_config = load_wrapper_methods_config_from_file(
108
+ wrapper_methods_config_path=os.path.join(parent_dir, wrapper_methods_config_path))
109
+ process_wrapper_method_config(
110
+ wrapper_methods_config=wrapper_methods_config,
111
+ attributes_config_base_path=attributes_config_base_path)
112
+ return wrapper_methods_config
113
+
114
+ def load_wrapper_methods_config_from_file(
115
+ wrapper_methods_config_path: str):
116
+ json_data = {}
117
+
118
+ with open(wrapper_methods_config_path, encoding='UTF-8') as config_file:
54
119
  json_data = json.load(config_file)
55
- wrapper_methods = json_data["wrapper_methods"]
56
- for wrapper_method in wrapper_methods:
120
+
121
+ return json_data["wrapper_methods"]
122
+
123
+ def process_wrapper_method_config(
124
+ wrapper_methods_config: str,
125
+ attributes_config_base_path: str = ""):
126
+ for wrapper_method in wrapper_methods_config:
127
+ if "wrapper_package" in wrapper_method and "wrapper_method" in wrapper_method:
57
128
  wrapper_method["wrapper"] = get_wrapper_method(
58
129
  wrapper_method["wrapper_package"], wrapper_method["wrapper_method"])
59
- if "span_name_getter_method" in wrapper_method :
130
+ if "span_name_getter_method" in wrapper_method:
60
131
  wrapper_method["span_name_getter"] = get_wrapper_method(
61
132
  wrapper_method["span_name_getter_package"],
62
133
  wrapper_method["span_name_getter_method"])
63
- return wrapper_methods
134
+ if "output_processor" in wrapper_method and wrapper_method["output_processor"]:
135
+ load_output_processor(wrapper_method, attributes_config_base_path)
64
136
 
65
137
  def get_wrapper_method(package_name: str, method_name: str):
66
138
  wrapper_module = import_module("monocle_apptrace." + package_name)
67
139
  return getattr(wrapper_module, method_name)
68
140
 
69
- def update_span_with_infra_name(span: Span, span_key: str):
70
- for key,val in azure_service_map.items():
71
- if key in os.environ:
72
- span.set_attribute(span_key, val)
73
- for key,val in aws_service_map.items():
74
- if key in os.environ:
75
- span.set_attribute(span_key, val)
76
-
141
+ def set_app_hosting_identifier_attribute(span, span_index):
142
+ return_value = 0
143
+ # Search env to indentify the infra service type, if found check env for service name if possible
144
+ for type_env, type_name in service_type_map.items():
145
+ if type_env in os.environ:
146
+ return_value = 1
147
+ span.set_attribute(f"entity.{span_index}.type", f"app_hosting.{type_name}")
148
+ entity_name_env = service_name_map.get(type_name, "unknown")
149
+ span.set_attribute(f"entity.{span_index}.name", os.environ.get(entity_name_env, "generic"))
150
+ return return_value
77
151
 
78
152
  def set_embedding_model(model_name: str):
79
153
  """
@@ -83,7 +157,6 @@ def set_embedding_model(model_name: str):
83
157
  """
84
158
  embedding_model_context['embedding_model'] = model_name
85
159
 
86
-
87
160
  def get_embedding_model() -> str:
88
161
  """
89
162
  Retrieves the embedding model from the global context.
@@ -91,3 +164,89 @@ def get_embedding_model() -> str:
91
164
  @return: The name of the embedding model, or 'unknown' if not set
92
165
  """
93
166
  return embedding_model_context.get('embedding_model', 'unknown')
167
+
168
+ def set_attribute(key: str, value: str):
169
+ """
170
+ Set a value in the global context for a given key.
171
+
172
+ Args:
173
+ key: The key for the context value to set.
174
+ value: The value to set for the given key.
175
+ """
176
+ attach(set_value(key, value))
177
+
178
+ def get_attribute(key: str) -> str:
179
+ """
180
+ Retrieve a value from the global context for a given key.
181
+
182
+ Args:
183
+ key: The key for the context value to retrieve.
184
+
185
+ Returns:
186
+ The value associated with the given key.
187
+ """
188
+ return get_value(key)
189
+
190
+ def flatten_dict(d, parent_key='', sep='_'):
191
+ items = []
192
+ for k, v in d.items():
193
+ new_key = f"{parent_key}{sep}{k}" if parent_key else k
194
+ if isinstance(v, dict):
195
+ items.extend(flatten_dict(v, new_key, sep=sep).items())
196
+ else:
197
+ items.append((new_key, v))
198
+ return dict(items)
199
+
200
+ def get_fully_qualified_class_name(instance):
201
+ if instance is None:
202
+ return None
203
+ module_name = instance.__class__.__module__
204
+ qualname = instance.__class__.__qualname__
205
+ return f"{module_name}.{qualname}"
206
+
207
+ # returns json path like key probe in a dictionary
208
+ def get_nested_value(data, keys):
209
+ for key in keys:
210
+ if isinstance(data, dict) and key in data:
211
+ data = data[key]
212
+ elif hasattr(data, key):
213
+ data = getattr(data, key)
214
+ else:
215
+ return None
216
+ return data
217
+
218
+ def get_workflow_name(span: Span) -> str:
219
+ try:
220
+ return get_value("workflow_name") or span.resource.attributes.get("service.name")
221
+ except Exception as e:
222
+ logger.exception(f"Error getting workflow name: {e}")
223
+ return None
224
+
225
+ def get_vectorstore_deployment(my_map):
226
+ if isinstance(my_map, dict):
227
+ if '_client_settings' in my_map:
228
+ client = my_map['_client_settings'].__dict__
229
+ host, port = get_keys_as_tuple(client, 'host', 'port')
230
+ if host:
231
+ return f"{host}:{port}" if port else host
232
+ keys_to_check = ['client', '_client']
233
+ host = get_host_from_map(my_map, keys_to_check)
234
+ if host:
235
+ return host
236
+ else:
237
+ if hasattr(my_map, 'client') and '_endpoint' in my_map.client.__dict__:
238
+ return my_map.client.__dict__['_endpoint']
239
+ host, port = get_keys_as_tuple(my_map.__dict__, 'host', 'port')
240
+ if host:
241
+ return f"{host}:{port}" if port else host
242
+ return None
243
+
244
+ def get_keys_as_tuple(dictionary, *keys):
245
+ return tuple(next((value for key, value in dictionary.items() if key.endswith(k) and value is not None), None) for k in keys)
246
+
247
+ def get_host_from_map(my_map, keys_to_check):
248
+ for key in keys_to_check:
249
+ seed_connections = get_nested_value(my_map, [key, 'transport', 'seed_connections'])
250
+ if seed_connections and 'host' in seed_connections[0].__dict__:
251
+ return seed_connections[0].__dict__['host']
252
+ return None