monocle-apptrace 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of monocle-apptrace might be problematic. Click here for more details.
- monocle_apptrace/__init__.py +1 -0
- monocle_apptrace/__main__.py +19 -0
- monocle_apptrace/exporters/aws/s3_exporter.py +50 -27
- monocle_apptrace/exporters/aws/s3_exporter_opendal.py +137 -0
- monocle_apptrace/exporters/azure/blob_exporter.py +30 -12
- monocle_apptrace/exporters/azure/blob_exporter_opendal.py +162 -0
- monocle_apptrace/exporters/base_exporter.py +19 -18
- monocle_apptrace/exporters/exporter_processor.py +128 -3
- monocle_apptrace/exporters/file_exporter.py +16 -0
- monocle_apptrace/exporters/monocle_exporters.py +48 -20
- monocle_apptrace/exporters/okahu/okahu_exporter.py +8 -6
- monocle_apptrace/instrumentation/__init__.py +1 -0
- monocle_apptrace/instrumentation/common/__init__.py +2 -0
- monocle_apptrace/instrumentation/common/constants.py +70 -0
- monocle_apptrace/instrumentation/common/instrumentor.py +362 -0
- monocle_apptrace/instrumentation/common/span_handler.py +220 -0
- monocle_apptrace/instrumentation/common/utils.py +356 -0
- monocle_apptrace/instrumentation/common/wrapper.py +92 -0
- monocle_apptrace/instrumentation/common/wrapper_method.py +72 -0
- monocle_apptrace/instrumentation/metamodel/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/botocore/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/botocore/_helper.py +95 -0
- monocle_apptrace/instrumentation/metamodel/botocore/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/botocore/entities/inference.py +65 -0
- monocle_apptrace/instrumentation/metamodel/botocore/handlers/botocore_span_handler.py +26 -0
- monocle_apptrace/instrumentation/metamodel/botocore/methods.py +16 -0
- monocle_apptrace/instrumentation/metamodel/flask/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/flask/_helper.py +29 -0
- monocle_apptrace/instrumentation/metamodel/flask/methods.py +13 -0
- monocle_apptrace/instrumentation/metamodel/haystack/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/haystack/_helper.py +127 -0
- monocle_apptrace/instrumentation/metamodel/haystack/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py +76 -0
- monocle_apptrace/instrumentation/metamodel/haystack/entities/retrieval.py +61 -0
- monocle_apptrace/instrumentation/metamodel/haystack/methods.py +43 -0
- monocle_apptrace/instrumentation/metamodel/langchain/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/langchain/_helper.py +127 -0
- monocle_apptrace/instrumentation/metamodel/langchain/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py +72 -0
- monocle_apptrace/instrumentation/metamodel/langchain/entities/retrieval.py +58 -0
- monocle_apptrace/instrumentation/metamodel/langchain/methods.py +111 -0
- monocle_apptrace/instrumentation/metamodel/langgraph/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/langgraph/_helper.py +48 -0
- monocle_apptrace/instrumentation/metamodel/langgraph/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/langgraph/entities/inference.py +56 -0
- monocle_apptrace/instrumentation/metamodel/langgraph/methods.py +14 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/_helper.py +172 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/entities/agent.py +47 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py +73 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/entities/retrieval.py +57 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py +101 -0
- monocle_apptrace/instrumentation/metamodel/openai/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/openai/_helper.py +112 -0
- monocle_apptrace/instrumentation/metamodel/openai/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py +71 -0
- monocle_apptrace/instrumentation/metamodel/openai/entities/retrieval.py +43 -0
- monocle_apptrace/instrumentation/metamodel/openai/methods.py +45 -0
- monocle_apptrace/instrumentation/metamodel/requests/__init__.py +4 -0
- monocle_apptrace/instrumentation/metamodel/requests/_helper.py +31 -0
- monocle_apptrace/instrumentation/metamodel/requests/methods.py +12 -0
- {monocle_apptrace-0.2.0.dist-info → monocle_apptrace-0.3.0.dist-info}/METADATA +19 -2
- monocle_apptrace-0.3.0.dist-info/RECORD +68 -0
- {monocle_apptrace-0.2.0.dist-info → monocle_apptrace-0.3.0.dist-info}/WHEEL +1 -1
- monocle_apptrace/constants.py +0 -22
- monocle_apptrace/haystack/__init__.py +0 -9
- monocle_apptrace/haystack/wrap_node.py +0 -27
- monocle_apptrace/haystack/wrap_openai.py +0 -44
- monocle_apptrace/haystack/wrap_pipeline.py +0 -63
- monocle_apptrace/instrumentor.py +0 -121
- monocle_apptrace/langchain/__init__.py +0 -9
- monocle_apptrace/llamaindex/__init__.py +0 -16
- monocle_apptrace/metamodel/README.md +0 -47
- monocle_apptrace/metamodel/entities/README.md +0 -77
- monocle_apptrace/metamodel/entities/app_hosting_types.json +0 -29
- monocle_apptrace/metamodel/entities/entities.json +0 -49
- monocle_apptrace/metamodel/entities/inference_types.json +0 -33
- monocle_apptrace/metamodel/entities/model_types.json +0 -41
- monocle_apptrace/metamodel/entities/vector_store_types.json +0 -25
- monocle_apptrace/metamodel/entities/workflow_types.json +0 -22
- monocle_apptrace/metamodel/maps/attributes/inference/langchain_entities.json +0 -35
- monocle_apptrace/metamodel/maps/attributes/inference/llamaindex_entities.json +0 -35
- monocle_apptrace/metamodel/maps/attributes/retrieval/langchain_entities.json +0 -27
- monocle_apptrace/metamodel/maps/attributes/retrieval/llamaindex_entities.json +0 -27
- monocle_apptrace/metamodel/maps/haystack_methods.json +0 -25
- monocle_apptrace/metamodel/maps/langchain_methods.json +0 -129
- monocle_apptrace/metamodel/maps/llamaindex_methods.json +0 -74
- monocle_apptrace/metamodel/spans/README.md +0 -121
- monocle_apptrace/metamodel/spans/span_example.json +0 -140
- monocle_apptrace/metamodel/spans/span_format.json +0 -55
- monocle_apptrace/metamodel/spans/span_types.json +0 -16
- monocle_apptrace/utils.py +0 -172
- monocle_apptrace/wrap_common.py +0 -417
- monocle_apptrace/wrapper.py +0 -26
- monocle_apptrace-0.2.0.dist-info/RECORD +0 -44
- {monocle_apptrace-0.2.0.dist-info → monocle_apptrace-0.3.0.dist-info}/licenses/LICENSE +0 -0
- {monocle_apptrace-0.2.0.dist-info → monocle_apptrace-0.3.0.dist-info}/licenses/NOTICE +0 -0
monocle_apptrace/utils.py
DELETED
|
@@ -1,172 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
import json
|
|
3
|
-
from importlib import import_module
|
|
4
|
-
import os
|
|
5
|
-
from opentelemetry.trace import Span
|
|
6
|
-
from opentelemetry.context import attach, set_value, get_value
|
|
7
|
-
from monocle_apptrace.constants import azure_service_map, aws_service_map
|
|
8
|
-
from json.decoder import JSONDecodeError
|
|
9
|
-
logger = logging.getLogger(__name__)
|
|
10
|
-
|
|
11
|
-
embedding_model_context = {}
|
|
12
|
-
|
|
13
|
-
def set_span_attribute(span, name, value):
|
|
14
|
-
if value is not None:
|
|
15
|
-
if value != "":
|
|
16
|
-
span.set_attribute(name, value)
|
|
17
|
-
|
|
18
|
-
def dont_throw(func):
|
|
19
|
-
"""
|
|
20
|
-
A decorator that wraps the passed in function and logs exceptions instead of throwing them.
|
|
21
|
-
|
|
22
|
-
@param func: The function to wrap
|
|
23
|
-
@return: The wrapper function
|
|
24
|
-
"""
|
|
25
|
-
# Obtain a logger specific to the function's module
|
|
26
|
-
logger = logging.getLogger(func.__module__)
|
|
27
|
-
|
|
28
|
-
# pylint: disable=inconsistent-return-statements
|
|
29
|
-
def wrapper(*args, **kwargs):
|
|
30
|
-
try:
|
|
31
|
-
return func(*args, **kwargs)
|
|
32
|
-
except Exception as ex:
|
|
33
|
-
logger.warning("Failed to execute %s, error: %s", func.__name__, str(ex))
|
|
34
|
-
|
|
35
|
-
return wrapper
|
|
36
|
-
|
|
37
|
-
def with_tracer_wrapper(func):
|
|
38
|
-
"""Helper for providing tracer for wrapper functions."""
|
|
39
|
-
|
|
40
|
-
def _with_tracer(tracer, to_wrap):
|
|
41
|
-
def wrapper(wrapped, instance, args, kwargs):
|
|
42
|
-
return func(tracer, to_wrap, wrapped, instance, args, kwargs)
|
|
43
|
-
|
|
44
|
-
return wrapper
|
|
45
|
-
|
|
46
|
-
return _with_tracer
|
|
47
|
-
|
|
48
|
-
def resolve_from_alias(my_map, alias):
|
|
49
|
-
"""Find a alias that is not none from list of aliases"""
|
|
50
|
-
|
|
51
|
-
for i in alias:
|
|
52
|
-
if i in my_map.keys():
|
|
53
|
-
return my_map[i]
|
|
54
|
-
return None
|
|
55
|
-
|
|
56
|
-
def load_output_processor(wrapper_method, attributes_config_base_path):
|
|
57
|
-
"""Load the output processor from a file if the file path is provided and valid."""
|
|
58
|
-
logger = logging.getLogger()
|
|
59
|
-
output_processor_file_path = wrapper_method["output_processor"][0]
|
|
60
|
-
logger.info(f'Output processor file path is: {output_processor_file_path}')
|
|
61
|
-
|
|
62
|
-
if isinstance(output_processor_file_path, str) and output_processor_file_path: # Combined condition
|
|
63
|
-
if not attributes_config_base_path:
|
|
64
|
-
absolute_file_path = os.path.abspath(output_processor_file_path)
|
|
65
|
-
else:
|
|
66
|
-
absolute_file_path = os.path.join(attributes_config_base_path, output_processor_file_path)
|
|
67
|
-
|
|
68
|
-
logger.info(f'Absolute file path is: {absolute_file_path}')
|
|
69
|
-
try:
|
|
70
|
-
with open(absolute_file_path, encoding='UTF-8') as op_file:
|
|
71
|
-
wrapper_method["output_processor"] = json.load(op_file)
|
|
72
|
-
logger.info('Output processor loaded successfully.')
|
|
73
|
-
except FileNotFoundError:
|
|
74
|
-
logger.error(f"Error: File not found at {absolute_file_path}.")
|
|
75
|
-
except JSONDecodeError:
|
|
76
|
-
logger.error(f"Error: Invalid JSON content in the file {absolute_file_path}.")
|
|
77
|
-
except Exception as e:
|
|
78
|
-
logger.error(f"Error: An unexpected error occurred: {e}")
|
|
79
|
-
else:
|
|
80
|
-
logger.error("Invalid or missing output processor file path.")
|
|
81
|
-
|
|
82
|
-
def get_wrapper_methods_config(
|
|
83
|
-
wrapper_methods_config_path: str,
|
|
84
|
-
attributes_config_base_path: str = None
|
|
85
|
-
):
|
|
86
|
-
parent_dir = os.path.dirname(os.path.join(os.path.dirname(__file__), '..'))
|
|
87
|
-
wrapper_methods_config = load_wrapper_methods_config_from_file(
|
|
88
|
-
wrapper_methods_config_path=os.path.join(parent_dir, wrapper_methods_config_path))
|
|
89
|
-
process_wrapper_method_config(
|
|
90
|
-
wrapper_methods_config=wrapper_methods_config,
|
|
91
|
-
attributes_config_base_path=attributes_config_base_path)
|
|
92
|
-
return wrapper_methods_config
|
|
93
|
-
|
|
94
|
-
def load_wrapper_methods_config_from_file(
|
|
95
|
-
wrapper_methods_config_path: str):
|
|
96
|
-
json_data = {}
|
|
97
|
-
|
|
98
|
-
with open(wrapper_methods_config_path, encoding='UTF-8') as config_file:
|
|
99
|
-
json_data = json.load(config_file)
|
|
100
|
-
|
|
101
|
-
return json_data["wrapper_methods"]
|
|
102
|
-
|
|
103
|
-
def process_wrapper_method_config(
|
|
104
|
-
wrapper_methods_config: str,
|
|
105
|
-
attributes_config_base_path: str = ""):
|
|
106
|
-
for wrapper_method in wrapper_methods_config:
|
|
107
|
-
if "wrapper_package" in wrapper_method and "wrapper_method" in wrapper_method:
|
|
108
|
-
wrapper_method["wrapper"] = get_wrapper_method(
|
|
109
|
-
wrapper_method["wrapper_package"], wrapper_method["wrapper_method"])
|
|
110
|
-
if "span_name_getter_method" in wrapper_method:
|
|
111
|
-
wrapper_method["span_name_getter"] = get_wrapper_method(
|
|
112
|
-
wrapper_method["span_name_getter_package"],
|
|
113
|
-
wrapper_method["span_name_getter_method"])
|
|
114
|
-
if "output_processor" in wrapper_method and wrapper_method["output_processor"]:
|
|
115
|
-
load_output_processor(wrapper_method, attributes_config_base_path)
|
|
116
|
-
|
|
117
|
-
def get_wrapper_method(package_name: str, method_name: str):
|
|
118
|
-
wrapper_module = import_module("monocle_apptrace." + package_name)
|
|
119
|
-
return getattr(wrapper_module, method_name)
|
|
120
|
-
|
|
121
|
-
def update_span_with_infra_name(span: Span, span_key: str):
|
|
122
|
-
for key, val in azure_service_map.items():
|
|
123
|
-
if key in os.environ:
|
|
124
|
-
span.set_attribute(span_key, val)
|
|
125
|
-
for key, val in aws_service_map.items():
|
|
126
|
-
if key in os.environ:
|
|
127
|
-
span.set_attribute(span_key, val)
|
|
128
|
-
|
|
129
|
-
def set_embedding_model(model_name: str):
|
|
130
|
-
"""
|
|
131
|
-
Sets the embedding model in the global context.
|
|
132
|
-
|
|
133
|
-
@param model_name: The name of the embedding model to set
|
|
134
|
-
"""
|
|
135
|
-
embedding_model_context['embedding_model'] = model_name
|
|
136
|
-
|
|
137
|
-
def get_embedding_model() -> str:
|
|
138
|
-
"""
|
|
139
|
-
Retrieves the embedding model from the global context.
|
|
140
|
-
|
|
141
|
-
@return: The name of the embedding model, or 'unknown' if not set
|
|
142
|
-
"""
|
|
143
|
-
return embedding_model_context.get('embedding_model', 'unknown')
|
|
144
|
-
|
|
145
|
-
def set_attribute(key: str, value: str):
|
|
146
|
-
"""
|
|
147
|
-
Set a value in the global context for a given key.
|
|
148
|
-
|
|
149
|
-
Args:
|
|
150
|
-
key: The key for the context value to set.
|
|
151
|
-
value: The value to set for the given key.
|
|
152
|
-
"""
|
|
153
|
-
attach(set_value(key, value))
|
|
154
|
-
|
|
155
|
-
def get_attribute(key: str) -> str:
|
|
156
|
-
"""
|
|
157
|
-
Retrieve a value from the global context for a given key.
|
|
158
|
-
|
|
159
|
-
Args:
|
|
160
|
-
key: The key for the context value to retrieve.
|
|
161
|
-
|
|
162
|
-
Returns:
|
|
163
|
-
The value associated with the given key.
|
|
164
|
-
"""
|
|
165
|
-
return get_value(key)
|
|
166
|
-
|
|
167
|
-
def get_workflow_name(span: Span) -> str:
|
|
168
|
-
try:
|
|
169
|
-
return get_value("workflow_name") or span.resource.attributes.get("service.name")
|
|
170
|
-
except Exception as e:
|
|
171
|
-
logger.exception(f"Error getting workflow name: {e}")
|
|
172
|
-
return None
|
monocle_apptrace/wrap_common.py
DELETED
|
@@ -1,417 +0,0 @@
|
|
|
1
|
-
# pylint: disable=protected-access
|
|
2
|
-
import logging
|
|
3
|
-
import os
|
|
4
|
-
import inspect
|
|
5
|
-
from urllib.parse import urlparse
|
|
6
|
-
from opentelemetry.trace import Span, Tracer
|
|
7
|
-
from monocle_apptrace.utils import resolve_from_alias, update_span_with_infra_name, with_tracer_wrapper, get_embedding_model, get_attribute, get_workflow_name
|
|
8
|
-
from monocle_apptrace.utils import set_attribute
|
|
9
|
-
from opentelemetry.context import get_value, attach, set_value
|
|
10
|
-
logger = logging.getLogger(__name__)
|
|
11
|
-
WORKFLOW_TYPE_KEY = "workflow_type"
|
|
12
|
-
DATA_INPUT_KEY = "data.input"
|
|
13
|
-
DATA_OUTPUT_KEY = "data.output"
|
|
14
|
-
PROMPT_INPUT_KEY = "data.input"
|
|
15
|
-
PROMPT_OUTPUT_KEY = "data.output"
|
|
16
|
-
QUERY = "question"
|
|
17
|
-
RESPONSE = "response"
|
|
18
|
-
SESSION_PROPERTIES_KEY = "session"
|
|
19
|
-
INFRA_SERVICE_KEY = "infra_service_name"
|
|
20
|
-
|
|
21
|
-
TYPE = "type"
|
|
22
|
-
PROVIDER = "provider_name"
|
|
23
|
-
EMBEDDING_MODEL = "embedding_model"
|
|
24
|
-
VECTOR_STORE = 'vector_store'
|
|
25
|
-
META_DATA = 'metadata'
|
|
26
|
-
|
|
27
|
-
WORKFLOW_TYPE_MAP = {
|
|
28
|
-
"llama_index": "workflow.llamaindex",
|
|
29
|
-
"langchain": "workflow.langchain",
|
|
30
|
-
"haystack": "workflow.haystack"
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
def get_embedding_model_for_vectorstore(instance):
|
|
34
|
-
# Handle Langchain or other frameworks where vectorstore exists
|
|
35
|
-
if hasattr(instance, 'vectorstore'):
|
|
36
|
-
vectorstore_dict = instance.vectorstore.__dict__
|
|
37
|
-
|
|
38
|
-
# Use inspect to check if the embedding function is from Sagemaker
|
|
39
|
-
if 'embedding_func' in vectorstore_dict:
|
|
40
|
-
embedding_func = vectorstore_dict['embedding_func']
|
|
41
|
-
class_name = embedding_func.__class__.__name__
|
|
42
|
-
file_location = inspect.getfile(embedding_func.__class__)
|
|
43
|
-
|
|
44
|
-
# Check if the class is SagemakerEndpointEmbeddings
|
|
45
|
-
if class_name == 'SagemakerEndpointEmbeddings' and 'langchain_community' in file_location:
|
|
46
|
-
# Set embedding_model as endpoint_name if it's Sagemaker
|
|
47
|
-
if hasattr(embedding_func, 'endpoint_name'):
|
|
48
|
-
return embedding_func.endpoint_name
|
|
49
|
-
|
|
50
|
-
# Default to the regular embedding model if not Sagemaker
|
|
51
|
-
return instance.vectorstore.embeddings.model
|
|
52
|
-
|
|
53
|
-
# Handle llama_index where _embed_model is present
|
|
54
|
-
if hasattr(instance, '_embed_model') and hasattr(instance._embed_model, 'model_name'):
|
|
55
|
-
return instance._embed_model.model_name
|
|
56
|
-
|
|
57
|
-
# Fallback if no specific model is found
|
|
58
|
-
return "Unknown Embedding Model"
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
framework_vector_store_mapping = {
|
|
62
|
-
'langchain_core.retrievers': lambda instance: {
|
|
63
|
-
'provider': type(instance.vectorstore).__name__,
|
|
64
|
-
'embedding_model': get_embedding_model_for_vectorstore(instance),
|
|
65
|
-
'type': VECTOR_STORE,
|
|
66
|
-
},
|
|
67
|
-
'llama_index.core.indices.base_retriever': lambda instance: {
|
|
68
|
-
'provider': type(instance._vector_store).__name__,
|
|
69
|
-
'embedding_model': get_embedding_model_for_vectorstore(instance),
|
|
70
|
-
'type': VECTOR_STORE,
|
|
71
|
-
},
|
|
72
|
-
'haystack.components.retrievers.in_memory': lambda instance: {
|
|
73
|
-
'provider': instance.__dict__.get("document_store").__class__.__name__,
|
|
74
|
-
'embedding_model': get_embedding_model(),
|
|
75
|
-
'type': VECTOR_STORE,
|
|
76
|
-
},
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
@with_tracer_wrapper
|
|
81
|
-
def task_wrapper(tracer: Tracer, to_wrap, wrapped, instance, args, kwargs):
|
|
82
|
-
"""Instruments and calls every function defined in TO_WRAP."""
|
|
83
|
-
|
|
84
|
-
# Some Langchain objects are wrapped elsewhere, so we ignore them here
|
|
85
|
-
if instance.__class__.__name__ in ("AgentExecutor"):
|
|
86
|
-
return wrapped(*args, **kwargs)
|
|
87
|
-
|
|
88
|
-
if hasattr(instance, "name") and instance.name:
|
|
89
|
-
name = f"{to_wrap.get('span_name')}.{instance.name.lower()}"
|
|
90
|
-
elif to_wrap.get("span_name"):
|
|
91
|
-
name = to_wrap.get("span_name")
|
|
92
|
-
else:
|
|
93
|
-
name = f"langchain.task.{instance.__class__.__name__}"
|
|
94
|
-
|
|
95
|
-
with tracer.start_as_current_span(name) as span:
|
|
96
|
-
process_span(to_wrap, span, instance, args)
|
|
97
|
-
pre_task_processing(to_wrap, instance, args, span)
|
|
98
|
-
return_value = wrapped(*args, **kwargs)
|
|
99
|
-
post_task_processing(to_wrap, span, return_value)
|
|
100
|
-
|
|
101
|
-
return return_value
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
def process_span(to_wrap, span, instance, args):
|
|
105
|
-
# Check if the output_processor is a valid JSON (in Python, that means it's a dictionary)
|
|
106
|
-
span_index = 1
|
|
107
|
-
if is_root_span(span):
|
|
108
|
-
workflow_name = get_workflow_name(span)
|
|
109
|
-
if workflow_name:
|
|
110
|
-
span.set_attribute(f"entity.{span_index}.name", workflow_name)
|
|
111
|
-
# workflow type
|
|
112
|
-
package_name = to_wrap.get('package')
|
|
113
|
-
for (package, workflow_type) in WORKFLOW_TYPE_MAP.items():
|
|
114
|
-
if (package_name is not None and package in package_name):
|
|
115
|
-
span.set_attribute(f"entity.{span_index}.type", workflow_type)
|
|
116
|
-
span_index += 1
|
|
117
|
-
if 'output_processor' in to_wrap:
|
|
118
|
-
output_processor=to_wrap['output_processor']
|
|
119
|
-
if isinstance(output_processor, dict) and len(output_processor) > 0:
|
|
120
|
-
if 'type' in output_processor:
|
|
121
|
-
span.set_attribute("span.type", output_processor['type'])
|
|
122
|
-
else:
|
|
123
|
-
logger.warning("type of span not found or incorrect written in entity json")
|
|
124
|
-
count = 0
|
|
125
|
-
if 'attributes' in output_processor:
|
|
126
|
-
count = len(output_processor["attributes"])
|
|
127
|
-
span.set_attribute("entity.count", count)
|
|
128
|
-
span_index = 1
|
|
129
|
-
for processors in output_processor["attributes"]:
|
|
130
|
-
for processor in processors:
|
|
131
|
-
attribute = processor.get('attribute')
|
|
132
|
-
accessor = processor.get('accessor')
|
|
133
|
-
|
|
134
|
-
if attribute and accessor:
|
|
135
|
-
attribute_name = f"entity.{span_index}.{attribute}"
|
|
136
|
-
try:
|
|
137
|
-
result = eval(accessor)(instance, args)
|
|
138
|
-
if result and isinstance(result, str):
|
|
139
|
-
span.set_attribute(attribute_name, result)
|
|
140
|
-
except Exception as e:
|
|
141
|
-
logger.error(f"Error processing accessor: {e}")
|
|
142
|
-
else:
|
|
143
|
-
logger.warning(f"{' and '.join([key for key in ['attribute', 'accessor'] if not processor.get(key)])} not found or incorrect in entity JSON")
|
|
144
|
-
span_index += 1
|
|
145
|
-
else:
|
|
146
|
-
logger.warning("attributes not found or incorrect written in entity json")
|
|
147
|
-
span.set_attribute("span.count", count)
|
|
148
|
-
|
|
149
|
-
else:
|
|
150
|
-
logger.warning("empty or entities json is not in correct format")
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
def post_task_processing(to_wrap, span, return_value):
|
|
154
|
-
try:
|
|
155
|
-
update_span_with_context_output(to_wrap=to_wrap, return_value=return_value, span=span)
|
|
156
|
-
|
|
157
|
-
if is_root_span(span):
|
|
158
|
-
update_span_with_prompt_output(to_wrap=to_wrap, wrapped_args=return_value, span=span)
|
|
159
|
-
except:
|
|
160
|
-
logger.exception("exception in post_task_processing")
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
def pre_task_processing(to_wrap, instance, args, span):
|
|
164
|
-
try:
|
|
165
|
-
if is_root_span(span):
|
|
166
|
-
update_span_with_prompt_input(to_wrap=to_wrap, wrapped_args=args, span=span)
|
|
167
|
-
update_span_with_infra_name(span, INFRA_SERVICE_KEY)
|
|
168
|
-
|
|
169
|
-
update_span_with_context_input(to_wrap=to_wrap, wrapped_args=args, span=span)
|
|
170
|
-
except:
|
|
171
|
-
logger.exception("exception in pre_task_processing")
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
@with_tracer_wrapper
|
|
175
|
-
async def atask_wrapper(tracer, to_wrap, wrapped, instance, args, kwargs):
|
|
176
|
-
"""Instruments and calls every function defined in TO_WRAP."""
|
|
177
|
-
|
|
178
|
-
# Some Langchain objects are wrapped elsewhere, so we ignore them here
|
|
179
|
-
if instance.__class__.__name__ in ("AgentExecutor"):
|
|
180
|
-
return wrapped(*args, **kwargs)
|
|
181
|
-
|
|
182
|
-
if hasattr(instance, "name") and instance.name:
|
|
183
|
-
name = f"{to_wrap.get('span_name')}.{instance.name.lower()}"
|
|
184
|
-
elif to_wrap.get("span_name"):
|
|
185
|
-
name = to_wrap.get("span_name")
|
|
186
|
-
else:
|
|
187
|
-
name = f"langchain.task.{instance.__class__.__name__}"
|
|
188
|
-
with tracer.start_as_current_span(name) as span:
|
|
189
|
-
process_span(to_wrap, span, instance, args)
|
|
190
|
-
pre_task_processing(to_wrap, instance, args, span)
|
|
191
|
-
return_value = await wrapped(*args, **kwargs)
|
|
192
|
-
post_task_processing(to_wrap, span, return_value)
|
|
193
|
-
|
|
194
|
-
return return_value
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
@with_tracer_wrapper
|
|
198
|
-
async def allm_wrapper(tracer, to_wrap, wrapped, instance, args, kwargs):
|
|
199
|
-
# Some Langchain objects are wrapped elsewhere, so we ignore them here
|
|
200
|
-
if instance.__class__.__name__ in ("AgentExecutor"):
|
|
201
|
-
return wrapped(*args, **kwargs)
|
|
202
|
-
|
|
203
|
-
if callable(to_wrap.get("span_name_getter")):
|
|
204
|
-
name = to_wrap.get("span_name_getter")(instance)
|
|
205
|
-
|
|
206
|
-
elif hasattr(instance, "name") and instance.name:
|
|
207
|
-
name = f"{to_wrap.get('span_name')}.{instance.name.lower()}"
|
|
208
|
-
elif to_wrap.get("span_name"):
|
|
209
|
-
name = to_wrap.get("span_name")
|
|
210
|
-
else:
|
|
211
|
-
name = f"langchain.task.{instance.__class__.__name__}"
|
|
212
|
-
with tracer.start_as_current_span(name) as span:
|
|
213
|
-
if 'haystack.components.retrievers' in to_wrap['package'] and 'haystack.retriever' in span.name:
|
|
214
|
-
input_arg_text = get_attribute(DATA_INPUT_KEY)
|
|
215
|
-
span.add_event(DATA_INPUT_KEY, {QUERY: input_arg_text})
|
|
216
|
-
provider_name, inference_endpoint = get_provider_name(instance)
|
|
217
|
-
instance_args = {"provider_name": provider_name, "inference_endpoint": inference_endpoint}
|
|
218
|
-
|
|
219
|
-
process_span(to_wrap, span, instance, instance_args)
|
|
220
|
-
|
|
221
|
-
return_value = await wrapped(*args, **kwargs)
|
|
222
|
-
if 'haystack.components.retrievers' in to_wrap['package'] and 'haystack.retriever' in span.name:
|
|
223
|
-
update_span_with_context_output(to_wrap=to_wrap, return_value=return_value, span=span)
|
|
224
|
-
update_span_from_llm_response(response=return_value, span=span, instance=instance)
|
|
225
|
-
|
|
226
|
-
return return_value
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
@with_tracer_wrapper
|
|
230
|
-
def llm_wrapper(tracer: Tracer, to_wrap, wrapped, instance, args, kwargs):
|
|
231
|
-
# Some Langchain objects are wrapped elsewhere, so we ignore them here
|
|
232
|
-
if instance.__class__.__name__ in ("AgentExecutor"):
|
|
233
|
-
return wrapped(*args, **kwargs)
|
|
234
|
-
|
|
235
|
-
if callable(to_wrap.get("span_name_getter")):
|
|
236
|
-
name = to_wrap.get("span_name_getter")(instance)
|
|
237
|
-
|
|
238
|
-
elif hasattr(instance, "name") and instance.name:
|
|
239
|
-
name = f"{to_wrap.get('span_name')}.{instance.name.lower()}"
|
|
240
|
-
elif to_wrap.get("span_name"):
|
|
241
|
-
name = to_wrap.get("span_name")
|
|
242
|
-
else:
|
|
243
|
-
name = f"langchain.task.{instance.__class__.__name__}"
|
|
244
|
-
|
|
245
|
-
with tracer.start_as_current_span(name) as span:
|
|
246
|
-
if 'haystack.components.retrievers' in to_wrap['package'] and 'haystack.retriever' in span.name:
|
|
247
|
-
input_arg_text = get_attribute(DATA_INPUT_KEY)
|
|
248
|
-
span.add_event(DATA_INPUT_KEY, {QUERY: input_arg_text})
|
|
249
|
-
provider_name, inference_endpoint = get_provider_name(instance)
|
|
250
|
-
instance_args = {"provider_name": provider_name, "inference_endpoint": inference_endpoint}
|
|
251
|
-
|
|
252
|
-
process_span(to_wrap, span, instance, instance_args)
|
|
253
|
-
|
|
254
|
-
return_value = wrapped(*args, **kwargs)
|
|
255
|
-
if 'haystack.components.retrievers' in to_wrap['package'] and 'haystack.retriever' in span.name:
|
|
256
|
-
update_span_with_context_output(to_wrap=to_wrap, return_value=return_value, span=span)
|
|
257
|
-
update_span_from_llm_response(response=return_value, span=span, instance=instance)
|
|
258
|
-
|
|
259
|
-
return return_value
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
def update_llm_endpoint(curr_span: Span, instance):
|
|
263
|
-
# Lambda to set attributes if values are not None
|
|
264
|
-
__set_span_attribute_if_not_none = lambda span, **kwargs: [
|
|
265
|
-
span.set_attribute(k, v) for k, v in kwargs.items() if v is not None
|
|
266
|
-
]
|
|
267
|
-
|
|
268
|
-
triton_llm_endpoint = os.environ.get("TRITON_LLM_ENDPOINT")
|
|
269
|
-
if triton_llm_endpoint is not None and len(triton_llm_endpoint) > 0:
|
|
270
|
-
curr_span.set_attribute("server_url", triton_llm_endpoint)
|
|
271
|
-
else:
|
|
272
|
-
# Get temperature if present
|
|
273
|
-
temp_val = instance.__dict__.get("temperature")
|
|
274
|
-
|
|
275
|
-
# Resolve values for model name, deployment, and inference endpoint
|
|
276
|
-
model_name = resolve_from_alias(instance.__dict__, ["model", "model_name"])
|
|
277
|
-
deployment_name = resolve_from_alias(instance.__dict__,
|
|
278
|
-
["engine", "azure_deployment", "deployment_name", "deployment_id",
|
|
279
|
-
"deployment"])
|
|
280
|
-
inference_ep = resolve_from_alias(instance.__dict__, ["azure_endpoint", "api_base"])
|
|
281
|
-
|
|
282
|
-
# Use the lambda to set attributes conditionally
|
|
283
|
-
__set_span_attribute_if_not_none(
|
|
284
|
-
curr_span,
|
|
285
|
-
temperature=temp_val,
|
|
286
|
-
model_name=model_name,
|
|
287
|
-
az_openai_deployment=deployment_name,
|
|
288
|
-
inference_endpoint=inference_ep
|
|
289
|
-
)
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
def get_provider_name(instance):
|
|
293
|
-
provider_url = ""
|
|
294
|
-
inference_endpoint = ""
|
|
295
|
-
try:
|
|
296
|
-
if isinstance(instance.client._client.base_url.host, str):
|
|
297
|
-
provider_url = instance.client._client.base_url.host
|
|
298
|
-
if isinstance(instance.client._client.base_url, str):
|
|
299
|
-
inference_endpoint = instance.client._client.base_url
|
|
300
|
-
else:
|
|
301
|
-
inference_endpoint = str(instance.client._client.base_url)
|
|
302
|
-
except:
|
|
303
|
-
pass
|
|
304
|
-
|
|
305
|
-
try:
|
|
306
|
-
if isinstance(instance.api_base, str):
|
|
307
|
-
provider_url = instance.api_base
|
|
308
|
-
except:
|
|
309
|
-
pass
|
|
310
|
-
|
|
311
|
-
try:
|
|
312
|
-
if len(provider_url) > 0:
|
|
313
|
-
parsed_provider_url = urlparse(provider_url)
|
|
314
|
-
except:
|
|
315
|
-
pass
|
|
316
|
-
return parsed_provider_url.hostname or provider_url,inference_endpoint
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
def is_root_span(curr_span: Span) -> bool:
|
|
320
|
-
return curr_span.parent is None
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
def get_input_from_args(chain_args):
|
|
324
|
-
if len(chain_args) > 0 and isinstance(chain_args[0], str):
|
|
325
|
-
return chain_args[0]
|
|
326
|
-
return ""
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
def update_span_from_llm_response(response, span: Span, instance):
|
|
330
|
-
# extract token uasge from langchain openai
|
|
331
|
-
if (response is not None and hasattr(response, "response_metadata")):
|
|
332
|
-
response_metadata = response.response_metadata
|
|
333
|
-
token_usage = response_metadata.get("token_usage")
|
|
334
|
-
meta_dict = {}
|
|
335
|
-
if token_usage is not None:
|
|
336
|
-
temperature = instance.__dict__.get("temperature", None)
|
|
337
|
-
meta_dict.update({"temperature": temperature})
|
|
338
|
-
meta_dict.update({"completion_tokens": token_usage.get("completion_tokens")})
|
|
339
|
-
meta_dict.update({"prompt_tokens": token_usage.get("prompt_tokens")})
|
|
340
|
-
meta_dict.update({"total_tokens": token_usage.get("total_tokens")})
|
|
341
|
-
span.add_event(META_DATA, meta_dict)
|
|
342
|
-
# extract token usage from llamaindex openai
|
|
343
|
-
if (response is not None and hasattr(response, "raw")):
|
|
344
|
-
try:
|
|
345
|
-
meta_dict = {}
|
|
346
|
-
if response.raw is not None:
|
|
347
|
-
token_usage = response.raw.get("usage") if isinstance(response.raw, dict) else getattr(response.raw,
|
|
348
|
-
"usage", None)
|
|
349
|
-
if token_usage is not None:
|
|
350
|
-
temperature = instance.__dict__.get("temperature", None)
|
|
351
|
-
meta_dict.update({"temperature": temperature})
|
|
352
|
-
if getattr(token_usage, "completion_tokens", None):
|
|
353
|
-
meta_dict.update({"completion_tokens": getattr(token_usage, "completion_tokens")})
|
|
354
|
-
if getattr(token_usage, "prompt_tokens", None):
|
|
355
|
-
meta_dict.update({"prompt_tokens": getattr(token_usage, "prompt_tokens")})
|
|
356
|
-
if getattr(token_usage, "total_tokens", None):
|
|
357
|
-
meta_dict.update({"total_tokens": getattr(token_usage, "total_tokens")})
|
|
358
|
-
span.add_event(META_DATA, meta_dict)
|
|
359
|
-
except AttributeError:
|
|
360
|
-
token_usage = None
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
def update_workflow_type(to_wrap, span: Span):
|
|
364
|
-
package_name = to_wrap.get('package')
|
|
365
|
-
|
|
366
|
-
for (package, workflow_type) in WORKFLOW_TYPE_MAP.items():
|
|
367
|
-
if (package_name is not None and package in package_name):
|
|
368
|
-
span.set_attribute(WORKFLOW_TYPE_KEY, workflow_type)
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
def update_span_with_context_input(to_wrap, wrapped_args, span: Span):
|
|
372
|
-
package_name: str = to_wrap.get('package')
|
|
373
|
-
input_arg_text = ""
|
|
374
|
-
if "langchain_core.retrievers" in package_name and len(wrapped_args) > 0:
|
|
375
|
-
input_arg_text += wrapped_args[0]
|
|
376
|
-
if "llama_index.core.indices.base_retriever" in package_name and len(wrapped_args) > 0:
|
|
377
|
-
input_arg_text += wrapped_args[0].query_str
|
|
378
|
-
if "haystack.components.retrievers.in_memory" in package_name:
|
|
379
|
-
input_arg_text += get_attribute(DATA_INPUT_KEY)
|
|
380
|
-
if input_arg_text:
|
|
381
|
-
span.add_event(DATA_INPUT_KEY, {QUERY: input_arg_text})
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
def update_span_with_context_output(to_wrap, return_value, span: Span):
|
|
385
|
-
package_name: str = to_wrap.get('package')
|
|
386
|
-
output_arg_text = ""
|
|
387
|
-
if "langchain_core.retrievers" in package_name:
|
|
388
|
-
output_arg_text += " ".join([doc.page_content for doc in return_value if hasattr(doc, 'page_content')])
|
|
389
|
-
if len(output_arg_text) > 100:
|
|
390
|
-
output_arg_text = output_arg_text[:100] + "..."
|
|
391
|
-
if "llama_index.core.indices.base_retriever" in package_name and len(return_value) > 0:
|
|
392
|
-
output_arg_text += return_value[0].text
|
|
393
|
-
if "haystack.components.retrievers.in_memory" in package_name:
|
|
394
|
-
output_arg_text += " ".join([doc.content for doc in return_value['documents']])
|
|
395
|
-
if len(output_arg_text) > 100:
|
|
396
|
-
output_arg_text = output_arg_text[:100] + "..."
|
|
397
|
-
if output_arg_text:
|
|
398
|
-
span.add_event(DATA_OUTPUT_KEY, {RESPONSE: output_arg_text})
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
def update_span_with_prompt_input(to_wrap, wrapped_args, span: Span):
|
|
402
|
-
input_arg_text = wrapped_args[0]
|
|
403
|
-
|
|
404
|
-
if isinstance(input_arg_text, dict):
|
|
405
|
-
span.add_event(PROMPT_INPUT_KEY, input_arg_text)
|
|
406
|
-
else:
|
|
407
|
-
span.add_event(PROMPT_INPUT_KEY, {QUERY: input_arg_text})
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
def update_span_with_prompt_output(to_wrap, wrapped_args, span: Span):
|
|
411
|
-
package_name: str = to_wrap.get('package')
|
|
412
|
-
if isinstance(wrapped_args, str):
|
|
413
|
-
span.add_event(PROMPT_OUTPUT_KEY, {RESPONSE: wrapped_args})
|
|
414
|
-
if isinstance(wrapped_args, dict):
|
|
415
|
-
span.add_event(PROMPT_OUTPUT_KEY, wrapped_args)
|
|
416
|
-
if "llama_index.core.base.base_query_engine" in package_name:
|
|
417
|
-
span.add_event(PROMPT_OUTPUT_KEY, {RESPONSE: wrapped_args.response})
|
monocle_apptrace/wrapper.py
DELETED
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
from monocle_apptrace.haystack import HAYSTACK_METHODS
|
|
3
|
-
from monocle_apptrace.langchain import LANGCHAIN_METHODS
|
|
4
|
-
from monocle_apptrace.llamaindex import LLAMAINDEX_METHODS
|
|
5
|
-
from monocle_apptrace.wrap_common import task_wrapper
|
|
6
|
-
|
|
7
|
-
# pylint: disable=too-few-public-methods
|
|
8
|
-
class WrapperMethod:
|
|
9
|
-
def __init__(
|
|
10
|
-
self,
|
|
11
|
-
package: str,
|
|
12
|
-
object_name: str,
|
|
13
|
-
method: str,
|
|
14
|
-
span_name: str = None,
|
|
15
|
-
output_processor : list[str] = None,
|
|
16
|
-
wrapper = task_wrapper
|
|
17
|
-
):
|
|
18
|
-
self.package = package
|
|
19
|
-
self.object = object_name
|
|
20
|
-
self.method = method
|
|
21
|
-
self.span_name = span_name
|
|
22
|
-
self.output_processor=output_processor
|
|
23
|
-
|
|
24
|
-
self.wrapper = wrapper
|
|
25
|
-
|
|
26
|
-
INBUILT_METHODS_LIST = LANGCHAIN_METHODS + LLAMAINDEX_METHODS + HAYSTACK_METHODS
|
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
monocle_apptrace/README.md,sha256=T5NFC01bF8VR0oVnAX_n0bhsEtttwqfTxDNAe5Y_ivE,3765
|
|
2
|
-
monocle_apptrace/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
-
monocle_apptrace/constants.py,sha256=wjObbmMTFL201x-bf3EOXevYygwkFH_1ng5dDrpE3z0,810
|
|
4
|
-
monocle_apptrace/instrumentor.py,sha256=FMQ8yVNYGRBTmyUjC2578G8RzSRpHY5HtVN5WK9ndVE,5291
|
|
5
|
-
monocle_apptrace/utils.py,sha256=l-affXaMX6U_wG1rWRE2GPB6dDGtha7kL0MQK4PEptI,6395
|
|
6
|
-
monocle_apptrace/wrap_common.py,sha256=ux2Ob7g1FYDtt1gF4g8XKQhl6Ed5X_iR4CP4GHX4upM,18178
|
|
7
|
-
monocle_apptrace/wrapper.py,sha256=8bsMaCCCbaEJN8hW5YoYkn2XLJ7vFI1HJk6QHihbkto,830
|
|
8
|
-
monocle_apptrace/exporters/base_exporter.py,sha256=1UvywEiKZ0nilcXiy-mZue012yVvYXgwib1x1qQvpBc,1647
|
|
9
|
-
monocle_apptrace/exporters/exporter_processor.py,sha256=BTcBgMuFLHCdCgVvc9TKIo9y8g1BvShI0L4vX6Q-cmk,393
|
|
10
|
-
monocle_apptrace/exporters/file_exporter.py,sha256=gN9pJ_X5pcstVVsyivgHsjWhr443eRa6Y6Hx1rGLQAM,2280
|
|
11
|
-
monocle_apptrace/exporters/monocle_exporters.py,sha256=mvVoGr4itvUBuakH17zNEuXYpHtqRhq2CBH5JXhleoM,1418
|
|
12
|
-
monocle_apptrace/exporters/aws/s3_exporter.py,sha256=S_w2OBkM8eEYK6bBDbfcRV4LdojU1QygOBqjdhcjAl4,6730
|
|
13
|
-
monocle_apptrace/exporters/azure/blob_exporter.py,sha256=dtL8MPISyP1eKM0gGpSoqTUmFuGEfycqF9OCjFF6H6E,5716
|
|
14
|
-
monocle_apptrace/exporters/okahu/okahu_exporter.py,sha256=p2rjStwo0OMEdHWQt_QvREpUWXbDm5jGx3qXeYai4_M,4407
|
|
15
|
-
monocle_apptrace/haystack/__init__.py,sha256=zMvF6Dh1-vVIAQD__HC1ubT5bs-EupUghg7HNhDv7_c,448
|
|
16
|
-
monocle_apptrace/haystack/wrap_node.py,sha256=IK07Wn3Lk1Os9URsyrmB1HXOH2FNdzK9fNLlR8TZdYc,908
|
|
17
|
-
monocle_apptrace/haystack/wrap_openai.py,sha256=Yp916DhOl0WI6virRi3L43snfsQm7PhI28wlDsg19v8,1536
|
|
18
|
-
monocle_apptrace/haystack/wrap_pipeline.py,sha256=yZAw7Hdv7FXe6rrM7gA2y5SjaZYQZCAi0q-R-uqUEvk,2254
|
|
19
|
-
monocle_apptrace/langchain/__init__.py,sha256=3yhbdw1h9I1nVEfnOOPKz9yD5NqwdLSZsxtXbMplRkw,400
|
|
20
|
-
monocle_apptrace/llamaindex/__init__.py,sha256=TgV1ZM8Cz113pZ2aAgpYwsCJyHA2aHOvNoW1QMBp0mM,709
|
|
21
|
-
monocle_apptrace/metamodel/README.md,sha256=KYuuYqgA9PNbOjG0zYj2nAdvNEpyNN_Bk9M2tNdnZ_s,4598
|
|
22
|
-
monocle_apptrace/metamodel/entities/README.md,sha256=dY7Q8QT_Ju-2ul65J-k0x6beDLvRirlbGufZN1Q0tpk,2068
|
|
23
|
-
monocle_apptrace/metamodel/entities/app_hosting_types.json,sha256=MWeHUe77n4HvO1hm2PX8iWjbRONBuNy2I84vd-lymYk,568
|
|
24
|
-
monocle_apptrace/metamodel/entities/entities.json,sha256=6eDoDm_6aPUgH_ROjI2H9Tk3J5Lj55VkX3Li-TJmaxg,1285
|
|
25
|
-
monocle_apptrace/metamodel/entities/inference_types.json,sha256=Gkq7qNw5Qn91tuq-rBxHJ4BSzT-etCshNjT5_G4Bg8I,651
|
|
26
|
-
monocle_apptrace/metamodel/entities/model_types.json,sha256=Oz9DzuX_ptpLEVTc1Epv4_Y80TrYxR7cyc9kM0Zk780,785
|
|
27
|
-
monocle_apptrace/metamodel/entities/vector_store_types.json,sha256=EA0KayHMOG7wMIkec54en03_3yT1qpdGh5TiDrAoh4g,462
|
|
28
|
-
monocle_apptrace/metamodel/entities/workflow_types.json,sha256=eD0W3_FocKrzrrW0bks0hIJb9Kj7w8c1dpXzxmGLOwk,386
|
|
29
|
-
monocle_apptrace/metamodel/maps/haystack_methods.json,sha256=JmngkaKICAzOyrWNTWEOLYFrp99l5wcERYKE_SQRNxE,698
|
|
30
|
-
monocle_apptrace/metamodel/maps/langchain_methods.json,sha256=lfU8nd6td5qzTI01glM063Vg0UoZM9HGPirwN4OZcKc,4285
|
|
31
|
-
monocle_apptrace/metamodel/maps/llamaindex_methods.json,sha256=pZ4d1DS4D6Wgpz9EBOrEuJUFN58jtu75E8ELyGRBHyM,2636
|
|
32
|
-
monocle_apptrace/metamodel/maps/attributes/inference/langchain_entities.json,sha256=E_K3eHv_e9Va2AcjNLHcXeAgePB1uolTMdRnkxxtNqU,1152
|
|
33
|
-
monocle_apptrace/metamodel/maps/attributes/inference/llamaindex_entities.json,sha256=LuAa9ZKl3mO2VUiXMVGZNEfSHOZ0n5qsVQLqWRHczVM,1122
|
|
34
|
-
monocle_apptrace/metamodel/maps/attributes/retrieval/langchain_entities.json,sha256=vSe2kbeJ7izmj5TBdQpX5pJ3hbjdY4SkcXvWPiam7oo,718
|
|
35
|
-
monocle_apptrace/metamodel/maps/attributes/retrieval/llamaindex_entities.json,sha256=BtWeSXUPzKR9-vuhZUUhn2Gz9Y4SXBZoFC81K1q0XwQ,712
|
|
36
|
-
monocle_apptrace/metamodel/spans/README.md,sha256=_uMkLLaWitQ_rPh7oQbW5Oe7uGSv2h_QA6YwxHRJi74,5433
|
|
37
|
-
monocle_apptrace/metamodel/spans/span_example.json,sha256=7ZAUssL1UYXlbKHre8PfeNlIhHnM28e2CH9EyHDTOt8,4402
|
|
38
|
-
monocle_apptrace/metamodel/spans/span_format.json,sha256=GhfioGgMhG7St0DeYA1fgNtMkbr9wiQ1L2hovekRQ24,1512
|
|
39
|
-
monocle_apptrace/metamodel/spans/span_types.json,sha256=jwVyPbYMhEf2Ea6Egmb3m1Za28ap9dgZIpJDhdE1BlY,361
|
|
40
|
-
monocle_apptrace-0.2.0.dist-info/METADATA,sha256=zaIOrJwa-RLX1bWeBEfTCfeBGC2cuNeXKwPHAYYNRT8,5364
|
|
41
|
-
monocle_apptrace-0.2.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
|
|
42
|
-
monocle_apptrace-0.2.0.dist-info/licenses/LICENSE,sha256=ay9trLiP5I7ZsFXo6AqtkLYdRqe5S9r-DrPOvsNlZrg,9136
|
|
43
|
-
monocle_apptrace-0.2.0.dist-info/licenses/NOTICE,sha256=9jn4xtwM_uUetJMx5WqGnhrR7MIhpoRlpokjSTlyt8c,112
|
|
44
|
-
monocle_apptrace-0.2.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|