monocle-apptrace 0.0.1__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of monocle-apptrace might be problematic. Click here for more details.
- monocle_apptrace/README.md +52 -28
- monocle_apptrace/__init__.py +0 -2
- monocle_apptrace/constants.py +22 -0
- monocle_apptrace/exporters/file_exporter.py +63 -0
- monocle_apptrace/haystack/__init__.py +5 -24
- monocle_apptrace/haystack/wrap_node.py +1 -1
- monocle_apptrace/haystack/wrap_openai.py +1 -9
- monocle_apptrace/haystack/wrap_pipeline.py +22 -9
- monocle_apptrace/instrumentor.py +29 -32
- monocle_apptrace/langchain/__init__.py +5 -94
- monocle_apptrace/llamaindex/__init__.py +7 -63
- monocle_apptrace/metamodel/README.md +47 -0
- monocle_apptrace/metamodel/entities/README.md +54 -0
- monocle_apptrace/metamodel/entities/entity_types.json +157 -0
- monocle_apptrace/metamodel/entities/entity_types.py +51 -0
- monocle_apptrace/metamodel/maps/haystack_methods.json +25 -0
- monocle_apptrace/metamodel/maps/lang_chain_methods.json +106 -0
- monocle_apptrace/metamodel/maps/llama_index_methods.json +70 -0
- monocle_apptrace/metamodel/spans/README.md +121 -0
- monocle_apptrace/metamodel/spans/span_example.json +140 -0
- monocle_apptrace/metamodel/spans/span_format.json +55 -0
- monocle_apptrace/utils.py +56 -16
- monocle_apptrace/wrap_common.py +143 -46
- monocle_apptrace/wrapper.py +3 -3
- monocle_apptrace-0.1.1.dist-info/METADATA +111 -0
- monocle_apptrace-0.1.1.dist-info/RECORD +29 -0
- monocle_apptrace-0.0.1.dist-info/METADATA +0 -76
- monocle_apptrace-0.0.1.dist-info/RECORD +0 -17
- {monocle_apptrace-0.0.1.dist-info → monocle_apptrace-0.1.1.dist-info}/WHEEL +0 -0
- {monocle_apptrace-0.0.1.dist-info → monocle_apptrace-0.1.1.dist-info}/licenses/LICENSE +0 -0
- {monocle_apptrace-0.0.1.dist-info → monocle_apptrace-0.1.1.dist-info}/licenses/NOTICE +0 -0
monocle_apptrace/README.md
CHANGED
|
@@ -1,37 +1,59 @@
|
|
|
1
|
-
#Monocle User Guide
|
|
2
|
-
|
|
3
1
|
## Monocle Concepts
|
|
2
|
+
|
|
4
3
|
### Traces
|
|
5
|
-
Traces are the full view of a single end-to-end application
|
|
6
|
-
|
|
4
|
+
Traces are the full view of a single end-to-end application execution.
|
|
5
|
+
|
|
6
|
+
Examples of traces include one response to end user’s question by a chatbot app. Traces consists of various metadata about the application run including status, start time, duration, input/outputs etc. It also includes a list of individual steps aka “spans with details about that step.It’s typically the workflow code components of an application that generate the traces for application runs.
|
|
7
|
+
|
|
8
|
+
Traces are collections of spans.
|
|
9
|
+
|
|
7
10
|
### Spans
|
|
8
|
-
Spans are the individual steps executed by the application to perform a GenAI related task
|
|
9
|
-
|
|
11
|
+
Spans are the individual steps executed by the application to perform a GenAI related task.
|
|
12
|
+
|
|
13
|
+
Examples of spans include app retrieving vectors from DB, app querying LLM for inference etc. The span includes the type of operation, start time, duration and metadata relevant to that step eg Model name, parameters and model endpoint/server for an inference request.
|
|
10
14
|
|
|
11
|
-
##
|
|
12
|
-
|
|
15
|
+
## Contribute to Monocle
|
|
16
|
+
|
|
17
|
+
Monocle includes:
|
|
18
|
+
- Methods for instrumentation of app code
|
|
19
|
+
- Base code for wrapping methods of interest in included in current folder
|
|
20
|
+
- Framework specific code is organized in a folder with the framework name
|
|
21
|
+
- Metamodel for how attributes and events for GenAI components are represented in OpenTelemety format
|
|
22
|
+
- See [metamodel](./metamodel/README.md) for supported GenAI entities, how functions operating on those entities map to spans and format of spans
|
|
23
|
+
- Exporters to send trace data to various locations. See [exporters](./exporters)
|
|
24
|
+
|
|
25
|
+
See [Monocle committer guide](/Monocle_committer_guide.md).
|
|
26
|
+
|
|
27
|
+
## Get Monocle
|
|
28
|
+
|
|
29
|
+
Option 1 - Download released packages from Pypi
|
|
13
30
|
```
|
|
14
|
-
|
|
15
|
-
|
|
31
|
+
python3 -m pip install pipenv
|
|
32
|
+
pip install monocle-apptrace
|
|
16
33
|
```
|
|
17
|
-
|
|
34
|
+
|
|
35
|
+
Option 2 - Build and install locally from source
|
|
18
36
|
```
|
|
19
|
-
|
|
20
|
-
|
|
37
|
+
pip install .
|
|
38
|
+
pip install -e ".[dev]"
|
|
21
39
|
|
|
22
|
-
|
|
23
|
-
|
|
40
|
+
python3 -m pip install pipenv
|
|
41
|
+
pipenv install build
|
|
24
42
|
```
|
|
25
43
|
|
|
26
|
-
## Examples
|
|
27
|
-
|
|
44
|
+
## Examples of app instrumentation with Monocle
|
|
45
|
+
|
|
46
|
+
### apps written using LLM orchestration frameworks
|
|
47
|
+
|
|
28
48
|
```python
|
|
29
|
-
from monocle_apptrace.instrumentor import setup_monocle_telemetry
|
|
30
|
-
from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
|
|
31
49
|
from langchain.chains import LLMChain
|
|
32
50
|
from langchain_openai import OpenAI
|
|
33
51
|
from langchain.prompts import PromptTemplate
|
|
34
52
|
|
|
53
|
+
# Import the monocle_apptrace instrumentation method
|
|
54
|
+
from monocle_apptrace.instrumentor import setup_monocle_telemetry
|
|
55
|
+
from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
|
|
56
|
+
|
|
35
57
|
# Call the setup Monocle telemetry method
|
|
36
58
|
setup_monocle_telemetry(workflow_name = "simple_math_app",
|
|
37
59
|
span_processors=[BatchSpanProcessor(ConsoleSpanExporter())])
|
|
@@ -42,19 +64,19 @@ prompt = PromptTemplate.from_template("1 + {number} = ")
|
|
|
42
64
|
chain = LLMChain(llm=llm, prompt=prompt)
|
|
43
65
|
chain.invoke({"number":2})
|
|
44
66
|
|
|
45
|
-
#
|
|
46
|
-
|
|
47
|
-
chain.invoke({"number":2}, {"callbacks":[handler]})
|
|
48
|
-
|
|
67
|
+
# Trace is generated when invoke() method is called
|
|
68
|
+
|
|
49
69
|
```
|
|
50
70
|
|
|
51
|
-
###
|
|
71
|
+
### apps with custom methods
|
|
52
72
|
|
|
53
73
|
```python
|
|
74
|
+
|
|
75
|
+
# Import the monocle_apptrace instrumentation method
|
|
54
76
|
from monocle_apptrace.wrapper import WrapperMethod,task_wrapper,atask_wrapper
|
|
55
77
|
from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
|
|
56
78
|
|
|
57
|
-
#
|
|
79
|
+
# Extend the default wrapped methods list as follows
|
|
58
80
|
app_name = "simple_math_app"
|
|
59
81
|
setup_monocle_telemetry(
|
|
60
82
|
workflow_name=app_name,
|
|
@@ -62,16 +84,18 @@ setup_monocle_telemetry(
|
|
|
62
84
|
wrapper_methods=[
|
|
63
85
|
WrapperMethod(
|
|
64
86
|
package="langchain.schema.runnable",
|
|
65
|
-
|
|
87
|
+
object_name="RunnableParallel",
|
|
66
88
|
method="invoke",
|
|
67
89
|
span_name="langchain.workflow",
|
|
68
90
|
wrapper=task_wrapper),
|
|
69
91
|
WrapperMethod(
|
|
70
92
|
package="langchain.schema.runnable",
|
|
71
|
-
|
|
93
|
+
object_name="RunnableParallel",
|
|
72
94
|
method="ainvoke",
|
|
73
95
|
span_name="langchain.workflow",
|
|
74
96
|
wrapper=atask_wrapper)
|
|
75
97
|
])
|
|
76
98
|
|
|
77
|
-
|
|
99
|
+
# Trace is generated when the invoke() method is called in langchain.schema.runnable package
|
|
100
|
+
|
|
101
|
+
```
|
monocle_apptrace/__init__.py
CHANGED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# Azure environment constants
|
|
2
|
+
AZURE_ML_ENDPOINT_ENV_NAME = "AZUREML_ENTRY_SCRIPT"
|
|
3
|
+
AZURE_FUNCTION_WORKER_ENV_NAME = "FUNCTIONS_WORKER_RUNTIME"
|
|
4
|
+
AZURE_APP_SERVICE_ENV_NAME = "WEBSITE_SITE_NAME"
|
|
5
|
+
AWS_LAMBDA_ENV_NAME = "AWS_LAMBDA_RUNTIME_API"
|
|
6
|
+
|
|
7
|
+
# Azure naming reference can be found here
|
|
8
|
+
# https://learn.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/resource-abbreviations
|
|
9
|
+
AZURE_FUNCTION_NAME = "azure.func"
|
|
10
|
+
AZURE_APP_SERVICE_NAME = "azure.asp"
|
|
11
|
+
AZURE_ML_SERVICE_NAME = "azure.mlw"
|
|
12
|
+
AWS_LAMBDA_SERVICE_NAME = "aws.lambda"
|
|
13
|
+
|
|
14
|
+
azure_service_map = {
|
|
15
|
+
AZURE_ML_ENDPOINT_ENV_NAME: AZURE_ML_SERVICE_NAME,
|
|
16
|
+
AZURE_APP_SERVICE_ENV_NAME: AZURE_APP_SERVICE_NAME,
|
|
17
|
+
AZURE_FUNCTION_WORKER_ENV_NAME: AZURE_FUNCTION_NAME
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
aws_service_map = {
|
|
21
|
+
AWS_LAMBDA_ENV_NAME: AWS_LAMBDA_SERVICE_NAME
|
|
22
|
+
}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
#pylint: disable=consider-using-with
|
|
2
|
+
|
|
3
|
+
from os import linesep, path
|
|
4
|
+
from io import TextIOWrapper
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import Optional, Callable, Sequence
|
|
7
|
+
from opentelemetry.sdk.trace import ReadableSpan
|
|
8
|
+
from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
|
|
9
|
+
from opentelemetry.sdk.resources import SERVICE_NAME
|
|
10
|
+
|
|
11
|
+
DEFAULT_FILE_PREFIX:str = "monocle_trace_"
|
|
12
|
+
DEFAULT_TIME_FORMAT:str = "%Y-%m-%d_%H.%M.%S"
|
|
13
|
+
|
|
14
|
+
class FileSpanExporter(SpanExporter):
|
|
15
|
+
current_trace_id: int = None
|
|
16
|
+
current_file_path: str = None
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
service_name: Optional[str] = None,
|
|
21
|
+
out_path:str = ".",
|
|
22
|
+
file_prefix = DEFAULT_FILE_PREFIX,
|
|
23
|
+
time_format = DEFAULT_TIME_FORMAT,
|
|
24
|
+
formatter: Callable[
|
|
25
|
+
[ReadableSpan], str
|
|
26
|
+
] = lambda span: span.to_json()
|
|
27
|
+
+ linesep,
|
|
28
|
+
):
|
|
29
|
+
self.out_handle:TextIOWrapper = None
|
|
30
|
+
self.formatter = formatter
|
|
31
|
+
self.service_name = service_name
|
|
32
|
+
self.output_path = out_path
|
|
33
|
+
self.file_prefix = file_prefix
|
|
34
|
+
self.time_format = time_format
|
|
35
|
+
|
|
36
|
+
def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
|
|
37
|
+
for span in spans:
|
|
38
|
+
if span.context.trace_id != self.current_trace_id:
|
|
39
|
+
self.rotate_file(span.resource.attributes[SERVICE_NAME],
|
|
40
|
+
span.context.trace_id)
|
|
41
|
+
self.out_handle.write(self.formatter(span))
|
|
42
|
+
self.out_handle.flush()
|
|
43
|
+
return SpanExportResult.SUCCESS
|
|
44
|
+
|
|
45
|
+
def rotate_file(self, trace_name:str, trace_id:int) -> None:
|
|
46
|
+
self.reset_handle()
|
|
47
|
+
self.current_file_path = path.join(self.output_path,
|
|
48
|
+
self.file_prefix + trace_name + "_" + hex(trace_id) + "_"
|
|
49
|
+
+ datetime.now().strftime(self.time_format) + ".json")
|
|
50
|
+
self.out_handle = open(self.current_file_path, "w", encoding='UTF-8')
|
|
51
|
+
self.current_trace_id = trace_id
|
|
52
|
+
|
|
53
|
+
def force_flush(self, timeout_millis: int = 30000) -> bool:
|
|
54
|
+
self.out_handle.flush()
|
|
55
|
+
return True
|
|
56
|
+
|
|
57
|
+
def reset_handle(self) -> None:
|
|
58
|
+
if self.out_handle is not None:
|
|
59
|
+
self.out_handle.close()
|
|
60
|
+
self.out_handle = None
|
|
61
|
+
|
|
62
|
+
def shutdown(self) -> None:
|
|
63
|
+
self.reset_handle()
|
|
@@ -1,28 +1,9 @@
|
|
|
1
1
|
|
|
2
|
-
|
|
2
|
+
import os
|
|
3
3
|
import logging
|
|
4
|
-
from monocle_apptrace.
|
|
5
|
-
from monocle_apptrace.haystack.wrap_pipeline import wrap as wrap_pipeline
|
|
4
|
+
from monocle_apptrace.utils import load_wrapper_from_config
|
|
6
5
|
|
|
7
6
|
logger = logging.getLogger(__name__)
|
|
8
|
-
|
|
9
|
-
HAYSTACK_METHODS =
|
|
10
|
-
|
|
11
|
-
"package": "haystack.components.generators.openai",
|
|
12
|
-
"object": "OpenAIGenerator",
|
|
13
|
-
"method": "run",
|
|
14
|
-
"wrapper": wrap_openai,
|
|
15
|
-
},
|
|
16
|
-
{
|
|
17
|
-
"package": "haystack.components.generators.chat.openai",
|
|
18
|
-
"object": "OpenAIChatGenerator",
|
|
19
|
-
"method": "run",
|
|
20
|
-
"wrapper": wrap_openai,
|
|
21
|
-
},
|
|
22
|
-
{
|
|
23
|
-
"package": "haystack.core.pipeline.pipeline",
|
|
24
|
-
"object": "Pipeline",
|
|
25
|
-
"method": "run",
|
|
26
|
-
"wrapper": wrap_pipeline,
|
|
27
|
-
},
|
|
28
|
-
]
|
|
7
|
+
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
|
8
|
+
HAYSTACK_METHODS = load_wrapper_from_config(
|
|
9
|
+
os.path.join(parent_dir, 'metamodel', 'maps', 'haystack_methods.json'))
|
|
@@ -21,7 +21,7 @@ def wrap(tracer, to_wrap, wrapped, instance, args, kwargs):
|
|
|
21
21
|
workflow_name = span.resource.attributes.get("service.name")
|
|
22
22
|
span.set_attribute("workflow_name",workflow_name)
|
|
23
23
|
span.set_attribute("workflow_type", WORKFLOW_TYPE_MAP["haystack"])
|
|
24
|
-
|
|
24
|
+
|
|
25
25
|
response = wrapped(*args, **kwargs)
|
|
26
26
|
|
|
27
27
|
return response
|
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
1
|
import logging
|
|
4
2
|
from opentelemetry import context as context_api
|
|
5
3
|
from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY
|
|
@@ -17,9 +15,7 @@ def _set_input_attributes(span, kwargs, instance, args):
|
|
|
17
15
|
|
|
18
16
|
if 'model' in instance.__dict__:
|
|
19
17
|
model_name = instance.__dict__.get("model")
|
|
20
|
-
set_span_attribute(span, "
|
|
21
|
-
|
|
22
|
-
return
|
|
18
|
+
set_span_attribute(span, "model_name", model_name)
|
|
23
19
|
|
|
24
20
|
@dont_throw
|
|
25
21
|
def _set_response_attributes(span, response):
|
|
@@ -39,14 +35,10 @@ def wrap_openai(tracer, to_wrap, wrapped, instance, args, kwargs):
|
|
|
39
35
|
with tracer.start_as_current_span("haystack.openai") as span:
|
|
40
36
|
if span.is_recording():
|
|
41
37
|
_set_input_attributes(span, kwargs, instance, args)
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
38
|
response = wrapped(*args, **kwargs)
|
|
46
39
|
|
|
47
40
|
if response:
|
|
48
41
|
if span.is_recording():
|
|
49
42
|
_set_response_attributes(span, response)
|
|
50
|
-
|
|
51
43
|
|
|
52
44
|
return response
|
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
1
|
import logging
|
|
4
2
|
from opentelemetry import context as context_api
|
|
5
3
|
from opentelemetry.context import attach, set_value
|
|
@@ -7,6 +5,7 @@ from opentelemetry.instrumentation.utils import (
|
|
|
7
5
|
_SUPPRESS_INSTRUMENTATION_KEY,
|
|
8
6
|
)
|
|
9
7
|
from monocle_apptrace.wrap_common import PROMPT_INPUT_KEY, PROMPT_OUTPUT_KEY, WORKFLOW_TYPE_MAP, with_tracer_wrapper
|
|
8
|
+
from monocle_apptrace.utils import set_embedding_model
|
|
10
9
|
|
|
11
10
|
logger = logging.getLogger(__name__)
|
|
12
11
|
|
|
@@ -18,13 +17,15 @@ def wrap(tracer, to_wrap, wrapped, instance, args, kwargs):
|
|
|
18
17
|
name = "haystack_pipeline"
|
|
19
18
|
attach(set_value("workflow_name", name))
|
|
20
19
|
inputs = set()
|
|
21
|
-
|
|
20
|
+
workflow_input = get_workflow_input(args, inputs)
|
|
21
|
+
embedding_model = get_embedding_model(instance)
|
|
22
|
+
set_embedding_model(embedding_model)
|
|
23
|
+
|
|
22
24
|
|
|
23
25
|
with tracer.start_as_current_span(f"{name}.workflow") as span:
|
|
24
|
-
span.set_attribute(PROMPT_INPUT_KEY,
|
|
26
|
+
span.set_attribute(PROMPT_INPUT_KEY, workflow_input)
|
|
25
27
|
workflow_name = span.resource.attributes.get("service.name")
|
|
26
28
|
set_workflow_attributes(span, workflow_name)
|
|
27
|
-
|
|
28
29
|
response = wrapped(*args, **kwargs)
|
|
29
30
|
set_workflow_output(span, response)
|
|
30
31
|
return response
|
|
@@ -37,13 +38,25 @@ def get_workflow_input(args, inputs):
|
|
|
37
38
|
for value in args[0].values():
|
|
38
39
|
for text in value.values():
|
|
39
40
|
inputs.add(text)
|
|
40
|
-
|
|
41
|
-
|
|
41
|
+
|
|
42
|
+
workflow_input: str = ""
|
|
42
43
|
|
|
43
44
|
for input_str in inputs:
|
|
44
|
-
|
|
45
|
-
return
|
|
45
|
+
workflow_input = workflow_input + input_str
|
|
46
|
+
return workflow_input
|
|
46
47
|
|
|
47
48
|
def set_workflow_attributes(span, workflow_name):
|
|
48
49
|
span.set_attribute("workflow_name",workflow_name)
|
|
49
50
|
span.set_attribute("workflow_type", WORKFLOW_TYPE_MAP["haystack"])
|
|
51
|
+
|
|
52
|
+
def get_embedding_model(instance):
|
|
53
|
+
try:
|
|
54
|
+
if hasattr(instance, 'get_component'):
|
|
55
|
+
text_embedder = instance.get_component('text_embedder')
|
|
56
|
+
if text_embedder and hasattr(text_embedder, 'model'):
|
|
57
|
+
# Set the embedding model attribute
|
|
58
|
+
return text_embedder.model
|
|
59
|
+
except:
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
return None
|
monocle_apptrace/instrumentor.py
CHANGED
|
@@ -10,25 +10,26 @@ from opentelemetry.sdk.trace import TracerProvider, Span
|
|
|
10
10
|
from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanProcessor
|
|
11
11
|
from opentelemetry.sdk.resources import SERVICE_NAME, Resource
|
|
12
12
|
from opentelemetry import trace
|
|
13
|
-
from monocle_apptrace.wrap_common import CONTEXT_PROPERTIES_KEY
|
|
14
|
-
from monocle_apptrace.wrapper import INBUILT_METHODS_LIST, WrapperMethod
|
|
15
13
|
from opentelemetry.context import get_value, attach, set_value
|
|
14
|
+
from monocle_apptrace.wrap_common import SESSION_PROPERTIES_KEY
|
|
15
|
+
from monocle_apptrace.wrapper import INBUILT_METHODS_LIST, WrapperMethod
|
|
16
|
+
from monocle_apptrace.exporters.file_exporter import FileSpanExporter
|
|
16
17
|
|
|
17
18
|
|
|
18
19
|
logger = logging.getLogger(__name__)
|
|
19
20
|
|
|
20
|
-
_instruments = (
|
|
21
|
+
_instruments = ()
|
|
21
22
|
|
|
22
23
|
class MonocleInstrumentor(BaseInstrumentor):
|
|
23
|
-
|
|
24
|
+
|
|
24
25
|
workflow_name: str = ""
|
|
25
26
|
user_wrapper_methods: list[WrapperMethod] = []
|
|
26
27
|
instrumented_method_list: list[object] = []
|
|
27
|
-
|
|
28
|
+
|
|
28
29
|
def __init__(
|
|
29
30
|
self,
|
|
30
|
-
user_wrapper_methods: list[WrapperMethod] =
|
|
31
|
-
self.user_wrapper_methods = user_wrapper_methods
|
|
31
|
+
user_wrapper_methods: list[WrapperMethod] = None) -> None:
|
|
32
|
+
self.user_wrapper_methods = user_wrapper_methods or []
|
|
32
33
|
super().__init__()
|
|
33
34
|
|
|
34
35
|
def instrumentation_dependencies(self) -> Collection[str]:
|
|
@@ -63,11 +64,11 @@ class MonocleInstrumentor(BaseInstrumentor):
|
|
|
63
64
|
self.instrumented_method_list.append(wrapped_method)
|
|
64
65
|
except Exception as ex:
|
|
65
66
|
if wrapped_method in user_method_list:
|
|
66
|
-
logger.error(f"""_instrument wrap Exception: {str(ex)}
|
|
67
|
+
logger.error(f"""_instrument wrap Exception: {str(ex)}
|
|
67
68
|
for package: {wrap_package},
|
|
68
69
|
object:{wrap_object},
|
|
69
70
|
method:{wrap_method}""")
|
|
70
|
-
|
|
71
|
+
|
|
71
72
|
|
|
72
73
|
def _uninstrument(self, **kwargs):
|
|
73
74
|
for wrapped_method in self.instrumented_method_list:
|
|
@@ -80,48 +81,44 @@ class MonocleInstrumentor(BaseInstrumentor):
|
|
|
80
81
|
wrap_method,
|
|
81
82
|
)
|
|
82
83
|
except Exception as ex:
|
|
83
|
-
logger.error(f"""_instrument unwrap Exception: {str(ex)}
|
|
84
|
+
logger.error(f"""_instrument unwrap Exception: {str(ex)}
|
|
84
85
|
for package: {wrap_package},
|
|
85
86
|
object:{wrap_object},
|
|
86
87
|
method:{wrap_method}""")
|
|
87
|
-
|
|
88
88
|
|
|
89
89
|
def setup_monocle_telemetry(
|
|
90
90
|
workflow_name: str,
|
|
91
|
-
span_processors: List[SpanProcessor] =
|
|
92
|
-
wrapper_methods: List[WrapperMethod] =
|
|
91
|
+
span_processors: List[SpanProcessor] = None,
|
|
92
|
+
wrapper_methods: List[WrapperMethod] = None):
|
|
93
93
|
resource = Resource(attributes={
|
|
94
94
|
SERVICE_NAME: workflow_name
|
|
95
95
|
})
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
96
|
+
span_processors = span_processors or [BatchSpanProcessor(FileSpanExporter())]
|
|
97
|
+
trace_provider = TracerProvider(resource=resource)
|
|
98
|
+
tracer_provider_default = trace.get_tracer_provider()
|
|
99
|
+
provider_type = type(tracer_provider_default).__name__
|
|
100
|
+
is_proxy_provider = "Proxy" in provider_type
|
|
100
101
|
for processor in span_processors:
|
|
101
102
|
processor.on_start = on_processor_start
|
|
102
|
-
if not
|
|
103
|
-
|
|
103
|
+
if not is_proxy_provider:
|
|
104
|
+
tracer_provider_default.add_span_processor(processor)
|
|
104
105
|
else :
|
|
105
|
-
|
|
106
|
-
if
|
|
107
|
-
trace.set_tracer_provider(
|
|
108
|
-
instrumentor = MonocleInstrumentor(user_wrapper_methods=wrapper_methods)
|
|
109
|
-
instrumentor.app_name = workflow_name
|
|
106
|
+
trace_provider.add_span_processor(processor)
|
|
107
|
+
if is_proxy_provider :
|
|
108
|
+
trace.set_tracer_provider(trace_provider)
|
|
109
|
+
instrumentor = MonocleInstrumentor(user_wrapper_methods=wrapper_methods or [])
|
|
110
|
+
# instrumentor.app_name = workflow_name
|
|
110
111
|
if not instrumentor.is_instrumented_by_opentelemetry:
|
|
111
112
|
instrumentor.instrument()
|
|
112
113
|
|
|
113
114
|
|
|
114
115
|
def on_processor_start(span: Span, parent_context):
|
|
115
|
-
context_properties = get_value(
|
|
116
|
+
context_properties = get_value(SESSION_PROPERTIES_KEY)
|
|
116
117
|
if context_properties is not None:
|
|
117
118
|
for key, value in context_properties.items():
|
|
118
119
|
span.set_attribute(
|
|
119
|
-
f"{
|
|
120
|
-
)
|
|
120
|
+
f"{SESSION_PROPERTIES_KEY}.{key}", value
|
|
121
|
+
)
|
|
121
122
|
|
|
122
123
|
def set_context_properties(properties: dict) -> None:
|
|
123
|
-
attach(set_value(
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
124
|
+
attach(set_value(SESSION_PROPERTIES_KEY, properties))
|
|
@@ -1,95 +1,6 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from monocle_apptrace.utils import load_wrapper_from_config
|
|
1
3
|
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
LANGCHAIN_METHODS = [
|
|
6
|
-
{
|
|
7
|
-
"package": "langchain.prompts.base",
|
|
8
|
-
"object": "BasePromptTemplate",
|
|
9
|
-
"method": "invoke",
|
|
10
|
-
"wrapper": task_wrapper,
|
|
11
|
-
},
|
|
12
|
-
{
|
|
13
|
-
"package": "langchain.prompts.base",
|
|
14
|
-
"object": "BasePromptTemplate",
|
|
15
|
-
"method": "ainvoke",
|
|
16
|
-
"wrapper": atask_wrapper,
|
|
17
|
-
},
|
|
18
|
-
{
|
|
19
|
-
"package": "langchain.chat_models.base",
|
|
20
|
-
"object": "BaseChatModel",
|
|
21
|
-
"method": "invoke",
|
|
22
|
-
"wrapper": llm_wrapper,
|
|
23
|
-
},
|
|
24
|
-
{
|
|
25
|
-
"package": "langchain.chat_models.base",
|
|
26
|
-
"object": "BaseChatModel",
|
|
27
|
-
"method": "ainvoke",
|
|
28
|
-
"wrapper": allm_wrapper,
|
|
29
|
-
},
|
|
30
|
-
{
|
|
31
|
-
"package": "langchain_core.language_models.llms",
|
|
32
|
-
"object": "LLM",
|
|
33
|
-
"method": "_generate",
|
|
34
|
-
"wrapper": llm_wrapper,
|
|
35
|
-
},
|
|
36
|
-
{
|
|
37
|
-
"package": "langchain_core.language_models.llms",
|
|
38
|
-
"object": "LLM",
|
|
39
|
-
"method": "_agenerate",
|
|
40
|
-
"wrapper": llm_wrapper,
|
|
41
|
-
},
|
|
42
|
-
{
|
|
43
|
-
"package": "langchain_core.retrievers",
|
|
44
|
-
"object": "BaseRetriever",
|
|
45
|
-
"method": "invoke",
|
|
46
|
-
"wrapper": task_wrapper,
|
|
47
|
-
},
|
|
48
|
-
{
|
|
49
|
-
"package": "langchain_core.retrievers",
|
|
50
|
-
"object": "BaseRetriever",
|
|
51
|
-
"method": "ainvoke",
|
|
52
|
-
"wrapper": atask_wrapper,
|
|
53
|
-
},
|
|
54
|
-
{
|
|
55
|
-
"package": "langchain.schema",
|
|
56
|
-
"object": "BaseOutputParser",
|
|
57
|
-
"method": "invoke",
|
|
58
|
-
"wrapper": task_wrapper,
|
|
59
|
-
},
|
|
60
|
-
{
|
|
61
|
-
"package": "langchain.schema",
|
|
62
|
-
"object": "BaseOutputParser",
|
|
63
|
-
"method": "ainvoke",
|
|
64
|
-
"wrapper": atask_wrapper,
|
|
65
|
-
},
|
|
66
|
-
{
|
|
67
|
-
"package": "langchain.schema.runnable",
|
|
68
|
-
"object": "RunnableSequence",
|
|
69
|
-
"method": "invoke",
|
|
70
|
-
"span_name": "langchain.workflow",
|
|
71
|
-
"wrapper": task_wrapper,
|
|
72
|
-
},
|
|
73
|
-
{
|
|
74
|
-
"package": "langchain.schema.runnable",
|
|
75
|
-
"object": "RunnableSequence",
|
|
76
|
-
"method": "ainvoke",
|
|
77
|
-
"span_name": "langchain.workflow",
|
|
78
|
-
"wrapper": atask_wrapper,
|
|
79
|
-
},
|
|
80
|
-
{
|
|
81
|
-
"package": "langchain.schema.runnable",
|
|
82
|
-
"object": "RunnableParallel",
|
|
83
|
-
"method": "invoke",
|
|
84
|
-
"span_name": "langchain.workflow",
|
|
85
|
-
"wrapper": task_wrapper,
|
|
86
|
-
},
|
|
87
|
-
{
|
|
88
|
-
"package": "langchain.schema.runnable",
|
|
89
|
-
"object": "RunnableParallel",
|
|
90
|
-
"method": "ainvoke",
|
|
91
|
-
"span_name": "langchain.workflow",
|
|
92
|
-
"wrapper": atask_wrapper,
|
|
93
|
-
},
|
|
94
|
-
|
|
95
|
-
]
|
|
4
|
+
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
|
5
|
+
LANGCHAIN_METHODS = load_wrapper_from_config(
|
|
6
|
+
os.path.join(parent_dir, 'metamodel', 'maps', 'lang_chain_methods.json'))
|
|
@@ -1,71 +1,15 @@
|
|
|
1
1
|
|
|
2
|
-
|
|
3
|
-
|
|
2
|
+
#pylint: disable=protected-access
|
|
3
|
+
import os
|
|
4
|
+
from monocle_apptrace.utils import load_wrapper_from_config
|
|
4
5
|
|
|
5
6
|
def get_llm_span_name_for_openai(instance):
|
|
6
|
-
if (hasattr(instance, "_is_azure_client")
|
|
7
|
+
if (hasattr(instance, "_is_azure_client")
|
|
7
8
|
and callable(getattr(instance, "_is_azure_client"))
|
|
8
9
|
and instance._is_azure_client()):
|
|
9
10
|
return "llamaindex.azure_openai"
|
|
10
11
|
return "llamaindex.openai"
|
|
11
12
|
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
"object": "BaseRetriever",
|
|
16
|
-
"method": "retrieve",
|
|
17
|
-
"span_name": "llamaindex.retrieve",
|
|
18
|
-
"wrapper": task_wrapper
|
|
19
|
-
},
|
|
20
|
-
{
|
|
21
|
-
"package": "llama_index.core.indices.base_retriever",
|
|
22
|
-
"object": "BaseRetriever",
|
|
23
|
-
"method": "aretrieve",
|
|
24
|
-
"span_name": "llamaindex.retrieve",
|
|
25
|
-
"wrapper": atask_wrapper
|
|
26
|
-
},
|
|
27
|
-
{
|
|
28
|
-
"package": "llama_index.core.base.base_query_engine",
|
|
29
|
-
"object": "BaseQueryEngine",
|
|
30
|
-
"method": "query",
|
|
31
|
-
"span_name": "llamaindex.query",
|
|
32
|
-
"wrapper": task_wrapper,
|
|
33
|
-
},
|
|
34
|
-
{
|
|
35
|
-
"package": "llama_index.core.base.base_query_engine",
|
|
36
|
-
"object": "BaseQueryEngine",
|
|
37
|
-
"method": "aquery",
|
|
38
|
-
"span_name": "llamaindex.query",
|
|
39
|
-
"wrapper": atask_wrapper,
|
|
40
|
-
},
|
|
41
|
-
{
|
|
42
|
-
"package": "llama_index.core.llms.custom",
|
|
43
|
-
"object": "CustomLLM",
|
|
44
|
-
"method": "chat",
|
|
45
|
-
"span_name": "llamaindex.llmchat",
|
|
46
|
-
"wrapper": task_wrapper,
|
|
47
|
-
},
|
|
48
|
-
{
|
|
49
|
-
"package": "llama_index.core.llms.custom",
|
|
50
|
-
"object": "CustomLLM",
|
|
51
|
-
"method": "achat",
|
|
52
|
-
"span_name": "llamaindex.llmchat",
|
|
53
|
-
"wrapper": atask_wrapper,
|
|
54
|
-
},
|
|
55
|
-
{
|
|
56
|
-
"package": "llama_index.llms.openai.base",
|
|
57
|
-
"object": "OpenAI",
|
|
58
|
-
"method": "chat",
|
|
59
|
-
"span_name": "llamaindex.openai",
|
|
60
|
-
"span_name_getter" : get_llm_span_name_for_openai,
|
|
61
|
-
"wrapper": llm_wrapper,
|
|
62
|
-
},
|
|
63
|
-
{
|
|
64
|
-
"package": "llama_index.llms.openai.base",
|
|
65
|
-
"object": "OpenAI",
|
|
66
|
-
"method": "achat",
|
|
67
|
-
"span_name": "llamaindex.openai",
|
|
68
|
-
"wrapper": allm_wrapper,
|
|
69
|
-
}
|
|
70
|
-
]
|
|
71
|
-
|
|
13
|
+
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
|
14
|
+
LLAMAINDEX_METHODS = load_wrapper_from_config(
|
|
15
|
+
os.path.join(parent_dir, 'metamodel', 'maps', 'llama_index_methods.json'))
|