monocle-apptrace 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of monocle-apptrace might be problematic. Click here for more details.
- monocle_apptrace/__init__.py +1 -0
- monocle_apptrace/__main__.py +19 -0
- monocle_apptrace/exporters/aws/s3_exporter.py +50 -27
- monocle_apptrace/exporters/aws/s3_exporter_opendal.py +137 -0
- monocle_apptrace/exporters/azure/blob_exporter.py +30 -12
- monocle_apptrace/exporters/azure/blob_exporter_opendal.py +162 -0
- monocle_apptrace/exporters/base_exporter.py +19 -18
- monocle_apptrace/exporters/exporter_processor.py +128 -3
- monocle_apptrace/exporters/file_exporter.py +16 -0
- monocle_apptrace/exporters/monocle_exporters.py +48 -20
- monocle_apptrace/exporters/okahu/okahu_exporter.py +8 -6
- monocle_apptrace/instrumentation/__init__.py +1 -0
- monocle_apptrace/instrumentation/common/__init__.py +2 -0
- monocle_apptrace/instrumentation/common/constants.py +70 -0
- monocle_apptrace/instrumentation/common/instrumentor.py +362 -0
- monocle_apptrace/instrumentation/common/span_handler.py +220 -0
- monocle_apptrace/instrumentation/common/utils.py +356 -0
- monocle_apptrace/instrumentation/common/wrapper.py +92 -0
- monocle_apptrace/instrumentation/common/wrapper_method.py +72 -0
- monocle_apptrace/instrumentation/metamodel/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/botocore/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/botocore/_helper.py +95 -0
- monocle_apptrace/instrumentation/metamodel/botocore/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/botocore/entities/inference.py +65 -0
- monocle_apptrace/instrumentation/metamodel/botocore/handlers/botocore_span_handler.py +26 -0
- monocle_apptrace/instrumentation/metamodel/botocore/methods.py +16 -0
- monocle_apptrace/instrumentation/metamodel/flask/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/flask/_helper.py +29 -0
- monocle_apptrace/instrumentation/metamodel/flask/methods.py +13 -0
- monocle_apptrace/instrumentation/metamodel/haystack/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/haystack/_helper.py +127 -0
- monocle_apptrace/instrumentation/metamodel/haystack/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py +76 -0
- monocle_apptrace/instrumentation/metamodel/haystack/entities/retrieval.py +61 -0
- monocle_apptrace/instrumentation/metamodel/haystack/methods.py +43 -0
- monocle_apptrace/instrumentation/metamodel/langchain/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/langchain/_helper.py +127 -0
- monocle_apptrace/instrumentation/metamodel/langchain/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py +72 -0
- monocle_apptrace/instrumentation/metamodel/langchain/entities/retrieval.py +58 -0
- monocle_apptrace/instrumentation/metamodel/langchain/methods.py +111 -0
- monocle_apptrace/instrumentation/metamodel/langgraph/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/langgraph/_helper.py +48 -0
- monocle_apptrace/instrumentation/metamodel/langgraph/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/langgraph/entities/inference.py +56 -0
- monocle_apptrace/instrumentation/metamodel/langgraph/methods.py +14 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/_helper.py +172 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/entities/agent.py +47 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py +73 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/entities/retrieval.py +57 -0
- monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py +101 -0
- monocle_apptrace/instrumentation/metamodel/openai/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/openai/_helper.py +112 -0
- monocle_apptrace/instrumentation/metamodel/openai/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py +71 -0
- monocle_apptrace/instrumentation/metamodel/openai/entities/retrieval.py +43 -0
- monocle_apptrace/instrumentation/metamodel/openai/methods.py +45 -0
- monocle_apptrace/instrumentation/metamodel/requests/__init__.py +4 -0
- monocle_apptrace/instrumentation/metamodel/requests/_helper.py +31 -0
- monocle_apptrace/instrumentation/metamodel/requests/methods.py +12 -0
- {monocle_apptrace-0.2.0.dist-info → monocle_apptrace-0.3.0.dist-info}/METADATA +19 -2
- monocle_apptrace-0.3.0.dist-info/RECORD +68 -0
- {monocle_apptrace-0.2.0.dist-info → monocle_apptrace-0.3.0.dist-info}/WHEEL +1 -1
- monocle_apptrace/constants.py +0 -22
- monocle_apptrace/haystack/__init__.py +0 -9
- monocle_apptrace/haystack/wrap_node.py +0 -27
- monocle_apptrace/haystack/wrap_openai.py +0 -44
- monocle_apptrace/haystack/wrap_pipeline.py +0 -63
- monocle_apptrace/instrumentor.py +0 -121
- monocle_apptrace/langchain/__init__.py +0 -9
- monocle_apptrace/llamaindex/__init__.py +0 -16
- monocle_apptrace/metamodel/README.md +0 -47
- monocle_apptrace/metamodel/entities/README.md +0 -77
- monocle_apptrace/metamodel/entities/app_hosting_types.json +0 -29
- monocle_apptrace/metamodel/entities/entities.json +0 -49
- monocle_apptrace/metamodel/entities/inference_types.json +0 -33
- monocle_apptrace/metamodel/entities/model_types.json +0 -41
- monocle_apptrace/metamodel/entities/vector_store_types.json +0 -25
- monocle_apptrace/metamodel/entities/workflow_types.json +0 -22
- monocle_apptrace/metamodel/maps/attributes/inference/langchain_entities.json +0 -35
- monocle_apptrace/metamodel/maps/attributes/inference/llamaindex_entities.json +0 -35
- monocle_apptrace/metamodel/maps/attributes/retrieval/langchain_entities.json +0 -27
- monocle_apptrace/metamodel/maps/attributes/retrieval/llamaindex_entities.json +0 -27
- monocle_apptrace/metamodel/maps/haystack_methods.json +0 -25
- monocle_apptrace/metamodel/maps/langchain_methods.json +0 -129
- monocle_apptrace/metamodel/maps/llamaindex_methods.json +0 -74
- monocle_apptrace/metamodel/spans/README.md +0 -121
- monocle_apptrace/metamodel/spans/span_example.json +0 -140
- monocle_apptrace/metamodel/spans/span_format.json +0 -55
- monocle_apptrace/metamodel/spans/span_types.json +0 -16
- monocle_apptrace/utils.py +0 -172
- monocle_apptrace/wrap_common.py +0 -417
- monocle_apptrace/wrapper.py +0 -26
- monocle_apptrace-0.2.0.dist-info/RECORD +0 -44
- {monocle_apptrace-0.2.0.dist-info → monocle_apptrace-0.3.0.dist-info}/licenses/LICENSE +0 -0
- {monocle_apptrace-0.2.0.dist-info → monocle_apptrace-0.3.0.dist-info}/licenses/NOTICE +0 -0
|
@@ -1,11 +1,18 @@
|
|
|
1
1
|
from abc import ABC, abstractmethod
|
|
2
2
|
import logging
|
|
3
|
+
import os
|
|
4
|
+
import queue
|
|
5
|
+
import threading
|
|
6
|
+
import time
|
|
3
7
|
from typing import Callable
|
|
8
|
+
import requests
|
|
9
|
+
from monocle_apptrace.instrumentation.common.constants import AWS_LAMBDA_ENV_NAME
|
|
4
10
|
|
|
5
11
|
logger = logging.getLogger(__name__)
|
|
12
|
+
LAMBDA_EXTENSION_NAME = "AsyncProcessorMonocle"
|
|
6
13
|
|
|
7
14
|
class ExportTaskProcessor(ABC):
|
|
8
|
-
|
|
15
|
+
|
|
9
16
|
@abstractmethod
|
|
10
17
|
def start(self):
|
|
11
18
|
return
|
|
@@ -15,5 +22,123 @@ class ExportTaskProcessor(ABC):
|
|
|
15
22
|
return
|
|
16
23
|
|
|
17
24
|
@abstractmethod
|
|
18
|
-
def queue_task(self, async_task: Callable[[Callable, any], any] = None, args: any = None):
|
|
19
|
-
return
|
|
25
|
+
def queue_task(self, async_task: Callable[[Callable, any], any] = None, args: any = None, is_root_span: bool = False):
|
|
26
|
+
return
|
|
27
|
+
|
|
28
|
+
class LambdaExportTaskProcessor(ExportTaskProcessor):
|
|
29
|
+
|
|
30
|
+
def __init__(
|
|
31
|
+
self,
|
|
32
|
+
span_check_interval_seconds: int = 1,
|
|
33
|
+
max_time_allowed_seconds: int = 30):
|
|
34
|
+
# An internal queue used by the handler to notify the extension that it can
|
|
35
|
+
# start processing the async task.
|
|
36
|
+
self.async_tasks_queue = queue.Queue()
|
|
37
|
+
self.span_check_interval = span_check_interval_seconds
|
|
38
|
+
self.max_time_allowed = max_time_allowed_seconds
|
|
39
|
+
|
|
40
|
+
def start(self):
|
|
41
|
+
try:
|
|
42
|
+
self._start_async_processor()
|
|
43
|
+
except Exception as e:
|
|
44
|
+
logger.error(f"LambdaExportTaskProcessor| Failed to start. {e}")
|
|
45
|
+
|
|
46
|
+
def stop(self):
|
|
47
|
+
return
|
|
48
|
+
|
|
49
|
+
def queue_task(self, async_task=None, args=None, is_root_span=False):
|
|
50
|
+
self.async_tasks_queue.put((async_task, args, is_root_span))
|
|
51
|
+
|
|
52
|
+
def set_sagemaker_model(self, endpoint_name: str, span: dict[str, dict[str, str]]):
|
|
53
|
+
try:
|
|
54
|
+
try:
|
|
55
|
+
import boto3
|
|
56
|
+
except ImportError:
|
|
57
|
+
logger.error("LambdaExportTaskProcessor| Failed to import boto3")
|
|
58
|
+
return
|
|
59
|
+
|
|
60
|
+
client = boto3.client('sagemaker')
|
|
61
|
+
response = client.describe_endpoint(
|
|
62
|
+
EndpointName=endpoint_name
|
|
63
|
+
)
|
|
64
|
+
endpoint_config_name = response["EndpointConfigName"]
|
|
65
|
+
endpoint_config_response = client.describe_endpoint_config(
|
|
66
|
+
EndpointConfigName=endpoint_config_name
|
|
67
|
+
)
|
|
68
|
+
model_name = endpoint_config_response["ProductionVariants"][0]["ModelName"]
|
|
69
|
+
model_name_response = client.describe_model(ModelName = model_name)
|
|
70
|
+
model_name_id = ""
|
|
71
|
+
try:
|
|
72
|
+
model_name_id = model_name_response["PrimaryContainer"]["Environment"]["HF_MODEL_ID"]
|
|
73
|
+
except:
|
|
74
|
+
pass
|
|
75
|
+
span["attributes"]["model_name"] = model_name_id
|
|
76
|
+
except Exception as e:
|
|
77
|
+
logger.error(f"LambdaExportTaskProcessor| Failed to get sagemaker model. {e}")
|
|
78
|
+
|
|
79
|
+
def update_spans(self, export_args):
|
|
80
|
+
try:
|
|
81
|
+
if 'batch' in export_args:
|
|
82
|
+
for span in export_args["batch"]:
|
|
83
|
+
try:
|
|
84
|
+
if len(span["attributes"]["sagemaker_endpoint_name"]) > 0 :
|
|
85
|
+
self.set_sagemaker_model(endpoint_name=span["attributes"]["sagemaker_endpoint_name"], span=span)
|
|
86
|
+
except:
|
|
87
|
+
pass
|
|
88
|
+
except Exception as e:
|
|
89
|
+
logger.error(f"LambdaExportTaskProcessor| Failed to update spans. {e}")
|
|
90
|
+
|
|
91
|
+
def _start_async_processor(self):
|
|
92
|
+
# Register internal extension
|
|
93
|
+
logger.debug(f"[{LAMBDA_EXTENSION_NAME}] Registering with Lambda service...")
|
|
94
|
+
response = requests.post(
|
|
95
|
+
url=f"http://{os.environ['AWS_LAMBDA_RUNTIME_API']}/2020-01-01/extension/register",
|
|
96
|
+
json={'events': ['INVOKE']},
|
|
97
|
+
headers={'Lambda-Extension-Name': LAMBDA_EXTENSION_NAME}
|
|
98
|
+
)
|
|
99
|
+
ext_id = response.headers['Lambda-Extension-Identifier']
|
|
100
|
+
logger.debug(f"[{LAMBDA_EXTENSION_NAME}] Registered with ID: {ext_id}")
|
|
101
|
+
|
|
102
|
+
def process_tasks():
|
|
103
|
+
while True:
|
|
104
|
+
# Call /next to get notified when there is a new invocation and let
|
|
105
|
+
# Lambda know that we are done processing the previous task.
|
|
106
|
+
|
|
107
|
+
logger.debug(f"[{LAMBDA_EXTENSION_NAME}] Waiting for invocation...")
|
|
108
|
+
response = requests.get(
|
|
109
|
+
url=f"http://{os.environ['AWS_LAMBDA_RUNTIME_API']}/2020-01-01/extension/event/next",
|
|
110
|
+
headers={'Lambda-Extension-Identifier': ext_id},
|
|
111
|
+
timeout=None
|
|
112
|
+
)
|
|
113
|
+
root_span_found = False
|
|
114
|
+
# all values in seconds
|
|
115
|
+
total_time_elapsed = 0
|
|
116
|
+
while root_span_found is False and total_time_elapsed < self.max_time_allowed:
|
|
117
|
+
logger.debug(response.json())
|
|
118
|
+
# Get next task from internal queue
|
|
119
|
+
logger.info(f"[{LAMBDA_EXTENSION_NAME}] Async thread running, waiting for task from handler")
|
|
120
|
+
while self.async_tasks_queue.empty() is False :
|
|
121
|
+
logger.info(f"[{LAMBDA_EXTENSION_NAME}] Processing task from handler")
|
|
122
|
+
async_task, arg, is_root_span = self.async_tasks_queue.get()
|
|
123
|
+
root_span_found = is_root_span
|
|
124
|
+
# self.update_spans(export_args=arg)
|
|
125
|
+
|
|
126
|
+
if async_task is None:
|
|
127
|
+
# No task to run this invocation
|
|
128
|
+
logger.debug(f"[{LAMBDA_EXTENSION_NAME}] Received null task. Ignoring.")
|
|
129
|
+
else:
|
|
130
|
+
# Invoke task
|
|
131
|
+
logger.debug(f"[{LAMBDA_EXTENSION_NAME}] Received async task from handler. Starting task.")
|
|
132
|
+
async_task(arg)
|
|
133
|
+
total_time_elapsed+=self.span_check_interval
|
|
134
|
+
logger.info(f"[{LAMBDA_EXTENSION_NAME}] Waiting for root span. total_time_elapsed: {total_time_elapsed}, root_span_found: {root_span_found}.")
|
|
135
|
+
time.sleep(self.span_check_interval)
|
|
136
|
+
|
|
137
|
+
logger.debug(f"[{LAMBDA_EXTENSION_NAME}] Finished processing task. total_time_elapsed: {total_time_elapsed}, root_span_found: {root_span_found}.")
|
|
138
|
+
|
|
139
|
+
# Start processing extension events in a separate thread
|
|
140
|
+
threading.Thread(target=process_tasks, daemon=True, name=LAMBDA_EXTENSION_NAME).start()
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def is_aws_lambda_environment():
|
|
144
|
+
return AWS_LAMBDA_ENV_NAME in os.environ
|
|
@@ -7,6 +7,7 @@ from typing import Optional, Callable, Sequence
|
|
|
7
7
|
from opentelemetry.sdk.trace import ReadableSpan
|
|
8
8
|
from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
|
|
9
9
|
from opentelemetry.sdk.resources import SERVICE_NAME
|
|
10
|
+
from monocle_apptrace.exporters.exporter_processor import ExportTaskProcessor
|
|
10
11
|
|
|
11
12
|
DEFAULT_FILE_PREFIX:str = "monocle_trace_"
|
|
12
13
|
DEFAULT_TIME_FORMAT:str = "%Y-%m-%d_%H.%M.%S"
|
|
@@ -25,6 +26,7 @@ class FileSpanExporter(SpanExporter):
|
|
|
25
26
|
[ReadableSpan], str
|
|
26
27
|
] = lambda span: span.to_json()
|
|
27
28
|
+ linesep,
|
|
29
|
+
task_processor: Optional[ExportTaskProcessor] = None
|
|
28
30
|
):
|
|
29
31
|
self.out_handle:TextIOWrapper = None
|
|
30
32
|
self.formatter = formatter
|
|
@@ -32,8 +34,20 @@ class FileSpanExporter(SpanExporter):
|
|
|
32
34
|
self.output_path = out_path
|
|
33
35
|
self.file_prefix = file_prefix
|
|
34
36
|
self.time_format = time_format
|
|
37
|
+
self.task_processor = task_processor
|
|
38
|
+
if self.task_processor is not None:
|
|
39
|
+
self.task_processor.start()
|
|
35
40
|
|
|
36
41
|
def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
|
|
42
|
+
if self.task_processor is not None and callable(getattr(self.task_processor, 'queue_task', None)):
|
|
43
|
+
# Check if any span is a root span (no parent)
|
|
44
|
+
is_root_span = any(not span.parent for span in spans)
|
|
45
|
+
self.task_processor.queue_task(self._process_spans, spans, is_root_span)
|
|
46
|
+
return SpanExportResult.SUCCESS
|
|
47
|
+
else:
|
|
48
|
+
return self._process_spans(spans)
|
|
49
|
+
|
|
50
|
+
def _process_spans(self, spans: Sequence[ReadableSpan], is_root_span: bool = False) -> SpanExportResult:
|
|
37
51
|
for span in spans:
|
|
38
52
|
if span.context.trace_id != self.current_trace_id:
|
|
39
53
|
self.rotate_file(span.resource.attributes[SERVICE_NAME],
|
|
@@ -60,4 +74,6 @@ class FileSpanExporter(SpanExporter):
|
|
|
60
74
|
self.out_handle = None
|
|
61
75
|
|
|
62
76
|
def shutdown(self) -> None:
|
|
77
|
+
if hasattr(self, 'task_processor') and self.task_processor is not None:
|
|
78
|
+
self.task_processor.stop()
|
|
63
79
|
self.reset_handle()
|
|
@@ -1,27 +1,55 @@
|
|
|
1
|
-
from typing import Dict, Any
|
|
2
|
-
import os
|
|
1
|
+
from typing import Dict, Any, List
|
|
2
|
+
import os
|
|
3
|
+
import logging
|
|
3
4
|
from importlib import import_module
|
|
4
5
|
from opentelemetry.sdk.trace.export import SpanExporter, ConsoleSpanExporter
|
|
6
|
+
from monocle_apptrace.exporters.exporter_processor import LambdaExportTaskProcessor, is_aws_lambda_environment
|
|
5
7
|
from monocle_apptrace.exporters.file_exporter import FileSpanExporter
|
|
6
8
|
|
|
7
|
-
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
monocle_exporters: Dict[str, Any] = {
|
|
8
12
|
"s3": {"module": "monocle_apptrace.exporters.aws.s3_exporter", "class": "S3SpanExporter"},
|
|
9
|
-
"blob": {"module":"monocle_apptrace.exporters.azure.blob_exporter", "class": "AzureBlobSpanExporter"},
|
|
10
|
-
"okahu": {"module":"monocle_apptrace.exporters.okahu.okahu_exporter", "class": "OkahuSpanExporter"},
|
|
11
|
-
"file": {"module":"monocle_apptrace.exporters.file_exporter", "class": "FileSpanExporter"}
|
|
13
|
+
"blob": {"module": "monocle_apptrace.exporters.azure.blob_exporter", "class": "AzureBlobSpanExporter"},
|
|
14
|
+
"okahu": {"module": "monocle_apptrace.exporters.okahu.okahu_exporter", "class": "OkahuSpanExporter"},
|
|
15
|
+
"file": {"module": "monocle_apptrace.exporters.file_exporter", "class": "FileSpanExporter"},
|
|
16
|
+
"memory": {"module": "opentelemetry.sdk.trace.export.in_memory_span_exporter", "class": "InMemorySpanExporter"},
|
|
17
|
+
"console": {"module": "opentelemetry.sdk.trace.export", "class": "ConsoleSpanExporter"}
|
|
12
18
|
}
|
|
13
19
|
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
20
|
+
|
|
21
|
+
def get_monocle_exporter() -> List[SpanExporter]:
|
|
22
|
+
# Retrieve the MONOCLE_EXPORTER environment variable and split it into a list
|
|
23
|
+
exporter_names = os.environ.get("MONOCLE_EXPORTER", "file").split(",")
|
|
24
|
+
exporters = []
|
|
25
|
+
|
|
26
|
+
# Create task processor for AWS Lambda environment
|
|
27
|
+
task_processor = LambdaExportTaskProcessor() if is_aws_lambda_environment() else None
|
|
28
|
+
|
|
29
|
+
for exporter_name in exporter_names:
|
|
30
|
+
exporter_name = exporter_name.strip()
|
|
31
|
+
try:
|
|
32
|
+
exporter_class_path = monocle_exporters[exporter_name]
|
|
33
|
+
except KeyError:
|
|
34
|
+
logger.debug(f"Unsupported Monocle span exporter '{exporter_name}', skipping.")
|
|
35
|
+
continue
|
|
36
|
+
try:
|
|
37
|
+
exporter_module = import_module(exporter_class_path["module"])
|
|
38
|
+
exporter_class = getattr(exporter_module, exporter_class_path["class"])
|
|
39
|
+
# Pass task_processor to all exporters when in AWS Lambda environment
|
|
40
|
+
if task_processor is not None and exporter_module.__name__.startswith("monocle_apptrace"):
|
|
41
|
+
exporters.append(exporter_class(task_processor=task_processor))
|
|
42
|
+
else:
|
|
43
|
+
exporters.append(exporter_class())
|
|
44
|
+
except Exception as ex:
|
|
45
|
+
logger.debug(
|
|
46
|
+
f"Unable to initialize Monocle span exporter '{exporter_name}', error: {ex}. Using ConsoleSpanExporter as a fallback.")
|
|
47
|
+
exporters.append(ConsoleSpanExporter())
|
|
48
|
+
continue
|
|
49
|
+
|
|
50
|
+
# If no exporters were created, default to FileSpanExporter
|
|
51
|
+
if not exporters:
|
|
52
|
+
logger.debug("No valid Monocle span exporters configured. Defaulting to FileSpanExporter.")
|
|
53
|
+
exporters.append(FileSpanExporter())
|
|
54
|
+
|
|
55
|
+
return exporters
|
|
@@ -48,7 +48,7 @@ class OkahuSpanExporter(SpanExporter):
|
|
|
48
48
|
|
|
49
49
|
if self._closed:
|
|
50
50
|
logger.warning("Exporter already shutdown, ignoring batch")
|
|
51
|
-
return SpanExportResult.
|
|
51
|
+
return SpanExportResult.FAILURE
|
|
52
52
|
if len(spans) == 0:
|
|
53
53
|
return
|
|
54
54
|
|
|
@@ -69,7 +69,10 @@ class OkahuSpanExporter(SpanExporter):
|
|
|
69
69
|
obj["context"]["span_id"] = remove_0x_from_start(obj["context"]["span_id"])
|
|
70
70
|
span_list["batch"].append(obj)
|
|
71
71
|
|
|
72
|
-
|
|
72
|
+
# Calculate is_root_span by checking if any span has no parent
|
|
73
|
+
is_root_span = any(not span.parent for span in spans)
|
|
74
|
+
|
|
75
|
+
def send_spans_to_okahu(span_list_local=None, is_root=False):
|
|
73
76
|
try:
|
|
74
77
|
result = self.session.post(
|
|
75
78
|
url=self.endpoint,
|
|
@@ -83,18 +86,17 @@ class OkahuSpanExporter(SpanExporter):
|
|
|
83
86
|
result.text,
|
|
84
87
|
)
|
|
85
88
|
return SpanExportResult.FAILURE
|
|
86
|
-
logger.
|
|
89
|
+
logger.debug("spans successfully exported to okahu. Is root span: %s", is_root)
|
|
87
90
|
return SpanExportResult.SUCCESS
|
|
88
91
|
except ReadTimeout as e:
|
|
89
92
|
logger.warning("Trace export timed out: %s", str(e))
|
|
90
93
|
return SpanExportResult.FAILURE
|
|
91
94
|
|
|
92
95
|
# if async task function is present, then push the request to asnc task
|
|
93
|
-
|
|
94
96
|
if self.task_processor is not None and callable(self.task_processor.queue_task):
|
|
95
|
-
self.task_processor.queue_task(send_spans_to_okahu, span_list)
|
|
97
|
+
self.task_processor.queue_task(send_spans_to_okahu, span_list, is_root_span)
|
|
96
98
|
return SpanExportResult.SUCCESS
|
|
97
|
-
return send_spans_to_okahu(span_list)
|
|
99
|
+
return send_spans_to_okahu(span_list, is_root_span)
|
|
98
100
|
|
|
99
101
|
def shutdown(self) -> None:
|
|
100
102
|
if self._closed:
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .common import *
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# Azure environment constants
|
|
2
|
+
AZURE_ML_ENDPOINT_ENV_NAME = "AZUREML_ENTRY_SCRIPT"
|
|
3
|
+
AZURE_FUNCTION_WORKER_ENV_NAME = "FUNCTIONS_WORKER_RUNTIME"
|
|
4
|
+
AZURE_APP_SERVICE_ENV_NAME = "WEBSITE_SITE_NAME"
|
|
5
|
+
AWS_LAMBDA_ENV_NAME = "AWS_LAMBDA_RUNTIME_API"
|
|
6
|
+
GITHUB_CODESPACE_ENV_NAME = "CODESPACES"
|
|
7
|
+
|
|
8
|
+
AWS_LAMBDA_FUNCTION_IDENTIFIER_ENV_NAME = "AWS_LAMBDA_FUNCTION_NAME"
|
|
9
|
+
AZURE_FUNCTION_IDENTIFIER_ENV_NAME = "WEBSITE_SITE_NAME"
|
|
10
|
+
AZURE_APP_SERVICE_IDENTIFIER_ENV_NAME = "WEBSITE_DEPLOYMENT_ID"
|
|
11
|
+
GITHUB_CODESPACE_IDENTIFIER_ENV_NAME = "GITHUB_REPOSITORY"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# Azure naming reference can be found here
|
|
15
|
+
# https://learn.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/resource-abbreviations
|
|
16
|
+
AZURE_FUNCTION_NAME = "azure.func"
|
|
17
|
+
AZURE_APP_SERVICE_NAME = "azure.asp"
|
|
18
|
+
AZURE_ML_SERVICE_NAME = "azure.mlw"
|
|
19
|
+
AWS_LAMBDA_SERVICE_NAME = "aws.lambda"
|
|
20
|
+
GITHUB_CODESPACE_SERVICE_NAME = "github_codespace"
|
|
21
|
+
|
|
22
|
+
# Env variables to identify infra service type
|
|
23
|
+
service_type_map = {
|
|
24
|
+
AZURE_ML_ENDPOINT_ENV_NAME: AZURE_ML_SERVICE_NAME,
|
|
25
|
+
AZURE_APP_SERVICE_ENV_NAME: AZURE_APP_SERVICE_NAME,
|
|
26
|
+
AZURE_FUNCTION_WORKER_ENV_NAME: AZURE_FUNCTION_NAME,
|
|
27
|
+
AWS_LAMBDA_ENV_NAME: AWS_LAMBDA_SERVICE_NAME,
|
|
28
|
+
GITHUB_CODESPACE_ENV_NAME: GITHUB_CODESPACE_SERVICE_NAME
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
# Env variables to identify infra service name
|
|
32
|
+
service_name_map = {
|
|
33
|
+
AZURE_APP_SERVICE_NAME: AZURE_APP_SERVICE_IDENTIFIER_ENV_NAME,
|
|
34
|
+
AZURE_FUNCTION_NAME: AZURE_FUNCTION_IDENTIFIER_ENV_NAME,
|
|
35
|
+
AZURE_ML_SERVICE_NAME: AZURE_ML_ENDPOINT_ENV_NAME,
|
|
36
|
+
AWS_LAMBDA_SERVICE_NAME: AWS_LAMBDA_FUNCTION_IDENTIFIER_ENV_NAME,
|
|
37
|
+
GITHUB_CODESPACE_SERVICE_NAME: GITHUB_CODESPACE_IDENTIFIER_ENV_NAME
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
llm_type_map = {
|
|
42
|
+
"sagemakerendpoint": "aws_sagemaker",
|
|
43
|
+
"azureopenai": "azure_openai",
|
|
44
|
+
"openai": "openai",
|
|
45
|
+
"chatopenai": "openai",
|
|
46
|
+
"azurechatopenai": "azure_openai",
|
|
47
|
+
"bedrock": "aws_bedrock",
|
|
48
|
+
"sagemakerllm": "aws_sagemaker",
|
|
49
|
+
"chatbedrock": "aws_bedrock",
|
|
50
|
+
"openaigenerator": "openai",
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
MONOCLE_INSTRUMENTOR = "monocle_apptrace"
|
|
54
|
+
DATA_INPUT_KEY = "data.input"
|
|
55
|
+
DATA_OUTPUT_KEY = "data.output"
|
|
56
|
+
PROMPT_INPUT_KEY = "data.input"
|
|
57
|
+
PROMPT_OUTPUT_KEY = "data.output"
|
|
58
|
+
QUERY = "input"
|
|
59
|
+
RESPONSE = "response"
|
|
60
|
+
SESSION_PROPERTIES_KEY = "session"
|
|
61
|
+
INFRA_SERVICE_KEY = "infra_service_name"
|
|
62
|
+
META_DATA = 'metadata'
|
|
63
|
+
MONOCLE_SCOPE_NAME_PREFIX = "monocle.scope."
|
|
64
|
+
SCOPE_METHOD_LIST = 'MONOCLE_SCOPE_METHODS'
|
|
65
|
+
SCOPE_METHOD_FILE = 'monocle_scopes.json'
|
|
66
|
+
SCOPE_CONFIG_PATH = 'MONOCLE_SCOPE_CONFIG_PATH'
|
|
67
|
+
TRACE_PROPOGATION_URLS = "MONOCLE_TRACE_PROPAGATATION_URLS"
|
|
68
|
+
WORKFLOW_TYPE_KEY = "monocle.workflow_type"
|
|
69
|
+
WORKFLOW_TYPE_GENERIC = "workflow.generic"
|
|
70
|
+
MONOCLE_SDK_VERSION = "monocle_apptrace.version"
|