monocle-apptrace 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of monocle-apptrace might be problematic. Click here for more details.

Files changed (41) hide show
  1. monocle_apptrace/README.md +50 -26
  2. monocle_apptrace/exporters/aws/s3_exporter.py +158 -0
  3. monocle_apptrace/exporters/azure/blob_exporter.py +128 -0
  4. monocle_apptrace/exporters/base_exporter.py +47 -0
  5. monocle_apptrace/exporters/exporter_processor.py +19 -0
  6. monocle_apptrace/exporters/monocle_exporters.py +27 -0
  7. monocle_apptrace/exporters/okahu/okahu_exporter.py +115 -0
  8. monocle_apptrace/haystack/__init__.py +4 -4
  9. monocle_apptrace/haystack/wrap_pipeline.py +18 -1
  10. monocle_apptrace/instrumentor.py +15 -18
  11. monocle_apptrace/langchain/__init__.py +6 -3
  12. monocle_apptrace/llamaindex/__init__.py +8 -7
  13. monocle_apptrace/metamodel/README.md +47 -0
  14. monocle_apptrace/metamodel/entities/README.md +77 -0
  15. monocle_apptrace/metamodel/entities/app_hosting_types.json +29 -0
  16. monocle_apptrace/metamodel/entities/entities.json +49 -0
  17. monocle_apptrace/metamodel/entities/inference_types.json +33 -0
  18. monocle_apptrace/metamodel/entities/model_types.json +41 -0
  19. monocle_apptrace/metamodel/entities/vector_store_types.json +25 -0
  20. monocle_apptrace/metamodel/entities/workflow_types.json +22 -0
  21. monocle_apptrace/metamodel/maps/attributes/inference/langchain_entities.json +35 -0
  22. monocle_apptrace/metamodel/maps/attributes/inference/llamaindex_entities.json +35 -0
  23. monocle_apptrace/metamodel/maps/attributes/retrieval/langchain_entities.json +27 -0
  24. monocle_apptrace/metamodel/maps/attributes/retrieval/llamaindex_entities.json +27 -0
  25. monocle_apptrace/{wrapper_config/lang_chain_methods.json → metamodel/maps/langchain_methods.json} +31 -8
  26. monocle_apptrace/{wrapper_config/llama_index_methods.json → metamodel/maps/llamaindex_methods.json} +12 -8
  27. monocle_apptrace/metamodel/spans/README.md +121 -0
  28. monocle_apptrace/metamodel/spans/span_example.json +140 -0
  29. monocle_apptrace/metamodel/spans/span_format.json +55 -0
  30. monocle_apptrace/metamodel/spans/span_types.json +16 -0
  31. monocle_apptrace/utils.py +108 -9
  32. monocle_apptrace/wrap_common.py +247 -98
  33. monocle_apptrace/wrapper.py +3 -1
  34. monocle_apptrace-0.2.0.dist-info/METADATA +115 -0
  35. monocle_apptrace-0.2.0.dist-info/RECORD +44 -0
  36. monocle_apptrace-0.1.0.dist-info/METADATA +0 -77
  37. monocle_apptrace-0.1.0.dist-info/RECORD +0 -22
  38. /monocle_apptrace/{wrapper_config → metamodel/maps}/haystack_methods.json +0 -0
  39. {monocle_apptrace-0.1.0.dist-info → monocle_apptrace-0.2.0.dist-info}/WHEEL +0 -0
  40. {monocle_apptrace-0.1.0.dist-info → monocle_apptrace-0.2.0.dist-info}/licenses/LICENSE +0 -0
  41. {monocle_apptrace-0.1.0.dist-info → monocle_apptrace-0.2.0.dist-info}/licenses/NOTICE +0 -0
@@ -1,37 +1,59 @@
1
- #Monocle User Guide
2
-
3
1
  ## Monocle Concepts
2
+
4
3
  ### Traces
5
- Traces are the full view of a single end-to-end application KPI eg Chatbot application to provide a response to end user’s question. Traces consists of various metadata about the application run including status, start time, duration, input/outputs etc. It also includes a list of individual steps aka “spans with details about that step.
6
- It’s typically the workflow code components of an application that generate the traces for application runs.
4
+ Traces are the full view of a single end-to-end application execution.
5
+
6
+ Examples of traces include one response to end user’s question by a chatbot app. Traces consists of various metadata about the application run including status, start time, duration, input/outputs etc. It also includes a list of individual steps aka “spans with details about that step.It’s typically the workflow code components of an application that generate the traces for application runs.
7
+
8
+ Traces are collections of spans.
9
+
7
10
  ### Spans
8
- Spans are the individual steps executed by the application to perform a GenAI related task” eg app retrieving vectors from DB, app querying LLM for inference etc. The span includes the type of operation, start time, duration and metadata relevant to that step eg Model name, parameters and model endpoint/server for an inference request.
9
- It’s typically the workflow code components of an application that generate the traces for application runs.
11
+ Spans are the individual steps executed by the application to perform a GenAI related task.
12
+
13
+ Examples of spans include app retrieving vectors from DB, app querying LLM for inference etc. The span includes the type of operation, start time, duration and metadata relevant to that step eg Model name, parameters and model endpoint/server for an inference request.
10
14
 
11
- ## Setup Monocle
12
- - You can download Monocle library releases from Pypi
15
+ ## Contribute to Monocle
16
+
17
+ Monocle includes:
18
+ - Methods for instrumentation of app code
19
+ - Base code for wrapping methods of interest in included in current folder
20
+ - Framework specific code is organized in a folder with the framework name
21
+ - Metamodel for how attributes and events for GenAI components are represented in OpenTelemety format
22
+ - See [metamodel](./metamodel/README.md) for supported GenAI entities, how functions operating on those entities map to spans and format of spans
23
+ - Exporters to send trace data to various locations. See [exporters](./exporters)
24
+
25
+ See [Monocle committer guide](/Monocle_committer_guide.md).
26
+
27
+ ## Get Monocle
28
+
29
+ Option 1 - Download released packages from Pypi
13
30
  ```
14
- > python3 -m pip install pipenv
15
- > pip install monocle-observability
31
+ python3 -m pip install pipenv
32
+ pip install monocle-apptrace
16
33
  ```
17
- - You can locally build and install Monocle library from source
34
+
35
+ Option 2 - Build and install locally from source
18
36
  ```
19
- > pip install .
20
- > pip install -e ".[dev]"
37
+ pip install .
38
+ pip install -e ".[dev]"
21
39
 
22
- > python3 -m pip install pipenv
23
- > pipenv install build
40
+ python3 -m pip install pipenv
41
+ pipenv install build
24
42
  ```
25
43
 
26
- ## Examples
27
- ### Enable Monocle tracing in your application
44
+ ## Examples of app instrumentation with Monocle
45
+
46
+ ### apps written using LLM orchestration frameworks
47
+
28
48
  ```python
29
- from monocle_apptrace.instrumentor import setup_monocle_telemetry
30
- from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
31
49
  from langchain.chains import LLMChain
32
50
  from langchain_openai import OpenAI
33
51
  from langchain.prompts import PromptTemplate
34
52
 
53
+ # Import the monocle_apptrace instrumentation method
54
+ from monocle_apptrace.instrumentor import setup_monocle_telemetry
55
+ from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
56
+
35
57
  # Call the setup Monocle telemetry method
36
58
  setup_monocle_telemetry(workflow_name = "simple_math_app",
37
59
  span_processors=[BatchSpanProcessor(ConsoleSpanExporter())])
@@ -42,19 +64,19 @@ prompt = PromptTemplate.from_template("1 + {number} = ")
42
64
  chain = LLMChain(llm=llm, prompt=prompt)
43
65
  chain.invoke({"number":2})
44
66
 
45
- # Request callbacks: Finally, let's use the request `callbacks` to achieve the same result
46
- chain = LLMChain(llm=llm, prompt=prompt)
47
- chain.invoke({"number":2}, {"callbacks":[handler]})
48
-
67
+ # Trace is generated when invoke() method is called
68
+
49
69
  ```
50
70
 
51
- ### Monitoring custom methods with Monocle
71
+ ### apps with custom methods
52
72
 
53
73
  ```python
74
+
75
+ # Import the monocle_apptrace instrumentation method
54
76
  from monocle_apptrace.wrapper import WrapperMethod,task_wrapper,atask_wrapper
55
77
  from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
56
78
 
57
- # extend the default wrapped methods list as follows
79
+ # Extend the default wrapped methods list as follows
58
80
  app_name = "simple_math_app"
59
81
  setup_monocle_telemetry(
60
82
  workflow_name=app_name,
@@ -74,4 +96,6 @@ setup_monocle_telemetry(
74
96
  wrapper=atask_wrapper)
75
97
  ])
76
98
 
77
- ```
99
+ # Trace is generated when the invoke() method is called in langchain.schema.runnable package
100
+
101
+ ```
@@ -0,0 +1,158 @@
1
+ import os
2
+ import time
3
+ import random
4
+ import datetime
5
+ import logging
6
+ import asyncio
7
+ import boto3
8
+ from botocore.exceptions import ClientError
9
+ from opentelemetry.sdk.trace import ReadableSpan
10
+ from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
11
+ from monocle_apptrace.exporters.base_exporter import SpanExporterBase
12
+ from typing import Sequence
13
+ import json
14
+ logger = logging.getLogger(__name__)
15
+
16
+ class S3SpanExporter(SpanExporterBase):
17
+ def __init__(self, bucket_name=None, region_name="us-east-1"):
18
+ super().__init__()
19
+ # Use environment variables if credentials are not provided
20
+ DEFAULT_FILE_PREFIX = "monocle_trace__"
21
+ DEFAULT_TIME_FORMAT = "%Y-%m-%d__%H.%M.%S"
22
+ self.max_batch_size = 500
23
+ self.export_interval = 1
24
+ self.s3_client = boto3.client(
25
+ 's3',
26
+ aws_access_key_id=os.getenv('AWS_ACCESS_KEY_ID'),
27
+ aws_secret_access_key=os.getenv('AWS_SECRET_ACCESS_KEY'),
28
+ region_name=region_name,
29
+ )
30
+ self.bucket_name = bucket_name or os.getenv('MONOCLE_S3_BUCKET_NAME','default-bucket')
31
+ self.file_prefix = DEFAULT_FILE_PREFIX
32
+ self.time_format = DEFAULT_TIME_FORMAT
33
+ self.export_queue = []
34
+ self.last_export_time = time.time()
35
+
36
+ # Check if bucket exists or create it
37
+ if not self.__bucket_exists(self.bucket_name):
38
+ try:
39
+ if region_name == "us-east-1":
40
+ self.s3_client.create_bucket(Bucket=self.bucket_name)
41
+ else:
42
+ self.s3_client.create_bucket(
43
+ Bucket=self.bucket_name,
44
+ CreateBucketConfiguration={'LocationConstraint': region_name}
45
+ )
46
+ logger.info(f"Bucket {self.bucket_name} created successfully.")
47
+ except ClientError as e:
48
+ logger.error(f"Error creating bucket {self.bucket_name}: {e}")
49
+ raise e
50
+
51
+ def __bucket_exists(self, bucket_name):
52
+ try:
53
+ # Check if the bucket exists by calling head_bucket
54
+ self.s3_client.head_bucket(Bucket=bucket_name)
55
+ return True
56
+ except ClientError as e:
57
+ error_code = e.response['Error']['Code']
58
+ if error_code == '404':
59
+ # Bucket not found
60
+ logger.error(f"Bucket {bucket_name} does not exist (404).")
61
+ return False
62
+ elif error_code == '403':
63
+ # Permission denied
64
+ logger.error(f"Access to bucket {bucket_name} is forbidden (403).")
65
+ raise PermissionError(f"Access to bucket {bucket_name} is forbidden.")
66
+ elif error_code == '400':
67
+ # Bad request or malformed input
68
+ logger.error(f"Bad request for bucket {bucket_name} (400).")
69
+ raise ValueError(f"Bad request for bucket {bucket_name}.")
70
+ else:
71
+ # Other client errors
72
+ logger.error(f"Unexpected error when accessing bucket {bucket_name}: {e}")
73
+ raise e
74
+ except TypeError as e:
75
+ # Handle TypeError separately
76
+ logger.error(f"Type error while checking bucket existence: {e}")
77
+ raise e
78
+
79
+ def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
80
+ """Synchronous export method that internally handles async logic."""
81
+ try:
82
+ # Run the asynchronous export logic in an event loop
83
+ asyncio.run(self.__export_async(spans))
84
+ return SpanExportResult.SUCCESS
85
+ except Exception as e:
86
+ logger.error(f"Error exporting spans: {e}")
87
+ return SpanExportResult.FAILURE
88
+
89
+ async def __export_async(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
90
+ try:
91
+ # Add spans to the export queue
92
+ for span in spans:
93
+ self.export_queue.append(span)
94
+ # If the queue reaches MAX_BATCH_SIZE, export the spans
95
+ if len(self.export_queue) >= self.max_batch_size:
96
+ await self.__export_spans()
97
+
98
+ # Check if it's time to force a flush
99
+ current_time = time.time()
100
+ if current_time - self.last_export_time >= self.export_interval:
101
+ await self.__export_spans() # Export spans if time interval has passed
102
+ self.last_export_time = current_time # Reset the last export time
103
+
104
+ return SpanExportResult.SUCCESS
105
+ except Exception as e:
106
+ logger.error(f"Error exporting spans: {e}")
107
+ return SpanExportResult.FAILURE
108
+
109
+ def __serialize_spans(self, spans: Sequence[ReadableSpan]) -> str:
110
+ try:
111
+ # Serialize spans to JSON or any other format you prefer
112
+ valid_json_list = []
113
+ for span in spans:
114
+ try:
115
+ valid_json_list.append(span.to_json(indent=0).replace("\n", ""))
116
+ except json.JSONDecodeError as e:
117
+ logger.warning(f"Invalid JSON format in span data: {span.context.span_id}. Error: {e}")
118
+ continue
119
+ ndjson_data = "\n".join(valid_json_list) + "\n"
120
+ return ndjson_data
121
+ except Exception as e:
122
+ logger.warning(f"Error serializing spans: {e}")
123
+
124
+
125
+ async def __export_spans(self):
126
+ if len(self.export_queue) == 0:
127
+ return
128
+
129
+ # Take a batch of spans from the queue
130
+ batch_to_export = self.export_queue[:self.max_batch_size]
131
+ serialized_data = self.__serialize_spans(batch_to_export)
132
+ self.export_queue = self.export_queue[self.max_batch_size:]
133
+ try:
134
+ if asyncio.get_event_loop().is_running():
135
+ task = asyncio.create_task(self._retry_with_backoff(self.__upload_to_s3, serialized_data))
136
+ await task
137
+ else:
138
+ await self._retry_with_backoff(self.__upload_to_s3, serialized_data)
139
+
140
+ except Exception as e:
141
+ logger.error(f"Failed to upload span batch: {e}")
142
+
143
+ def __upload_to_s3(self, span_data_batch: str):
144
+ current_time = datetime.datetime.now().strftime(self.time_format)
145
+ file_name = f"{self.file_prefix}{current_time}.ndjson"
146
+ self.s3_client.put_object(
147
+ Bucket=self.bucket_name,
148
+ Key=file_name,
149
+ Body=span_data_batch
150
+ )
151
+ logger.info(f"Span batch uploaded to AWS S3 as {file_name}.")
152
+
153
+ async def force_flush(self, timeout_millis: int = 30000) -> bool:
154
+ await self.__export_spans() # Export any remaining spans in the queue
155
+ return True
156
+
157
+ def shutdown(self) -> None:
158
+ logger.info("S3SpanExporter has been shut down.")
@@ -0,0 +1,128 @@
1
+ import os
2
+ import time
3
+ import random
4
+ import datetime
5
+ import logging
6
+ import asyncio
7
+ from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient
8
+ from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError, ServiceRequestError
9
+ from opentelemetry.sdk.trace import ReadableSpan
10
+ from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
11
+ from typing import Sequence
12
+ from monocle_apptrace.exporters.base_exporter import SpanExporterBase
13
+ import json
14
+ logger = logging.getLogger(__name__)
15
+
16
+ class AzureBlobSpanExporter(SpanExporterBase):
17
+ def __init__(self, connection_string=None, container_name=None):
18
+ super().__init__()
19
+ DEFAULT_FILE_PREFIX = "monocle_trace_"
20
+ DEFAULT_TIME_FORMAT = "%Y-%m-%d_%H.%M.%S"
21
+ self.max_batch_size = 500
22
+ self.export_interval = 1
23
+ # Use default values if none are provided
24
+ if not connection_string:
25
+ connection_string = os.getenv('MONOCLE_BLOB_CONNECTION_STRING')
26
+ if not connection_string:
27
+ raise ValueError("Azure Storage connection string is not provided or set in environment variables.")
28
+
29
+ if not container_name:
30
+ container_name = os.getenv('MONOCLE_BLOB_CONTAINER_NAME', 'default-container')
31
+
32
+ self.blob_service_client = BlobServiceClient.from_connection_string(connection_string)
33
+ self.container_name = container_name
34
+ self.file_prefix = DEFAULT_FILE_PREFIX
35
+ self.time_format = DEFAULT_TIME_FORMAT
36
+
37
+ # Check if container exists or create it
38
+ if not self.__container_exists(container_name):
39
+ try:
40
+ self.blob_service_client.create_container(container_name)
41
+ logger.info(f"Container {container_name} created successfully.")
42
+ except Exception as e:
43
+ logger.error(f"Error creating container {container_name}: {e}")
44
+ raise e
45
+
46
+ def __container_exists(self, container_name):
47
+ try:
48
+ container_client = self.blob_service_client.get_container_client(container_name)
49
+ container_client.get_container_properties()
50
+ return True
51
+ except ResourceNotFoundError:
52
+ logger.error(f"Container {container_name} not found (404).")
53
+ return False
54
+ except ClientAuthenticationError:
55
+ logger.error(f"Access to container {container_name} is forbidden (403).")
56
+ raise PermissionError(f"Access to container {container_name} is forbidden.")
57
+ except Exception as e:
58
+ logger.error(f"Unexpected error when checking if container {container_name} exists: {e}")
59
+ raise e
60
+
61
+ def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
62
+ """Synchronous export method that internally handles async logic."""
63
+ try:
64
+ # Run the asynchronous export logic in an event loop
65
+ asyncio.run(self._export_async(spans))
66
+ return SpanExportResult.SUCCESS
67
+ except Exception as e:
68
+ logger.error(f"Error exporting spans: {e}")
69
+ return SpanExportResult.FAILURE
70
+
71
+ async def _export_async(self, spans: Sequence[ReadableSpan]):
72
+ """The actual async export logic is run here."""
73
+ # Add spans to the export queue
74
+ for span in spans:
75
+ self.export_queue.append(span)
76
+ if len(self.export_queue) >= self.max_batch_size:
77
+ await self.__export_spans()
78
+
79
+ # Force a flush if the interval has passed
80
+ current_time = time.time()
81
+ if current_time - self.last_export_time >= self.export_interval:
82
+ await self.__export_spans()
83
+ self.last_export_time = current_time
84
+
85
+ def __serialize_spans(self, spans: Sequence[ReadableSpan]) -> str:
86
+ try:
87
+ valid_json_list = []
88
+ for span in spans:
89
+ try:
90
+ valid_json_list.append(span.to_json(indent=0).replace("\n", ""))
91
+ except json.JSONDecodeError as e:
92
+ logger.warning(f"Invalid JSON format in span data: {span.context.span_id}. Error: {e}")
93
+ continue
94
+
95
+ ndjson_data = "\n".join(valid_json_list) + "\n"
96
+ return ndjson_data
97
+ except Exception as e:
98
+ logger.warning(f"Error serializing spans: {e}")
99
+
100
+ async def __export_spans(self):
101
+ if len(self.export_queue) == 0:
102
+ return
103
+
104
+ batch_to_export = self.export_queue[:self.max_batch_size]
105
+ serialized_data = self.__serialize_spans(batch_to_export)
106
+ self.export_queue = self.export_queue[self.max_batch_size:]
107
+ try:
108
+ if asyncio.get_event_loop().is_running():
109
+ task = asyncio.create_task(self._retry_with_backoff(self.__upload_to_blob, serialized_data))
110
+ await task
111
+ else:
112
+ await self._retry_with_backoff(self.__upload_to_blob, serialized_data)
113
+ except Exception as e:
114
+ logger.error(f"Failed to upload span batch: {e}")
115
+
116
+ def __upload_to_blob(self, span_data_batch: str):
117
+ current_time = datetime.datetime.now().strftime(self.time_format)
118
+ file_name = f"{self.file_prefix}{current_time}.ndjson"
119
+ blob_client = self.blob_service_client.get_blob_client(container=self.container_name, blob=file_name)
120
+ blob_client.upload_blob(span_data_batch, overwrite=True)
121
+ logger.info(f"Span batch uploaded to Azure Blob Storage as {file_name}.")
122
+
123
+ async def force_flush(self, timeout_millis: int = 30000) -> bool:
124
+ await self.__export_spans()
125
+ return True
126
+
127
+ def shutdown(self) -> None:
128
+ logger.info("AzureBlobSpanExporter has been shut down.")
@@ -0,0 +1,47 @@
1
+ import time
2
+ import random
3
+ import logging
4
+ from abc import ABC, abstractmethod
5
+ from azure.core.exceptions import ServiceRequestError, ClientAuthenticationError
6
+ from opentelemetry.sdk.trace import ReadableSpan
7
+ from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
8
+ from typing import Sequence
9
+ import asyncio
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+ class SpanExporterBase(ABC):
14
+ def __init__(self):
15
+ self.backoff_factor = 2
16
+ self.max_retries = 10
17
+ self.export_queue = []
18
+ self.last_export_time = time.time()
19
+
20
+ @abstractmethod
21
+ async def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
22
+ pass
23
+
24
+ @abstractmethod
25
+ async def force_flush(self, timeout_millis: int = 30000) -> bool:
26
+ pass
27
+
28
+ def shutdown(self) -> None:
29
+ pass
30
+
31
+ async def _retry_with_backoff(self, func, *args, **kwargs):
32
+ """Handle retries with exponential backoff."""
33
+ attempt = 0
34
+ while attempt < self.max_retries:
35
+ try:
36
+ return func(*args, **kwargs)
37
+ except ServiceRequestError as e:
38
+ logger.warning(f"Network connectivity error: {e}. Retrying in {self.backoff_factor ** attempt} seconds...")
39
+ sleep_time = self.backoff_factor * (2 ** attempt) + random.uniform(0, 1)
40
+ await asyncio.sleep(sleep_time)
41
+ attempt += 1
42
+ except ClientAuthenticationError as e:
43
+ logger.error(f"Failed to authenticate: {str(e)}")
44
+ break
45
+
46
+ logger.error("Max retries exceeded.")
47
+ raise ServiceRequestError(message="Max retries exceeded.")
@@ -0,0 +1,19 @@
1
+ from abc import ABC, abstractmethod
2
+ import logging
3
+ from typing import Callable
4
+
5
+ logger = logging.getLogger(__name__)
6
+
7
+ class ExportTaskProcessor(ABC):
8
+
9
+ @abstractmethod
10
+ def start(self):
11
+ return
12
+
13
+ @abstractmethod
14
+ def stop(self):
15
+ return
16
+
17
+ @abstractmethod
18
+ def queue_task(self, async_task: Callable[[Callable, any], any] = None, args: any = None):
19
+ return
@@ -0,0 +1,27 @@
1
+ from typing import Dict, Any
2
+ import os, warnings
3
+ from importlib import import_module
4
+ from opentelemetry.sdk.trace.export import SpanExporter, ConsoleSpanExporter
5
+ from monocle_apptrace.exporters.file_exporter import FileSpanExporter
6
+
7
+ monocle_exporters:Dict[str, Any] = {
8
+ "s3": {"module": "monocle_apptrace.exporters.aws.s3_exporter", "class": "S3SpanExporter"},
9
+ "blob": {"module":"monocle_apptrace.exporters.azure.blob_exporter", "class": "AzureBlobSpanExporter"},
10
+ "okahu": {"module":"monocle_apptrace.exporters.okahu.okahu_exporter", "class": "OkahuSpanExporter"},
11
+ "file": {"module":"monocle_apptrace.exporters.file_exporter", "class": "FileSpanExporter"}
12
+ }
13
+
14
+ def get_monocle_exporter() -> SpanExporter:
15
+ exporter_name = os.environ.get("MONOCLE_EXPORTER", "file")
16
+ try:
17
+ exporter_class_path = monocle_exporters[exporter_name]
18
+ except Exception as ex:
19
+ warnings.warn(f"Unsupported Monocle span exporter setting {exporter_name}, using default FileSpanExporter.")
20
+ return FileSpanExporter()
21
+ try:
22
+ exporter_module = import_module(exporter_class_path.get("module"))
23
+ exporter_class = getattr(exporter_module, exporter_class_path.get("class"))
24
+ return exporter_class()
25
+ except Exception as ex:
26
+ warnings.warn(f"Unable to set Monocle span exporter to {exporter_name}, error {ex}. Using ConsoleSpanExporter")
27
+ return ConsoleSpanExporter()
@@ -0,0 +1,115 @@
1
+ import json
2
+ import logging
3
+ import os
4
+ from typing import Callable, Optional, Sequence
5
+ import requests
6
+ from opentelemetry.sdk.trace import ReadableSpan
7
+ from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult, ConsoleSpanExporter
8
+ from requests.exceptions import ReadTimeout
9
+
10
+ from monocle_apptrace.exporters.exporter_processor import ExportTaskProcessor
11
+
12
+ REQUESTS_SUCCESS_STATUS_CODES = (200, 202)
13
+ OKAHU_PROD_INGEST_ENDPOINT = "https://ingest.okahu.co/api/v1/trace/ingest"
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class OkahuSpanExporter(SpanExporter):
19
+ def __init__(
20
+ self,
21
+ endpoint: Optional[str] = None,
22
+ timeout: Optional[int] = None,
23
+ session: Optional[requests.Session] = None,
24
+ task_processor: ExportTaskProcessor = None
25
+ ):
26
+ """Okahu exporter."""
27
+ okahu_endpoint: str = os.environ.get("OKAHU_INGESTION_ENDPOINT", OKAHU_PROD_INGEST_ENDPOINT)
28
+ self.endpoint = endpoint or okahu_endpoint
29
+ api_key: str = os.environ.get("OKAHU_API_KEY")
30
+ self._closed = False
31
+ if not api_key:
32
+ raise ValueError("OKAHU_API_KEY not set.")
33
+ self.timeout = timeout or 15
34
+ self.session = session or requests.Session()
35
+ self.session.headers.update(
36
+ {"Content-Type": "application/json", "x-api-key": api_key}
37
+ )
38
+
39
+ self.task_processor = task_processor or None
40
+ if task_processor is not None:
41
+ task_processor.start()
42
+
43
+ def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
44
+ # After the call to Shutdown subsequent calls to Export are
45
+ # not allowed and should return a Failure result
46
+ if not hasattr(self, 'session'):
47
+ return self.exporter.export(spans)
48
+
49
+ if self._closed:
50
+ logger.warning("Exporter already shutdown, ignoring batch")
51
+ return SpanExportResult.FAILUREencoder
52
+ if len(spans) == 0:
53
+ return
54
+
55
+ span_list = {
56
+ "batch": []
57
+ }
58
+
59
+ # append the batch object with all the spans object
60
+ for span in spans:
61
+ # create a object from serialized span
62
+ obj = json.loads(span.to_json())
63
+ if obj["parent_id"] is None:
64
+ obj["parent_id"] = "None"
65
+ else:
66
+ obj["parent_id"] = remove_0x_from_start(obj["parent_id"])
67
+ if obj["context"] is not None:
68
+ obj["context"]["trace_id"] = remove_0x_from_start(obj["context"]["trace_id"])
69
+ obj["context"]["span_id"] = remove_0x_from_start(obj["context"]["span_id"])
70
+ span_list["batch"].append(obj)
71
+
72
+ def send_spans_to_okahu(span_list_local=None):
73
+ try:
74
+ result = self.session.post(
75
+ url=self.endpoint,
76
+ data=json.dumps(span_list_local),
77
+ timeout=self.timeout,
78
+ )
79
+ if result.status_code not in REQUESTS_SUCCESS_STATUS_CODES:
80
+ logger.error(
81
+ "Traces cannot be uploaded; status code: %s, message %s",
82
+ result.status_code,
83
+ result.text,
84
+ )
85
+ return SpanExportResult.FAILURE
86
+ logger.warning("spans successfully exported to okahu")
87
+ return SpanExportResult.SUCCESS
88
+ except ReadTimeout as e:
89
+ logger.warning("Trace export timed out: %s", str(e))
90
+ return SpanExportResult.FAILURE
91
+
92
+ # if async task function is present, then push the request to asnc task
93
+
94
+ if self.task_processor is not None and callable(self.task_processor.queue_task):
95
+ self.task_processor.queue_task(send_spans_to_okahu, span_list)
96
+ return SpanExportResult.SUCCESS
97
+ return send_spans_to_okahu(span_list)
98
+
99
+ def shutdown(self) -> None:
100
+ if self._closed:
101
+ logger.warning("Exporter already shutdown, ignoring call")
102
+ return
103
+ if hasattr(self, 'session'):
104
+ self.session.close()
105
+ self._closed = True
106
+
107
+ def force_flush(self, timeout_millis: int = 30000) -> bool:
108
+ return True
109
+
110
+
111
+ # only removes the first occurrence of 0x from the string
112
+ def remove_0x_from_start(my_str: str):
113
+ if my_str.startswith("0x"):
114
+ return my_str.replace("0x", "", 1)
115
+ return my_str
@@ -1,9 +1,9 @@
1
-
2
1
  import os
3
2
  import logging
4
- from monocle_apptrace.utils import load_wrapper_from_config
3
+ from monocle_apptrace.utils import get_wrapper_methods_config
5
4
 
6
5
  logger = logging.getLogger(__name__)
7
6
  parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
8
- HAYSTACK_METHODS = load_wrapper_from_config(
9
- os.path.join(parent_dir, 'wrapper_config', 'haystack_methods.json'))
7
+ HAYSTACK_METHODS = get_wrapper_methods_config(
8
+ wrapper_methods_config_path=os.path.join(parent_dir, 'metamodel', 'maps', 'haystack_methods.json'),
9
+ attributes_config_base_path=os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
@@ -4,7 +4,8 @@ from opentelemetry.context import attach, set_value
4
4
  from opentelemetry.instrumentation.utils import (
5
5
  _SUPPRESS_INSTRUMENTATION_KEY,
6
6
  )
7
- from monocle_apptrace.wrap_common import PROMPT_INPUT_KEY, PROMPT_OUTPUT_KEY, WORKFLOW_TYPE_MAP, with_tracer_wrapper
7
+ from monocle_apptrace.wrap_common import PROMPT_INPUT_KEY, PROMPT_OUTPUT_KEY, WORKFLOW_TYPE_MAP, with_tracer_wrapper, DATA_INPUT_KEY
8
+ from monocle_apptrace.utils import set_embedding_model, set_attribute
8
9
 
9
10
  logger = logging.getLogger(__name__)
10
11
 
@@ -17,6 +18,10 @@ def wrap(tracer, to_wrap, wrapped, instance, args, kwargs):
17
18
  attach(set_value("workflow_name", name))
18
19
  inputs = set()
19
20
  workflow_input = get_workflow_input(args, inputs)
21
+ embedding_model = get_embedding_model(instance)
22
+ set_embedding_model(embedding_model)
23
+ set_attribute(DATA_INPUT_KEY, workflow_input)
24
+
20
25
 
21
26
  with tracer.start_as_current_span(f"{name}.workflow") as span:
22
27
  span.set_attribute(PROMPT_INPUT_KEY, workflow_input)
@@ -44,3 +49,15 @@ def get_workflow_input(args, inputs):
44
49
  def set_workflow_attributes(span, workflow_name):
45
50
  span.set_attribute("workflow_name",workflow_name)
46
51
  span.set_attribute("workflow_type", WORKFLOW_TYPE_MAP["haystack"])
52
+
53
+ def get_embedding_model(instance):
54
+ try:
55
+ if hasattr(instance, 'get_component'):
56
+ text_embedder = instance.get_component('text_embedder')
57
+ if text_embedder and hasattr(text_embedder, 'model'):
58
+ # Set the embedding model attribute
59
+ return text_embedder.model
60
+ except:
61
+ pass
62
+
63
+ return None