monocle-apptrace 0.1.1__tar.gz → 0.3.0b1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of monocle-apptrace might be problematic. Click here for more details.
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/.gitignore +2 -1
- monocle_apptrace-0.3.0b1/CHANGELOG.md +49 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/Monocle_User_Guide.md +31 -4
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/PKG-INFO +8 -3
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/pyproject.toml +13 -2
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/botocore/__init__.py +9 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/constants.py +36 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/exporters/aws/s3_exporter.py +158 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/exporters/azure/blob_exporter.py +125 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/exporters/base_exporter.py +48 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/exporters/exporter_processor.py +19 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/exporters/monocle_exporters.py +27 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/exporters/okahu/okahu_exporter.py +115 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/haystack/__init__.py +9 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/src/monocle_apptrace/haystack/wrap_pipeline.py +3 -2
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/src/monocle_apptrace/instrumentor.py +14 -17
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/langchain/__init__.py +9 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/llamaindex/__init__.py +16 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/message_processing.py +80 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/src/monocle_apptrace/metamodel/entities/README.md +33 -10
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/entities/app_hosting_types.json +29 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/entities/entities.json +49 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/entities/inference_types.json +33 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/entities/model_types.json +41 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/entities/vector_store_types.json +25 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/entities/workflow_types.json +22 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/maps/attributes/inference/botocore_entities.json +27 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/maps/attributes/inference/haystack_entities.json +57 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/maps/attributes/inference/langchain_entities.json +57 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/maps/attributes/inference/llamaindex_entities.json +57 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/maps/attributes/retrieval/haystack_entities.json +31 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/maps/attributes/retrieval/langchain_entities.json +31 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/maps/attributes/retrieval/llamaindex_entities.json +31 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/maps/botocore_methods.json +13 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/maps/haystack_methods.json +45 -0
- monocle_apptrace-0.1.1/src/monocle_apptrace/metamodel/maps/lang_chain_methods.json → monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/maps/langchain_methods.json +31 -8
- monocle_apptrace-0.1.1/src/monocle_apptrace/metamodel/maps/llama_index_methods.json → monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/maps/llamaindex_methods.json +30 -8
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/src/monocle_apptrace/metamodel/spans/span_example.json +1 -1
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/metamodel/spans/span_types.json +16 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/utils.py +252 -0
- monocle_apptrace-0.3.0b1/src/monocle_apptrace/wrap_common.py +511 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/src/monocle_apptrace/wrapper.py +5 -2
- monocle_apptrace-0.1.1/CHANGELOG.md +0 -17
- monocle_apptrace-0.1.1/Pipfile.lock +0 -340
- monocle_apptrace-0.1.1/src/monocle_apptrace/constants.py +0 -22
- monocle_apptrace-0.1.1/src/monocle_apptrace/haystack/__init__.py +0 -9
- monocle_apptrace-0.1.1/src/monocle_apptrace/haystack/wrap_node.py +0 -27
- monocle_apptrace-0.1.1/src/monocle_apptrace/haystack/wrap_openai.py +0 -44
- monocle_apptrace-0.1.1/src/monocle_apptrace/langchain/__init__.py +0 -6
- monocle_apptrace-0.1.1/src/monocle_apptrace/llamaindex/__init__.py +0 -15
- monocle_apptrace-0.1.1/src/monocle_apptrace/metamodel/entities/entity_types.json +0 -157
- monocle_apptrace-0.1.1/src/monocle_apptrace/metamodel/entities/entity_types.py +0 -51
- monocle_apptrace-0.1.1/src/monocle_apptrace/metamodel/maps/haystack_methods.json +0 -25
- monocle_apptrace-0.1.1/src/monocle_apptrace/utils.py +0 -93
- monocle_apptrace-0.1.1/src/monocle_apptrace/wrap_common.py +0 -311
- monocle_apptrace-0.1.1/storage/default__vector_store.json +0 -1
- monocle_apptrace-0.1.1/storage/docstore.json +0 -1
- monocle_apptrace-0.1.1/storage/graph_store.json +0 -1
- monocle_apptrace-0.1.1/storage/image__vector_store.json +0 -1
- monocle_apptrace-0.1.1/storage/index_store.json +0 -1
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/CODEOWNERS.md +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/CODE_OF_CONDUCT.md +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/CONTRIBUTING.md +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/COPYRIGHT.template +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/LICENSE +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/MAINTAINER.md +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/Monocle_committer_guide.md +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/Monocle_contributor_guide.md +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/NOTICE +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/README.md +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/SECURITY.md +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/src/monocle_apptrace/README.md +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/src/monocle_apptrace/__init__.py +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/src/monocle_apptrace/exporters/file_exporter.py +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/src/monocle_apptrace/metamodel/README.md +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/src/monocle_apptrace/metamodel/spans/README.md +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/src/monocle_apptrace/metamodel/spans/span_format.json +0 -0
- {monocle_apptrace-0.1.1 → monocle_apptrace-0.3.0b1}/tox.ini +0 -0
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
## Version 0.3.0b1 (2024-12-10)
|
|
2
|
+
|
|
3
|
+
- Add dev dependency for Mistral AI integration ([#81](https://github.com/monocle2ai/monocle/pull/81))
|
|
4
|
+
- Add VectorStore deployment URL capture support ([#80](https://github.com/monocle2ai/monocle/pull/80))
|
|
5
|
+
- Clean up cloud exporter implementation ([#79](https://github.com/monocle2ai/monocle/pull/79))
|
|
6
|
+
- Capture inference span input/output events attributes ([#77](https://github.com/monocle2ai/monocle/pull/77))
|
|
7
|
+
- Add release automation workflows ([#76](https://github.com/monocle2ai/monocle/pull/76))
|
|
8
|
+
- Fix gaps in Monocle SDK implementation ([#72](https://github.com/monocle2ai/monocle/pull/72))
|
|
9
|
+
- Add kwargs and return value handling in Accessor ([#71](https://github.com/monocle2ai/monocle/pull/71))
|
|
10
|
+
- Update workflow name formatting ([#69](https://github.com/monocle2ai/monocle/pull/69))
|
|
11
|
+
- Implement Haystack metamodel support ([#68](https://github.com/monocle2ai/monocle/pull/68))
|
|
12
|
+
|
|
13
|
+
## Version 0.2.0 (2024-12-05)
|
|
14
|
+
|
|
15
|
+
## 0.2.0 (Oct 22, 2024)
|
|
16
|
+
|
|
17
|
+
- Ndjson format for S3 and Blob exporters ([#61](https://github.com/monocle2ai/monocle/pull/61))
|
|
18
|
+
- Set monocle exporter from env setting ([#60](https://github.com/monocle2ai/monocle/pull/60))
|
|
19
|
+
- Update workflow name and type with new format ([#59](https://github.com/monocle2ai/monocle/pull/59))
|
|
20
|
+
- Updated async and custom output processor testcase for metamodel([#58](https://github.com/monocle2ai/monocle/pull/58))
|
|
21
|
+
- Build okahu exporter and added test cases for okahu exporte ([#56](https://github.com/monocle2ai/monocle/pull/56))
|
|
22
|
+
- Handle exception in span wrappers([#52](https://github.com/monocle2ai/monocle/pull/52))
|
|
23
|
+
- Metamodel entity changes ([#51](https://github.com/monocle2ai/monocle/pull/51)), ([#54](https://github.com/monocle2ai/monocle/pull/54))
|
|
24
|
+
- Error handling for llm_endpoint and tags ([#50](https://github.com/monocle2ai/monocle/pull/50))
|
|
25
|
+
- Context_output for vector store retriever ([#48](https://github.com/monocle2ai/monocle/pull/48))
|
|
26
|
+
- Direct exporter - AWS S3 ([#42](https://github.com/monocle2ai/monocle/pull/42))
|
|
27
|
+
- Direct Exporter - Blob store ([#41](https://github.com/monocle2ai/monocle/pull/41))
|
|
28
|
+
- Initial metamodel definition ([#39](https://github.com/monocle2ai/monocle/pull/39))
|
|
29
|
+
- Improvement in vectorstore traces ([#38](https://github.com/monocle2ai/monocle/pull/38))
|
|
30
|
+
- Update key for session context field in attributes ([#34](https://github.com/monocle2ai/monocle/pull/34))
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
## 0.1.0 (Aug 27, 2024)
|
|
34
|
+
|
|
35
|
+
- Fixed LlamaIndex tracing bugs ([#32](https://github.com/monocle2ai/monocle/pull/32))
|
|
36
|
+
- Added support to add AWS cloud infra attributes ([#29](https://github.com/monocle2ai/monocle/pull/29))
|
|
37
|
+
- Added support to add Azure cloud infra attributes ([#23](https://github.com/monocle2ai/monocle/pull/23))
|
|
38
|
+
- Added support for adding provider name in LLM span in traces ([#22](https://github.com/monocle2ai/monocle/pull/22))
|
|
39
|
+
- Added a default file span exporter ([#21](https://github.com/monocle2ai/monocle/pull/21))
|
|
40
|
+
- Moved input and output context and prompts from attributes to events ([#15](https://github.com/monocle2ai/monocle/pull/15))
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
## 0.0.1 (Jul 17, 2024)
|
|
48
|
+
|
|
49
|
+
- First monocle release
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
#Monocle User Guide
|
|
1
|
+
# Monocle User Guide
|
|
2
2
|
|
|
3
3
|
## Monocle Concepts
|
|
4
4
|
### Traces
|
|
@@ -13,21 +13,42 @@ It’s typically the workflow code components of an application that generate th
|
|
|
13
13
|
```
|
|
14
14
|
> pip install monocle_apptrace
|
|
15
15
|
```
|
|
16
|
+
|
|
17
|
+
- For Azure support (to upload traces to Azure), install with the azure extra:
|
|
18
|
+
```
|
|
19
|
+
> pip install monocle_apptrace[azure]
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
- For AWS support (to upload traces to AWS), install with the aws extra:
|
|
23
|
+
```
|
|
24
|
+
> pip install monocle_apptrace[aws]
|
|
25
|
+
```
|
|
26
|
+
|
|
16
27
|
- You can locally build and install Monocle library from source
|
|
17
28
|
```
|
|
18
|
-
> pip install .
|
|
29
|
+
> pip install .
|
|
19
30
|
```
|
|
20
31
|
- Install the optional test dependencies listed against dev in pyproject.toml in editable mode
|
|
21
32
|
```
|
|
22
|
-
> pip install -e ".[dev]"
|
|
33
|
+
> pip install -e ".[dev]"
|
|
23
34
|
```
|
|
24
35
|
|
|
36
|
+
|
|
25
37
|
## Using Monocle with your application to generate traces
|
|
26
38
|
### Enable Monocle tracing
|
|
27
39
|
You need to import monocle package and invoke the API ``setup_monocle_telemetry(workflow=<workflow-name>)`` to enable the tracing. The 'workflow-name' is what you define to identify the give application workflow, for example "customer-chatbot". Monocle trace will include this name in every trace. The trace output will include a list of spans in the traces. You can print the output on the console or send it to an HTTP endpoint.
|
|
28
40
|
|
|
29
41
|
### Using Monocle's out of box support of genAI technology components
|
|
30
42
|
Monocle community has done the hard work of figuring out what to trace and how to extract relevant details from multiple genAI technology components. For example, if you have a python app coded using LlamaIndex and using models hostsed in OpenAI, Monocle can seamlessly trace your app. All you need to do enable Monocle tracing.
|
|
43
|
+
|
|
44
|
+
### Using Monocle's Support for Adding Custom Attributes
|
|
45
|
+
Monocle provides users with the ability to add custom attributes to various spans, such as inference and retrieval spans, by utilizing the output processor within its metamodel. This feature allows for dynamic attribute assignment through lambda functions, which operate on an arguments dictionary.
|
|
46
|
+
The arguments dictionary contains key-value pairs that can be used to compute custom attributes. The dictionary includes the following components:
|
|
47
|
+
```python
|
|
48
|
+
arguments = {"instance":instance, "args":args, "kwargs":kwargs, "output":return_value}
|
|
49
|
+
```
|
|
50
|
+
By leveraging this dictionary, users can define custom attributes for spans, enabling the integration of additional context and information into the tracing process. The lambda functions used in the attributes field can access and process these values to enrich the span with relevant custom data.
|
|
51
|
+
|
|
31
52
|
#### Example - Enable Monocle tracing in your application
|
|
32
53
|
```python
|
|
33
54
|
from monocle_apptrace.instrumentor import setup_monocle_telemetry
|
|
@@ -48,7 +69,7 @@ chain.invoke({"number":2})
|
|
|
48
69
|
# Request callbacks: Finally, let's use the request `callbacks` to achieve the same result
|
|
49
70
|
chain = LLMChain(llm=llm, prompt=prompt)
|
|
50
71
|
chain.invoke({"number":2}, {"callbacks":[handler]})
|
|
51
|
-
|
|
72
|
+
|
|
52
73
|
```
|
|
53
74
|
|
|
54
75
|
### Accessing monocle trace
|
|
@@ -63,6 +84,12 @@ setup_monocle_telemetry(workflow_name = "simple_math_app",
|
|
|
63
84
|
```
|
|
64
85
|
To print the trace on the console, use ```ConsoleSpanExporter()``` instead of ```FileSpanExporter()```
|
|
65
86
|
|
|
87
|
+
For Azure:
|
|
88
|
+
Install the Azure support as shown in the setup section, then use ```AzureBlobSpanExporter()``` to upload the traces to Azure.
|
|
89
|
+
|
|
90
|
+
For AWS:
|
|
91
|
+
Install the AWS support as shown in the setup section, then use ```S3SpanExporter()``` to upload the traces to an S3 bucket.
|
|
92
|
+
|
|
66
93
|
### Leveraging Monocle's extensibility to handle customization
|
|
67
94
|
When the out of box features from app frameworks are not sufficent, the app developers have to add custom code. For example, if you are extending a LLM class in LlamaIndex to use a model hosted in NVIDIA Triton. This new class is not know to Monocle. You can specify this new class method part of Monocle enabling API and it will be able to trace it.
|
|
68
95
|
|
|
@@ -1,11 +1,9 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: monocle_apptrace
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.0b1
|
|
4
4
|
Summary: package with monocle genAI tracing
|
|
5
5
|
Project-URL: Homepage, https://github.com/monocle2ai/monocle
|
|
6
6
|
Project-URL: Issues, https://github.com/monocle2ai/monocle/issues
|
|
7
|
-
License-File: LICENSE
|
|
8
|
-
License-File: NOTICE
|
|
9
7
|
Classifier: License :: OSI Approved :: MIT License
|
|
10
8
|
Classifier: Operating System :: OS Independent
|
|
11
9
|
Classifier: Programming Language :: Python :: 3
|
|
@@ -15,17 +13,24 @@ Requires-Dist: opentelemetry-instrumentation
|
|
|
15
13
|
Requires-Dist: opentelemetry-sdk>=1.21.0
|
|
16
14
|
Requires-Dist: requests
|
|
17
15
|
Requires-Dist: wrapt>=1.14.0
|
|
16
|
+
Provides-Extra: aws
|
|
17
|
+
Requires-Dist: boto3==1.35.19; extra == 'aws'
|
|
18
|
+
Provides-Extra: azure
|
|
19
|
+
Requires-Dist: azure-storage-blob==12.22.0; extra == 'azure'
|
|
18
20
|
Provides-Extra: dev
|
|
19
21
|
Requires-Dist: datasets==2.20.0; extra == 'dev'
|
|
20
22
|
Requires-Dist: faiss-cpu==1.8.0; extra == 'dev'
|
|
21
23
|
Requires-Dist: instructorembedding==1.0.1; extra == 'dev'
|
|
22
24
|
Requires-Dist: langchain-chroma==0.1.1; extra == 'dev'
|
|
23
25
|
Requires-Dist: langchain-community==0.2.5; extra == 'dev'
|
|
26
|
+
Requires-Dist: langchain-mistralai==0.1.13; extra == 'dev'
|
|
24
27
|
Requires-Dist: langchain-openai==0.1.8; extra == 'dev'
|
|
25
28
|
Requires-Dist: langchain==0.2.5; extra == 'dev'
|
|
26
29
|
Requires-Dist: llama-index-embeddings-huggingface==0.2.0; extra == 'dev'
|
|
30
|
+
Requires-Dist: llama-index-llms-mistralai==0.1.20; extra == 'dev'
|
|
27
31
|
Requires-Dist: llama-index-vector-stores-chroma==0.1.9; extra == 'dev'
|
|
28
32
|
Requires-Dist: llama-index==0.10.30; extra == 'dev'
|
|
33
|
+
Requires-Dist: mistral-haystack==0.0.2; extra == 'dev'
|
|
29
34
|
Requires-Dist: numpy==1.26.4; extra == 'dev'
|
|
30
35
|
Requires-Dist: parameterized==0.9.0; extra == 'dev'
|
|
31
36
|
Requires-Dist: pytest==8.0.0; extra == 'dev'
|
|
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "monocle_apptrace"
|
|
7
|
-
version = "0.
|
|
7
|
+
version = "0.3.0b1"
|
|
8
8
|
authors = []
|
|
9
9
|
description = "package with monocle genAI tracing"
|
|
10
10
|
readme = "README.md"
|
|
@@ -43,7 +43,18 @@ dev = [
|
|
|
43
43
|
'llama-index==0.10.30',
|
|
44
44
|
'llama-index-embeddings-huggingface==0.2.0',
|
|
45
45
|
'llama-index-vector-stores-chroma==0.1.9',
|
|
46
|
-
'parameterized==0.9.0'
|
|
46
|
+
'parameterized==0.9.0',
|
|
47
|
+
'llama-index-llms-mistralai==0.1.20',
|
|
48
|
+
'langchain-mistralai==0.1.13',
|
|
49
|
+
'mistral-haystack==0.0.2'
|
|
50
|
+
]
|
|
51
|
+
|
|
52
|
+
azure = [
|
|
53
|
+
'azure-storage-blob==12.22.0',
|
|
54
|
+
]
|
|
55
|
+
|
|
56
|
+
aws = [
|
|
57
|
+
'boto3==1.35.19',
|
|
47
58
|
]
|
|
48
59
|
|
|
49
60
|
[project.urls]
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from monocle_apptrace.utils import get_wrapper_methods_config
|
|
3
|
+
|
|
4
|
+
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
|
5
|
+
BOTOCORE_METHODS = get_wrapper_methods_config(
|
|
6
|
+
wrapper_methods_config_path=os.path.join(parent_dir, 'metamodel', 'maps', 'botocore_methods.json'),
|
|
7
|
+
attributes_config_base_path=os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
|
8
|
+
|
|
9
|
+
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# Azure environment constants
|
|
2
|
+
AZURE_ML_ENDPOINT_ENV_NAME = "AZUREML_ENTRY_SCRIPT"
|
|
3
|
+
AZURE_FUNCTION_WORKER_ENV_NAME = "FUNCTIONS_WORKER_RUNTIME"
|
|
4
|
+
AZURE_APP_SERVICE_ENV_NAME = "WEBSITE_SITE_NAME"
|
|
5
|
+
AWS_LAMBDA_ENV_NAME = "AWS_LAMBDA_RUNTIME_API"
|
|
6
|
+
GITHUB_CODESPACE_ENV_NAME = "CODESPACES"
|
|
7
|
+
|
|
8
|
+
AWS_LAMBDA_FUNCTION_IDENTIFIER_ENV_NAME = "AWS_LAMBDA_FUNCTION_NAME"
|
|
9
|
+
AZURE_FUNCTION_IDENTIFIER_ENV_NAME = "WEBSITE_SITE_NAME"
|
|
10
|
+
AZURE_APP_SERVICE_IDENTIFIER_ENV_NAME = "WEBSITE_DEPLOYMENT_ID"
|
|
11
|
+
GITHUB_CODESPACE_IDENTIFIER_ENV_NAME = "GITHUB_REPOSITORY"
|
|
12
|
+
# Azure naming reference can be found here
|
|
13
|
+
# https://learn.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/resource-abbreviations
|
|
14
|
+
AZURE_FUNCTION_NAME = "azure.func"
|
|
15
|
+
AZURE_APP_SERVICE_NAME = "azure.asp"
|
|
16
|
+
AZURE_ML_SERVICE_NAME = "azure.mlw"
|
|
17
|
+
AWS_LAMBDA_SERVICE_NAME = "aws.lambda"
|
|
18
|
+
GITHUB_CODESPACE_SERVICE_NAME = "github_codespace"
|
|
19
|
+
|
|
20
|
+
# Env variables to identify infra service type
|
|
21
|
+
service_type_map = {
|
|
22
|
+
AZURE_ML_ENDPOINT_ENV_NAME: AZURE_ML_SERVICE_NAME,
|
|
23
|
+
AZURE_APP_SERVICE_ENV_NAME: AZURE_APP_SERVICE_NAME,
|
|
24
|
+
AZURE_FUNCTION_WORKER_ENV_NAME: AZURE_FUNCTION_NAME,
|
|
25
|
+
AWS_LAMBDA_ENV_NAME: AWS_LAMBDA_SERVICE_NAME,
|
|
26
|
+
GITHUB_CODESPACE_ENV_NAME: GITHUB_CODESPACE_SERVICE_NAME
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
# Env variables to identify infra service name
|
|
30
|
+
service_name_map = {
|
|
31
|
+
AZURE_APP_SERVICE_NAME: AZURE_APP_SERVICE_IDENTIFIER_ENV_NAME,
|
|
32
|
+
AZURE_FUNCTION_NAME: AZURE_FUNCTION_IDENTIFIER_ENV_NAME,
|
|
33
|
+
AZURE_ML_SERVICE_NAME: AZURE_ML_ENDPOINT_ENV_NAME,
|
|
34
|
+
AWS_LAMBDA_SERVICE_NAME: AWS_LAMBDA_FUNCTION_IDENTIFIER_ENV_NAME,
|
|
35
|
+
GITHUB_CODESPACE_SERVICE_NAME: GITHUB_CODESPACE_IDENTIFIER_ENV_NAME
|
|
36
|
+
}
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import time
|
|
3
|
+
import random
|
|
4
|
+
import datetime
|
|
5
|
+
import logging
|
|
6
|
+
import asyncio
|
|
7
|
+
import boto3
|
|
8
|
+
from botocore.exceptions import ClientError
|
|
9
|
+
from botocore.exceptions import (
|
|
10
|
+
BotoCoreError,
|
|
11
|
+
ConnectionClosedError,
|
|
12
|
+
ConnectTimeoutError,
|
|
13
|
+
EndpointConnectionError,
|
|
14
|
+
ReadTimeoutError,
|
|
15
|
+
)
|
|
16
|
+
from opentelemetry.sdk.trace import ReadableSpan
|
|
17
|
+
from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
|
|
18
|
+
from monocle_apptrace.exporters.base_exporter import SpanExporterBase
|
|
19
|
+
from typing import Sequence
|
|
20
|
+
import json
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
class S3SpanExporter(SpanExporterBase):
|
|
24
|
+
def __init__(self, bucket_name=None, region_name=None):
|
|
25
|
+
super().__init__()
|
|
26
|
+
# Use environment variables if credentials are not provided
|
|
27
|
+
DEFAULT_FILE_PREFIX = "monocle_trace_"
|
|
28
|
+
DEFAULT_TIME_FORMAT = "%Y-%m-%d_%H.%M.%S"
|
|
29
|
+
self.max_batch_size = 500
|
|
30
|
+
self.export_interval = 1
|
|
31
|
+
self.s3_client = boto3.client(
|
|
32
|
+
's3',
|
|
33
|
+
aws_access_key_id=os.getenv('AWS_ACCESS_KEY_ID'),
|
|
34
|
+
aws_secret_access_key=os.getenv('AWS_SECRET_ACCESS_KEY'),
|
|
35
|
+
region_name=region_name,
|
|
36
|
+
)
|
|
37
|
+
self.bucket_name = bucket_name or os.getenv('MONOCLE_S3_BUCKET_NAME','default-bucket')
|
|
38
|
+
self.file_prefix = DEFAULT_FILE_PREFIX
|
|
39
|
+
self.time_format = DEFAULT_TIME_FORMAT
|
|
40
|
+
self.export_queue = []
|
|
41
|
+
self.last_export_time = time.time()
|
|
42
|
+
|
|
43
|
+
# Check if bucket exists or create it
|
|
44
|
+
if not self.__bucket_exists(self.bucket_name):
|
|
45
|
+
try:
|
|
46
|
+
self.s3_client.create_bucket(
|
|
47
|
+
Bucket=self.bucket_name,
|
|
48
|
+
CreateBucketConfiguration={'LocationConstraint': region_name}
|
|
49
|
+
)
|
|
50
|
+
logger.info(f"Bucket {self.bucket_name} created successfully.")
|
|
51
|
+
except ClientError as e:
|
|
52
|
+
logger.error(f"Error creating bucket {self.bucket_name}: {e}")
|
|
53
|
+
raise e
|
|
54
|
+
|
|
55
|
+
def __bucket_exists(self, bucket_name):
|
|
56
|
+
try:
|
|
57
|
+
# Check if the bucket exists by calling head_bucket
|
|
58
|
+
self.s3_client.head_bucket(Bucket=bucket_name)
|
|
59
|
+
return True
|
|
60
|
+
except ClientError as e:
|
|
61
|
+
error_code = e.response['Error']['Code']
|
|
62
|
+
if error_code == '404':
|
|
63
|
+
# Bucket not found
|
|
64
|
+
logger.error(f"Bucket {bucket_name} does not exist (404).")
|
|
65
|
+
return False
|
|
66
|
+
elif error_code == '403':
|
|
67
|
+
# Permission denied
|
|
68
|
+
logger.error(f"Access to bucket {bucket_name} is forbidden (403).")
|
|
69
|
+
raise PermissionError(f"Access to bucket {bucket_name} is forbidden.")
|
|
70
|
+
elif error_code == '400':
|
|
71
|
+
# Bad request or malformed input
|
|
72
|
+
logger.error(f"Bad request for bucket {bucket_name} (400).")
|
|
73
|
+
raise ValueError(f"Bad request for bucket {bucket_name}.")
|
|
74
|
+
else:
|
|
75
|
+
# Other client errors
|
|
76
|
+
logger.error(f"Unexpected error when accessing bucket {bucket_name}: {e}")
|
|
77
|
+
raise e
|
|
78
|
+
except TypeError as e:
|
|
79
|
+
# Handle TypeError separately
|
|
80
|
+
logger.error(f"Type error while checking bucket existence: {e}")
|
|
81
|
+
raise e
|
|
82
|
+
|
|
83
|
+
def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
|
|
84
|
+
"""Synchronous export method that internally handles async logic."""
|
|
85
|
+
try:
|
|
86
|
+
# Run the asynchronous export logic in an event loop
|
|
87
|
+
asyncio.run(self.__export_async(spans))
|
|
88
|
+
return SpanExportResult.SUCCESS
|
|
89
|
+
except Exception as e:
|
|
90
|
+
logger.error(f"Error exporting spans: {e}")
|
|
91
|
+
return SpanExportResult.FAILURE
|
|
92
|
+
|
|
93
|
+
async def __export_async(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
|
|
94
|
+
try:
|
|
95
|
+
# Add spans to the export queue
|
|
96
|
+
for span in spans:
|
|
97
|
+
self.export_queue.append(span)
|
|
98
|
+
# If the queue reaches MAX_BATCH_SIZE, export the spans
|
|
99
|
+
if len(self.export_queue) >= self.max_batch_size:
|
|
100
|
+
await self.__export_spans()
|
|
101
|
+
|
|
102
|
+
# Check if it's time to force a flush
|
|
103
|
+
current_time = time.time()
|
|
104
|
+
if current_time - self.last_export_time >= self.export_interval:
|
|
105
|
+
await self.__export_spans() # Export spans if time interval has passed
|
|
106
|
+
self.last_export_time = current_time # Reset the last export time
|
|
107
|
+
|
|
108
|
+
return SpanExportResult.SUCCESS
|
|
109
|
+
except Exception as e:
|
|
110
|
+
logger.error(f"Error exporting spans: {e}")
|
|
111
|
+
return SpanExportResult.FAILURE
|
|
112
|
+
|
|
113
|
+
def __serialize_spans(self, spans: Sequence[ReadableSpan]) -> str:
|
|
114
|
+
try:
|
|
115
|
+
# Serialize spans to JSON or any other format you prefer
|
|
116
|
+
valid_json_list = []
|
|
117
|
+
for span in spans:
|
|
118
|
+
try:
|
|
119
|
+
valid_json_list.append(span.to_json(indent=0).replace("\n", ""))
|
|
120
|
+
except json.JSONDecodeError as e:
|
|
121
|
+
logger.warning(f"Invalid JSON format in span data: {span.context.span_id}. Error: {e}")
|
|
122
|
+
continue
|
|
123
|
+
ndjson_data = "\n".join(valid_json_list) + "\n"
|
|
124
|
+
return ndjson_data
|
|
125
|
+
except Exception as e:
|
|
126
|
+
logger.warning(f"Error serializing spans: {e}")
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
async def __export_spans(self):
|
|
130
|
+
if len(self.export_queue) == 0:
|
|
131
|
+
return
|
|
132
|
+
|
|
133
|
+
# Take a batch of spans from the queue
|
|
134
|
+
batch_to_export = self.export_queue[:self.max_batch_size]
|
|
135
|
+
serialized_data = self.__serialize_spans(batch_to_export)
|
|
136
|
+
self.export_queue = self.export_queue[self.max_batch_size:]
|
|
137
|
+
try:
|
|
138
|
+
self.__upload_to_s3(serialized_data)
|
|
139
|
+
except Exception as e:
|
|
140
|
+
logger.error(f"Failed to upload span batch: {e}")
|
|
141
|
+
|
|
142
|
+
@SpanExporterBase.retry_with_backoff(exceptions=(EndpointConnectionError, ConnectionClosedError, ReadTimeoutError, ConnectTimeoutError))
|
|
143
|
+
def __upload_to_s3(self, span_data_batch: str):
|
|
144
|
+
current_time = datetime.datetime.now().strftime(self.time_format)
|
|
145
|
+
file_name = f"{self.file_prefix}{current_time}.ndjson"
|
|
146
|
+
self.s3_client.put_object(
|
|
147
|
+
Bucket=self.bucket_name,
|
|
148
|
+
Key=file_name,
|
|
149
|
+
Body=span_data_batch
|
|
150
|
+
)
|
|
151
|
+
logger.info(f"Span batch uploaded to AWS S3 as {file_name}.")
|
|
152
|
+
|
|
153
|
+
async def force_flush(self, timeout_millis: int = 30000) -> bool:
|
|
154
|
+
await self.__export_spans() # Export any remaining spans in the queue
|
|
155
|
+
return True
|
|
156
|
+
|
|
157
|
+
def shutdown(self) -> None:
|
|
158
|
+
logger.info("S3SpanExporter has been shut down.")
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import time
|
|
3
|
+
import random
|
|
4
|
+
import datetime
|
|
5
|
+
import logging
|
|
6
|
+
import asyncio
|
|
7
|
+
from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient
|
|
8
|
+
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError, ServiceRequestError
|
|
9
|
+
from opentelemetry.sdk.trace import ReadableSpan
|
|
10
|
+
from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
|
|
11
|
+
from typing import Sequence
|
|
12
|
+
from monocle_apptrace.exporters.base_exporter import SpanExporterBase
|
|
13
|
+
import json
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
class AzureBlobSpanExporter(SpanExporterBase):
|
|
17
|
+
def __init__(self, connection_string=None, container_name=None):
|
|
18
|
+
super().__init__()
|
|
19
|
+
DEFAULT_FILE_PREFIX = "monocle_trace_"
|
|
20
|
+
DEFAULT_TIME_FORMAT = "%Y-%m-%d_%H.%M.%S"
|
|
21
|
+
self.max_batch_size = 500
|
|
22
|
+
self.export_interval = 1
|
|
23
|
+
# Use default values if none are provided
|
|
24
|
+
if not connection_string:
|
|
25
|
+
connection_string = os.getenv('MONOCLE_BLOB_CONNECTION_STRING')
|
|
26
|
+
if not connection_string:
|
|
27
|
+
raise ValueError("Azure Storage connection string is not provided or set in environment variables.")
|
|
28
|
+
|
|
29
|
+
if not container_name:
|
|
30
|
+
container_name = os.getenv('MONOCLE_BLOB_CONTAINER_NAME', 'default-container')
|
|
31
|
+
|
|
32
|
+
self.blob_service_client = BlobServiceClient.from_connection_string(connection_string)
|
|
33
|
+
self.container_name = container_name
|
|
34
|
+
self.file_prefix = DEFAULT_FILE_PREFIX
|
|
35
|
+
self.time_format = DEFAULT_TIME_FORMAT
|
|
36
|
+
|
|
37
|
+
# Check if container exists or create it
|
|
38
|
+
if not self.__container_exists(container_name):
|
|
39
|
+
try:
|
|
40
|
+
self.blob_service_client.create_container(container_name)
|
|
41
|
+
logger.info(f"Container {container_name} created successfully.")
|
|
42
|
+
except Exception as e:
|
|
43
|
+
logger.error(f"Error creating container {container_name}: {e}")
|
|
44
|
+
raise e
|
|
45
|
+
|
|
46
|
+
def __container_exists(self, container_name):
|
|
47
|
+
try:
|
|
48
|
+
container_client = self.blob_service_client.get_container_client(container_name)
|
|
49
|
+
container_client.get_container_properties()
|
|
50
|
+
return True
|
|
51
|
+
except ResourceNotFoundError:
|
|
52
|
+
logger.error(f"Container {container_name} not found (404).")
|
|
53
|
+
return False
|
|
54
|
+
except ClientAuthenticationError:
|
|
55
|
+
logger.error(f"Access to container {container_name} is forbidden (403).")
|
|
56
|
+
raise PermissionError(f"Access to container {container_name} is forbidden.")
|
|
57
|
+
except Exception as e:
|
|
58
|
+
logger.error(f"Unexpected error when checking if container {container_name} exists: {e}")
|
|
59
|
+
raise e
|
|
60
|
+
|
|
61
|
+
def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
|
|
62
|
+
"""Synchronous export method that internally handles async logic."""
|
|
63
|
+
try:
|
|
64
|
+
# Run the asynchronous export logic in an event loop
|
|
65
|
+
asyncio.run(self._export_async(spans))
|
|
66
|
+
return SpanExportResult.SUCCESS
|
|
67
|
+
except Exception as e:
|
|
68
|
+
logger.error(f"Error exporting spans: {e}")
|
|
69
|
+
return SpanExportResult.FAILURE
|
|
70
|
+
|
|
71
|
+
async def _export_async(self, spans: Sequence[ReadableSpan]):
|
|
72
|
+
"""The actual async export logic is run here."""
|
|
73
|
+
# Add spans to the export queue
|
|
74
|
+
for span in spans:
|
|
75
|
+
self.export_queue.append(span)
|
|
76
|
+
if len(self.export_queue) >= self.max_batch_size:
|
|
77
|
+
await self.__export_spans()
|
|
78
|
+
|
|
79
|
+
# Force a flush if the interval has passed
|
|
80
|
+
current_time = time.time()
|
|
81
|
+
if current_time - self.last_export_time >= self.export_interval:
|
|
82
|
+
await self.__export_spans()
|
|
83
|
+
self.last_export_time = current_time
|
|
84
|
+
|
|
85
|
+
def __serialize_spans(self, spans: Sequence[ReadableSpan]) -> str:
|
|
86
|
+
try:
|
|
87
|
+
valid_json_list = []
|
|
88
|
+
for span in spans:
|
|
89
|
+
try:
|
|
90
|
+
valid_json_list.append(span.to_json(indent=0).replace("\n", ""))
|
|
91
|
+
except json.JSONDecodeError as e:
|
|
92
|
+
logger.warning(f"Invalid JSON format in span data: {span.context.span_id}. Error: {e}")
|
|
93
|
+
continue
|
|
94
|
+
|
|
95
|
+
ndjson_data = "\n".join(valid_json_list) + "\n"
|
|
96
|
+
return ndjson_data
|
|
97
|
+
except Exception as e:
|
|
98
|
+
logger.warning(f"Error serializing spans: {e}")
|
|
99
|
+
|
|
100
|
+
async def __export_spans(self):
|
|
101
|
+
if len(self.export_queue) == 0:
|
|
102
|
+
return
|
|
103
|
+
|
|
104
|
+
batch_to_export = self.export_queue[:self.max_batch_size]
|
|
105
|
+
serialized_data = self.__serialize_spans(batch_to_export)
|
|
106
|
+
self.export_queue = self.export_queue[self.max_batch_size:]
|
|
107
|
+
try:
|
|
108
|
+
self.__upload_to_blob(serialized_data)
|
|
109
|
+
except Exception as e:
|
|
110
|
+
logger.error(f"Failed to upload span batch: {e}")
|
|
111
|
+
|
|
112
|
+
@SpanExporterBase.retry_with_backoff(exceptions=(ResourceNotFoundError, ClientAuthenticationError, ServiceRequestError))
|
|
113
|
+
def __upload_to_blob(self, span_data_batch: str):
|
|
114
|
+
current_time = datetime.datetime.now().strftime(self.time_format)
|
|
115
|
+
file_name = f"{self.file_prefix}{current_time}.ndjson"
|
|
116
|
+
blob_client = self.blob_service_client.get_blob_client(container=self.container_name, blob=file_name)
|
|
117
|
+
blob_client.upload_blob(span_data_batch, overwrite=True)
|
|
118
|
+
logger.info(f"Span batch uploaded to Azure Blob Storage as {file_name}.")
|
|
119
|
+
|
|
120
|
+
async def force_flush(self, timeout_millis: int = 30000) -> bool:
|
|
121
|
+
await self.__export_spans()
|
|
122
|
+
return True
|
|
123
|
+
|
|
124
|
+
def shutdown(self) -> None:
|
|
125
|
+
logger.info("AzureBlobSpanExporter has been shut down.")
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import time
|
|
2
|
+
import random
|
|
3
|
+
import logging
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
from opentelemetry.sdk.trace import ReadableSpan
|
|
6
|
+
from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
|
|
7
|
+
from typing import Sequence
|
|
8
|
+
import asyncio
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
class SpanExporterBase(ABC):
|
|
13
|
+
def __init__(self):
|
|
14
|
+
self.backoff_factor = 2
|
|
15
|
+
self.max_retries = 10
|
|
16
|
+
self.export_queue = []
|
|
17
|
+
self.last_export_time = time.time()
|
|
18
|
+
|
|
19
|
+
@abstractmethod
|
|
20
|
+
async def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
@abstractmethod
|
|
24
|
+
async def force_flush(self, timeout_millis: int = 30000) -> bool:
|
|
25
|
+
pass
|
|
26
|
+
|
|
27
|
+
def shutdown(self) -> None:
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
@staticmethod
|
|
31
|
+
def retry_with_backoff(retries=3, backoff_in_seconds=1, max_backoff_in_seconds=32, exceptions=(Exception,)):
|
|
32
|
+
def decorator(func):
|
|
33
|
+
def wrapper(*args, **kwargs):
|
|
34
|
+
attempt = 0
|
|
35
|
+
while attempt < retries:
|
|
36
|
+
try:
|
|
37
|
+
return func(*args, **kwargs)
|
|
38
|
+
except exceptions as e:
|
|
39
|
+
attempt += 1
|
|
40
|
+
sleep_time = min(max_backoff_in_seconds, backoff_in_seconds * (2 ** (attempt - 1)))
|
|
41
|
+
sleep_time = sleep_time * (1 + random.uniform(-0.1, 0.1)) # Add jitter
|
|
42
|
+
logger.warning(f"Network connectivity error, Attempt {attempt} failed: {e}. Retrying in {sleep_time:.2f} seconds...")
|
|
43
|
+
time.sleep(sleep_time)
|
|
44
|
+
raise Exception(f"Failed after {retries} attempts")
|
|
45
|
+
|
|
46
|
+
return wrapper
|
|
47
|
+
|
|
48
|
+
return decorator
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Callable
|
|
4
|
+
|
|
5
|
+
logger = logging.getLogger(__name__)
|
|
6
|
+
|
|
7
|
+
class ExportTaskProcessor(ABC):
|
|
8
|
+
|
|
9
|
+
@abstractmethod
|
|
10
|
+
def start(self):
|
|
11
|
+
return
|
|
12
|
+
|
|
13
|
+
@abstractmethod
|
|
14
|
+
def stop(self):
|
|
15
|
+
return
|
|
16
|
+
|
|
17
|
+
@abstractmethod
|
|
18
|
+
def queue_task(self, async_task: Callable[[Callable, any], any] = None, args: any = None):
|
|
19
|
+
return
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from typing import Dict, Any
|
|
2
|
+
import os, warnings
|
|
3
|
+
from importlib import import_module
|
|
4
|
+
from opentelemetry.sdk.trace.export import SpanExporter, ConsoleSpanExporter
|
|
5
|
+
from monocle_apptrace.exporters.file_exporter import FileSpanExporter
|
|
6
|
+
|
|
7
|
+
monocle_exporters:Dict[str, Any] = {
|
|
8
|
+
"s3": {"module": "monocle_apptrace.exporters.aws.s3_exporter", "class": "S3SpanExporter"},
|
|
9
|
+
"blob": {"module":"monocle_apptrace.exporters.azure.blob_exporter", "class": "AzureBlobSpanExporter"},
|
|
10
|
+
"okahu": {"module":"monocle_apptrace.exporters.okahu.okahu_exporter", "class": "OkahuSpanExporter"},
|
|
11
|
+
"file": {"module":"monocle_apptrace.exporters.file_exporter", "class": "FileSpanExporter"}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
def get_monocle_exporter() -> SpanExporter:
|
|
15
|
+
exporter_name = os.environ.get("MONOCLE_EXPORTER", "file")
|
|
16
|
+
try:
|
|
17
|
+
exporter_class_path = monocle_exporters[exporter_name]
|
|
18
|
+
except Exception as ex:
|
|
19
|
+
warnings.warn(f"Unsupported Monocle span exporter setting {exporter_name}, using default FileSpanExporter.")
|
|
20
|
+
return FileSpanExporter()
|
|
21
|
+
try:
|
|
22
|
+
exporter_module = import_module(exporter_class_path.get("module"))
|
|
23
|
+
exporter_class = getattr(exporter_module, exporter_class_path.get("class"))
|
|
24
|
+
return exporter_class()
|
|
25
|
+
except Exception as ex:
|
|
26
|
+
warnings.warn(f"Unable to set Monocle span exporter to {exporter_name}, error {ex}. Using ConsoleSpanExporter")
|
|
27
|
+
return ConsoleSpanExporter()
|