monocle-apptrace 0.0.1__tar.gz → 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. monocle_apptrace-0.1.0/CHANGELOG.md +17 -0
  2. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/Monocle_User_Guide.md +18 -6
  3. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/PKG-INFO +3 -2
  4. monocle_apptrace-0.1.0/Pipfile.lock +46 -0
  5. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/README.md +1 -1
  6. monocle_apptrace-0.1.0/monocle_trace_file_test_0x8e5ff83c337b2649098910e1db3c8173_2024-08-27.json +24 -0
  7. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/pyproject.toml +3 -2
  8. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/src/monocle_apptrace/README.md +2 -2
  9. monocle_apptrace-0.1.0/src/monocle_apptrace/__init__.py +0 -0
  10. monocle_apptrace-0.1.0/src/monocle_apptrace/constants.py +22 -0
  11. monocle_apptrace-0.1.0/src/monocle_apptrace/exporters/file_exporter.py +63 -0
  12. monocle_apptrace-0.1.0/src/monocle_apptrace/haystack/__init__.py +9 -0
  13. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/src/monocle_apptrace/haystack/wrap_node.py +1 -1
  14. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/src/monocle_apptrace/haystack/wrap_openai.py +1 -9
  15. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/src/monocle_apptrace/haystack/wrap_pipeline.py +6 -9
  16. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/src/monocle_apptrace/instrumentor.py +25 -28
  17. monocle_apptrace-0.1.0/src/monocle_apptrace/langchain/__init__.py +6 -0
  18. monocle_apptrace-0.1.0/src/monocle_apptrace/llamaindex/__init__.py +15 -0
  19. monocle_apptrace-0.1.0/src/monocle_apptrace/utils.py +73 -0
  20. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/src/monocle_apptrace/wrap_common.py +97 -43
  21. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/src/monocle_apptrace/wrapper.py +3 -3
  22. monocle_apptrace-0.0.1/src/monocle_apptrace/haystack/__init__.py → monocle_apptrace-0.1.0/src/monocle_apptrace/wrapper_config/haystack_methods.json +11 -14
  23. monocle_apptrace-0.0.1/src/monocle_apptrace/langchain/__init__.py → monocle_apptrace-0.1.0/src/monocle_apptrace/wrapper_config/lang_chain_methods.json +32 -21
  24. monocle_apptrace-0.0.1/src/monocle_apptrace/llamaindex/__init__.py → monocle_apptrace-0.1.0/src/monocle_apptrace/wrapper_config/llama_index_methods.json +22 -23
  25. monocle_apptrace-0.1.0/storage/default__vector_store.json +1 -0
  26. monocle_apptrace-0.1.0/storage/docstore.json +1 -0
  27. monocle_apptrace-0.1.0/storage/index_store.json +1 -0
  28. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/tox.ini +1 -0
  29. monocle_apptrace-0.0.1/src/monocle_apptrace/__init__.py +0 -2
  30. monocle_apptrace-0.0.1/src/monocle_apptrace/utils.py +0 -53
  31. monocle_apptrace-0.0.1/storage/default__vector_store.json +0 -1
  32. monocle_apptrace-0.0.1/storage/docstore.json +0 -1
  33. monocle_apptrace-0.0.1/storage/index_store.json +0 -1
  34. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/.gitignore +0 -0
  35. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/CODEOWNERS.md +0 -0
  36. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/CODE_OF_CONDUCT.md +0 -0
  37. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/CONTRIBUTING.md +0 -0
  38. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/COPYRIGHT.template +0 -0
  39. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/LICENSE +0 -0
  40. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/MAINTAINER.md +0 -0
  41. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/Monocle_committer_guide.md +0 -0
  42. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/Monocle_contributor_guide.md +0 -0
  43. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/NOTICE +0 -0
  44. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/SECURITY.md +0 -0
  45. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/storage/graph_store.json +0 -0
  46. {monocle_apptrace-0.0.1 → monocle_apptrace-0.1.0}/storage/image__vector_store.json +0 -0
@@ -0,0 +1,17 @@
1
+ ## 0.1.0 (Aug 27, 2024)
2
+
3
+ - Fixed LlamaIndex tracing bugs ([#32](https://github.com/monocle2ai/monocle/pull/32))
4
+ - Added support to add AWS cloud infra attributes ([#29](https://github.com/monocle2ai/monocle/pull/29))
5
+ - Added support to add Azure cloud infra attributes ([#23](https://github.com/monocle2ai/monocle/pull/23))
6
+ - Added support for adding provider name in LLM span in traces ([#22](https://github.com/monocle2ai/monocle/pull/22))
7
+ - Added a default file span exporter ([#21](https://github.com/monocle2ai/monocle/pull/21))
8
+ - Moved input and output context and prompts from attributes to events ([#15](https://github.com/monocle2ai/monocle/pull/15))
9
+
10
+
11
+
12
+
13
+
14
+
15
+ ## 0.0.1 (Jul 17, 2024)
16
+
17
+ - First monocle release
@@ -37,8 +37,7 @@ from langchain_openai import OpenAI
37
37
  from langchain.prompts import PromptTemplate
38
38
 
39
39
  # Call the setup Monocle telemetry method
40
- setup_monocle_telemetry(workflow_name = "simple_math_app",
41
- span_processors=[BatchSpanProcessor(ConsoleSpanExporter())])
40
+ setup_monocle_telemetry(workflow_name = "simple_math_app")
42
41
 
43
42
  llm = OpenAI()
44
43
  prompt = PromptTemplate.from_template("1 + {number} = ")
@@ -51,6 +50,19 @@ chain = LLMChain(llm=llm, prompt=prompt)
51
50
  chain.invoke({"number":2}, {"callbacks":[handler]})
52
51
 
53
52
  ```
53
+
54
+ ### Accessing monocle trace
55
+ By default monocle generate traces in a json file created in the local directory where the application is running. The file name by default is monocle_trace_{workflow_name}\_{trace_id}\_{timestamp}.json where the trace_id is a unique number generated by monocle for every trace. Please refere to [Trace span json](Monocle_User_Guide.md#trace-span-json). The file path and format can be changed by setting those properties as argement to ```setup_monocle_telemetry()```. For example,
56
+ ```
57
+ setup_monocle_telemetry(workflow_name = "simple_math_app",
58
+ span_processors=[BatchSpanProcessor(FileSpanExporter(
59
+ out_path = "/tmp",
60
+ file_prefix = "map_app_prod_trace_",
61
+ time_format = "%Y-%m-%d"))
62
+ ])
63
+ ```
64
+ To print the trace on the console, use ```ConsoleSpanExporter()``` instead of ```FileSpanExporter()```
65
+
54
66
  ### Leveraging Monocle's extensibility to handle customization
55
67
  When the out of box features from app frameworks are not sufficent, the app developers have to add custom code. For example, if you are extending a LLM class in LlamaIndex to use a model hosted in NVIDIA Triton. This new class is not know to Monocle. You can specify this new class method part of Monocle enabling API and it will be able to trace it.
56
68
 
@@ -83,13 +95,13 @@ setup_monocle_telemetry(
83
95
  wrapper_methods=[
84
96
  WrapperMethod(
85
97
  package="langchain.schema.runnable",
86
- object="RunnableParallel",
98
+ object_name="RunnableParallel",
87
99
  method="invoke",
88
100
  span_name="langchain.workflow",
89
101
  wrapper=task_wrapper),
90
102
  WrapperMethod(
91
103
  package="langchain.schema.runnable",
92
- object="RunnableParallel",
104
+ object_name="RunnableParallel",
93
105
  method="ainvoke",
94
106
  span_name="langchain.workflow",
95
107
  wrapper=atask_wrapper)
@@ -110,7 +122,7 @@ Monocle generates spans which adhere to [Tracing API | OpenTelemetry](https://op
110
122
  | Span JSON | Description |
111
123
  | ------------- | ------------- |
112
124
  | {||
113
- | "```name```": "langchain.workflow",|span name and is configurable in [__init.py__](src/monocle_apptrace/langchain/__init__.py) or in ```setup_okahu_telemetry(...)```|
125
+ | "```name```": "langchain.workflow",|span name and is configurable in [__init.py__](src/monocle_apptrace/langchain/__init__.py) or in ```setup_monocle_telemetry(...)```|
114
126
  | "```context```": {|this gets autogenerated|
115
127
  |   "```trace_id```": "0xe5269f0e534efa098b240f974220d6b7",||
116
128
  |   "```span_id```": "0x30b13075eca52f44",||
@@ -124,7 +136,7 @@ Monocle generates spans which adhere to [Tracing API | OpenTelemetry](https://op
124
136
  |  "```status_code```": "UNSET"| status of span to OK or ERROR. Default is UNSET|
125
137
  |  },||
126
138
  |"```attributes```": {||
127
- |  "workflow_name": "ml_rag_app",|defines the name of the service being set in ```setup_okahu_telemetry(...)``` during initialization of instrumentation|
139
+ |  "workflow_name": "ml_rag_app",|defines the name of the service being set in ```setup_monocle_telemetry(...)``` during initialization of instrumentation|
128
140
  |  "workflow_type": "workflow.langchain"|type of framework that generated this span|
129
141
  |  },||
130
142
  |"```events```": [|captures the log records|
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: monocle_apptrace
3
- Version: 0.0.1
3
+ Version: 0.1.0
4
4
  Summary: package with monocle genAI tracing
5
5
  Project-URL: Homepage, https://github.com/monocle2ai/monocle
6
6
  Project-URL: Issues, https://github.com/monocle2ai/monocle/issues
@@ -27,6 +27,7 @@ Requires-Dist: llama-index-embeddings-huggingface==0.2.0; extra == 'dev'
27
27
  Requires-Dist: llama-index-vector-stores-chroma==0.1.9; extra == 'dev'
28
28
  Requires-Dist: llama-index==0.10.30; extra == 'dev'
29
29
  Requires-Dist: numpy==1.26.4; extra == 'dev'
30
+ Requires-Dist: parameterized==0.9.0; extra == 'dev'
30
31
  Requires-Dist: pytest==8.0.0; extra == 'dev'
31
32
  Requires-Dist: sentence-transformers==2.6.1; extra == 'dev'
32
33
  Requires-Dist: types-requests==2.31.0.20240106; extra == 'dev'
@@ -63,7 +64,7 @@ The traces are compatible with OpenTelemetry format. They are further enriched t
63
64
  ```
64
65
  - Enable Monocle tracing in your app by adding following
65
66
  ```
66
- setup_okahu_telemetry(workflow_name="your-app-name")
67
+ setup_monocle_telemetry(workflow_name="your-app-name")
67
68
  ```
68
69
  Please refer to [Monocle user guide](Monocle_User_Guide.md) for more details
69
70
 
@@ -0,0 +1,46 @@
1
+ {
2
+ "_meta": {
3
+ "hash": {
4
+ "sha256": "48ca59749d628df4416e69ea793ec7181fe158afbd8741c25e6aa27d8074bbb1"
5
+ },
6
+ "pipfile-spec": 6,
7
+ "requires": {
8
+ "python_version": "3.11"
9
+ },
10
+ "sources": [
11
+ {
12
+ "name": "pypi",
13
+ "url": "https://pypi.org/simple",
14
+ "verify_ssl": true
15
+ }
16
+ ]
17
+ },
18
+ "default": {
19
+ "build": {
20
+ "hashes": [
21
+ "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d",
22
+ "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"
23
+ ],
24
+ "index": "pypi",
25
+ "markers": "python_version >= '3.8'",
26
+ "version": "==1.2.1"
27
+ },
28
+ "packaging": {
29
+ "hashes": [
30
+ "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002",
31
+ "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"
32
+ ],
33
+ "markers": "python_version >= '3.8'",
34
+ "version": "==24.1"
35
+ },
36
+ "pyproject-hooks": {
37
+ "hashes": [
38
+ "sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965",
39
+ "sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2"
40
+ ],
41
+ "markers": "python_version >= '3.7'",
42
+ "version": "==1.1.0"
43
+ }
44
+ },
45
+ "develop": {}
46
+ }
@@ -29,7 +29,7 @@ The traces are compatible with OpenTelemetry format. They are further enriched t
29
29
  ```
30
30
  - Enable Monocle tracing in your app by adding following
31
31
  ```
32
- setup_okahu_telemetry(workflow_name="your-app-name")
32
+ setup_monocle_telemetry(workflow_name="your-app-name")
33
33
  ```
34
34
  Please refer to [Monocle user guide](Monocle_User_Guide.md) for more details
35
35
 
@@ -0,0 +1,24 @@
1
+ {
2
+ "name": "dummy.span",
3
+ "context": {
4
+ "trace_id": "0x8e5ff83c337b2649098910e1db3c8173",
5
+ "span_id": "0x2bbee7a930714909",
6
+ "trace_state": "[]"
7
+ },
8
+ "kind": "SpanKind.INTERNAL",
9
+ "parent_id": null,
10
+ "start_time": "2024-08-27T06:18:15.315360Z",
11
+ "end_time": "2024-08-27T06:18:15.315549Z",
12
+ "status": {
13
+ "status_code": "UNSET"
14
+ },
15
+ "attributes": {},
16
+ "events": [],
17
+ "links": [],
18
+ "resource": {
19
+ "attributes": {
20
+ "service.name": "file_test"
21
+ },
22
+ "schema_url": ""
23
+ }
24
+ }
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "monocle_apptrace"
7
- version = "0.0.1"
7
+ version = "0.1.0"
8
8
  authors = []
9
9
  description = "package with monocle genAI tracing"
10
10
  readme = "README.md"
@@ -42,7 +42,8 @@ dev = [
42
42
  'pytest==8.0.0',
43
43
  'llama-index==0.10.30',
44
44
  'llama-index-embeddings-huggingface==0.2.0',
45
- 'llama-index-vector-stores-chroma==0.1.9'
45
+ 'llama-index-vector-stores-chroma==0.1.9',
46
+ 'parameterized==0.9.0'
46
47
  ]
47
48
 
48
49
  [project.urls]
@@ -62,13 +62,13 @@ setup_monocle_telemetry(
62
62
  wrapper_methods=[
63
63
  WrapperMethod(
64
64
  package="langchain.schema.runnable",
65
- object="RunnableParallel",
65
+ object_name="RunnableParallel",
66
66
  method="invoke",
67
67
  span_name="langchain.workflow",
68
68
  wrapper=task_wrapper),
69
69
  WrapperMethod(
70
70
  package="langchain.schema.runnable",
71
- object="RunnableParallel",
71
+ object_name="RunnableParallel",
72
72
  method="ainvoke",
73
73
  span_name="langchain.workflow",
74
74
  wrapper=atask_wrapper)
@@ -0,0 +1,22 @@
1
+ # Azure environment constants
2
+ AZURE_ML_ENDPOINT_ENV_NAME = "AZUREML_ENTRY_SCRIPT"
3
+ AZURE_FUNCTION_WORKER_ENV_NAME = "FUNCTIONS_WORKER_RUNTIME"
4
+ AZURE_APP_SERVICE_ENV_NAME = "WEBSITE_SITE_NAME"
5
+ AWS_LAMBDA_ENV_NAME = "AWS_LAMBDA_RUNTIME_API"
6
+
7
+ # Azure naming reference can be found here
8
+ # https://learn.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/resource-abbreviations
9
+ AZURE_FUNCTION_NAME = "azure.func"
10
+ AZURE_APP_SERVICE_NAME = "azure.asp"
11
+ AZURE_ML_SERVICE_NAME = "azure.mlw"
12
+ AWS_LAMBDA_SERVICE_NAME = "aws.lambda"
13
+
14
+ azure_service_map = {
15
+ AZURE_ML_ENDPOINT_ENV_NAME: AZURE_ML_SERVICE_NAME,
16
+ AZURE_APP_SERVICE_ENV_NAME: AZURE_APP_SERVICE_NAME,
17
+ AZURE_FUNCTION_WORKER_ENV_NAME: AZURE_FUNCTION_NAME
18
+ }
19
+
20
+ aws_service_map = {
21
+ AWS_LAMBDA_ENV_NAME: AWS_LAMBDA_SERVICE_NAME
22
+ }
@@ -0,0 +1,63 @@
1
+ #pylint: disable=consider-using-with
2
+
3
+ from os import linesep, path
4
+ from io import TextIOWrapper
5
+ from datetime import datetime
6
+ from typing import Optional, Callable, Sequence
7
+ from opentelemetry.sdk.trace import ReadableSpan
8
+ from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
9
+ from opentelemetry.sdk.resources import SERVICE_NAME
10
+
11
+ DEFAULT_FILE_PREFIX:str = "monocle_trace_"
12
+ DEFAULT_TIME_FORMAT:str = "%Y-%m-%d_%H.%M.%S"
13
+
14
+ class FileSpanExporter(SpanExporter):
15
+ current_trace_id: int = None
16
+ current_file_path: str = None
17
+
18
+ def __init__(
19
+ self,
20
+ service_name: Optional[str] = None,
21
+ out_path:str = ".",
22
+ file_prefix = DEFAULT_FILE_PREFIX,
23
+ time_format = DEFAULT_TIME_FORMAT,
24
+ formatter: Callable[
25
+ [ReadableSpan], str
26
+ ] = lambda span: span.to_json()
27
+ + linesep,
28
+ ):
29
+ self.out_handle:TextIOWrapper = None
30
+ self.formatter = formatter
31
+ self.service_name = service_name
32
+ self.output_path = out_path
33
+ self.file_prefix = file_prefix
34
+ self.time_format = time_format
35
+
36
+ def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
37
+ for span in spans:
38
+ if span.context.trace_id != self.current_trace_id:
39
+ self.rotate_file(span.resource.attributes[SERVICE_NAME],
40
+ span.context.trace_id)
41
+ self.out_handle.write(self.formatter(span))
42
+ self.out_handle.flush()
43
+ return SpanExportResult.SUCCESS
44
+
45
+ def rotate_file(self, trace_name:str, trace_id:int) -> None:
46
+ self.reset_handle()
47
+ self.current_file_path = path.join(self.output_path,
48
+ self.file_prefix + trace_name + "_" + hex(trace_id) + "_"
49
+ + datetime.now().strftime(self.time_format) + ".json")
50
+ self.out_handle = open(self.current_file_path, "w", encoding='UTF-8')
51
+ self.current_trace_id = trace_id
52
+
53
+ def force_flush(self, timeout_millis: int = 30000) -> bool:
54
+ self.out_handle.flush()
55
+ return True
56
+
57
+ def reset_handle(self) -> None:
58
+ if self.out_handle is not None:
59
+ self.out_handle.close()
60
+ self.out_handle = None
61
+
62
+ def shutdown(self) -> None:
63
+ self.reset_handle()
@@ -0,0 +1,9 @@
1
+
2
+ import os
3
+ import logging
4
+ from monocle_apptrace.utils import load_wrapper_from_config
5
+
6
+ logger = logging.getLogger(__name__)
7
+ parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
8
+ HAYSTACK_METHODS = load_wrapper_from_config(
9
+ os.path.join(parent_dir, 'wrapper_config', 'haystack_methods.json'))
@@ -21,7 +21,7 @@ def wrap(tracer, to_wrap, wrapped, instance, args, kwargs):
21
21
  workflow_name = span.resource.attributes.get("service.name")
22
22
  span.set_attribute("workflow_name",workflow_name)
23
23
  span.set_attribute("workflow_type", WORKFLOW_TYPE_MAP["haystack"])
24
-
24
+
25
25
  response = wrapped(*args, **kwargs)
26
26
 
27
27
  return response
@@ -1,5 +1,3 @@
1
-
2
-
3
1
  import logging
4
2
  from opentelemetry import context as context_api
5
3
  from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY
@@ -17,9 +15,7 @@ def _set_input_attributes(span, kwargs, instance, args):
17
15
 
18
16
  if 'model' in instance.__dict__:
19
17
  model_name = instance.__dict__.get("model")
20
- set_span_attribute(span, "openai_model_name", model_name)
21
-
22
- return
18
+ set_span_attribute(span, "model_name", model_name)
23
19
 
24
20
  @dont_throw
25
21
  def _set_response_attributes(span, response):
@@ -39,14 +35,10 @@ def wrap_openai(tracer, to_wrap, wrapped, instance, args, kwargs):
39
35
  with tracer.start_as_current_span("haystack.openai") as span:
40
36
  if span.is_recording():
41
37
  _set_input_attributes(span, kwargs, instance, args)
42
-
43
-
44
-
45
38
  response = wrapped(*args, **kwargs)
46
39
 
47
40
  if response:
48
41
  if span.is_recording():
49
42
  _set_response_attributes(span, response)
50
-
51
43
 
52
44
  return response
@@ -1,5 +1,3 @@
1
-
2
-
3
1
  import logging
4
2
  from opentelemetry import context as context_api
5
3
  from opentelemetry.context import attach, set_value
@@ -18,13 +16,12 @@ def wrap(tracer, to_wrap, wrapped, instance, args, kwargs):
18
16
  name = "haystack_pipeline"
19
17
  attach(set_value("workflow_name", name))
20
18
  inputs = set()
21
- input = get_workflow_input(args, inputs)
19
+ workflow_input = get_workflow_input(args, inputs)
22
20
 
23
21
  with tracer.start_as_current_span(f"{name}.workflow") as span:
24
- span.set_attribute(PROMPT_INPUT_KEY, input)
22
+ span.set_attribute(PROMPT_INPUT_KEY, workflow_input)
25
23
  workflow_name = span.resource.attributes.get("service.name")
26
24
  set_workflow_attributes(span, workflow_name)
27
-
28
25
  response = wrapped(*args, **kwargs)
29
26
  set_workflow_output(span, response)
30
27
  return response
@@ -37,12 +34,12 @@ def get_workflow_input(args, inputs):
37
34
  for value in args[0].values():
38
35
  for text in value.values():
39
36
  inputs.add(text)
40
-
41
- input: str = ""
37
+
38
+ workflow_input: str = ""
42
39
 
43
40
  for input_str in inputs:
44
- input = input + input_str
45
- return input
41
+ workflow_input = workflow_input + input_str
42
+ return workflow_input
46
43
 
47
44
  def set_workflow_attributes(span, workflow_name):
48
45
  span.set_attribute("workflow_name",workflow_name)
@@ -10,25 +10,26 @@ from opentelemetry.sdk.trace import TracerProvider, Span
10
10
  from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanProcessor
11
11
  from opentelemetry.sdk.resources import SERVICE_NAME, Resource
12
12
  from opentelemetry import trace
13
+ from opentelemetry.context import get_value, attach, set_value
13
14
  from monocle_apptrace.wrap_common import CONTEXT_PROPERTIES_KEY
14
15
  from monocle_apptrace.wrapper import INBUILT_METHODS_LIST, WrapperMethod
15
- from opentelemetry.context import get_value, attach, set_value
16
+ from monocle_apptrace.exporters.file_exporter import FileSpanExporter
16
17
 
17
18
 
18
19
  logger = logging.getLogger(__name__)
19
20
 
20
- _instruments = ("langchain >= 0.0.346",)
21
+ _instruments = ()
21
22
 
22
23
  class MonocleInstrumentor(BaseInstrumentor):
23
-
24
+
24
25
  workflow_name: str = ""
25
26
  user_wrapper_methods: list[WrapperMethod] = []
26
27
  instrumented_method_list: list[object] = []
27
-
28
+
28
29
  def __init__(
29
30
  self,
30
- user_wrapper_methods: list[WrapperMethod] = []) -> None:
31
- self.user_wrapper_methods = user_wrapper_methods
31
+ user_wrapper_methods: list[WrapperMethod] = None) -> None:
32
+ self.user_wrapper_methods = user_wrapper_methods or []
32
33
  super().__init__()
33
34
 
34
35
  def instrumentation_dependencies(self) -> Collection[str]:
@@ -63,11 +64,11 @@ class MonocleInstrumentor(BaseInstrumentor):
63
64
  self.instrumented_method_list.append(wrapped_method)
64
65
  except Exception as ex:
65
66
  if wrapped_method in user_method_list:
66
- logger.error(f"""_instrument wrap Exception: {str(ex)}
67
+ logger.error(f"""_instrument wrap Exception: {str(ex)}
67
68
  for package: {wrap_package},
68
69
  object:{wrap_object},
69
70
  method:{wrap_method}""")
70
-
71
+
71
72
 
72
73
  def _uninstrument(self, **kwargs):
73
74
  for wrapped_method in self.instrumented_method_list:
@@ -80,33 +81,33 @@ class MonocleInstrumentor(BaseInstrumentor):
80
81
  wrap_method,
81
82
  )
82
83
  except Exception as ex:
83
- logger.error(f"""_instrument unwrap Exception: {str(ex)}
84
+ logger.error(f"""_instrument unwrap Exception: {str(ex)}
84
85
  for package: {wrap_package},
85
86
  object:{wrap_object},
86
87
  method:{wrap_method}""")
87
-
88
88
 
89
89
  def setup_monocle_telemetry(
90
90
  workflow_name: str,
91
- span_processors: List[SpanProcessor] = [],
92
- wrapper_methods: List[WrapperMethod] = []):
91
+ span_processors: List[SpanProcessor] = None,
92
+ wrapper_methods: List[WrapperMethod] = None):
93
93
  resource = Resource(attributes={
94
94
  SERVICE_NAME: workflow_name
95
95
  })
96
- traceProvider = TracerProvider(resource=resource)
97
- tracerProviderDefault = trace.get_tracer_provider()
98
- providerType = type(tracerProviderDefault).__name__
99
- isProxyProvider = "Proxy" in providerType
96
+ span_processors = span_processors or [BatchSpanProcessor(FileSpanExporter())]
97
+ trace_provider = TracerProvider(resource=resource)
98
+ tracer_provider_default = trace.get_tracer_provider()
99
+ provider_type = type(tracer_provider_default).__name__
100
+ is_proxy_provider = "Proxy" in provider_type
100
101
  for processor in span_processors:
101
102
  processor.on_start = on_processor_start
102
- if not isProxyProvider:
103
- tracerProviderDefault.add_span_processor(processor)
103
+ if not is_proxy_provider:
104
+ tracer_provider_default.add_span_processor(processor)
104
105
  else :
105
- traceProvider.add_span_processor(processor)
106
- if isProxyProvider :
107
- trace.set_tracer_provider(traceProvider)
108
- instrumentor = MonocleInstrumentor(user_wrapper_methods=wrapper_methods)
109
- instrumentor.app_name = workflow_name
106
+ trace_provider.add_span_processor(processor)
107
+ if is_proxy_provider :
108
+ trace.set_tracer_provider(trace_provider)
109
+ instrumentor = MonocleInstrumentor(user_wrapper_methods=wrapper_methods or [])
110
+ # instrumentor.app_name = workflow_name
110
111
  if not instrumentor.is_instrumented_by_opentelemetry:
111
112
  instrumentor.instrument()
112
113
 
@@ -117,11 +118,7 @@ def on_processor_start(span: Span, parent_context):
117
118
  for key, value in context_properties.items():
118
119
  span.set_attribute(
119
120
  f"{CONTEXT_PROPERTIES_KEY}.{key}", value
120
- )
121
+ )
121
122
 
122
123
  def set_context_properties(properties: dict) -> None:
123
124
  attach(set_value(CONTEXT_PROPERTIES_KEY, properties))
124
-
125
-
126
-
127
-
@@ -0,0 +1,6 @@
1
+ import os
2
+ from monocle_apptrace.utils import load_wrapper_from_config
3
+
4
+ parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
5
+ LANGCHAIN_METHODS = load_wrapper_from_config(
6
+ os.path.join(parent_dir, 'wrapper_config', 'lang_chain_methods.json'))
@@ -0,0 +1,15 @@
1
+
2
+ #pylint: disable=protected-access
3
+ import os
4
+ from monocle_apptrace.utils import load_wrapper_from_config
5
+
6
+ def get_llm_span_name_for_openai(instance):
7
+ if (hasattr(instance, "_is_azure_client")
8
+ and callable(getattr(instance, "_is_azure_client"))
9
+ and instance._is_azure_client()):
10
+ return "llamaindex.azure_openai"
11
+ return "llamaindex.openai"
12
+
13
+ parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
14
+ LLAMAINDEX_METHODS = load_wrapper_from_config(
15
+ os.path.join(parent_dir, 'wrapper_config', 'llama_index_methods.json'))
@@ -0,0 +1,73 @@
1
+ import logging
2
+ import json
3
+ from importlib import import_module
4
+ import os
5
+ from opentelemetry.trace import Span
6
+ from monocle_apptrace.constants import azure_service_map, aws_service_map
7
+
8
+ def set_span_attribute(span, name, value):
9
+ if value is not None:
10
+ if value != "":
11
+ span.set_attribute(name, value)
12
+
13
+ def dont_throw(func):
14
+ """
15
+ A decorator that wraps the passed in function and logs exceptions instead of throwing them.
16
+
17
+ @param func: The function to wrap
18
+ @return: The wrapper function
19
+ """
20
+ # Obtain a logger specific to the function's module
21
+ logger = logging.getLogger(func.__module__)
22
+ # pylint: disable=inconsistent-return-statements
23
+ def wrapper(*args, **kwargs):
24
+ try:
25
+ return func(*args, **kwargs)
26
+ except Exception as ex:
27
+ logger.warning("Failed to execute %s, error: %s", func.__name__, str(ex))
28
+ return wrapper
29
+
30
+ def with_tracer_wrapper(func):
31
+ """Helper for providing tracer for wrapper functions."""
32
+
33
+ def _with_tracer(tracer, to_wrap):
34
+ def wrapper(wrapped, instance, args, kwargs):
35
+ return func(tracer, to_wrap, wrapped, instance, args, kwargs)
36
+
37
+ return wrapper
38
+
39
+ return _with_tracer
40
+
41
+ def resolve_from_alias(my_map, alias):
42
+ """Find a alias that is not none from list of aliases"""
43
+
44
+ for i in alias:
45
+ if i in my_map.keys():
46
+ return my_map[i]
47
+ return None
48
+
49
+ def load_wrapper_from_config(config_file_path: str, module_name: str = None):
50
+ wrapper_methods = []
51
+ with open(config_file_path, encoding='UTF-8') as config_file:
52
+ json_data = json.load(config_file)
53
+ wrapper_methods = json_data["wrapper_methods"]
54
+ for wrapper_method in wrapper_methods:
55
+ wrapper_method["wrapper"] = get_wrapper_method(
56
+ wrapper_method["wrapper_package"], wrapper_method["wrapper_method"])
57
+ if "span_name_getter_method" in wrapper_method :
58
+ wrapper_method["span_name_getter"] = get_wrapper_method(
59
+ wrapper_method["span_name_getter_package"],
60
+ wrapper_method["span_name_getter_method"])
61
+ return wrapper_methods
62
+
63
+ def get_wrapper_method(package_name: str, method_name: str):
64
+ wrapper_module = import_module("monocle_apptrace." + package_name)
65
+ return getattr(wrapper_module, method_name)
66
+
67
+ def update_span_with_infra_name(span: Span, span_key: str):
68
+ for key,val in azure_service_map.items():
69
+ if key in os.environ:
70
+ span.set_attribute(span_key, val)
71
+ for key,val in aws_service_map.items():
72
+ if key in os.environ:
73
+ span.set_attribute(span_key, val)