mingx 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mingx/decorator.py ADDED
@@ -0,0 +1,185 @@
1
+ """
2
+ Method-level automatic tracing via @traced decorator.
3
+
4
+ Supports sync and async; uses mingx._trace.get_tracer() for consistency
5
+ with global TracerProvider. 默认记录函数入参(输入)与返回值(输出)到当前 Span。
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import functools
11
+ import inspect
12
+ from typing import Any, Callable, Dict, List, Optional, TypeVar
13
+
14
+ from opentelemetry.trace import Status, StatusCode
15
+
16
+ from mingx._default_attributes import MINGX_SPAN_TYPE
17
+ from mingx._trace import get_tracer
18
+ from mingx.genai.io import record_span_input, record_span_output
19
+
20
+ F = TypeVar("F", bound=Callable[..., Any])
21
+
22
+
23
+ def traced(
24
+ func: Optional[F] = None,
25
+ *,
26
+ name: Optional[str] = None,
27
+ span_type: Optional[str] = None,
28
+ attributes: Optional[Dict[str, Any]] = None,
29
+ record_io: bool = True,
30
+ record_io_as: str = "events",
31
+ max_content_length: Optional[int] = None,
32
+ ) -> Any:
33
+ """
34
+ Decorator that creates an OpenTelemetry span for each call.
35
+
36
+ Usage:
37
+ @traced
38
+ def my_func(): ...
39
+
40
+ @traced(name="custom_span", span_type="model", attributes={"key": "value"})
41
+ def other(): ...
42
+
43
+ span_type: 设置 mingx.span_type,如 "model", "retriever", "tool", "chain", "agent" 等(可扩展)。
44
+ record_io: 是否记录函数入参(输入)与返回值(输出),默认 True,与适配器行为一致。
45
+ record_io_as: "events"(默认)| "attributes" | "none"。
46
+ max_content_length: 可选,单条 content 截断长度。
47
+ Supports sync and async; on exception sets span status to ERROR and
48
+ record_exception, then re-raises.
49
+ """
50
+ attrs = dict(attributes or {})
51
+ if span_type is not None:
52
+ attrs[MINGX_SPAN_TYPE] = span_type
53
+
54
+ def decorator(f: F) -> F:
55
+ span_name = name or _default_span_name(f)
56
+
57
+ if inspect.iscoroutinefunction(f):
58
+ return _wrap_async(f, span_name, attrs, record_io, record_io_as, max_content_length) # type: ignore[return-value]
59
+ return _wrap_sync(f, span_name, attrs, record_io, record_io_as, max_content_length) # type: ignore[return-value]
60
+
61
+ if func is not None:
62
+ return decorator(func)
63
+ return decorator
64
+
65
+
66
+ def _default_span_name(func: Callable[..., Any]) -> str:
67
+ """Default span name: module.funcname or funcname."""
68
+ module = getattr(func, "__module__", "") or ""
69
+ qualname = getattr(func, "__qualname__", func.__name__)
70
+ if module:
71
+ return f"{module}.{qualname}"
72
+ return qualname
73
+
74
+
75
+ def _args_to_input(args: tuple, kwargs: Dict[str, Any]) -> Dict[str, Any]:
76
+ """将 *args, **kwargs 转为可序列化的 input 结构(与适配器“入参作为输入”一致)。"""
77
+ return {"args": list(args), "kwargs": dict(kwargs)}
78
+
79
+
80
+ def _wrap_sync(
81
+ func: F,
82
+ span_name: str,
83
+ attrs: Dict[str, Any],
84
+ record_io: bool,
85
+ record_io_as: str,
86
+ max_content_length: Optional[int],
87
+ ) -> F:
88
+ @functools.wraps(func)
89
+ def _inner(*args: Any, **kwargs: Any) -> Any:
90
+ tracer = get_tracer()
91
+ with tracer.start_as_current_span(span_name, attributes=attrs) as span:
92
+ if record_io and record_io_as != "none":
93
+ try:
94
+ record_span_input(
95
+ span,
96
+ _args_to_input(args, kwargs),
97
+ record_as=record_io_as,
98
+ max_length=max_content_length,
99
+ )
100
+ except Exception:
101
+ pass
102
+ try:
103
+ result = func(*args, **kwargs)
104
+ if record_io and record_io_as != "none":
105
+ try:
106
+ record_span_output(
107
+ span,
108
+ result,
109
+ record_as=record_io_as,
110
+ max_length=max_content_length,
111
+ )
112
+ except Exception:
113
+ pass
114
+ span.set_status(Status(StatusCode.OK))
115
+ return result
116
+ except Exception as e:
117
+ if record_io and record_io_as != "none":
118
+ try:
119
+ record_span_output(
120
+ span,
121
+ {"error": str(e), "error_type": type(e).__name__},
122
+ record_as=record_io_as,
123
+ max_length=max_content_length,
124
+ )
125
+ except Exception:
126
+ pass
127
+ span.set_status(Status(StatusCode.ERROR, str(e)))
128
+ span.record_exception(e)
129
+ raise
130
+
131
+ return _inner # type: ignore[return-value]
132
+
133
+
134
+ def _wrap_async(
135
+ func: F,
136
+ span_name: str,
137
+ attrs: Dict[str, Any],
138
+ record_io: bool,
139
+ record_io_as: str,
140
+ max_content_length: Optional[int],
141
+ ) -> F:
142
+ @functools.wraps(func)
143
+ async def _inner(*args: Any, **kwargs: Any) -> Any:
144
+ tracer = get_tracer()
145
+ with tracer.start_as_current_span(span_name, attributes=attrs) as span:
146
+ if record_io and record_io_as != "none":
147
+ try:
148
+ record_span_input(
149
+ span,
150
+ _args_to_input(args, kwargs),
151
+ record_as=record_io_as,
152
+ max_length=max_content_length,
153
+ )
154
+ except Exception:
155
+ pass
156
+ try:
157
+ result = await func(*args, **kwargs)
158
+ if record_io and record_io_as != "none":
159
+ try:
160
+ record_span_output(
161
+ span,
162
+ result,
163
+ record_as=record_io_as,
164
+ max_length=max_content_length,
165
+ )
166
+ except Exception:
167
+ pass
168
+ span.set_status(Status(StatusCode.OK))
169
+ return result
170
+ except Exception as e:
171
+ if record_io and record_io_as != "none":
172
+ try:
173
+ record_span_output(
174
+ span,
175
+ {"error": str(e), "error_type": type(e).__name__},
176
+ record_as=record_io_as,
177
+ max_length=max_content_length,
178
+ )
179
+ except Exception:
180
+ pass
181
+ span.set_status(Status(StatusCode.ERROR, str(e)))
182
+ span.record_exception(e)
183
+ raise
184
+
185
+ return _inner # type: ignore[return-value]
@@ -0,0 +1,99 @@
1
+ """
2
+ GenAI semantic layer: OTEL GenAI span names and attributes.
3
+
4
+ See: https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-spans/
5
+ """
6
+
7
+ from mingx.genai import attributes
8
+ from mingx.genai.attributes import (
9
+ GEN_AI_OPERATION_NAME,
10
+ GEN_AI_PROVIDER_NAME,
11
+ GEN_AI_REQUEST_MODEL,
12
+ GEN_AI_TOOL_NAME,
13
+ GEN_AI_USAGE_INPUT_TOKENS,
14
+ GEN_AI_USAGE_OUTPUT_TOKENS,
15
+ OPERATION_CHAT,
16
+ OPERATION_EMBEDDINGS,
17
+ OPERATION_EXECUTE_TOOL,
18
+ execute_tool_attributes,
19
+ inference_attributes,
20
+ embeddings_attributes,
21
+ inference_span_name,
22
+ execute_tool_span_name,
23
+ embeddings_span_name,
24
+ )
25
+ from mingx.genai.span_attributes import (
26
+ ChainSpanAttributes,
27
+ InferenceInputOutput,
28
+ InferenceResponseAttributes,
29
+ InferenceSpanAttributes,
30
+ RetrieverSpanAttributes,
31
+ TokenUsage,
32
+ ToolSpanAttributes,
33
+ )
34
+ from mingx.genai.spans import (
35
+ inference_span,
36
+ embeddings_span,
37
+ execute_tool_span,
38
+ build_inference_span_args,
39
+ build_execute_tool_span_args,
40
+ build_embeddings_span_args,
41
+ )
42
+ from mingx.genai.io import (
43
+ GEN_AI_EVENT_INPUT,
44
+ GEN_AI_EVENT_OUTPUT,
45
+ GEN_AI_EVENT_SYSTEM_INSTRUCTIONS,
46
+ build_input_messages_from_prompts,
47
+ build_output_messages_from_llm_result,
48
+ record_llm_input_output,
49
+ record_span_input,
50
+ record_span_output,
51
+ record_span_input_output,
52
+ span_input,
53
+ span_output,
54
+ record_current_span_input_output,
55
+ )
56
+
57
+ __all__ = [
58
+ "ChainSpanAttributes",
59
+ "InferenceInputOutput",
60
+ "InferenceResponseAttributes",
61
+ "InferenceSpanAttributes",
62
+ "RetrieverSpanAttributes",
63
+ "TokenUsage",
64
+ "ToolSpanAttributes",
65
+ "attributes",
66
+ "GEN_AI_OPERATION_NAME",
67
+ "GEN_AI_PROVIDER_NAME",
68
+ "GEN_AI_REQUEST_MODEL",
69
+ "GEN_AI_TOOL_NAME",
70
+ "GEN_AI_USAGE_INPUT_TOKENS",
71
+ "GEN_AI_USAGE_OUTPUT_TOKENS",
72
+ "OPERATION_CHAT",
73
+ "OPERATION_EMBEDDINGS",
74
+ "OPERATION_EXECUTE_TOOL",
75
+ "inference_attributes",
76
+ "embeddings_attributes",
77
+ "execute_tool_attributes",
78
+ "inference_span_name",
79
+ "execute_tool_span_name",
80
+ "embeddings_span_name",
81
+ "inference_span",
82
+ "embeddings_span",
83
+ "execute_tool_span",
84
+ "build_inference_span_args",
85
+ "build_execute_tool_span_args",
86
+ "build_embeddings_span_args",
87
+ "GEN_AI_EVENT_INPUT",
88
+ "GEN_AI_EVENT_OUTPUT",
89
+ "GEN_AI_EVENT_SYSTEM_INSTRUCTIONS",
90
+ "build_input_messages_from_prompts",
91
+ "build_output_messages_from_llm_result",
92
+ "record_llm_input_output",
93
+ "record_span_input",
94
+ "record_span_output",
95
+ "record_span_input_output",
96
+ "span_input",
97
+ "span_output",
98
+ "record_current_span_input_output",
99
+ ]
@@ -0,0 +1,176 @@
1
+ """
2
+ OpenTelemetry GenAI semantic convention attribute names and builders.
3
+
4
+ Follows: https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-spans/
5
+ Respects OTEL_SEMCONV_STABILITY_OPT_IN=gen_ai_latest_experimental for experimental attributes.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import os
11
+ from typing import Any, Dict, List, Optional
12
+
13
+ # --- Attribute key constants (gen_ai.* and related) ---
14
+
15
+ GEN_AI_OPERATION_NAME = "gen_ai.operation.name"
16
+ GEN_AI_PROVIDER_NAME = "gen_ai.provider.name"
17
+ GEN_AI_REQUEST_MODEL = "gen_ai.request.model"
18
+ GEN_AI_REQUEST_TEMPERATURE = "gen_ai.request.temperature"
19
+ GEN_AI_REQUEST_MAX_TOKENS = "gen_ai.request.max_tokens"
20
+ GEN_AI_REQUEST_TOP_P = "gen_ai.request.top_p"
21
+ GEN_AI_REQUEST_TOP_K = "gen_ai.request.top_k"
22
+ GEN_AI_REQUEST_FREQUENCY_PENALTY = "gen_ai.request.frequency_penalty"
23
+ GEN_AI_REQUEST_PRESENCE_PENALTY = "gen_ai.request.presence_penalty"
24
+ GEN_AI_REQUEST_STOP_SEQUENCES = "gen_ai.request.stop_sequences"
25
+ GEN_AI_RESPONSE_ID = "gen_ai.response.id"
26
+ GEN_AI_RESPONSE_MODEL = "gen_ai.response.model"
27
+ GEN_AI_RESPONSE_FINISH_REASONS = "gen_ai.response.finish_reasons"
28
+ GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens"
29
+ GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens"
30
+ GEN_AI_TOOL_NAME = "gen_ai.tool.name"
31
+ GEN_AI_TOOL_DESCRIPTION = "gen_ai.tool.description"
32
+ GEN_AI_TOOL_CALL_ID = "gen_ai.tool.call.id"
33
+ GEN_AI_TOOL_TYPE = "gen_ai.tool.type"
34
+ ERROR_TYPE = "error.type"
35
+
36
+ # 输入/输出(Opt-In,建议通过 Span Event 记录,见 genai.io)
37
+ GEN_AI_INPUT_MESSAGES = "gen_ai.input.messages"
38
+ GEN_AI_OUTPUT_MESSAGES = "gen_ai.output.messages"
39
+ GEN_AI_SYSTEM_INSTRUCTIONS = "gen_ai.system_instructions"
40
+
41
+ # Well-known values for gen_ai.operation.name
42
+ OPERATION_CHAT = "chat"
43
+ OPERATION_EMBEDDINGS = "embeddings"
44
+ OPERATION_EXECUTE_TOOL = "execute_tool"
45
+ OPERATION_GENERATE_CONTENT = "generate_content"
46
+ OPERATION_TEXT_COMPLETION = "text_completion"
47
+ OPERATION_INVOKE_AGENT = "invoke_agent"
48
+ OPERATION_CREATE_AGENT = "create_agent"
49
+
50
+ # Well-known provider names (subset)
51
+ PROVIDER_OPENAI = "openai"
52
+ PROVIDER_ANTHROPIC = "anthropic"
53
+ PROVIDER_GCP_VERTEX_AI = "gcp.vertex_ai"
54
+ PROVIDER_GCP_GEN_AI = "gcp.gen_ai"
55
+ PROVIDER_AZURE_OPENAI = "azure.ai.openai"
56
+ PROVIDER_AWS_BEDROCK = "aws.bedrock"
57
+
58
+ # Span name templates per spec
59
+ # Inference: {gen_ai.operation.name} {gen_ai.request.model}
60
+ # Execute tool: execute_tool {gen_ai.tool.name}
61
+ # Embeddings: embeddings {gen_ai.request.model}
62
+
63
+
64
+ def _use_experimental() -> bool:
65
+ opt_in = os.environ.get("OTEL_SEMCONV_STABILITY_OPT_IN", "")
66
+ return "gen_ai_latest_experimental" in opt_in.replace(" ", "").split(",")
67
+
68
+
69
+ def inference_attributes(
70
+ operation_name: str,
71
+ provider_name: str,
72
+ *,
73
+ model: Optional[str] = None,
74
+ temperature: Optional[float] = None,
75
+ max_tokens: Optional[int] = None,
76
+ top_p: Optional[float] = None,
77
+ top_k: Optional[int] = None,
78
+ frequency_penalty: Optional[float] = None,
79
+ presence_penalty: Optional[float] = None,
80
+ stop_sequences: Optional[List[str]] = None,
81
+ response_id: Optional[str] = None,
82
+ response_model: Optional[str] = None,
83
+ finish_reasons: Optional[List[str]] = None,
84
+ input_tokens: Optional[int] = None,
85
+ output_tokens: Optional[int] = None,
86
+ ) -> Dict[str, Any]:
87
+ """Build attribute dict for an inference (chat/completion) span."""
88
+ attrs: Dict[str, Any] = {
89
+ GEN_AI_OPERATION_NAME: operation_name,
90
+ GEN_AI_PROVIDER_NAME: provider_name,
91
+ }
92
+ if model is not None:
93
+ attrs[GEN_AI_REQUEST_MODEL] = model
94
+ if temperature is not None:
95
+ attrs[GEN_AI_REQUEST_TEMPERATURE] = temperature
96
+ if max_tokens is not None:
97
+ attrs[GEN_AI_REQUEST_MAX_TOKENS] = max_tokens
98
+ if top_p is not None:
99
+ attrs[GEN_AI_REQUEST_TOP_P] = top_p
100
+ if top_k is not None:
101
+ attrs[GEN_AI_REQUEST_TOP_K] = top_k
102
+ if frequency_penalty is not None:
103
+ attrs[GEN_AI_REQUEST_FREQUENCY_PENALTY] = frequency_penalty
104
+ if presence_penalty is not None:
105
+ attrs[GEN_AI_REQUEST_PRESENCE_PENALTY] = presence_penalty
106
+ if stop_sequences is not None:
107
+ attrs[GEN_AI_REQUEST_STOP_SEQUENCES] = stop_sequences
108
+ if response_id is not None:
109
+ attrs[GEN_AI_RESPONSE_ID] = response_id
110
+ if response_model is not None:
111
+ attrs[GEN_AI_RESPONSE_MODEL] = response_model
112
+ if finish_reasons is not None:
113
+ attrs[GEN_AI_RESPONSE_FINISH_REASONS] = finish_reasons
114
+ if input_tokens is not None:
115
+ attrs[GEN_AI_USAGE_INPUT_TOKENS] = input_tokens
116
+ if output_tokens is not None:
117
+ attrs[GEN_AI_USAGE_OUTPUT_TOKENS] = output_tokens
118
+ return attrs
119
+
120
+
121
+ def embeddings_attributes(
122
+ provider_name: str,
123
+ *,
124
+ model: Optional[str] = None,
125
+ input_tokens: Optional[int] = None,
126
+ ) -> Dict[str, Any]:
127
+ """Build attribute dict for an embeddings span."""
128
+ attrs: Dict[str, Any] = {
129
+ GEN_AI_OPERATION_NAME: OPERATION_EMBEDDINGS,
130
+ GEN_AI_PROVIDER_NAME: provider_name,
131
+ }
132
+ if model is not None:
133
+ attrs[GEN_AI_REQUEST_MODEL] = model
134
+ if input_tokens is not None:
135
+ attrs[GEN_AI_USAGE_INPUT_TOKENS] = input_tokens
136
+ return attrs
137
+
138
+
139
+ def execute_tool_attributes(
140
+ tool_name: str,
141
+ *,
142
+ tool_description: Optional[str] = None,
143
+ tool_call_id: Optional[str] = None,
144
+ tool_type: Optional[str] = None,
145
+ ) -> Dict[str, Any]:
146
+ """Build attribute dict for an execute_tool span."""
147
+ attrs: Dict[str, Any] = {
148
+ GEN_AI_OPERATION_NAME: OPERATION_EXECUTE_TOOL,
149
+ GEN_AI_TOOL_NAME: tool_name,
150
+ }
151
+ if tool_description is not None:
152
+ attrs[GEN_AI_TOOL_DESCRIPTION] = tool_description
153
+ if tool_call_id is not None:
154
+ attrs[GEN_AI_TOOL_CALL_ID] = tool_call_id
155
+ if tool_type is not None:
156
+ attrs[GEN_AI_TOOL_TYPE] = tool_type
157
+ return attrs
158
+
159
+
160
+ def inference_span_name(operation_name: str, model: Optional[str] = None) -> str:
161
+ """Span name for inference: {gen_ai.operation.name} {gen_ai.request.model}."""
162
+ if model:
163
+ return f"{operation_name} {model}"
164
+ return operation_name
165
+
166
+
167
+ def execute_tool_span_name(tool_name: str) -> str:
168
+ """Span name for execute_tool: execute_tool {gen_ai.tool.name}."""
169
+ return f"{OPERATION_EXECUTE_TOOL} {tool_name}"
170
+
171
+
172
+ def embeddings_span_name(model: Optional[str] = None) -> str:
173
+ """Span name for embeddings span."""
174
+ if model:
175
+ return f"{OPERATION_EMBEDDINGS} {model}"
176
+ return OPERATION_EMBEDDINGS