promptlayer 1.2.1 → 1.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/claude-agents.js +1 -1
- package/dist/claude-agents.js.map +1 -1
- package/dist/esm/{chunk-UKSCOWKT.js → chunk-7Y65WGSZ.js} +2 -2
- package/dist/esm/claude-agents.js +1 -1
- package/dist/esm/claude-agents.js.map +1 -1
- package/dist/esm/index.js +1 -1
- package/dist/esm/openai-agents.js +1 -1
- package/dist/index.js +1 -1
- package/dist/openai-agents.js +1 -1
- package/package.json +1 -1
- package/vendor/claude-agents/trace/hooks/lib.sh +4 -522
- package/vendor/claude-agents/trace/hooks/post_tool_use.sh +2 -27
- package/vendor/claude-agents/trace/hooks/py/__init__.py +1 -0
- package/vendor/claude-agents/trace/hooks/py/cli.py +81 -0
- package/vendor/claude-agents/trace/hooks/py/context.py +63 -0
- package/vendor/claude-agents/trace/hooks/py/handlers.py +244 -0
- package/vendor/claude-agents/trace/hooks/py/otlp.py +278 -0
- package/vendor/claude-agents/trace/hooks/py/settings.py +33 -0
- package/vendor/claude-agents/trace/hooks/py/state.py +135 -0
- package/vendor/claude-agents/trace/hooks/{parse_stop_transcript.py → py/stop_parser.py} +69 -31
- package/vendor/claude-agents/trace/hooks/py/traceparent.py +31 -0
- package/vendor/claude-agents/trace/hooks/session_end.sh +1 -23
- package/vendor/claude-agents/trace/hooks/session_start.sh +5 -41
- package/vendor/claude-agents/trace/hooks/stop_hook.sh +3 -106
- package/vendor/claude-agents/trace/hooks/user_prompt_submit.sh +1 -11
- package/vendor/claude-agents/trace/setup.sh +170 -0
- package/vendor/claude-agents/vendor_metadata.json +2 -2
- package/vendor/claude-agents/trace/hooks/hook_utils.py +0 -38
- /package/dist/esm/{chunk-UKSCOWKT.js.map → chunk-7Y65WGSZ.js.map} +0 -0
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
import os
|
|
3
|
+
import subprocess
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
PLUGIN_VERSION = "1.0.0"
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def env_int(name: str, default: int) -> int:
|
|
10
|
+
try:
|
|
11
|
+
return int(os.environ.get(name, str(default)))
|
|
12
|
+
except Exception:
|
|
13
|
+
return default
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def detect_claude_version() -> str:
|
|
17
|
+
try:
|
|
18
|
+
result = subprocess.run(
|
|
19
|
+
["claude", "--version"],
|
|
20
|
+
check=False,
|
|
21
|
+
capture_output=True,
|
|
22
|
+
text=True,
|
|
23
|
+
)
|
|
24
|
+
except Exception:
|
|
25
|
+
return "unknown"
|
|
26
|
+
return result.stdout.strip() or "unknown"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass(frozen=True)
|
|
30
|
+
class HookContext:
|
|
31
|
+
log_file: str
|
|
32
|
+
queue_file: str
|
|
33
|
+
session_state_dir: str
|
|
34
|
+
lock_dir: str
|
|
35
|
+
debug: str
|
|
36
|
+
api_key: str
|
|
37
|
+
otlp_endpoint: str
|
|
38
|
+
queue_drain_limit: int
|
|
39
|
+
otlp_connect_timeout: int
|
|
40
|
+
otlp_max_time: int
|
|
41
|
+
plugin_version: str
|
|
42
|
+
cc_version: str
|
|
43
|
+
user_agent: str
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def load_context() -> HookContext:
|
|
47
|
+
cc_version = detect_claude_version()
|
|
48
|
+
plugin_version = PLUGIN_VERSION
|
|
49
|
+
return HookContext(
|
|
50
|
+
log_file=os.path.expanduser("~/.claude/state/promptlayer_hook.log"),
|
|
51
|
+
queue_file=os.path.expanduser("~/.claude/state/promptlayer_otlp_queue.ndjson"),
|
|
52
|
+
session_state_dir=os.path.expanduser("~/.claude/state/promptlayer_sessions"),
|
|
53
|
+
lock_dir=os.path.expanduser("~/.claude/state/promptlayer_locks"),
|
|
54
|
+
debug=os.environ.get("PROMPTLAYER_CC_DEBUG", "false"),
|
|
55
|
+
api_key=os.environ.get("PROMPTLAYER_API_KEY", ""),
|
|
56
|
+
otlp_endpoint=os.environ.get("PROMPTLAYER_OTLP_ENDPOINT", "https://api.promptlayer.com/v1/traces"),
|
|
57
|
+
queue_drain_limit=env_int("PROMPTLAYER_QUEUE_DRAIN_LIMIT", 10),
|
|
58
|
+
otlp_connect_timeout=env_int("PROMPTLAYER_OTLP_CONNECT_TIMEOUT", 5),
|
|
59
|
+
otlp_max_time=env_int("PROMPTLAYER_OTLP_MAX_TIME", 12),
|
|
60
|
+
plugin_version=plugin_version,
|
|
61
|
+
cc_version=cc_version,
|
|
62
|
+
user_agent=f"promptlayer-claude-plugin/{plugin_version} claude-code/{cc_version}",
|
|
63
|
+
)
|
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import time
|
|
4
|
+
from typing import Optional
|
|
5
|
+
|
|
6
|
+
from otlp import (
|
|
7
|
+
SpanSpec,
|
|
8
|
+
build_payload,
|
|
9
|
+
build_span,
|
|
10
|
+
generate_session_id,
|
|
11
|
+
generate_span_id,
|
|
12
|
+
generate_trace_id,
|
|
13
|
+
send_payload_with_queueing,
|
|
14
|
+
)
|
|
15
|
+
from state import (
|
|
16
|
+
acquire_lock,
|
|
17
|
+
ensure_session_initialized,
|
|
18
|
+
load_session_state,
|
|
19
|
+
parse_pending_tool_calls,
|
|
20
|
+
release_lock,
|
|
21
|
+
save_session_state,
|
|
22
|
+
session_lock_path,
|
|
23
|
+
)
|
|
24
|
+
from stop_parser import build_stop_hook_span_specs, parse_transcript
|
|
25
|
+
from traceparent import parse_traceparent
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def read_stdin_json(raw: str):
|
|
29
|
+
if not raw.strip():
|
|
30
|
+
return {}
|
|
31
|
+
try:
|
|
32
|
+
data = json.loads(raw)
|
|
33
|
+
except Exception:
|
|
34
|
+
return {}
|
|
35
|
+
return data if isinstance(data, dict) else {}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def handle_session_start(ctx, raw_input: str) -> str:
|
|
39
|
+
input_data = read_stdin_json(raw_input)
|
|
40
|
+
session_id = input_data.get("session_id")
|
|
41
|
+
session_id = str(session_id) if session_id else generate_session_id()
|
|
42
|
+
|
|
43
|
+
state, path = load_session_state(ctx.session_state_dir, session_id)
|
|
44
|
+
existing = bool(state.trace_id and state.session_span_id)
|
|
45
|
+
if existing:
|
|
46
|
+
state, _ = ensure_session_initialized(
|
|
47
|
+
state,
|
|
48
|
+
traceparent_raw=os.environ.get("PROMPTLAYER_TRACEPARENT", ""),
|
|
49
|
+
generate_trace_id=generate_trace_id,
|
|
50
|
+
generate_span_id=generate_span_id,
|
|
51
|
+
)
|
|
52
|
+
status = "existing"
|
|
53
|
+
else:
|
|
54
|
+
trace_context = parse_traceparent(os.environ.get("PROMPTLAYER_TRACEPARENT", ""))
|
|
55
|
+
state.trace_id = trace_context["trace_id"] if trace_context else generate_trace_id()
|
|
56
|
+
state.session_span_id = generate_span_id()
|
|
57
|
+
state.session_parent_span_id = trace_context["parent_span_id"] if trace_context else ""
|
|
58
|
+
state.session_start_ns = str(time.time_ns())
|
|
59
|
+
state.current_turn_start_ns = ""
|
|
60
|
+
state.pending_tool_calls = "[]"
|
|
61
|
+
state.session_init_source = "session_start_hook"
|
|
62
|
+
state.session_traceparent_version = trace_context["version"] if trace_context else ""
|
|
63
|
+
state.session_trace_flags = trace_context["trace_flags"] if trace_context else ""
|
|
64
|
+
state.trace_context_source = trace_context["source"] if trace_context else "generated"
|
|
65
|
+
status = "captured"
|
|
66
|
+
|
|
67
|
+
save_session_state(path, state)
|
|
68
|
+
return f"{session_id}\t{state.trace_id}\t{status}"
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def handle_user_prompt_submit(ctx, raw_input: str) -> str:
|
|
72
|
+
input_data = read_stdin_json(raw_input)
|
|
73
|
+
session_id = input_data.get("session_id")
|
|
74
|
+
if not session_id:
|
|
75
|
+
return ""
|
|
76
|
+
|
|
77
|
+
state, path = load_session_state(ctx.session_state_dir, str(session_id))
|
|
78
|
+
state, _ = ensure_session_initialized(
|
|
79
|
+
state,
|
|
80
|
+
traceparent_raw=os.environ.get("PROMPTLAYER_TRACEPARENT", ""),
|
|
81
|
+
generate_trace_id=generate_trace_id,
|
|
82
|
+
generate_span_id=generate_span_id,
|
|
83
|
+
)
|
|
84
|
+
if not state.trace_id or not state.session_span_id:
|
|
85
|
+
return ""
|
|
86
|
+
|
|
87
|
+
state.current_turn_start_ns = str(time.time_ns())
|
|
88
|
+
state.pending_tool_calls = "[]"
|
|
89
|
+
save_session_state(path, state)
|
|
90
|
+
return str(session_id)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def handle_post_tool_use(ctx, raw_input: str) -> str:
|
|
94
|
+
input_data = read_stdin_json(raw_input)
|
|
95
|
+
session_id = input_data.get("session_id")
|
|
96
|
+
tool_name = input_data.get("tool_name")
|
|
97
|
+
if not session_id or not tool_name:
|
|
98
|
+
return ""
|
|
99
|
+
|
|
100
|
+
tool_input = input_data.get("tool_input", {})
|
|
101
|
+
tool_output = input_data.get("tool_response", input_data.get("output", {}))
|
|
102
|
+
|
|
103
|
+
state, path = load_session_state(ctx.session_state_dir, str(session_id))
|
|
104
|
+
state, _ = ensure_session_initialized(
|
|
105
|
+
state,
|
|
106
|
+
traceparent_raw=os.environ.get("PROMPTLAYER_TRACEPARENT", ""),
|
|
107
|
+
generate_trace_id=generate_trace_id,
|
|
108
|
+
generate_span_id=generate_span_id,
|
|
109
|
+
)
|
|
110
|
+
if not state.trace_id:
|
|
111
|
+
return ""
|
|
112
|
+
if not state.current_turn_start_ns:
|
|
113
|
+
state.current_turn_start_ns = str(time.time_ns())
|
|
114
|
+
|
|
115
|
+
pending_tool_calls = parse_pending_tool_calls(state.pending_tool_calls)
|
|
116
|
+
pending_tool_calls.append(
|
|
117
|
+
{
|
|
118
|
+
"source": "claude-code",
|
|
119
|
+
"hook": "PostToolUse",
|
|
120
|
+
"tool_name": str(tool_name),
|
|
121
|
+
"node_type": "CODE_EXECUTION",
|
|
122
|
+
"function_input": tool_input,
|
|
123
|
+
"function_output": tool_output,
|
|
124
|
+
}
|
|
125
|
+
)
|
|
126
|
+
state.pending_tool_calls = json.dumps(pending_tool_calls, ensure_ascii=False, separators=(",", ":"))
|
|
127
|
+
save_session_state(path, state)
|
|
128
|
+
return f"{session_id}\t{tool_name}"
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def handle_session_end(ctx, raw_input: str) -> str:
|
|
132
|
+
input_data = read_stdin_json(raw_input)
|
|
133
|
+
session_id = input_data.get("session_id")
|
|
134
|
+
if not session_id:
|
|
135
|
+
return ""
|
|
136
|
+
|
|
137
|
+
lock_path = session_lock_path(ctx.lock_dir, str(session_id))
|
|
138
|
+
if not acquire_lock(lock_path):
|
|
139
|
+
return ""
|
|
140
|
+
|
|
141
|
+
try:
|
|
142
|
+
state, path = load_session_state(ctx.session_state_dir, str(session_id))
|
|
143
|
+
if not state.trace_id or not state.session_span_id:
|
|
144
|
+
return ""
|
|
145
|
+
|
|
146
|
+
spec = build_span(
|
|
147
|
+
SpanSpec(
|
|
148
|
+
trace_id=state.trace_id,
|
|
149
|
+
span_id=state.session_span_id,
|
|
150
|
+
parent_span_id=state.session_parent_span_id,
|
|
151
|
+
name="Claude Code session",
|
|
152
|
+
kind="1",
|
|
153
|
+
start_ns=state.session_start_ns or str(time.time_ns()),
|
|
154
|
+
end_ns=str(time.time_ns()),
|
|
155
|
+
attrs={
|
|
156
|
+
"source": "claude-code",
|
|
157
|
+
"hook": "SessionEnd",
|
|
158
|
+
"node_type": "WORKFLOW",
|
|
159
|
+
"session.lifecycle": "complete",
|
|
160
|
+
},
|
|
161
|
+
)
|
|
162
|
+
)
|
|
163
|
+
send_payload_with_queueing(ctx, build_payload([spec]))
|
|
164
|
+
try:
|
|
165
|
+
os.remove(path)
|
|
166
|
+
except FileNotFoundError:
|
|
167
|
+
pass
|
|
168
|
+
return str(session_id)
|
|
169
|
+
finally:
|
|
170
|
+
release_lock(lock_path)
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def resolve_stop_session_id(input_data):
|
|
174
|
+
session_id = input_data.get("session_id")
|
|
175
|
+
transcript_path = input_data.get("transcript_path")
|
|
176
|
+
if not session_id and transcript_path:
|
|
177
|
+
session_id = os.path.basename(str(transcript_path))
|
|
178
|
+
if session_id.endswith(".jsonl"):
|
|
179
|
+
session_id = session_id[: -len(".jsonl")]
|
|
180
|
+
return session_id
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def handle_stop_hook(ctx, raw_input: str) -> str:
|
|
184
|
+
input_data = read_stdin_json(raw_input)
|
|
185
|
+
session_id = resolve_stop_session_id(input_data)
|
|
186
|
+
transcript_path = input_data.get("transcript_path")
|
|
187
|
+
if not session_id:
|
|
188
|
+
return ""
|
|
189
|
+
|
|
190
|
+
lock_path = session_lock_path(ctx.lock_dir, str(session_id))
|
|
191
|
+
if not acquire_lock(lock_path):
|
|
192
|
+
return ""
|
|
193
|
+
|
|
194
|
+
try:
|
|
195
|
+
state, path = load_session_state(ctx.session_state_dir, str(session_id))
|
|
196
|
+
state, _ = ensure_session_initialized(
|
|
197
|
+
state,
|
|
198
|
+
traceparent_raw=os.environ.get("PROMPTLAYER_TRACEPARENT", ""),
|
|
199
|
+
generate_trace_id=generate_trace_id,
|
|
200
|
+
generate_span_id=generate_span_id,
|
|
201
|
+
)
|
|
202
|
+
if not state.trace_id or not state.session_span_id:
|
|
203
|
+
return ""
|
|
204
|
+
|
|
205
|
+
turn_start_ns = state.current_turn_start_ns or str(time.time_ns())
|
|
206
|
+
pending_tool_calls = state.pending_tool_calls or "[]"
|
|
207
|
+
state.current_turn_start_ns = ""
|
|
208
|
+
state.pending_tool_calls = "[]"
|
|
209
|
+
save_session_state(path, state)
|
|
210
|
+
finally:
|
|
211
|
+
release_lock(lock_path)
|
|
212
|
+
|
|
213
|
+
if not transcript_path or not os.path.exists(str(transcript_path)):
|
|
214
|
+
return f"{session_id}\tmissing_transcript"
|
|
215
|
+
|
|
216
|
+
pending_payloads = parse_pending_tool_calls(pending_tool_calls)
|
|
217
|
+
attempts = 0
|
|
218
|
+
while True:
|
|
219
|
+
parsed = parse_transcript(str(transcript_path), int(turn_start_ns), pending_payloads, str(session_id))
|
|
220
|
+
if parsed.get("llms") or attempts >= 10:
|
|
221
|
+
break
|
|
222
|
+
attempts += 1
|
|
223
|
+
time.sleep(0.2)
|
|
224
|
+
|
|
225
|
+
span_specs = build_stop_hook_span_specs(
|
|
226
|
+
parsed=parsed,
|
|
227
|
+
trace_id=state.trace_id,
|
|
228
|
+
session_span_id=state.session_span_id,
|
|
229
|
+
session_parent_span_id=state.session_parent_span_id,
|
|
230
|
+
session_start_ns=state.session_start_ns or str(time.time_ns()),
|
|
231
|
+
session_init_source=state.session_init_source,
|
|
232
|
+
generate_span_id=generate_span_id,
|
|
233
|
+
)
|
|
234
|
+
spans = [build_span(span_spec) for span_spec in span_specs]
|
|
235
|
+
if spans:
|
|
236
|
+
send_payload_with_queueing(ctx, build_payload(spans))
|
|
237
|
+
return f"{session_id}\tok"
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def handle_parse_stop_transcript(transcript_path: str, turn_start_ns: str, expected_session_id: Optional[str]) -> str:
|
|
241
|
+
pending_raw = os.environ.get("PL_PENDING_TOOL_CALLS", "[]")
|
|
242
|
+
pending_payloads = parse_pending_tool_calls(pending_raw)
|
|
243
|
+
parsed = parse_transcript(transcript_path, int(turn_start_ns) or None, pending_payloads, expected_session_id)
|
|
244
|
+
return json.dumps(parsed, ensure_ascii=False, separators=(",", ":"))
|
|
@@ -0,0 +1,278 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
import base64
|
|
3
|
+
import binascii
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import secrets
|
|
7
|
+
import uuid
|
|
8
|
+
from urllib import error, request
|
|
9
|
+
|
|
10
|
+
from state import acquire_lock, queue_lock_path, release_lock
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class SpanSpec:
|
|
15
|
+
trace_id: str
|
|
16
|
+
span_id: str
|
|
17
|
+
parent_span_id: str
|
|
18
|
+
name: str
|
|
19
|
+
kind: str
|
|
20
|
+
start_ns: str
|
|
21
|
+
end_ns: str
|
|
22
|
+
attrs: dict
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def compact_json(value) -> str:
|
|
26
|
+
return json.dumps(value, ensure_ascii=False, separators=(",", ":"))
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def generate_trace_id() -> str:
|
|
30
|
+
return secrets.token_hex(16)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def generate_span_id() -> str:
|
|
34
|
+
return secrets.token_hex(8)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def generate_session_id() -> str:
|
|
38
|
+
return str(uuid.uuid4())
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def normalize_hex_id(raw: str, expected_len: int, fallback: str) -> str:
|
|
42
|
+
clean = "".join(ch for ch in str(raw).lower() if ch in "0123456789abcdef")
|
|
43
|
+
if not clean:
|
|
44
|
+
clean = fallback
|
|
45
|
+
if len(clean) > expected_len:
|
|
46
|
+
clean = clean[:expected_len]
|
|
47
|
+
if len(clean) < expected_len:
|
|
48
|
+
clean = clean.ljust(expected_len, "0")
|
|
49
|
+
return clean
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def hex_to_base64(hex_value: str) -> str:
|
|
53
|
+
raw = binascii.unhexlify(hex_value)
|
|
54
|
+
return base64.b64encode(raw).decode("ascii")
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def kind_int_to_string(kind) -> str:
|
|
58
|
+
value = str(kind)
|
|
59
|
+
return {
|
|
60
|
+
"0": "SPAN_KIND_UNSPECIFIED",
|
|
61
|
+
"1": "SPAN_KIND_INTERNAL",
|
|
62
|
+
"2": "SPAN_KIND_SERVER",
|
|
63
|
+
"3": "SPAN_KIND_CLIENT",
|
|
64
|
+
"4": "SPAN_KIND_PRODUCER",
|
|
65
|
+
"5": "SPAN_KIND_CONSUMER",
|
|
66
|
+
}.get(value, "SPAN_KIND_UNSPECIFIED")
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def otlp_attribute_value(value):
|
|
70
|
+
if isinstance(value, str):
|
|
71
|
+
return {"stringValue": value}
|
|
72
|
+
if isinstance(value, bool):
|
|
73
|
+
return {"boolValue": value}
|
|
74
|
+
if isinstance(value, int):
|
|
75
|
+
return {"intValue": str(value)}
|
|
76
|
+
if isinstance(value, float):
|
|
77
|
+
if value.is_integer():
|
|
78
|
+
return {"intValue": str(int(value))}
|
|
79
|
+
return {"doubleValue": value}
|
|
80
|
+
return {"stringValue": compact_json(value)}
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def build_span(spec: SpanSpec):
|
|
84
|
+
trace_id = normalize_hex_id(spec.trace_id, 32, generate_trace_id())
|
|
85
|
+
span_id = normalize_hex_id(spec.span_id, 16, generate_span_id())
|
|
86
|
+
parent_span = ""
|
|
87
|
+
if spec.parent_span_id:
|
|
88
|
+
parent_span = normalize_hex_id(spec.parent_span_id, 16, generate_span_id())
|
|
89
|
+
|
|
90
|
+
attributes = []
|
|
91
|
+
for key, value in (spec.attrs or {}).items():
|
|
92
|
+
if value is None:
|
|
93
|
+
continue
|
|
94
|
+
attributes.append({"key": key, "value": otlp_attribute_value(value)})
|
|
95
|
+
|
|
96
|
+
span = {
|
|
97
|
+
"traceId": hex_to_base64(trace_id),
|
|
98
|
+
"spanId": hex_to_base64(span_id),
|
|
99
|
+
"name": spec.name,
|
|
100
|
+
"kind": kind_int_to_string(spec.kind),
|
|
101
|
+
"startTimeUnixNano": str(spec.start_ns),
|
|
102
|
+
"endTimeUnixNano": str(spec.end_ns),
|
|
103
|
+
"attributes": attributes,
|
|
104
|
+
}
|
|
105
|
+
if parent_span:
|
|
106
|
+
span["parentSpanId"] = hex_to_base64(parent_span)
|
|
107
|
+
return span
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def build_payload(spans):
|
|
111
|
+
return {
|
|
112
|
+
"resourceSpans": [
|
|
113
|
+
{
|
|
114
|
+
"resource": {
|
|
115
|
+
"attributes": [
|
|
116
|
+
{"key": "service.name", "value": {"stringValue": "claude-code"}}
|
|
117
|
+
]
|
|
118
|
+
},
|
|
119
|
+
"scopeSpans": [{"spans": spans}],
|
|
120
|
+
}
|
|
121
|
+
]
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def http_post_json(endpoint: str, payload, api_key: str = "", user_agent: str = "", timeout: int = 12):
|
|
126
|
+
body = compact_json(payload).encode("utf-8")
|
|
127
|
+
headers = {"Content-Type": "application/json"}
|
|
128
|
+
if api_key:
|
|
129
|
+
headers["X-Api-Key"] = api_key
|
|
130
|
+
if user_agent:
|
|
131
|
+
headers["User-Agent"] = user_agent
|
|
132
|
+
|
|
133
|
+
req = request.Request(endpoint, data=body, headers=headers, method="POST")
|
|
134
|
+
try:
|
|
135
|
+
with request.urlopen(req, timeout=max(timeout, 1)) as response:
|
|
136
|
+
return response.getcode(), response.read().decode("utf-8")
|
|
137
|
+
except error.HTTPError as exc:
|
|
138
|
+
return exc.code, exc.read().decode("utf-8", errors="replace")
|
|
139
|
+
except Exception:
|
|
140
|
+
return 0, ""
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def parse_partial_success(response_text: str):
|
|
144
|
+
if not response_text:
|
|
145
|
+
return 0, ""
|
|
146
|
+
try:
|
|
147
|
+
parsed = json.loads(response_text)
|
|
148
|
+
except Exception:
|
|
149
|
+
return 0, ""
|
|
150
|
+
partial = parsed.get("partialSuccess", {})
|
|
151
|
+
if not isinstance(partial, dict):
|
|
152
|
+
return 0, ""
|
|
153
|
+
rejected = partial.get("rejectedSpans", 0)
|
|
154
|
+
try:
|
|
155
|
+
rejected_int = int(rejected)
|
|
156
|
+
except Exception:
|
|
157
|
+
rejected_int = 0
|
|
158
|
+
message = partial.get("errorMessage", "")
|
|
159
|
+
return rejected_int, str(message) if message else ""
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def post_otlp_payload(ctx, payload):
|
|
163
|
+
return http_post_json(
|
|
164
|
+
ctx.otlp_endpoint,
|
|
165
|
+
payload,
|
|
166
|
+
api_key=ctx.api_key,
|
|
167
|
+
user_agent=ctx.user_agent,
|
|
168
|
+
timeout=ctx.otlp_max_time,
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def append_queue_payload(ctx, payload):
|
|
173
|
+
if not ctx.queue_file or not ctx.lock_dir:
|
|
174
|
+
return False
|
|
175
|
+
|
|
176
|
+
os.makedirs(os.path.dirname(ctx.queue_file), exist_ok=True)
|
|
177
|
+
lock_path = queue_lock_path(ctx.lock_dir)
|
|
178
|
+
if not acquire_lock(lock_path):
|
|
179
|
+
return False
|
|
180
|
+
try:
|
|
181
|
+
with open(ctx.queue_file, "a", encoding="utf-8") as f:
|
|
182
|
+
f.write(compact_json(payload))
|
|
183
|
+
f.write("\n")
|
|
184
|
+
try:
|
|
185
|
+
os.chmod(ctx.queue_file, 0o600)
|
|
186
|
+
except Exception:
|
|
187
|
+
pass
|
|
188
|
+
return True
|
|
189
|
+
finally:
|
|
190
|
+
release_lock(lock_path)
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def read_queue_payloads(queue_file: str):
|
|
194
|
+
if not os.path.exists(queue_file) or os.path.getsize(queue_file) == 0:
|
|
195
|
+
return []
|
|
196
|
+
|
|
197
|
+
payloads = []
|
|
198
|
+
with open(queue_file, encoding="utf-8") as f:
|
|
199
|
+
for line in f:
|
|
200
|
+
line = line.strip()
|
|
201
|
+
if not line:
|
|
202
|
+
continue
|
|
203
|
+
try:
|
|
204
|
+
payloads.append(json.loads(line))
|
|
205
|
+
except Exception:
|
|
206
|
+
continue
|
|
207
|
+
return payloads
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def write_queue_payloads(queue_file: str, payloads) -> None:
|
|
211
|
+
os.makedirs(os.path.dirname(queue_file), exist_ok=True)
|
|
212
|
+
with open(queue_file, "w", encoding="utf-8") as f:
|
|
213
|
+
for payload in payloads:
|
|
214
|
+
f.write(compact_json(payload))
|
|
215
|
+
f.write("\n")
|
|
216
|
+
try:
|
|
217
|
+
os.chmod(queue_file, 0o600)
|
|
218
|
+
except Exception:
|
|
219
|
+
pass
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def post_payload_result(ctx, payload):
|
|
223
|
+
status, response_text = post_otlp_payload(ctx, payload)
|
|
224
|
+
if status != 200:
|
|
225
|
+
return 1
|
|
226
|
+
rejected, _ = parse_partial_success(response_text)
|
|
227
|
+
if rejected:
|
|
228
|
+
return 2
|
|
229
|
+
return 0
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def drain_queue(ctx):
|
|
233
|
+
if not ctx.queue_file or not ctx.lock_dir or not os.path.exists(ctx.queue_file):
|
|
234
|
+
return
|
|
235
|
+
if ctx.queue_drain_limit <= 0:
|
|
236
|
+
return
|
|
237
|
+
|
|
238
|
+
lock_path = queue_lock_path(ctx.lock_dir)
|
|
239
|
+
if not acquire_lock(lock_path):
|
|
240
|
+
return
|
|
241
|
+
try:
|
|
242
|
+
payloads = read_queue_payloads(ctx.queue_file)
|
|
243
|
+
if not payloads:
|
|
244
|
+
return
|
|
245
|
+
|
|
246
|
+
max_attempts = min(len(payloads), ctx.queue_drain_limit)
|
|
247
|
+
remaining_start = max_attempts
|
|
248
|
+
for idx in range(max_attempts):
|
|
249
|
+
result = post_payload_result(ctx, payloads[idx])
|
|
250
|
+
if result == 0:
|
|
251
|
+
continue
|
|
252
|
+
if result == 2:
|
|
253
|
+
continue
|
|
254
|
+
remaining_start = idx
|
|
255
|
+
break
|
|
256
|
+
|
|
257
|
+
if max_attempts < len(payloads):
|
|
258
|
+
remaining = payloads[remaining_start:]
|
|
259
|
+
elif remaining_start < max_attempts:
|
|
260
|
+
remaining = payloads[remaining_start:]
|
|
261
|
+
else:
|
|
262
|
+
remaining = payloads[max_attempts:]
|
|
263
|
+
write_queue_payloads(ctx.queue_file, remaining)
|
|
264
|
+
finally:
|
|
265
|
+
release_lock(lock_path)
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def send_payload_with_queueing(ctx, payload):
|
|
269
|
+
drain_queue(ctx)
|
|
270
|
+
result = post_payload_result(ctx, payload)
|
|
271
|
+
if result == 1:
|
|
272
|
+
append_queue_payload(ctx, payload)
|
|
273
|
+
return result
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
def probe_endpoint(endpoint: str, api_key: str) -> str:
|
|
277
|
+
status, _ = http_post_json(endpoint, {"resourceSpans": []}, api_key=api_key, timeout=12)
|
|
278
|
+
return f"{status:03d}" if status else "000"
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def write_settings_env(settings_file: str, api_key: str, endpoint: str, debug: str) -> str:
|
|
6
|
+
env_values = {
|
|
7
|
+
"TRACE_TO_PROMPTLAYER": "true",
|
|
8
|
+
"PROMPTLAYER_API_KEY": api_key,
|
|
9
|
+
"PROMPTLAYER_OTLP_ENDPOINT": endpoint,
|
|
10
|
+
"PROMPTLAYER_CC_DEBUG": debug,
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
settings = {}
|
|
14
|
+
if os.path.exists(settings_file):
|
|
15
|
+
try:
|
|
16
|
+
with open(settings_file, encoding="utf-8") as f:
|
|
17
|
+
settings = json.load(f)
|
|
18
|
+
except Exception as exc:
|
|
19
|
+
raise SystemExit(f"invalid settings json: {exc}")
|
|
20
|
+
if not isinstance(settings, dict):
|
|
21
|
+
raise SystemExit("invalid settings json: root must be an object")
|
|
22
|
+
|
|
23
|
+
current_env = settings.get("env", {})
|
|
24
|
+
if not isinstance(current_env, dict):
|
|
25
|
+
current_env = {}
|
|
26
|
+
current_env.update(env_values)
|
|
27
|
+
settings["env"] = current_env
|
|
28
|
+
|
|
29
|
+
os.makedirs(os.path.dirname(settings_file), exist_ok=True)
|
|
30
|
+
with open(settings_file, "w", encoding="utf-8") as f:
|
|
31
|
+
json.dump(settings, f, ensure_ascii=False, indent=2)
|
|
32
|
+
f.write("\n")
|
|
33
|
+
return settings_file
|