promptlayer 1.1.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/README.md +22 -0
  2. package/dist/claude-agents.d.mts +20 -0
  3. package/dist/claude-agents.d.ts +20 -0
  4. package/dist/claude-agents.js +2 -0
  5. package/dist/claude-agents.js.map +1 -0
  6. package/dist/esm/{chunk-SWBNW72U.js → chunk-UKSCOWKT.js} +2 -2
  7. package/dist/esm/{chunk-SWBNW72U.js.map → chunk-UKSCOWKT.js.map} +1 -1
  8. package/dist/esm/claude-agents.js +2 -0
  9. package/dist/esm/claude-agents.js.map +1 -0
  10. package/dist/esm/index.js +1 -1
  11. package/dist/esm/openai-agents.js +2 -2
  12. package/dist/esm/openai-agents.js.map +1 -1
  13. package/dist/index.js +2 -2
  14. package/dist/index.js.map +1 -1
  15. package/dist/openai-agents.js +2 -2
  16. package/dist/openai-agents.js.map +1 -1
  17. package/package.json +18 -1
  18. package/vendor/claude-agents/trace/.claude-plugin/plugin.json +8 -0
  19. package/vendor/claude-agents/trace/hooks/hook_utils.py +38 -0
  20. package/vendor/claude-agents/trace/hooks/hooks.json +60 -0
  21. package/vendor/claude-agents/trace/hooks/lib.sh +577 -0
  22. package/vendor/claude-agents/trace/hooks/parse_stop_transcript.py +375 -0
  23. package/vendor/claude-agents/trace/hooks/post_tool_use.sh +41 -0
  24. package/vendor/claude-agents/trace/hooks/session_end.sh +37 -0
  25. package/vendor/claude-agents/trace/hooks/session_start.sh +57 -0
  26. package/vendor/claude-agents/trace/hooks/stop_hook.sh +123 -0
  27. package/vendor/claude-agents/trace/hooks/user_prompt_submit.sh +25 -0
  28. package/vendor/claude-agents/vendor_metadata.json +5 -0
  29. package/.github/CODEOWNERS +0 -1
  30. package/.github/workflows/node.js.yml +0 -30
  31. package/.github/workflows/npm-publish.yml +0 -35
  32. package/src/groups.ts +0 -16
  33. package/src/index.ts +0 -383
  34. package/src/integrations/openai-agents/helpers.test.ts +0 -254
  35. package/src/integrations/openai-agents/ids.ts +0 -27
  36. package/src/integrations/openai-agents/index.ts +0 -8
  37. package/src/integrations/openai-agents/instrumentation.test.ts +0 -46
  38. package/src/integrations/openai-agents/instrumentation.ts +0 -47
  39. package/src/integrations/openai-agents/mapping.ts +0 -714
  40. package/src/integrations/openai-agents/otlp-json.ts +0 -120
  41. package/src/integrations/openai-agents/processor.test.ts +0 -509
  42. package/src/integrations/openai-agents/processor.ts +0 -388
  43. package/src/integrations/openai-agents/time.ts +0 -56
  44. package/src/integrations/openai-agents/types.ts +0 -49
  45. package/src/integrations/openai-agents/url.ts +0 -9
  46. package/src/openai-agents.ts +0 -1
  47. package/src/promptlayer.ts +0 -125
  48. package/src/run-error-tracking.test.ts +0 -146
  49. package/src/span-exporter.ts +0 -120
  50. package/src/span-wrapper.ts +0 -51
  51. package/src/templates.ts +0 -37
  52. package/src/tracing.ts +0 -20
  53. package/src/track.ts +0 -84
  54. package/src/types.ts +0 -689
  55. package/src/utils/blueprint-builder.test.ts +0 -727
  56. package/src/utils/blueprint-builder.ts +0 -1453
  57. package/src/utils/errors.test.ts +0 -68
  58. package/src/utils/errors.ts +0 -62
  59. package/src/utils/streaming.test.ts +0 -498
  60. package/src/utils/streaming.ts +0 -1402
  61. package/src/utils/utils.ts +0 -1228
  62. package/tsconfig.json +0 -115
  63. package/tsup.config.ts +0 -20
  64. package/vitest.config.ts +0 -9
@@ -1,254 +0,0 @@
1
- import { describe, expect, it } from "vitest";
2
-
3
- import {
4
- mapSpanId,
5
- mapTraceId,
6
- syntheticRootSpanId,
7
- } from "@/integrations/openai-agents/ids";
8
- import {
9
- normalizeMessages,
10
- normalizeResponseItems,
11
- OTLP_STATUS_CODE_ERROR,
12
- OTLP_STATUS_CODE_OK,
13
- spanDataAttributes,
14
- } from "@/integrations/openai-agents/mapping";
15
- import { buildOtlpJsonPayload } from "@/integrations/openai-agents/otlp-json";
16
- import { isoToUnixNano } from "@/integrations/openai-agents/time";
17
-
18
- describe("openai agents ids", () => {
19
- it("strips a valid trace_ prefix", () => {
20
- expect(
21
- mapTraceId("trace_0AF7651916CD43DD8448EB211C80319C")
22
- ).toBe("0af7651916cd43dd8448eb211c80319c");
23
- });
24
-
25
- it("hashes nonstandard trace ids", () => {
26
- expect(mapTraceId("trace_not_standard")).toHaveLength(32);
27
- expect(mapTraceId("trace_not_standard")).toBe(
28
- mapTraceId("trace_not_standard")
29
- );
30
- });
31
-
32
- it("hashes span ids to 16 hex chars", () => {
33
- expect(mapSpanId("span_123")).toHaveLength(16);
34
- expect(mapSpanId("span_123")).toBe(mapSpanId("span_123"));
35
- });
36
-
37
- it("derives a deterministic synthetic root id", () => {
38
- expect(syntheticRootSpanId("trace_123")).toHaveLength(16);
39
- expect(syntheticRootSpanId("trace_123")).toBe(
40
- syntheticRootSpanId("trace_123")
41
- );
42
- });
43
- });
44
-
45
- describe("openai agents time conversion", () => {
46
- it("converts ISO timestamps with nanoseconds", () => {
47
- expect(isoToUnixNano("2026-03-17T14:15:16.123456789Z")).toBe(
48
- "1773756916123456789"
49
- );
50
- });
51
-
52
- it("converts timestamps with timezone offsets", () => {
53
- expect(isoToUnixNano("2026-03-17T09:15:16.987-05:00")).toBe(
54
- "1773756916987000000"
55
- );
56
- });
57
- });
58
-
59
- describe("openai agents mapping", () => {
60
- it("normalizes message tool calls", () => {
61
- expect(
62
- normalizeMessages([
63
- {
64
- role: "assistant",
65
- content: "Calling weather",
66
- tool_calls: [
67
- {
68
- id: "call_1",
69
- function: {
70
- name: "weather",
71
- arguments: "{\"city\":\"Tokyo\"}",
72
- },
73
- },
74
- ],
75
- },
76
- ])
77
- ).toEqual([
78
- {
79
- role: "assistant",
80
- content: "Calling weather",
81
- tool_calls: [
82
- {
83
- id: "call_1",
84
- type: "tool_call",
85
- name: "weather",
86
- arguments: { city: "Tokyo" },
87
- },
88
- ],
89
- },
90
- ]);
91
- });
92
-
93
- it("normalizes tool calls and tool results in response input history", () => {
94
- expect(
95
- normalizeResponseItems([
96
- {
97
- type: "function_call",
98
- call_id: "call_weather",
99
- name: "weather_lookup",
100
- arguments: "{\"city\":\"Barcelona\"}",
101
- },
102
- {
103
- type: "tool_call_output_item",
104
- rawItem: {
105
- type: "function_call_result",
106
- callId: "call_weather",
107
- },
108
- output: "{\"temp_c\":20,\"condition\":\"Sunny\"}",
109
- },
110
- {
111
- type: "tool_call_output_item",
112
- rawItem: {
113
- type: "function_call_result",
114
- callId: "call_weather_json",
115
- },
116
- output: { tempC: 20, condition: "Sunny" },
117
- },
118
- {
119
- type: "function_call_result",
120
- callId: "call_translate",
121
- output: {
122
- type: "text",
123
- text: "- Hello: Hola\n- Thank you: Gràcies",
124
- },
125
- },
126
- ])
127
- ).toEqual([
128
- {
129
- role: "assistant",
130
- tool_calls: [
131
- {
132
- id: "call_weather",
133
- type: "tool_call",
134
- name: "weather_lookup",
135
- arguments: { city: "Barcelona" },
136
- },
137
- ],
138
- },
139
- {
140
- role: "tool",
141
- tool_call_id: "call_weather",
142
- content: "{\"temp_c\":20,\"condition\":\"Sunny\"}",
143
- },
144
- {
145
- role: "tool",
146
- tool_call_id: "call_weather_json",
147
- content: "{\"condition\":\"Sunny\",\"tempC\":20}",
148
- },
149
- {
150
- role: "tool",
151
- tool_call_id: "call_translate",
152
- content: "- Hello: Hola\n- Thank you: Gràcies",
153
- },
154
- ]);
155
- });
156
-
157
- it("maps generation data to canonical attrs", () => {
158
- const attrs = spanDataAttributes(
159
- {
160
- type: "generation",
161
- model: "gpt-4.1",
162
- input: [{ role: "user", content: "Hello" }],
163
- output: [{ role: "assistant", content: "Hi" }],
164
- usage: { input_tokens: 3, output_tokens: 5 },
165
- model_config: { temperature: 0.2 },
166
- },
167
- true
168
- );
169
-
170
- expect(attrs["gen_ai.provider.name"]).toBe("openai.responses");
171
- expect(attrs["gen_ai.request.model"]).toBe("gpt-4.1");
172
- expect(attrs["gen_ai.usage.input_tokens"]).toBe(3);
173
- expect(attrs["gen_ai.usage.output_tokens"]).toBe(5);
174
- expect(attrs["gen_ai.prompt.0.role"]).toBe("user");
175
- expect(attrs["gen_ai.completion.0.content"]).toBe("Hi");
176
- expect(attrs["openai_agents.model_config_json"]).toBe(
177
- "{\"temperature\":0.2}"
178
- );
179
- });
180
-
181
- it("keeps function spans namespaced", () => {
182
- const attrs = spanDataAttributes(
183
- {
184
- type: "function",
185
- name: "weather",
186
- input: "{\"city\":\"Tokyo\"}",
187
- output: "72 and sunny",
188
- mcp_data: "{\"server\":\"mcp\"}",
189
- },
190
- true
191
- );
192
-
193
- expect(attrs.node_type).toBe("CODE_EXECUTION");
194
- expect(attrs.tool_name).toBe("weather");
195
- expect(attrs["openai_agents.function.name"]).toBe("weather");
196
- expect(attrs["gen_ai.provider.name"]).toBeUndefined();
197
- });
198
-
199
- it("preserves unsupported span types as raw json", () => {
200
- const attrs = spanDataAttributes(
201
- {
202
- type: "speech_group",
203
- input: "hello",
204
- } as any,
205
- true
206
- );
207
-
208
- expect(attrs["openai_agents.raw_json"]).toBe(
209
- "{\"input\":\"hello\",\"type\":\"speech_group\"}"
210
- );
211
- });
212
- });
213
-
214
- describe("openai agents otlp json", () => {
215
- it("serializes OTLP spans in backend-compatible JSON", () => {
216
- const payload = buildOtlpJsonPayload([
217
- {
218
- traceId: "0af7651916cd43dd8448eb211c80319c",
219
- spanId: "00f067aa0ba902b7",
220
- name: "Generation",
221
- kind: 3,
222
- startTimeUnixNano: "1630000000000000000",
223
- endTimeUnixNano: "1630000001000000000",
224
- attributes: {
225
- "gen_ai.request.model": "gpt-4.1",
226
- "gen_ai.usage.input_tokens": 3,
227
- "gen_ai.usage.output_tokens": 5,
228
- },
229
- status: {
230
- code: OTLP_STATUS_CODE_OK,
231
- },
232
- events: [
233
- {
234
- name: "exception",
235
- timeUnixNano: "1630000001000000000",
236
- attributes: {
237
- "exception.type": "OpenAIAgentsError",
238
- },
239
- },
240
- ],
241
- },
242
- ]);
243
-
244
- const span = payload.resourceSpans[0].scopeSpans[0].spans[0] as any;
245
- expect(span.traceId).toBe("0af7651916cd43dd8448eb211c80319c");
246
- expect(span.status.code).toBe(OTLP_STATUS_CODE_OK);
247
- expect(span.events[0].name).toBe("exception");
248
- expect(span.attributes).toEqual(
249
- expect.arrayContaining([
250
- expect.objectContaining({ key: "gen_ai.request.model" }),
251
- ])
252
- );
253
- });
254
- });
@@ -1,27 +0,0 @@
1
- import { createHash } from "crypto";
2
-
3
- const TRACE_HEX_RE = /^[0-9a-f]{32}$/i;
4
-
5
- const sha256Hex = (value: string): string => {
6
- return createHash("sha256").update(value, "utf8").digest("hex");
7
- };
8
-
9
- export const mapTraceId = (originalTraceId: string): string => {
10
- const suffix = originalTraceId.startsWith("trace_")
11
- ? originalTraceId.slice("trace_".length)
12
- : originalTraceId;
13
-
14
- if (TRACE_HEX_RE.test(suffix)) {
15
- return suffix.toLowerCase();
16
- }
17
-
18
- return sha256Hex(originalTraceId).slice(0, 32);
19
- };
20
-
21
- export const mapSpanId = (originalSpanId: string): string => {
22
- return sha256Hex(originalSpanId).slice(0, 16);
23
- };
24
-
25
- export const syntheticRootSpanId = (originalTraceId: string): string => {
26
- return sha256Hex(`${originalTraceId}:root`).slice(0, 16);
27
- };
@@ -1,8 +0,0 @@
1
- export {
2
- instrumentOpenAIAgents,
3
- type InstrumentOpenAIAgentsOptions,
4
- } from "@/integrations/openai-agents/instrumentation";
5
- export {
6
- PromptLayerOpenAIAgentsProcessor,
7
- type PromptLayerOpenAIAgentsProcessorOptions,
8
- } from "@/integrations/openai-agents/processor";
@@ -1,46 +0,0 @@
1
- import { beforeEach, describe, expect, it, vi } from "vitest";
2
-
3
- vi.mock("@openai/agents", () => ({
4
- addTraceProcessor: vi.fn(),
5
- setTraceProcessors: vi.fn(),
6
- }));
7
-
8
- import {
9
- instrumentOpenAIAgents,
10
- } from "@/integrations/openai-agents/instrumentation";
11
- import * as agentsModule from "@openai/agents";
12
-
13
- describe("instrumentOpenAIAgents", () => {
14
- beforeEach(() => {
15
- vi.clearAllMocks();
16
- });
17
-
18
- it("registers the processor exclusively by default", async () => {
19
- const processor = await instrumentOpenAIAgents({
20
- apiKey: "pl_test",
21
- baseURL: "https://api.promptlayer.dev",
22
- });
23
-
24
- expect(agentsModule.setTraceProcessors).toHaveBeenCalledWith([processor]);
25
- expect(agentsModule.addTraceProcessor).not.toHaveBeenCalled();
26
- });
27
-
28
- it("registers the processor non-exclusively when requested", async () => {
29
- const processor = await instrumentOpenAIAgents({
30
- apiKey: "pl_test",
31
- baseURL: "https://api.promptlayer.dev",
32
- exclusive: false,
33
- });
34
-
35
- expect(agentsModule.addTraceProcessor).toHaveBeenCalledWith(processor);
36
- expect(agentsModule.setTraceProcessors).not.toHaveBeenCalled();
37
- });
38
-
39
- it("throws when the api key is missing", async () => {
40
- await expect(
41
- instrumentOpenAIAgents({
42
- apiKey: "",
43
- })
44
- ).rejects.toThrow("PromptLayer API key not provided");
45
- });
46
- });
@@ -1,47 +0,0 @@
1
- import { PromptLayerOpenAIAgentsProcessor } from "@/integrations/openai-agents/processor";
2
- import { trimTrailingSlashes } from "@/integrations/openai-agents/url";
3
- import { readEnv } from "@/utils/utils";
4
-
5
- export interface InstrumentOpenAIAgentsOptions {
6
- apiKey?: string;
7
- baseURL?: string;
8
- exclusive?: boolean;
9
- includeRawPayloads?: boolean;
10
- }
11
-
12
- const resolveBaseURL = (baseURL?: string): string => {
13
- return trimTrailingSlashes(
14
- baseURL ?? readEnv("PROMPTLAYER_BASE_URL") ?? "https://api.promptlayer.com"
15
- );
16
- };
17
-
18
- export const instrumentOpenAIAgents = async ({
19
- apiKey = readEnv("PROMPTLAYER_API_KEY"),
20
- baseURL,
21
- exclusive = true,
22
- includeRawPayloads = true,
23
- }: InstrumentOpenAIAgentsOptions = {}): Promise<PromptLayerOpenAIAgentsProcessor> => {
24
- if (!apiKey) {
25
- throw new Error(
26
- "PromptLayer API key not provided. Please set PROMPTLAYER_API_KEY or pass apiKey."
27
- );
28
- }
29
-
30
- const agentsModule: typeof import("@openai/agents") = await import(
31
- "@openai/agents"
32
- );
33
-
34
- const processor = new PromptLayerOpenAIAgentsProcessor({
35
- apiKey,
36
- baseURL: resolveBaseURL(baseURL),
37
- includeRawPayloads,
38
- });
39
-
40
- if (exclusive) {
41
- agentsModule.setTraceProcessors([processor]);
42
- } else {
43
- agentsModule.addTraceProcessor(processor);
44
- }
45
-
46
- return processor;
47
- };