deadpipe 1.0.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +29 -0
- package/README.md +221 -89
- package/dist/index.d.mts +144 -87
- package/dist/index.d.ts +144 -87
- package/dist/index.js +471 -114
- package/dist/index.mjs +461 -111
- package/package.json +14 -11
package/dist/index.js
CHANGED
|
@@ -20,52 +20,396 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
20
20
|
// src/index.ts
|
|
21
21
|
var index_exports = {};
|
|
22
22
|
__export(index_exports, {
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
23
|
+
PromptTracker: () => PromptTracker,
|
|
24
|
+
VERSION: () => VERSION,
|
|
25
|
+
detectRefusal: () => detectRefusal,
|
|
26
|
+
estimateCost: () => estimateCost,
|
|
27
|
+
extractAnthropicResponse: () => extractAnthropicResponse,
|
|
28
|
+
extractOpenAIResponse: () => extractOpenAIResponse,
|
|
29
|
+
track: () => track,
|
|
30
|
+
validateEnumBounds: () => validateEnumBounds,
|
|
31
|
+
validateNumericBounds: () => validateNumericBounds,
|
|
32
|
+
wrapOpenAI: () => wrapOpenAI
|
|
27
33
|
});
|
|
28
34
|
module.exports = __toCommonJS(index_exports);
|
|
29
|
-
var
|
|
35
|
+
var VERSION = "2.0.0";
|
|
36
|
+
var MODEL_COSTS = {
|
|
37
|
+
// OpenAI
|
|
38
|
+
"gpt-4": { input: 0.03, output: 0.06 },
|
|
39
|
+
// legacy
|
|
40
|
+
"gpt-4-turbo": { input: 0.01, output: 0.03 },
|
|
41
|
+
// legacy
|
|
42
|
+
"gpt-4o": { input: 5e-3, output: 0.015 },
|
|
43
|
+
"gpt-4o-mini": { input: 15e-5, output: 6e-4 },
|
|
44
|
+
"gpt-4.1": { input: 2e-3, output: 8e-3 },
|
|
45
|
+
"gpt-3.5-turbo": { input: 5e-4, output: 15e-4 },
|
|
46
|
+
"gpt-5": { input: 175e-5, output: 0.014 },
|
|
47
|
+
"gpt-5-mini": { input: 25e-5, output: 2e-3 },
|
|
48
|
+
"gpt-5.2-pro": { input: 0.021, output: 0.168 },
|
|
49
|
+
// Anthropic
|
|
50
|
+
"claude-3-opus": { input: 0.015, output: 0.075 },
|
|
51
|
+
"claude-3-sonnet": { input: 3e-3, output: 0.015 },
|
|
52
|
+
"claude-3-haiku": { input: 25e-5, output: 125e-5 },
|
|
53
|
+
"claude-3.5-sonnet": { input: 3e-3, output: 0.015 },
|
|
54
|
+
"claude-opus-4": { input: 0.015, output: 0.075 },
|
|
55
|
+
"claude-sonnet-4": { input: 3e-3, output: 0.015 },
|
|
56
|
+
"claude-haiku-4": { input: 25e-5, output: 125e-5 }
|
|
57
|
+
};
|
|
58
|
+
function estimateCost(model, inputTokens, outputTokens) {
|
|
59
|
+
const modelLower = model.toLowerCase();
|
|
60
|
+
for (const [knownModel, costs] of Object.entries(MODEL_COSTS)) {
|
|
61
|
+
if (modelLower.includes(knownModel)) {
|
|
62
|
+
const inputCost = inputTokens / 1e3 * costs.input;
|
|
63
|
+
const outputCost = outputTokens / 1e3 * costs.output;
|
|
64
|
+
return Math.round((inputCost + outputCost) * 1e6) / 1e6;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
69
|
+
function hashContentSync(content) {
|
|
70
|
+
let hash = 5381;
|
|
71
|
+
for (let i = 0; i < content.length; i++) {
|
|
72
|
+
hash = (hash << 5) + hash + content.charCodeAt(i);
|
|
73
|
+
}
|
|
74
|
+
return Math.abs(hash).toString(16).slice(0, 16).padStart(16, "0");
|
|
75
|
+
}
|
|
76
|
+
function hashMessages(messages) {
|
|
77
|
+
const serialized = JSON.stringify(messages);
|
|
78
|
+
return hashContentSync(serialized);
|
|
79
|
+
}
|
|
80
|
+
function hashTools(tools) {
|
|
81
|
+
if (!tools || tools.length === 0) return void 0;
|
|
82
|
+
const serialized = JSON.stringify(tools);
|
|
83
|
+
return hashContentSync(serialized);
|
|
84
|
+
}
|
|
85
|
+
var REFUSAL_PATTERNS = [
|
|
86
|
+
"i can't help with",
|
|
87
|
+
"i cannot help with",
|
|
88
|
+
"i'm not able to",
|
|
89
|
+
"i am not able to",
|
|
90
|
+
"i won't be able to",
|
|
91
|
+
"i'm unable to",
|
|
92
|
+
"i cannot provide",
|
|
93
|
+
"i can't provide",
|
|
94
|
+
"i must decline",
|
|
95
|
+
"i cannot assist with",
|
|
96
|
+
"i can't assist with",
|
|
97
|
+
"as an ai",
|
|
98
|
+
"as a language model",
|
|
99
|
+
"i don't have the ability",
|
|
100
|
+
"i cannot comply",
|
|
101
|
+
"i'm designed to",
|
|
102
|
+
"my purpose is to",
|
|
103
|
+
"violates my guidelines",
|
|
104
|
+
"against my guidelines",
|
|
105
|
+
"ethical guidelines",
|
|
106
|
+
"i apologize, but i cannot",
|
|
107
|
+
"i'm sorry, but i can't"
|
|
108
|
+
];
|
|
109
|
+
function detectRefusal(text) {
|
|
110
|
+
const textLower = text.toLowerCase();
|
|
111
|
+
return REFUSAL_PATTERNS.some((pattern) => textLower.includes(pattern));
|
|
112
|
+
}
|
|
113
|
+
function validateEnumBounds(data, enumFields) {
|
|
114
|
+
if (!enumFields) return true;
|
|
115
|
+
for (const [fieldName, validValues] of Object.entries(enumFields)) {
|
|
116
|
+
if (fieldName in data && !validValues.includes(data[fieldName])) {
|
|
117
|
+
return false;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
return true;
|
|
121
|
+
}
|
|
122
|
+
function validateNumericBounds(data, numericBounds) {
|
|
123
|
+
if (!numericBounds) return true;
|
|
124
|
+
for (const [fieldName, [minVal, maxVal]] of Object.entries(numericBounds)) {
|
|
125
|
+
if (fieldName in data) {
|
|
126
|
+
const value = data[fieldName];
|
|
127
|
+
if (typeof value === "number") {
|
|
128
|
+
if (minVal !== null && value < minVal) return false;
|
|
129
|
+
if (maxVal !== null && value > maxVal) return false;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
return true;
|
|
134
|
+
}
|
|
135
|
+
function extractOpenAIResponse(response) {
|
|
136
|
+
const result = {
|
|
137
|
+
model: "",
|
|
138
|
+
content: "",
|
|
139
|
+
inputTokens: null,
|
|
140
|
+
outputTokens: null,
|
|
141
|
+
totalTokens: null,
|
|
142
|
+
finishReason: null,
|
|
143
|
+
toolCalls: [],
|
|
144
|
+
logprobs: null
|
|
145
|
+
};
|
|
146
|
+
if (response?.model) {
|
|
147
|
+
result.model = response.model;
|
|
148
|
+
}
|
|
149
|
+
if (response?.choices?.[0]) {
|
|
150
|
+
const choice = response.choices[0];
|
|
151
|
+
if (choice.message) {
|
|
152
|
+
result.content = choice.message.content || "";
|
|
153
|
+
if (choice.message.tool_calls) {
|
|
154
|
+
result.toolCalls = choice.message.tool_calls.map((tc) => ({
|
|
155
|
+
name: tc.function.name,
|
|
156
|
+
arguments: tc.function.arguments
|
|
157
|
+
}));
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
if (choice.finish_reason) {
|
|
161
|
+
result.finishReason = choice.finish_reason;
|
|
162
|
+
}
|
|
163
|
+
if (choice.logprobs) {
|
|
164
|
+
result.logprobs = choice.logprobs;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
if (response?.output) {
|
|
168
|
+
result.content = response.output || "";
|
|
169
|
+
}
|
|
170
|
+
if (response?.usage) {
|
|
171
|
+
result.inputTokens = response.usage.prompt_tokens ?? null;
|
|
172
|
+
result.outputTokens = response.usage.completion_tokens ?? null;
|
|
173
|
+
result.totalTokens = response.usage.total_tokens ?? null;
|
|
174
|
+
}
|
|
175
|
+
return result;
|
|
176
|
+
}
|
|
177
|
+
function extractAnthropicResponse(response) {
|
|
178
|
+
const result = {
|
|
179
|
+
model: "",
|
|
180
|
+
content: "",
|
|
181
|
+
inputTokens: null,
|
|
182
|
+
outputTokens: null,
|
|
183
|
+
totalTokens: null,
|
|
184
|
+
finishReason: null,
|
|
185
|
+
toolCalls: [],
|
|
186
|
+
logprobs: null
|
|
187
|
+
};
|
|
188
|
+
if (response?.model) {
|
|
189
|
+
result.model = response.model;
|
|
190
|
+
}
|
|
191
|
+
if (response?.content && Array.isArray(response.content)) {
|
|
192
|
+
const textBlocks = response.content.filter((block) => block.type === "text" || block.text).map((block) => block.text);
|
|
193
|
+
result.content = textBlocks.join("");
|
|
194
|
+
const toolBlocks = response.content.filter(
|
|
195
|
+
(block) => block.type === "tool_use"
|
|
196
|
+
);
|
|
197
|
+
result.toolCalls = toolBlocks.map((block) => ({
|
|
198
|
+
name: block.name,
|
|
199
|
+
arguments: JSON.stringify(block.input)
|
|
200
|
+
}));
|
|
201
|
+
}
|
|
202
|
+
if (response?.stop_reason) {
|
|
203
|
+
result.finishReason = response.stop_reason;
|
|
204
|
+
}
|
|
205
|
+
if (response?.usage) {
|
|
206
|
+
result.inputTokens = response.usage.input_tokens ?? null;
|
|
207
|
+
result.outputTokens = response.usage.output_tokens ?? null;
|
|
208
|
+
if (result.inputTokens !== null && result.outputTokens !== null) {
|
|
209
|
+
result.totalTokens = result.inputTokens + result.outputTokens;
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
return result;
|
|
213
|
+
}
|
|
214
|
+
function calculateLogprobMean(logprobs) {
|
|
215
|
+
if (!logprobs?.content) return null;
|
|
216
|
+
try {
|
|
217
|
+
const probs = logprobs.content.filter((token) => typeof token.logprob === "number").map((token) => token.logprob);
|
|
218
|
+
if (probs.length > 0) {
|
|
219
|
+
return probs.reduce((a, b) => a + b, 0) / probs.length;
|
|
220
|
+
}
|
|
221
|
+
} catch {
|
|
222
|
+
}
|
|
223
|
+
return null;
|
|
224
|
+
}
|
|
225
|
+
var PromptTracker = class {
|
|
226
|
+
promptId;
|
|
30
227
|
apiKey;
|
|
31
228
|
baseUrl;
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
229
|
+
timeoutMs;
|
|
230
|
+
// Identity
|
|
231
|
+
appId;
|
|
232
|
+
environment;
|
|
233
|
+
versionStr;
|
|
234
|
+
provider;
|
|
235
|
+
// Validation
|
|
236
|
+
schema;
|
|
237
|
+
enumFields;
|
|
238
|
+
numericBounds;
|
|
239
|
+
// Context hashes
|
|
240
|
+
promptHash;
|
|
241
|
+
toolSchemaHash;
|
|
242
|
+
systemPromptHash;
|
|
243
|
+
// Timing
|
|
244
|
+
startTime = null;
|
|
245
|
+
firstTokenTime = null;
|
|
246
|
+
endTime = null;
|
|
247
|
+
// State
|
|
248
|
+
telemetry;
|
|
249
|
+
recorded = false;
|
|
250
|
+
retryCount = 0;
|
|
251
|
+
constructor(promptId, options = {}) {
|
|
252
|
+
this.promptId = promptId;
|
|
253
|
+
this.apiKey = options.apiKey || process.env.DEADPIPE_API_KEY;
|
|
46
254
|
this.baseUrl = (options.baseUrl || "https://www.deadpipe.com/api/v1").replace(/\/$/, "");
|
|
47
|
-
this.
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
255
|
+
this.timeoutMs = options.timeout || 1e4;
|
|
256
|
+
this.appId = options.appId || process.env.DEADPIPE_APP_ID;
|
|
257
|
+
this.environment = options.environment || process.env.DEADPIPE_ENVIRONMENT;
|
|
258
|
+
this.versionStr = options.version || process.env.DEADPIPE_VERSION || process.env.GIT_COMMIT;
|
|
259
|
+
this.provider = options.provider || "openai";
|
|
260
|
+
this.schema = options.schema;
|
|
261
|
+
this.enumFields = options.enumFields;
|
|
262
|
+
this.numericBounds = options.numericBounds;
|
|
263
|
+
this.promptHash = options.messages ? hashMessages(options.messages) : void 0;
|
|
264
|
+
this.toolSchemaHash = hashTools(options.tools);
|
|
265
|
+
this.systemPromptHash = options.systemPrompt ? hashContentSync(options.systemPrompt) : void 0;
|
|
266
|
+
this.telemetry = {
|
|
267
|
+
prompt_id: this.promptId,
|
|
268
|
+
provider: this.provider,
|
|
269
|
+
app_id: this.appId,
|
|
270
|
+
environment: this.environment,
|
|
271
|
+
version: this.versionStr,
|
|
272
|
+
prompt_hash: this.promptHash,
|
|
273
|
+
tool_schema_hash: this.toolSchemaHash,
|
|
274
|
+
system_prompt_hash: this.systemPromptHash,
|
|
275
|
+
status: "success"
|
|
61
276
|
};
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
277
|
+
}
|
|
278
|
+
start() {
|
|
279
|
+
this.startTime = Date.now();
|
|
280
|
+
this.telemetry.request_start = new Date(this.startTime).toISOString();
|
|
281
|
+
}
|
|
282
|
+
markFirstToken() {
|
|
283
|
+
if (this.firstTokenTime === null && this.startTime !== null) {
|
|
284
|
+
this.firstTokenTime = Date.now();
|
|
285
|
+
this.telemetry.first_token_time = this.firstTokenTime - this.startTime;
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
markRetry() {
|
|
289
|
+
this.retryCount++;
|
|
290
|
+
this.telemetry.retry_count = this.retryCount;
|
|
291
|
+
}
|
|
292
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
293
|
+
record(response, parsedOutput) {
|
|
294
|
+
this.endTime = Date.now();
|
|
295
|
+
this.telemetry.end_time = new Date(this.endTime).toISOString();
|
|
296
|
+
this.telemetry.total_latency = this.startTime ? this.endTime - this.startTime : 0;
|
|
297
|
+
const extracted = this.provider === "anthropic" ? extractAnthropicResponse(response) : extractOpenAIResponse(response);
|
|
298
|
+
this.telemetry.model = extracted.model;
|
|
299
|
+
this.telemetry.input_tokens = extracted.inputTokens ?? void 0;
|
|
300
|
+
this.telemetry.output_tokens = extracted.outputTokens ?? void 0;
|
|
301
|
+
this.telemetry.total_tokens = extracted.totalTokens ?? void 0;
|
|
302
|
+
this.telemetry.http_status = 200;
|
|
303
|
+
const content = extracted.content;
|
|
304
|
+
this.telemetry.output_length = content?.length ?? 0;
|
|
305
|
+
this.telemetry.empty_output = !content || content.trim().length === 0;
|
|
306
|
+
this.telemetry.truncated = extracted.finishReason === "length";
|
|
307
|
+
this.telemetry.tool_call_flag = extracted.toolCalls.length > 0;
|
|
308
|
+
this.telemetry.tool_calls_count = extracted.toolCalls.length;
|
|
309
|
+
if (content) {
|
|
310
|
+
this.telemetry.output_hash = hashContentSync(content);
|
|
311
|
+
}
|
|
312
|
+
if (extracted.logprobs) {
|
|
313
|
+
this.telemetry.top_logprob_mean = calculateLogprobMean(extracted.logprobs) ?? void 0;
|
|
314
|
+
}
|
|
315
|
+
if (this.telemetry.input_tokens && this.telemetry.output_tokens && this.telemetry.model) {
|
|
316
|
+
this.telemetry.estimated_cost_usd = estimateCost(
|
|
317
|
+
this.telemetry.model,
|
|
318
|
+
this.telemetry.input_tokens,
|
|
319
|
+
this.telemetry.output_tokens
|
|
320
|
+
) ?? void 0;
|
|
321
|
+
}
|
|
322
|
+
if (content) {
|
|
323
|
+
this.telemetry.refusal_flag = detectRefusal(content);
|
|
324
|
+
if (this.telemetry.refusal_flag) {
|
|
325
|
+
this.telemetry.status = "refusal";
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
if (this.telemetry.empty_output) {
|
|
329
|
+
this.telemetry.status = "empty";
|
|
330
|
+
}
|
|
331
|
+
let parsedData = parsedOutput;
|
|
332
|
+
if (parsedData === void 0 && content) {
|
|
333
|
+
try {
|
|
334
|
+
const contentStripped = content.trim();
|
|
335
|
+
if (contentStripped.startsWith("{") || contentStripped.startsWith("[")) {
|
|
336
|
+
parsedData = JSON.parse(contentStripped);
|
|
337
|
+
this.telemetry.json_parse_success = true;
|
|
338
|
+
} else if (contentStripped.includes("```json")) {
|
|
339
|
+
const start = contentStripped.indexOf("```json") + 7;
|
|
340
|
+
const end = contentStripped.indexOf("```", start);
|
|
341
|
+
if (end > start) {
|
|
342
|
+
parsedData = JSON.parse(contentStripped.slice(start, end).trim());
|
|
343
|
+
this.telemetry.json_parse_success = true;
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
} catch {
|
|
347
|
+
this.telemetry.json_parse_success = false;
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
let validatedResult = null;
|
|
351
|
+
if (this.schema && parsedData !== null) {
|
|
352
|
+
const validation = this.schema.validate(parsedData);
|
|
353
|
+
this.telemetry.schema_validation_pass = validation.success;
|
|
354
|
+
if (!validation.success) {
|
|
355
|
+
this.telemetry.status = "schema_violation";
|
|
356
|
+
if (validation.errors) {
|
|
357
|
+
this.telemetry.missing_required_fields = JSON.stringify(validation.errors);
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
validatedResult = validation.data;
|
|
361
|
+
}
|
|
362
|
+
if (this.enumFields && parsedData !== null) {
|
|
363
|
+
if (!validateEnumBounds(parsedData, this.enumFields)) {
|
|
364
|
+
this.telemetry.enum_out_of_range = true;
|
|
365
|
+
this.telemetry.status = "schema_violation";
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
if (this.numericBounds && parsedData !== null) {
|
|
369
|
+
if (!validateNumericBounds(parsedData, this.numericBounds)) {
|
|
370
|
+
this.telemetry.numeric_out_of_bounds = true;
|
|
371
|
+
this.telemetry.status = "schema_violation";
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
this.send();
|
|
375
|
+
this.recorded = true;
|
|
376
|
+
if (this.schema) {
|
|
377
|
+
return validatedResult;
|
|
378
|
+
}
|
|
379
|
+
return response;
|
|
380
|
+
}
|
|
381
|
+
recordError(error) {
|
|
382
|
+
this.endTime = Date.now();
|
|
383
|
+
this.telemetry.end_time = new Date(this.endTime).toISOString();
|
|
384
|
+
this.telemetry.total_latency = this.startTime ? this.endTime - this.startTime : 0;
|
|
385
|
+
this.telemetry.status = "error";
|
|
386
|
+
this.telemetry.error_message = error.message;
|
|
387
|
+
const err = error;
|
|
388
|
+
if (err.status) {
|
|
389
|
+
this.telemetry.http_status = err.status;
|
|
390
|
+
}
|
|
391
|
+
if (err.code) {
|
|
392
|
+
this.telemetry.provider_error_code = String(err.code);
|
|
393
|
+
}
|
|
394
|
+
if (error.message.toLowerCase().includes("timeout")) {
|
|
395
|
+
this.telemetry.status = "timeout";
|
|
396
|
+
this.telemetry.timeout = true;
|
|
397
|
+
}
|
|
398
|
+
this.send();
|
|
399
|
+
this.recorded = true;
|
|
400
|
+
}
|
|
401
|
+
async send() {
|
|
402
|
+
if (!this.apiKey) return;
|
|
65
403
|
try {
|
|
66
404
|
const controller = new AbortController();
|
|
67
|
-
const timeoutId = setTimeout(() => controller.abort(), this.
|
|
68
|
-
const
|
|
405
|
+
const timeoutId = setTimeout(() => controller.abort(), this.timeoutMs);
|
|
406
|
+
const payload = {};
|
|
407
|
+
for (const [key, value] of Object.entries(this.telemetry)) {
|
|
408
|
+
if (value !== void 0) {
|
|
409
|
+
payload[key] = value;
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
await fetch(`${this.baseUrl}/prompt`, {
|
|
69
413
|
method: "POST",
|
|
70
414
|
headers: {
|
|
71
415
|
"Content-Type": "application/json",
|
|
@@ -75,90 +419,103 @@ var Deadpipe = class {
|
|
|
75
419
|
signal: controller.signal
|
|
76
420
|
});
|
|
77
421
|
clearTimeout(timeoutId);
|
|
78
|
-
if (!response.ok) {
|
|
79
|
-
return null;
|
|
80
|
-
}
|
|
81
|
-
return await response.json();
|
|
82
422
|
} catch {
|
|
83
|
-
return null;
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
/**
|
|
87
|
-
* Run a function with automatic heartbeat on completion.
|
|
88
|
-
*
|
|
89
|
-
* @param pipelineId - Unique identifier for this pipeline.
|
|
90
|
-
* @param fn - The function to run.
|
|
91
|
-
* @param options - Additional options.
|
|
92
|
-
* @returns The result of the function.
|
|
93
|
-
*
|
|
94
|
-
* @example
|
|
95
|
-
* const result = await dp.run('daily-etl', async () => {
|
|
96
|
-
* const records = await processData();
|
|
97
|
-
* return { recordsProcessed: records.length };
|
|
98
|
-
* });
|
|
99
|
-
*/
|
|
100
|
-
async run(pipelineId, fn, options = {}) {
|
|
101
|
-
const startTime = Date.now();
|
|
102
|
-
let status = "success";
|
|
103
|
-
let recordsProcessed;
|
|
104
|
-
try {
|
|
105
|
-
const result = await fn();
|
|
106
|
-
if (result && typeof result === "object" && "recordsProcessed" in result) {
|
|
107
|
-
recordsProcessed = result.recordsProcessed;
|
|
108
|
-
}
|
|
109
|
-
return result;
|
|
110
|
-
} catch (error) {
|
|
111
|
-
status = "failed";
|
|
112
|
-
throw error;
|
|
113
|
-
} finally {
|
|
114
|
-
const durationMs = Date.now() - startTime;
|
|
115
|
-
await this.ping(pipelineId, {
|
|
116
|
-
status,
|
|
117
|
-
durationMs,
|
|
118
|
-
recordsProcessed,
|
|
119
|
-
appName: options.appName
|
|
120
|
-
});
|
|
121
423
|
}
|
|
122
424
|
}
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
* @param options - Additional options.
|
|
129
|
-
* @returns A wrapped function.
|
|
130
|
-
*
|
|
131
|
-
* @example
|
|
132
|
-
* const myPipeline = dp.wrap('daily-etl', async () => {
|
|
133
|
-
* await processData();
|
|
134
|
-
* });
|
|
135
|
-
*
|
|
136
|
-
* // Later...
|
|
137
|
-
* await myPipeline();
|
|
138
|
-
*/
|
|
139
|
-
wrap(pipelineId, fn, options = {}) {
|
|
140
|
-
return async (...args) => {
|
|
141
|
-
return this.run(pipelineId, () => fn(...args), options);
|
|
142
|
-
};
|
|
425
|
+
isRecorded() {
|
|
426
|
+
return this.recorded;
|
|
427
|
+
}
|
|
428
|
+
getTelemetry() {
|
|
429
|
+
return { ...this.telemetry };
|
|
143
430
|
}
|
|
144
431
|
};
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
432
|
+
async function track(promptId, fn, options = {}) {
|
|
433
|
+
const tracker = new PromptTracker(promptId, options);
|
|
434
|
+
tracker.start();
|
|
435
|
+
try {
|
|
436
|
+
const result = await fn(tracker);
|
|
437
|
+
if (!tracker.isRecorded()) {
|
|
438
|
+
tracker.recordError(new Error("No response recorded"));
|
|
439
|
+
}
|
|
440
|
+
return result;
|
|
441
|
+
} catch (error) {
|
|
442
|
+
if (!tracker.isRecorded()) {
|
|
443
|
+
tracker.recordError(error instanceof Error ? error : new Error(String(error)));
|
|
444
|
+
}
|
|
445
|
+
throw error;
|
|
149
446
|
}
|
|
150
|
-
return defaultClient;
|
|
151
|
-
}
|
|
152
|
-
async function ping(pipelineId, options = {}) {
|
|
153
|
-
return getDefaultClient().ping(pipelineId, options);
|
|
154
447
|
}
|
|
155
|
-
|
|
156
|
-
|
|
448
|
+
function wrapOpenAI(client, options) {
|
|
449
|
+
const { promptId, ...trackOptions } = options;
|
|
450
|
+
const wrappedClient = Object.create(client);
|
|
451
|
+
if (client.chat?.completions) {
|
|
452
|
+
wrappedClient.chat = {
|
|
453
|
+
...client.chat,
|
|
454
|
+
completions: {
|
|
455
|
+
...client.chat.completions,
|
|
456
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
457
|
+
create: async (params) => {
|
|
458
|
+
const messages = params.messages || [];
|
|
459
|
+
const tools = params.tools;
|
|
460
|
+
let systemPrompt;
|
|
461
|
+
for (const msg of messages) {
|
|
462
|
+
if (msg.role === "system") {
|
|
463
|
+
systemPrompt = msg.content || "";
|
|
464
|
+
break;
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
return track(
|
|
468
|
+
promptId,
|
|
469
|
+
async (t) => {
|
|
470
|
+
const response = await client.chat.completions.create(params);
|
|
471
|
+
t.record(response);
|
|
472
|
+
return response;
|
|
473
|
+
},
|
|
474
|
+
{
|
|
475
|
+
...trackOptions,
|
|
476
|
+
messages,
|
|
477
|
+
tools,
|
|
478
|
+
systemPrompt
|
|
479
|
+
}
|
|
480
|
+
);
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
};
|
|
484
|
+
}
|
|
485
|
+
if (client.responses) {
|
|
486
|
+
wrappedClient.responses = {
|
|
487
|
+
...client.responses,
|
|
488
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
489
|
+
create: async (params) => {
|
|
490
|
+
const inputContent = params.input || "";
|
|
491
|
+
const messages = typeof inputContent === "string" ? [{ role: "user", content: inputContent }] : inputContent;
|
|
492
|
+
return track(
|
|
493
|
+
promptId,
|
|
494
|
+
async (t) => {
|
|
495
|
+
const response = await client.responses.create(params);
|
|
496
|
+
t.record(response);
|
|
497
|
+
return response;
|
|
498
|
+
},
|
|
499
|
+
{
|
|
500
|
+
...trackOptions,
|
|
501
|
+
messages
|
|
502
|
+
}
|
|
503
|
+
);
|
|
504
|
+
}
|
|
505
|
+
};
|
|
506
|
+
}
|
|
507
|
+
return wrappedClient;
|
|
157
508
|
}
|
|
158
|
-
var index_default = Deadpipe;
|
|
159
509
|
// Annotate the CommonJS export names for ESM import in node:
|
|
160
510
|
0 && (module.exports = {
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
511
|
+
PromptTracker,
|
|
512
|
+
VERSION,
|
|
513
|
+
detectRefusal,
|
|
514
|
+
estimateCost,
|
|
515
|
+
extractAnthropicResponse,
|
|
516
|
+
extractOpenAIResponse,
|
|
517
|
+
track,
|
|
518
|
+
validateEnumBounds,
|
|
519
|
+
validateNumericBounds,
|
|
520
|
+
wrapOpenAI
|
|
164
521
|
});
|