@agentuity/runtime 1.0.23 → 1.0.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dev-patches/aisdk.d.ts +17 -0
- package/dist/dev-patches/aisdk.d.ts.map +1 -0
- package/dist/dev-patches/aisdk.js +154 -0
- package/dist/dev-patches/aisdk.js.map +1 -0
- package/dist/dev-patches/gateway.d.ts +16 -0
- package/dist/dev-patches/gateway.d.ts.map +1 -0
- package/dist/dev-patches/gateway.js +55 -0
- package/dist/dev-patches/gateway.js.map +1 -0
- package/dist/dev-patches/index.d.ts +21 -0
- package/dist/dev-patches/index.d.ts.map +1 -0
- package/dist/dev-patches/index.js +33 -0
- package/dist/dev-patches/index.js.map +1 -0
- package/dist/dev-patches/otel-llm.d.ts +12 -0
- package/dist/dev-patches/otel-llm.d.ts.map +1 -0
- package/dist/dev-patches/otel-llm.js +345 -0
- package/dist/dev-patches/otel-llm.js.map +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -0
- package/dist/index.js.map +1 -1
- package/dist/services/local/_db.d.ts.map +1 -1
- package/dist/services/local/_db.js +78 -1
- package/dist/services/local/_db.js.map +1 -1
- package/dist/services/local/email.d.ts +2 -0
- package/dist/services/local/email.d.ts.map +1 -1
- package/dist/services/local/email.js +6 -0
- package/dist/services/local/email.js.map +1 -1
- package/dist/services/local/task.d.ts +26 -1
- package/dist/services/local/task.d.ts.map +1 -1
- package/dist/services/local/task.js +304 -4
- package/dist/services/local/task.js.map +1 -1
- package/package.json +7 -7
- package/src/dev-patches/aisdk.ts +172 -0
- package/src/dev-patches/gateway.ts +70 -0
- package/src/dev-patches/index.ts +37 -0
- package/src/dev-patches/otel-llm.ts +408 -0
- package/src/index.ts +3 -0
- package/src/services/local/_db.ts +98 -5
- package/src/services/local/email.ts +9 -4
- package/src/services/local/task.ts +448 -4
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Runtime LLM Gateway patches for dev mode.
|
|
3
|
+
*
|
|
4
|
+
* Replaces the build-time patches from cli/src/cmd/build/patch/llm.ts.
|
|
5
|
+
* Sets environment variables to route LLM SDK calls through the Agentuity AI Gateway
|
|
6
|
+
* when the user hasn't provided their own API keys.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
interface GatewayConfig {
|
|
10
|
+
apiKeyEnv: string;
|
|
11
|
+
baseUrlEnv: string;
|
|
12
|
+
provider: string;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const GATEWAY_CONFIGS: GatewayConfig[] = [
|
|
16
|
+
{ apiKeyEnv: 'ANTHROPIC_API_KEY', baseUrlEnv: 'ANTHROPIC_BASE_URL', provider: 'anthropic' },
|
|
17
|
+
{ apiKeyEnv: 'GROQ_API_KEY', baseUrlEnv: 'GROQ_BASE_URL', provider: 'groq' },
|
|
18
|
+
{ apiKeyEnv: 'OPENAI_API_KEY', baseUrlEnv: 'OPENAI_BASE_URL', provider: 'openai' },
|
|
19
|
+
];
|
|
20
|
+
|
|
21
|
+
function warnMissingKey(envKey: string): void {
|
|
22
|
+
const isDev =
|
|
23
|
+
process.env.AGENTUITY_ENVIRONMENT === 'development' || process.env.NODE_ENV !== 'production';
|
|
24
|
+
if (isDev) {
|
|
25
|
+
console.error('[ERROR] No credentials found for this AI provider. To fix this, either:');
|
|
26
|
+
console.error(
|
|
27
|
+
' 1. Login to Agentuity Cloud (agentuity auth login) to use the AI Gateway (recommended)'
|
|
28
|
+
);
|
|
29
|
+
console.error(` 2. Set ${envKey} in your .env file to use the provider directly`);
|
|
30
|
+
} else {
|
|
31
|
+
console.error(`[ERROR] The environment variable ${envKey} is required. Either:`);
|
|
32
|
+
console.error(
|
|
33
|
+
' 1. Use Agentuity Cloud AI Gateway by ensuring AGENTUITY_SDK_KEY is configured'
|
|
34
|
+
);
|
|
35
|
+
console.error(` 2. Set ${envKey} using "agentuity env set ${envKey}" and redeploy`);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Set environment variables to route LLM calls through the AI Gateway.
|
|
41
|
+
*
|
|
42
|
+
* For each provider, if the user hasn't set their own API key (or it equals
|
|
43
|
+
* the SDK key), we redirect to the gateway. This matches the behavior of
|
|
44
|
+
* the build-time patches in patch/llm.ts.
|
|
45
|
+
*/
|
|
46
|
+
export function applyGatewayPatches(): void {
|
|
47
|
+
const sdkKey = process.env.AGENTUITY_SDK_KEY;
|
|
48
|
+
const gatewayUrl =
|
|
49
|
+
process.env.AGENTUITY_AIGATEWAY_URL ||
|
|
50
|
+
process.env.AGENTUITY_TRANSPORT_URL ||
|
|
51
|
+
(sdkKey ? 'https://agentuity.ai' : '');
|
|
52
|
+
|
|
53
|
+
for (const config of GATEWAY_CONFIGS) {
|
|
54
|
+
const currentKey = process.env[config.apiKeyEnv];
|
|
55
|
+
|
|
56
|
+
// If the user provided their own key (and it's not the SDK key), leave it alone
|
|
57
|
+
if (currentKey && currentKey !== sdkKey) {
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Route through gateway if we have both URL and SDK key
|
|
62
|
+
if (gatewayUrl && sdkKey) {
|
|
63
|
+
process.env[config.apiKeyEnv] = sdkKey;
|
|
64
|
+
process.env[config.baseUrlEnv] = `${gatewayUrl}/gateway/${config.provider}`;
|
|
65
|
+
console.debug(`Enabled Agentuity AI Gateway for ${config.provider}`);
|
|
66
|
+
} else if (!currentKey) {
|
|
67
|
+
warnMissingKey(config.apiKeyEnv);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Runtime dev patches — replaces build-time Bun.build patches for dev mode.
|
|
3
|
+
*
|
|
4
|
+
* When --experimental-no-bundle is used, the generated entry file (src/generated/app.ts)
|
|
5
|
+
* is run directly by Bun without bundling. These runtime patches apply the same
|
|
6
|
+
* monkey-patches that would normally be injected by the Bun.build plugin during bundling.
|
|
7
|
+
*
|
|
8
|
+
* Three categories of patches:
|
|
9
|
+
* 1. Gateway: Set env vars to route LLM calls through Agentuity AI Gateway
|
|
10
|
+
* 2. AI SDK: Wrap Vercel AI SDK functions with telemetry + gateway config
|
|
11
|
+
* 3. OTel LLM: Wrap LLM SDK .create() methods with OpenTelemetry spans
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { applyGatewayPatches } from './gateway';
|
|
15
|
+
import { applyAISDKCorePatches, applyAISDKProviderPatches } from './aisdk';
|
|
16
|
+
import { applyOtelLLMPatches } from './otel-llm';
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Apply all runtime dev patches.
|
|
20
|
+
*
|
|
21
|
+
* Must be called:
|
|
22
|
+
* - AFTER bootstrapRuntimeEnv() (so env vars like AGENTUITY_SDK_KEY are loaded)
|
|
23
|
+
* - BEFORE any user code imports LLM SDKs
|
|
24
|
+
*/
|
|
25
|
+
export async function applyDevPatches(): Promise<void> {
|
|
26
|
+
// 1. Set gateway env vars first (other patches may read them)
|
|
27
|
+
applyGatewayPatches();
|
|
28
|
+
|
|
29
|
+
// 2. Patch AI SDK core functions (telemetry injection)
|
|
30
|
+
await applyAISDKCorePatches();
|
|
31
|
+
|
|
32
|
+
// 3. Patch AI SDK provider factories (gateway routing)
|
|
33
|
+
await applyAISDKProviderPatches();
|
|
34
|
+
|
|
35
|
+
// 4. Patch LLM SDK prototypes with OTel spans
|
|
36
|
+
await applyOtelLLMPatches();
|
|
37
|
+
}
|
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Runtime OpenTelemetry LLM instrumentation patches for dev mode.
|
|
3
|
+
*
|
|
4
|
+
* Replaces the build-time patches from cli/src/cmd/build/patch/otel-llm.ts.
|
|
5
|
+
* Wraps LLM SDK methods (OpenAI, Anthropic, Groq) with OTel spans to capture
|
|
6
|
+
* model, tokens, latency, and streaming support.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
10
|
+
|
|
11
|
+
import { SpanKind, SpanStatusCode, trace } from '@opentelemetry/api';
|
|
12
|
+
import type { Span } from '@opentelemetry/api';
|
|
13
|
+
|
|
14
|
+
const ATTR_GEN_AI_SYSTEM = 'gen_ai.system';
|
|
15
|
+
const ATTR_GEN_AI_REQUEST_MODEL = 'gen_ai.request.model';
|
|
16
|
+
const ATTR_GEN_AI_REQUEST_MAX_TOKENS = 'gen_ai.request.max_tokens';
|
|
17
|
+
const ATTR_GEN_AI_REQUEST_TEMPERATURE = 'gen_ai.request.temperature';
|
|
18
|
+
const ATTR_GEN_AI_REQUEST_TOP_P = 'gen_ai.request.top_p';
|
|
19
|
+
const ATTR_GEN_AI_REQUEST_FREQUENCY_PENALTY = 'gen_ai.request.frequency_penalty';
|
|
20
|
+
const ATTR_GEN_AI_REQUEST_PRESENCE_PENALTY = 'gen_ai.request.presence_penalty';
|
|
21
|
+
const ATTR_GEN_AI_RESPONSE_MODEL = 'gen_ai.response.model';
|
|
22
|
+
const ATTR_GEN_AI_RESPONSE_ID = 'gen_ai.response.id';
|
|
23
|
+
const ATTR_GEN_AI_RESPONSE_FINISH_REASONS = 'gen_ai.response.finish_reasons';
|
|
24
|
+
const ATTR_GEN_AI_USAGE_INPUT_TOKENS = 'gen_ai.usage.input_tokens';
|
|
25
|
+
const ATTR_GEN_AI_USAGE_OUTPUT_TOKENS = 'gen_ai.usage.output_tokens';
|
|
26
|
+
const ATTR_GEN_AI_OPERATION_NAME = 'gen_ai.operation.name';
|
|
27
|
+
const ATTR_GEN_AI_REQUEST_MESSAGES = 'gen_ai.request.messages';
|
|
28
|
+
const ATTR_GEN_AI_RESPONSE_TEXT = 'gen_ai.response.text';
|
|
29
|
+
|
|
30
|
+
const otelTracer = trace.getTracer('@agentuity/otel-llm', '1.0.0');
|
|
31
|
+
|
|
32
|
+
interface OtelPatchConfig {
|
|
33
|
+
provider: string;
|
|
34
|
+
inputTokensField: string;
|
|
35
|
+
outputTokensField: string;
|
|
36
|
+
responseIdField: string;
|
|
37
|
+
finishReasonExtractor: (response: any) => string | undefined;
|
|
38
|
+
responseContentExtractor: (response: any) => string | undefined;
|
|
39
|
+
requestMessagesField: string;
|
|
40
|
+
streamDeltaContentExtractor: (chunk: any) => string | undefined;
|
|
41
|
+
streamFinishReasonExtractor: (chunk: any) => string | undefined;
|
|
42
|
+
streamUsageExtractor: (chunk: any) => any;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function wrapAsyncIterator(
|
|
46
|
+
iterator: AsyncIterator<any>,
|
|
47
|
+
span: Span,
|
|
48
|
+
config: OtelPatchConfig
|
|
49
|
+
): AsyncIterableIterator<any> {
|
|
50
|
+
const contentChunks: string[] = [];
|
|
51
|
+
let finishReason: string | null = null;
|
|
52
|
+
let usage: any = null;
|
|
53
|
+
let model: string | null = null;
|
|
54
|
+
let responseId: string | null = null;
|
|
55
|
+
|
|
56
|
+
return {
|
|
57
|
+
[Symbol.asyncIterator]() {
|
|
58
|
+
return this;
|
|
59
|
+
},
|
|
60
|
+
async next() {
|
|
61
|
+
try {
|
|
62
|
+
const result = await iterator.next();
|
|
63
|
+
if (result.done) {
|
|
64
|
+
// Stream complete — finalize span
|
|
65
|
+
if (contentChunks.length > 0) {
|
|
66
|
+
span.setAttribute(ATTR_GEN_AI_RESPONSE_TEXT, contentChunks.join(''));
|
|
67
|
+
}
|
|
68
|
+
if (finishReason) {
|
|
69
|
+
span.setAttribute(
|
|
70
|
+
ATTR_GEN_AI_RESPONSE_FINISH_REASONS,
|
|
71
|
+
JSON.stringify([finishReason])
|
|
72
|
+
);
|
|
73
|
+
}
|
|
74
|
+
if (model) {
|
|
75
|
+
span.setAttribute(ATTR_GEN_AI_RESPONSE_MODEL, model);
|
|
76
|
+
}
|
|
77
|
+
if (responseId) {
|
|
78
|
+
span.setAttribute(ATTR_GEN_AI_RESPONSE_ID, responseId);
|
|
79
|
+
}
|
|
80
|
+
if (usage) {
|
|
81
|
+
if (usage[config.inputTokensField] !== undefined) {
|
|
82
|
+
span.setAttribute(
|
|
83
|
+
ATTR_GEN_AI_USAGE_INPUT_TOKENS,
|
|
84
|
+
usage[config.inputTokensField]
|
|
85
|
+
);
|
|
86
|
+
}
|
|
87
|
+
if (usage[config.outputTokensField] !== undefined) {
|
|
88
|
+
span.setAttribute(
|
|
89
|
+
ATTR_GEN_AI_USAGE_OUTPUT_TOKENS,
|
|
90
|
+
usage[config.outputTokensField]
|
|
91
|
+
);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
span.setStatus({ code: SpanStatusCode.OK });
|
|
95
|
+
span.end();
|
|
96
|
+
return result;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
const chunk = result.value;
|
|
100
|
+
|
|
101
|
+
if (chunk.model && !model) model = chunk.model;
|
|
102
|
+
if (chunk.id && !responseId) responseId = chunk.id;
|
|
103
|
+
|
|
104
|
+
const deltaContent = config.streamDeltaContentExtractor(chunk);
|
|
105
|
+
if (deltaContent) contentChunks.push(deltaContent);
|
|
106
|
+
|
|
107
|
+
const chunkFinishReason = config.streamFinishReasonExtractor(chunk);
|
|
108
|
+
if (chunkFinishReason) finishReason = chunkFinishReason;
|
|
109
|
+
|
|
110
|
+
const chunkUsage = config.streamUsageExtractor(chunk);
|
|
111
|
+
if (chunkUsage) usage = chunkUsage;
|
|
112
|
+
|
|
113
|
+
return result;
|
|
114
|
+
} catch (error: any) {
|
|
115
|
+
span.setStatus({ code: SpanStatusCode.ERROR, message: error?.message });
|
|
116
|
+
span.recordException(error);
|
|
117
|
+
span.end();
|
|
118
|
+
throw error;
|
|
119
|
+
}
|
|
120
|
+
},
|
|
121
|
+
async return(value?: any) {
|
|
122
|
+
span.setStatus({ code: SpanStatusCode.OK });
|
|
123
|
+
span.end();
|
|
124
|
+
if (iterator.return) {
|
|
125
|
+
return iterator.return(value);
|
|
126
|
+
}
|
|
127
|
+
return { done: true as const, value };
|
|
128
|
+
},
|
|
129
|
+
async throw(error?: any) {
|
|
130
|
+
span.setStatus({ code: SpanStatusCode.ERROR, message: error?.message });
|
|
131
|
+
span.recordException(error);
|
|
132
|
+
span.end();
|
|
133
|
+
if (iterator.throw) {
|
|
134
|
+
return iterator.throw(error);
|
|
135
|
+
}
|
|
136
|
+
throw error;
|
|
137
|
+
},
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
function wrapStream(stream: any, span: Span, config: OtelPatchConfig): any {
|
|
142
|
+
const originalIterator = stream[Symbol.asyncIterator]();
|
|
143
|
+
const wrappedIterator = wrapAsyncIterator(originalIterator, span, config);
|
|
144
|
+
|
|
145
|
+
return new Proxy(stream, {
|
|
146
|
+
get(target, prop) {
|
|
147
|
+
if (prop === Symbol.asyncIterator) {
|
|
148
|
+
return () => wrappedIterator;
|
|
149
|
+
}
|
|
150
|
+
const value = target[prop];
|
|
151
|
+
if (typeof value === 'function') {
|
|
152
|
+
return value.bind(target);
|
|
153
|
+
}
|
|
154
|
+
return value;
|
|
155
|
+
},
|
|
156
|
+
});
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
function createOtelWrapper(
|
|
160
|
+
originalCreate: (...args: any[]) => any,
|
|
161
|
+
config: OtelPatchConfig
|
|
162
|
+
): (body: any, options?: any) => any {
|
|
163
|
+
return function _agentuityOtelCreate(this: any, body: any, options?: any) {
|
|
164
|
+
const attributes: Record<string, any> = {
|
|
165
|
+
[ATTR_GEN_AI_SYSTEM]: config.provider,
|
|
166
|
+
[ATTR_GEN_AI_OPERATION_NAME]: 'chat',
|
|
167
|
+
};
|
|
168
|
+
|
|
169
|
+
if (body.model) attributes[ATTR_GEN_AI_REQUEST_MODEL] = body.model;
|
|
170
|
+
if (body.max_tokens) attributes[ATTR_GEN_AI_REQUEST_MAX_TOKENS] = body.max_tokens;
|
|
171
|
+
if (body.temperature !== undefined)
|
|
172
|
+
attributes[ATTR_GEN_AI_REQUEST_TEMPERATURE] = body.temperature;
|
|
173
|
+
if (body.top_p !== undefined) attributes[ATTR_GEN_AI_REQUEST_TOP_P] = body.top_p;
|
|
174
|
+
if (body.frequency_penalty !== undefined)
|
|
175
|
+
attributes[ATTR_GEN_AI_REQUEST_FREQUENCY_PENALTY] = body.frequency_penalty;
|
|
176
|
+
if (body.presence_penalty !== undefined)
|
|
177
|
+
attributes[ATTR_GEN_AI_REQUEST_PRESENCE_PENALTY] = body.presence_penalty;
|
|
178
|
+
|
|
179
|
+
// Capture request messages
|
|
180
|
+
const messages = body[config.requestMessagesField];
|
|
181
|
+
if (messages && Array.isArray(messages)) {
|
|
182
|
+
try {
|
|
183
|
+
attributes[ATTR_GEN_AI_REQUEST_MESSAGES] = JSON.stringify(messages);
|
|
184
|
+
} catch {
|
|
185
|
+
// Ignore serialization errors
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
const spanName = body.model ? `chat ${body.model}` : 'chat';
|
|
190
|
+
|
|
191
|
+
return otelTracer.startActiveSpan(
|
|
192
|
+
spanName,
|
|
193
|
+
{ attributes, kind: SpanKind.CLIENT },
|
|
194
|
+
(span: Span) => {
|
|
195
|
+
let result: any;
|
|
196
|
+
try {
|
|
197
|
+
result = originalCreate.call(this, body, options);
|
|
198
|
+
} catch (error: any) {
|
|
199
|
+
span.setStatus({ code: SpanStatusCode.ERROR, message: error?.message });
|
|
200
|
+
span.recordException(error);
|
|
201
|
+
span.end();
|
|
202
|
+
throw error;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// Handle streaming responses
|
|
206
|
+
if (body.stream) {
|
|
207
|
+
if (result && typeof result.then === 'function') {
|
|
208
|
+
return result
|
|
209
|
+
.then((stream: any) => {
|
|
210
|
+
try {
|
|
211
|
+
return wrapStream(stream, span, config);
|
|
212
|
+
} catch (error: any) {
|
|
213
|
+
span.setStatus({
|
|
214
|
+
code: SpanStatusCode.ERROR,
|
|
215
|
+
message: error?.message,
|
|
216
|
+
});
|
|
217
|
+
span.recordException(error);
|
|
218
|
+
span.end();
|
|
219
|
+
throw error;
|
|
220
|
+
}
|
|
221
|
+
})
|
|
222
|
+
.catch((error: any) => {
|
|
223
|
+
span.setStatus({ code: SpanStatusCode.ERROR, message: error?.message });
|
|
224
|
+
span.recordException(error);
|
|
225
|
+
span.end();
|
|
226
|
+
throw error;
|
|
227
|
+
});
|
|
228
|
+
}
|
|
229
|
+
try {
|
|
230
|
+
return wrapStream(result, span, config);
|
|
231
|
+
} catch (error: any) {
|
|
232
|
+
span.setStatus({ code: SpanStatusCode.ERROR, message: error?.message });
|
|
233
|
+
span.recordException(error);
|
|
234
|
+
span.end();
|
|
235
|
+
throw error;
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// Handle non-streaming responses
|
|
240
|
+
if (result && typeof result.then === 'function') {
|
|
241
|
+
return result
|
|
242
|
+
.then((response: any) => {
|
|
243
|
+
if (response) {
|
|
244
|
+
if (response.model) {
|
|
245
|
+
span.setAttribute(ATTR_GEN_AI_RESPONSE_MODEL, response.model);
|
|
246
|
+
}
|
|
247
|
+
if (response[config.responseIdField]) {
|
|
248
|
+
span.setAttribute(
|
|
249
|
+
ATTR_GEN_AI_RESPONSE_ID,
|
|
250
|
+
response[config.responseIdField]
|
|
251
|
+
);
|
|
252
|
+
}
|
|
253
|
+
if (response.usage) {
|
|
254
|
+
if (response.usage[config.inputTokensField] !== undefined) {
|
|
255
|
+
span.setAttribute(
|
|
256
|
+
ATTR_GEN_AI_USAGE_INPUT_TOKENS,
|
|
257
|
+
response.usage[config.inputTokensField]
|
|
258
|
+
);
|
|
259
|
+
}
|
|
260
|
+
if (response.usage[config.outputTokensField] !== undefined) {
|
|
261
|
+
span.setAttribute(
|
|
262
|
+
ATTR_GEN_AI_USAGE_OUTPUT_TOKENS,
|
|
263
|
+
response.usage[config.outputTokensField]
|
|
264
|
+
);
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
const finishReason = config.finishReasonExtractor(response);
|
|
268
|
+
if (finishReason) {
|
|
269
|
+
span.setAttribute(
|
|
270
|
+
ATTR_GEN_AI_RESPONSE_FINISH_REASONS,
|
|
271
|
+
JSON.stringify([finishReason])
|
|
272
|
+
);
|
|
273
|
+
}
|
|
274
|
+
const responseContent = config.responseContentExtractor(response);
|
|
275
|
+
if (responseContent) {
|
|
276
|
+
span.setAttribute(ATTR_GEN_AI_RESPONSE_TEXT, responseContent);
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
span.setStatus({ code: SpanStatusCode.OK });
|
|
280
|
+
span.end();
|
|
281
|
+
return response;
|
|
282
|
+
})
|
|
283
|
+
.catch((error: any) => {
|
|
284
|
+
span.setStatus({ code: SpanStatusCode.ERROR, message: error?.message });
|
|
285
|
+
span.recordException(error);
|
|
286
|
+
span.end();
|
|
287
|
+
throw error;
|
|
288
|
+
});
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
span.end();
|
|
292
|
+
return result;
|
|
293
|
+
}
|
|
294
|
+
);
|
|
295
|
+
};
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
/** Provider-specific configurations matching the build-time patches */
|
|
299
|
+
const OTEL_CONFIGS: Array<{
|
|
300
|
+
module: string;
|
|
301
|
+
className: string;
|
|
302
|
+
config: OtelPatchConfig;
|
|
303
|
+
}> = [
|
|
304
|
+
{
|
|
305
|
+
module: 'openai',
|
|
306
|
+
// In the openai SDK, the Completions class is in resources/chat/completions
|
|
307
|
+
// We need to import the main module and access the prototype
|
|
308
|
+
className: 'Completions',
|
|
309
|
+
config: {
|
|
310
|
+
provider: 'openai',
|
|
311
|
+
inputTokensField: 'prompt_tokens',
|
|
312
|
+
outputTokensField: 'completion_tokens',
|
|
313
|
+
responseIdField: 'id',
|
|
314
|
+
finishReasonExtractor: (r) => r?.choices?.[0]?.finish_reason,
|
|
315
|
+
responseContentExtractor: (r) => r?.choices?.[0]?.message?.content,
|
|
316
|
+
requestMessagesField: 'messages',
|
|
317
|
+
streamDeltaContentExtractor: (c) => c?.choices?.[0]?.delta?.content,
|
|
318
|
+
streamFinishReasonExtractor: (c) => c?.choices?.[0]?.finish_reason,
|
|
319
|
+
streamUsageExtractor: (c) => c?.usage,
|
|
320
|
+
},
|
|
321
|
+
},
|
|
322
|
+
{
|
|
323
|
+
module: '@anthropic-ai/sdk',
|
|
324
|
+
className: 'Messages',
|
|
325
|
+
config: {
|
|
326
|
+
provider: 'anthropic',
|
|
327
|
+
inputTokensField: 'input_tokens',
|
|
328
|
+
outputTokensField: 'output_tokens',
|
|
329
|
+
responseIdField: 'id',
|
|
330
|
+
finishReasonExtractor: (r) => r?.stop_reason,
|
|
331
|
+
responseContentExtractor: (r) => r?.content?.[0]?.text,
|
|
332
|
+
requestMessagesField: 'messages',
|
|
333
|
+
streamDeltaContentExtractor: (c) => c?.delta?.text,
|
|
334
|
+
streamFinishReasonExtractor: (c) => c?.delta?.stop_reason,
|
|
335
|
+
streamUsageExtractor: (c) => c?.usage,
|
|
336
|
+
},
|
|
337
|
+
},
|
|
338
|
+
{
|
|
339
|
+
module: 'groq-sdk',
|
|
340
|
+
className: 'Completions',
|
|
341
|
+
config: {
|
|
342
|
+
provider: 'groq',
|
|
343
|
+
inputTokensField: 'prompt_tokens',
|
|
344
|
+
outputTokensField: 'completion_tokens',
|
|
345
|
+
responseIdField: 'id',
|
|
346
|
+
finishReasonExtractor: (r) => r?.choices?.[0]?.finish_reason,
|
|
347
|
+
responseContentExtractor: (r) => r?.choices?.[0]?.message?.content,
|
|
348
|
+
requestMessagesField: 'messages',
|
|
349
|
+
streamDeltaContentExtractor: (c) => c?.choices?.[0]?.delta?.content,
|
|
350
|
+
streamFinishReasonExtractor: (c) => c?.choices?.[0]?.finish_reason,
|
|
351
|
+
streamUsageExtractor: (c) => c?.x_groq?.usage,
|
|
352
|
+
},
|
|
353
|
+
},
|
|
354
|
+
];
|
|
355
|
+
|
|
356
|
+
/**
|
|
357
|
+
* Find a class prototype by name within a module's exports (recursive search).
|
|
358
|
+
* The SDKs nest classes in sub-objects (e.g., openai.Chat.Completions).
|
|
359
|
+
*/
|
|
360
|
+
function findPrototype(obj: any, className: string, depth = 0): any | null {
|
|
361
|
+
if (depth > 5 || !obj || typeof obj !== 'object') return null;
|
|
362
|
+
|
|
363
|
+
// Check if this object itself is the class we're looking for
|
|
364
|
+
if (typeof obj === 'function' && obj.name === className && obj.prototype?.create) {
|
|
365
|
+
return obj.prototype;
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
// Search properties
|
|
369
|
+
for (const key of Object.keys(obj)) {
|
|
370
|
+
try {
|
|
371
|
+
const val = obj[key];
|
|
372
|
+
if (typeof val === 'function' && val.name === className && val.prototype?.create) {
|
|
373
|
+
return val.prototype;
|
|
374
|
+
}
|
|
375
|
+
if (typeof val === 'object' && val !== null) {
|
|
376
|
+
const found = findPrototype(val, className, depth + 1);
|
|
377
|
+
if (found) return found;
|
|
378
|
+
}
|
|
379
|
+
} catch {
|
|
380
|
+
// Skip inaccessible properties
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
return null;
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
/**
|
|
387
|
+
* Apply OTel instrumentation patches to LLM SDK prototype methods.
|
|
388
|
+
*/
|
|
389
|
+
export async function applyOtelLLMPatches(): Promise<void> {
|
|
390
|
+
for (const { module: moduleName, className, config } of OTEL_CONFIGS) {
|
|
391
|
+
try {
|
|
392
|
+
const mod = await import(moduleName);
|
|
393
|
+
const proto = findPrototype(mod, className);
|
|
394
|
+
|
|
395
|
+
if (!proto || typeof proto.create !== 'function') {
|
|
396
|
+
console.debug(
|
|
397
|
+
`[Agentuity OTel] Skipping patch: ${className}.prototype.create not found in ${moduleName}`
|
|
398
|
+
);
|
|
399
|
+
continue;
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
const originalCreate = proto.create;
|
|
403
|
+
proto.create = createOtelWrapper(originalCreate, config);
|
|
404
|
+
} catch {
|
|
405
|
+
// Module not installed — skip
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
}
|
package/src/index.ts
CHANGED
|
@@ -263,3 +263,6 @@ export { bootstrapRuntimeEnv, type RuntimeBootstrapOptions, mimeTypes } from '@a
|
|
|
263
263
|
|
|
264
264
|
// bun-s3-patch.ts exports
|
|
265
265
|
export { patchBunS3ForStorageDev, isAgentuityStorageEndpoint } from './bun-s3-patch';
|
|
266
|
+
|
|
267
|
+
// dev-patches exports (runtime monkey-patches for --experimental-no-bundle dev mode)
|
|
268
|
+
export { applyDevPatches } from './dev-patches';
|
|
@@ -115,12 +115,20 @@ function initializeTables(db: Database): void {
|
|
|
115
115
|
created_id TEXT NOT NULL,
|
|
116
116
|
assigned_id TEXT,
|
|
117
117
|
closed_id TEXT,
|
|
118
|
+
deleted INTEGER NOT NULL DEFAULT 0,
|
|
118
119
|
created_at INTEGER NOT NULL,
|
|
119
120
|
updated_at INTEGER NOT NULL,
|
|
120
121
|
PRIMARY KEY (project_path, id)
|
|
121
122
|
)
|
|
122
123
|
`);
|
|
123
124
|
|
|
125
|
+
// Migration: add deleted column for existing databases
|
|
126
|
+
try {
|
|
127
|
+
db.run('ALTER TABLE task_storage ADD COLUMN deleted INTEGER NOT NULL DEFAULT 0');
|
|
128
|
+
} catch {
|
|
129
|
+
// Column already exists
|
|
130
|
+
}
|
|
131
|
+
|
|
124
132
|
// Task Changelog table
|
|
125
133
|
db.run(`
|
|
126
134
|
CREATE TABLE IF NOT EXISTS task_changelog_storage (
|
|
@@ -139,6 +147,57 @@ function initializeTables(db: Database): void {
|
|
|
139
147
|
CREATE INDEX IF NOT EXISTS idx_task_changelog_lookup
|
|
140
148
|
ON task_changelog_storage(project_path, task_id)
|
|
141
149
|
`);
|
|
150
|
+
|
|
151
|
+
// Task Comment table
|
|
152
|
+
db.run(`
|
|
153
|
+
CREATE TABLE IF NOT EXISTS task_comment_storage (
|
|
154
|
+
project_path TEXT NOT NULL,
|
|
155
|
+
id TEXT NOT NULL,
|
|
156
|
+
task_id TEXT NOT NULL,
|
|
157
|
+
user_id TEXT NOT NULL,
|
|
158
|
+
body TEXT NOT NULL,
|
|
159
|
+
created_at INTEGER NOT NULL,
|
|
160
|
+
updated_at INTEGER NOT NULL,
|
|
161
|
+
PRIMARY KEY (project_path, id)
|
|
162
|
+
)
|
|
163
|
+
`);
|
|
164
|
+
|
|
165
|
+
db.run(`
|
|
166
|
+
CREATE INDEX IF NOT EXISTS idx_task_comment_lookup
|
|
167
|
+
ON task_comment_storage(project_path, task_id)
|
|
168
|
+
`);
|
|
169
|
+
|
|
170
|
+
// Task Tag table
|
|
171
|
+
db.run(`
|
|
172
|
+
CREATE TABLE IF NOT EXISTS task_tag_storage (
|
|
173
|
+
project_path TEXT NOT NULL,
|
|
174
|
+
id TEXT NOT NULL,
|
|
175
|
+
name TEXT NOT NULL,
|
|
176
|
+
color TEXT,
|
|
177
|
+
created_at INTEGER NOT NULL,
|
|
178
|
+
PRIMARY KEY (project_path, id)
|
|
179
|
+
)
|
|
180
|
+
`);
|
|
181
|
+
|
|
182
|
+
// Task-Tag association table
|
|
183
|
+
db.run(`
|
|
184
|
+
CREATE TABLE IF NOT EXISTS task_tag_association_storage (
|
|
185
|
+
project_path TEXT NOT NULL,
|
|
186
|
+
task_id TEXT NOT NULL,
|
|
187
|
+
tag_id TEXT NOT NULL,
|
|
188
|
+
PRIMARY KEY (project_path, task_id, tag_id)
|
|
189
|
+
)
|
|
190
|
+
`);
|
|
191
|
+
|
|
192
|
+
db.run(`
|
|
193
|
+
CREATE INDEX IF NOT EXISTS idx_task_tag_assoc_task
|
|
194
|
+
ON task_tag_association_storage(project_path, task_id)
|
|
195
|
+
`);
|
|
196
|
+
|
|
197
|
+
db.run(`
|
|
198
|
+
CREATE INDEX IF NOT EXISTS idx_task_tag_assoc_tag
|
|
199
|
+
ON task_tag_association_storage(project_path, tag_id)
|
|
200
|
+
`);
|
|
142
201
|
}
|
|
143
202
|
|
|
144
203
|
function cleanupOrphanedProjects(db: Database): void {
|
|
@@ -163,14 +222,36 @@ function cleanupOrphanedProjects(db: Database): void {
|
|
|
163
222
|
.all() as Array<{
|
|
164
223
|
project_path: string;
|
|
165
224
|
}>;
|
|
225
|
+
const taskCommentPaths = db
|
|
226
|
+
.query('SELECT DISTINCT project_path FROM task_comment_storage')
|
|
227
|
+
.all() as Array<{
|
|
228
|
+
project_path: string;
|
|
229
|
+
}>;
|
|
230
|
+
const taskTagPaths = db
|
|
231
|
+
.query('SELECT DISTINCT project_path FROM task_tag_storage')
|
|
232
|
+
.all() as Array<{
|
|
233
|
+
project_path: string;
|
|
234
|
+
}>;
|
|
235
|
+
const taskTagAssocPaths = db
|
|
236
|
+
.query('SELECT DISTINCT project_path FROM task_tag_association_storage')
|
|
237
|
+
.all() as Array<{
|
|
238
|
+
project_path: string;
|
|
239
|
+
}>;
|
|
166
240
|
|
|
167
241
|
// Combine and deduplicate all project paths
|
|
168
242
|
const allPaths = new Set<string>();
|
|
169
|
-
[
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
243
|
+
[
|
|
244
|
+
...kvPaths,
|
|
245
|
+
...streamPaths,
|
|
246
|
+
...vectorPaths,
|
|
247
|
+
...taskPaths,
|
|
248
|
+
...taskChangelogPaths,
|
|
249
|
+
...taskCommentPaths,
|
|
250
|
+
...taskTagPaths,
|
|
251
|
+
...taskTagAssocPaths,
|
|
252
|
+
].forEach((row) => {
|
|
253
|
+
allPaths.add(row.project_path);
|
|
254
|
+
});
|
|
174
255
|
|
|
175
256
|
// Check which paths no longer exist and are not the current project
|
|
176
257
|
const pathsToDelete: string[] = [];
|
|
@@ -198,12 +279,24 @@ function cleanupOrphanedProjects(db: Database): void {
|
|
|
198
279
|
const deleteTaskChangelog = db.prepare(
|
|
199
280
|
`DELETE FROM task_changelog_storage WHERE project_path IN (${placeholders})`
|
|
200
281
|
);
|
|
282
|
+
const deleteTaskComments = db.prepare(
|
|
283
|
+
`DELETE FROM task_comment_storage WHERE project_path IN (${placeholders})`
|
|
284
|
+
);
|
|
285
|
+
const deleteTaskTags = db.prepare(
|
|
286
|
+
`DELETE FROM task_tag_storage WHERE project_path IN (${placeholders})`
|
|
287
|
+
);
|
|
288
|
+
const deleteTaskTagAssoc = db.prepare(
|
|
289
|
+
`DELETE FROM task_tag_association_storage WHERE project_path IN (${placeholders})`
|
|
290
|
+
);
|
|
201
291
|
|
|
202
292
|
deleteKv.run(...pathsToDelete);
|
|
203
293
|
deleteStream.run(...pathsToDelete);
|
|
204
294
|
deleteVector.run(...pathsToDelete);
|
|
205
295
|
deleteTasks.run(...pathsToDelete);
|
|
206
296
|
deleteTaskChangelog.run(...pathsToDelete);
|
|
297
|
+
deleteTaskComments.run(...pathsToDelete);
|
|
298
|
+
deleteTaskTags.run(...pathsToDelete);
|
|
299
|
+
deleteTaskTagAssoc.run(...pathsToDelete);
|
|
207
300
|
|
|
208
301
|
console.log(`[LocalDB] Cleaned up data for ${pathsToDelete.length} orphaned project(s)`);
|
|
209
302
|
}
|
|
@@ -11,10 +11,7 @@ import {
|
|
|
11
11
|
const ERROR_MESSAGE =
|
|
12
12
|
'Email service is not available in local development mode. Deploy to Agentuity Cloud to use email.';
|
|
13
13
|
|
|
14
|
-
const LocalEmailNotAvailableError = StructuredError(
|
|
15
|
-
'LocalEmailNotAvailableError',
|
|
16
|
-
ERROR_MESSAGE
|
|
17
|
-
);
|
|
14
|
+
const LocalEmailNotAvailableError = StructuredError('LocalEmailNotAvailableError', ERROR_MESSAGE);
|
|
18
15
|
|
|
19
16
|
/**
|
|
20
17
|
* Local development stub for the email service.
|
|
@@ -65,6 +62,10 @@ export class LocalEmailStorage implements EmailService {
|
|
|
65
62
|
throw new LocalEmailNotAvailableError();
|
|
66
63
|
}
|
|
67
64
|
|
|
65
|
+
async deleteInbound(_id: string): Promise<void> {
|
|
66
|
+
throw new LocalEmailNotAvailableError();
|
|
67
|
+
}
|
|
68
|
+
|
|
68
69
|
async listOutbound(_addressId?: string): Promise<EmailOutbound[]> {
|
|
69
70
|
throw new LocalEmailNotAvailableError();
|
|
70
71
|
}
|
|
@@ -72,4 +73,8 @@ export class LocalEmailStorage implements EmailService {
|
|
|
72
73
|
async getOutbound(_id: string): Promise<EmailOutbound | null> {
|
|
73
74
|
throw new LocalEmailNotAvailableError();
|
|
74
75
|
}
|
|
76
|
+
|
|
77
|
+
async deleteOutbound(_id: string): Promise<void> {
|
|
78
|
+
throw new LocalEmailNotAvailableError();
|
|
79
|
+
}
|
|
75
80
|
}
|