@sentro/sdk 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
# @sentro/sdk
|
|
2
|
+
|
|
3
|
+
**Error tracking and agent observability for AI agents.** TypeScript SDK for [Sentro](https://github.com/yzzztech/sentro).
|
|
4
|
+
|
|
5
|
+
Sentro is an open-source Sentry alternative built specifically for AI agents. This SDK gives you full observability into every run, step, tool call, and LLM call your agents make — plus traditional error tracking.
|
|
6
|
+
|
|
7
|
+
## Install
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install @sentro/sdk
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Quick Start
|
|
14
|
+
|
|
15
|
+
### Error tracking
|
|
16
|
+
|
|
17
|
+
```typescript
|
|
18
|
+
import { Sentro } from '@sentro/sdk';
|
|
19
|
+
|
|
20
|
+
const sentro = new Sentro({ dsn: 'http://token@localhost:3000/api/ingest/proj_1' });
|
|
21
|
+
|
|
22
|
+
sentro.captureException(new Error('Payment failed'));
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
### Agent observability
|
|
26
|
+
|
|
27
|
+
```typescript
|
|
28
|
+
const result = await sentro.trace('order-processor', {
|
|
29
|
+
goal: 'Process refund for order #456',
|
|
30
|
+
model: 'claude-sonnet-4-6',
|
|
31
|
+
}, async (run) => {
|
|
32
|
+
|
|
33
|
+
return await run.trace('Looking up order', async (step) => {
|
|
34
|
+
// Track tool calls
|
|
35
|
+
const order = await step.traceToolCall('database.query',
|
|
36
|
+
{ sql: 'SELECT * FROM orders WHERE id = 456' },
|
|
37
|
+
async () => db.query('SELECT * FROM orders WHERE id = 456')
|
|
38
|
+
);
|
|
39
|
+
|
|
40
|
+
// Track LLM calls
|
|
41
|
+
const llm = step.llmCall({ model: 'claude-sonnet-4-6' });
|
|
42
|
+
const decision = await callLLM('Should we approve this refund?');
|
|
43
|
+
llm.end({ promptTokens: 150, completionTokens: 20, cost: 0.001 });
|
|
44
|
+
|
|
45
|
+
return decision;
|
|
46
|
+
});
|
|
47
|
+
});
|
|
48
|
+
// Automatically captures: duration, tokens, cost, success/failure
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
### Explicit API
|
|
52
|
+
|
|
53
|
+
For more control, use the explicit start/end API:
|
|
54
|
+
|
|
55
|
+
```typescript
|
|
56
|
+
const run = sentro.startRun({
|
|
57
|
+
agent: 'research-agent',
|
|
58
|
+
goal: 'Find recent papers',
|
|
59
|
+
model: 'gpt-4o',
|
|
60
|
+
trigger: 'api',
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
try {
|
|
64
|
+
const step = run.step('Search for papers');
|
|
65
|
+
const llm = step.llmCall({ model: 'gpt-4o', provider: 'openai' });
|
|
66
|
+
const res = await openai.chat.completions.create({ ... });
|
|
67
|
+
await llm.end({
|
|
68
|
+
promptTokens: res.usage?.prompt_tokens,
|
|
69
|
+
completionTokens: res.usage?.completion_tokens,
|
|
70
|
+
});
|
|
71
|
+
await step.end();
|
|
72
|
+
await run.end({ status: 'success' });
|
|
73
|
+
} catch (err) {
|
|
74
|
+
await run.error(err instanceof Error ? err : new Error(String(err)));
|
|
75
|
+
throw err;
|
|
76
|
+
}
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
## Configuration
|
|
80
|
+
|
|
81
|
+
```typescript
|
|
82
|
+
const sentro = new Sentro({
|
|
83
|
+
dsn: 'http://token@localhost:3000/api/ingest/proj_1',
|
|
84
|
+
capturePrompts: false, // set true to store prompt/response bodies
|
|
85
|
+
flushIntervalMs: 1000, // batch send every 1s
|
|
86
|
+
maxBatchSize: 100, // flush when buffer hits 100
|
|
87
|
+
defaultTags: {
|
|
88
|
+
env: 'production',
|
|
89
|
+
version: '1.0.0',
|
|
90
|
+
},
|
|
91
|
+
});
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
| Option | Type | Default | Description |
|
|
95
|
+
|--------|------|---------|-------------|
|
|
96
|
+
| `dsn` | string | required | Full DSN URL from your project settings |
|
|
97
|
+
| `capturePrompts` | boolean | `false` | Store prompt/response bodies verbatim |
|
|
98
|
+
| `flushIntervalMs` | number | `1000` | How often (ms) to flush the buffer |
|
|
99
|
+
| `maxBatchSize` | number | `100` | Max events per batch before immediate flush |
|
|
100
|
+
| `defaultTags` | Record\<string, string\> | — | Tags merged into every event |
|
|
101
|
+
|
|
102
|
+
## Security Considerations
|
|
103
|
+
|
|
104
|
+
- Set `capturePrompts: false` (the default) to avoid storing LLM prompt/response bodies that may contain PII
|
|
105
|
+
- DSN tokens are API keys — treat them like passwords, don't commit them to source control
|
|
106
|
+
- Tool call inputs/outputs are stored as-is — avoid passing sensitive data (API keys, passwords) through traced tool calls
|
|
107
|
+
|
|
108
|
+
## Graceful Shutdown
|
|
109
|
+
|
|
110
|
+
```typescript
|
|
111
|
+
// Before process exit
|
|
112
|
+
await sentro.shutdown();
|
|
113
|
+
|
|
114
|
+
// In serverless environments
|
|
115
|
+
await sentro.flush();
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
## Links
|
|
119
|
+
|
|
120
|
+
- **GitHub:** [github.com/yzzztech/sentro](https://github.com/yzzztech/sentro)
|
|
121
|
+
- **Docs:** Available at `/docs` when running Sentro
|
|
122
|
+
- **Python SDK:** `pip install sentro-sdk`
|
|
123
|
+
|
|
124
|
+
## License
|
|
125
|
+
|
|
126
|
+
MIT
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sentro integration for Vercel AI SDK.
|
|
3
|
+
*
|
|
4
|
+
* Usage:
|
|
5
|
+
* import { Sentro } from '@sentro/sdk';
|
|
6
|
+
* import { sentroMiddleware } from '@sentro/sdk/vercel-ai';
|
|
7
|
+
*
|
|
8
|
+
* const sentro = new Sentro({ dsn: '...' });
|
|
9
|
+
*
|
|
10
|
+
* const result = await generateText({
|
|
11
|
+
* model: openai('gpt-4o'),
|
|
12
|
+
* prompt: 'Hello!',
|
|
13
|
+
* experimental_telemetry: sentroMiddleware(sentro),
|
|
14
|
+
* });
|
|
15
|
+
*/
|
|
16
|
+
import type { Sentro } from '../client';
|
|
17
|
+
interface TelemetrySettings {
|
|
18
|
+
isEnabled: boolean;
|
|
19
|
+
functionId?: string;
|
|
20
|
+
metadata?: Record<string, string>;
|
|
21
|
+
tracer?: {
|
|
22
|
+
startSpan: (name: string, options?: Record<string, unknown>) => Span;
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
interface Span {
|
|
26
|
+
setAttribute(key: string, value: string | number | boolean): void;
|
|
27
|
+
setStatus(status: {
|
|
28
|
+
code: number;
|
|
29
|
+
message?: string;
|
|
30
|
+
}): void;
|
|
31
|
+
end(): void;
|
|
32
|
+
}
|
|
33
|
+
interface SentroMiddlewareOptions {
|
|
34
|
+
agentName?: string;
|
|
35
|
+
}
|
|
36
|
+
export declare function sentroMiddleware(client: Sentro, options?: SentroMiddlewareOptions): TelemetrySettings;
|
|
37
|
+
export {};
|
|
38
|
+
//# sourceMappingURL=vercel-ai.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"vercel-ai.d.ts","sourceRoot":"","sources":["../../src/integrations/vercel-ai.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAExC,UAAU,iBAAiB;IACzB,SAAS,EAAE,OAAO,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAClC,MAAM,CAAC,EAAE;QACP,SAAS,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,IAAI,CAAC;KACtE,CAAC;CACH;AAED,UAAU,IAAI;IACZ,YAAY,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,IAAI,CAAC;IAClE,SAAS,CAAC,MAAM,EAAE;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAC;IAC5D,GAAG,IAAI,IAAI,CAAC;CACb;AAED,UAAU,uBAAuB;IAC/B,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,wBAAgB,gBAAgB,CAC9B,MAAM,EAAE,MAAM,EACd,OAAO,GAAE,uBAA4B,GACpC,iBAAiB,CAoDnB"}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sentro integration for Vercel AI SDK.
|
|
3
|
+
*
|
|
4
|
+
* Usage:
|
|
5
|
+
* import { Sentro } from '@sentro/sdk';
|
|
6
|
+
* import { sentroMiddleware } from '@sentro/sdk/vercel-ai';
|
|
7
|
+
*
|
|
8
|
+
* const sentro = new Sentro({ dsn: '...' });
|
|
9
|
+
*
|
|
10
|
+
* const result = await generateText({
|
|
11
|
+
* model: openai('gpt-4o'),
|
|
12
|
+
* prompt: 'Hello!',
|
|
13
|
+
* experimental_telemetry: sentroMiddleware(sentro),
|
|
14
|
+
* });
|
|
15
|
+
*/
|
|
16
|
+
export function sentroMiddleware(client, options = {}) {
|
|
17
|
+
const agentName = options.agentName ?? 'vercel-ai-agent';
|
|
18
|
+
return {
|
|
19
|
+
isEnabled: true,
|
|
20
|
+
functionId: agentName,
|
|
21
|
+
tracer: {
|
|
22
|
+
startSpan(name, spanOptions) {
|
|
23
|
+
const attributes = {};
|
|
24
|
+
const span = {
|
|
25
|
+
setAttribute(key, value) {
|
|
26
|
+
attributes[key] = String(value);
|
|
27
|
+
// Capture LLM call data when available
|
|
28
|
+
if (key === 'ai.model.id') {
|
|
29
|
+
const step = client._currentStep;
|
|
30
|
+
if (step) {
|
|
31
|
+
const llm = step.llmCall({ model: String(value) });
|
|
32
|
+
span._llmCall = llm;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
if (key === 'ai.usage.promptTokens' && span._llmCall) {
|
|
36
|
+
span._promptTokens = Number(value);
|
|
37
|
+
}
|
|
38
|
+
if (key === 'ai.usage.completionTokens' && span._llmCall) {
|
|
39
|
+
span._completionTokens = Number(value);
|
|
40
|
+
}
|
|
41
|
+
},
|
|
42
|
+
setStatus(status) {
|
|
43
|
+
attributes['status.code'] = status.code;
|
|
44
|
+
if (status.message)
|
|
45
|
+
attributes['status.message'] = status.message;
|
|
46
|
+
},
|
|
47
|
+
end() {
|
|
48
|
+
const llmCall = span._llmCall;
|
|
49
|
+
if (llmCall) {
|
|
50
|
+
llmCall.end({
|
|
51
|
+
promptTokens: span._promptTokens,
|
|
52
|
+
completionTokens: span._completionTokens,
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
},
|
|
56
|
+
};
|
|
57
|
+
return span;
|
|
58
|
+
},
|
|
59
|
+
},
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
//# sourceMappingURL=vercel-ai.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"vercel-ai.js","sourceRoot":"","sources":["../../src/integrations/vercel-ai.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAuBH,MAAM,UAAU,gBAAgB,CAC9B,MAAc,EACd,UAAmC,EAAE;IAErC,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,IAAI,iBAAiB,CAAC;IAEzD,OAAO;QACL,SAAS,EAAE,IAAI;QACf,UAAU,EAAE,SAAS;QACrB,MAAM,EAAE;YACN,SAAS,CAAC,IAAY,EAAE,WAAqC;gBAC3D,MAAM,UAAU,GAAoC,EAAE,CAAC;gBAEvD,MAAM,IAAI,GAAS;oBACjB,YAAY,CAAC,GAAW,EAAE,KAAgC;wBACxD,UAAU,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;wBAEhC,uCAAuC;wBACvC,IAAI,GAAG,KAAK,aAAa,EAAE,CAAC;4BAC1B,MAAM,IAAI,GAAI,MAAc,CAAC,YAAY,CAAC;4BAC1C,IAAI,IAAI,EAAE,CAAC;gCACT,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;gCAClD,IAAY,CAAC,QAAQ,GAAG,GAAG,CAAC;4BAC/B,CAAC;wBACH,CAAC;wBAED,IAAI,GAAG,KAAK,uBAAuB,IAAK,IAAY,CAAC,QAAQ,EAAE,CAAC;4BAC7D,IAAY,CAAC,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;wBAC9C,CAAC;wBAED,IAAI,GAAG,KAAK,2BAA2B,IAAK,IAAY,CAAC,QAAQ,EAAE,CAAC;4BACjE,IAAY,CAAC,iBAAiB,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;wBAClD,CAAC;oBACH,CAAC;oBAED,SAAS,CAAC,MAA0C;wBAClD,UAAU,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC;wBACxC,IAAI,MAAM,CAAC,OAAO;4BAAE,UAAU,CAAC,gBAAgB,CAAC,GAAG,MAAM,CAAC,OAAO,CAAC;oBACpE,CAAC;oBAED,GAAG;wBACD,MAAM,OAAO,GAAI,IAAY,CAAC,QAAQ,CAAC;wBACvC,IAAI,OAAO,EAAE,CAAC;4BACZ,OAAO,CAAC,GAAG,CAAC;gCACV,YAAY,EAAG,IAAY,CAAC,aAAa;gCACzC,gBAAgB,EAAG,IAAY,CAAC,iBAAiB;6BAClD,CAAC,CAAC;wBACL,CAAC;oBACH,CAAC;iBACF,CAAC;gBAEF,OAAO,IAAI,CAAC;YACd,CAAC;SACF;KACF,CAAC;AACJ,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,16 +1,49 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@sentro/sdk",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.2",
|
|
4
|
+
"description": "Error tracking and agent observability for AI agents — TypeScript SDK for Sentro",
|
|
4
5
|
"main": "dist/index.js",
|
|
5
6
|
"types": "dist/index.d.ts",
|
|
6
|
-
"
|
|
7
|
+
"exports": {
|
|
8
|
+
".": {
|
|
9
|
+
"types": "./dist/index.d.ts",
|
|
10
|
+
"default": "./dist/index.js"
|
|
11
|
+
},
|
|
12
|
+
"./vercel-ai": {
|
|
13
|
+
"types": "./dist/integrations/vercel-ai.d.ts",
|
|
14
|
+
"default": "./dist/integrations/vercel-ai.js"
|
|
15
|
+
}
|
|
16
|
+
},
|
|
17
|
+
"files": [
|
|
18
|
+
"dist",
|
|
19
|
+
"README.md"
|
|
20
|
+
],
|
|
21
|
+
"keywords": [
|
|
22
|
+
"sentro",
|
|
23
|
+
"observability",
|
|
24
|
+
"error-tracking",
|
|
25
|
+
"ai-agents",
|
|
26
|
+
"llm",
|
|
27
|
+
"tracing",
|
|
28
|
+
"sentry-alternative"
|
|
29
|
+
],
|
|
30
|
+
"license": "MIT",
|
|
31
|
+
"repository": {
|
|
32
|
+
"type": "git",
|
|
33
|
+
"url": "https://github.com/yzzztech/sentro.git",
|
|
34
|
+
"directory": "packages/sdk"
|
|
35
|
+
},
|
|
36
|
+
"homepage": "https://github.com/yzzztech/sentro",
|
|
37
|
+
"bugs": "https://github.com/yzzztech/sentro/issues",
|
|
7
38
|
"scripts": {
|
|
8
39
|
"build": "tsc",
|
|
9
40
|
"test": "vitest run",
|
|
41
|
+
"test:coverage": "vitest run --coverage",
|
|
10
42
|
"test:watch": "vitest",
|
|
11
43
|
"dev": "tsc --watch"
|
|
12
44
|
},
|
|
13
45
|
"devDependencies": {
|
|
46
|
+
"@vitest/coverage-v8": "^3.2.4",
|
|
14
47
|
"typescript": "^5.7.0",
|
|
15
48
|
"vitest": "^3.1.0"
|
|
16
49
|
}
|