teckel-ai 0.3.6 → 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,60 +1,72 @@
1
1
  # teckel-ai
2
2
 
3
- TypeScript/JavaScript SDK for [Teckel AI](https://teckel.ai)- Get insight into your AI systems, track topics of your choosing, and identify and fix knowledge gaps.
3
+ TypeScript SDK for [Teckel AI](https://teckel.ai) an agentic monitoring platform for tracking behavior and improving AI output.
4
4
 
5
- ## Installation
5
+ ## Install
6
6
 
7
7
  ```bash
8
8
  npm install teckel-ai
9
9
  ```
10
10
 
11
- **Requirements:** Node.js 18+ (or Bun, Deno, serverless runtimes)
12
-
13
11
  ## Quick Start
14
12
 
15
13
  ```typescript
16
14
  import { TeckelTracer } from 'teckel-ai';
17
15
 
18
- // Initialize once at startup
19
- const tracer = new TeckelTracer({
20
- apiKey: process.env.TECKEL_API_KEY
16
+ const tracer = new TeckelTracer({ apiKey: process.env.TECKEL_API_KEY });
17
+
18
+ tracer.trace({
19
+ query: "How do I reset my password?",
20
+ response: "Go to Settings > Security...",
21
+ documents: [{ id: "doc-1", name: "Password Guide", text: "To reset..." }]
21
22
  });
22
23
 
23
- // In your API handler
24
- async function handleChat(userQuestion: string, sessionId: string) {
25
- const conversation = tracer.start({ sessionRef: sessionId });
26
-
27
- // Your existing RAG logic
28
- const chunks = await vectorDB.search(userQuestion);
29
- const answer = await llm.generate(userQuestion, chunks);
30
-
31
- // Send trace (non-blocking)
32
- conversation.trace({
33
- query: userQuestion,
34
- response: answer,
35
- documents: chunks.map((chunk, i) => ({
36
- documentRef: chunk.id,
37
- documentName: chunk.title,
38
- documentText: chunk.content,
39
- }))
40
- });
41
-
42
- // For serverless: flush before returning
43
- await conversation.flush();
44
-
45
- return answer;
46
- }
24
+ // Serverless: flush before returning
25
+ await tracer.flush();
47
26
  ```
48
27
 
49
- ## Documentation
28
+ ## OpenTelemetry Integration
29
+
30
+ Capture detailed spans from AI SDK calls:
31
+
32
+ ```typescript
33
+ import { TeckelTracer } from 'teckel-ai';
34
+ import { TeckelSpanCollector } from 'teckel-ai/otel';
35
+
36
+ const tracer = new TeckelTracer({ apiKey: process.env.TECKEL_API_KEY });
37
+ const spanCollector = new TeckelSpanCollector();
38
+
39
+ const result = await generateText({
40
+ model: openai('gpt-5-mini'),
41
+ prompt: userQuery,
42
+ experimental_telemetry: {
43
+ isEnabled: true,
44
+ tracer: spanCollector.getTracer()
45
+ }
46
+ });
50
47
 
51
- - **Full SDK Reference:** [docs.teckel.ai/typescript-sdk-reference](https://docs.teckel.ai/typescript-sdk-reference)
52
- - **Getting Started:** [docs.teckel.ai/getting-started](https://docs.teckel.ai/getting-started)
48
+ const spans = spanCollector.getSpans();
49
+
50
+ // Tokens auto-aggregated from spans!
51
+ tracer.trace({
52
+ query: userQuery,
53
+ response: result.text,
54
+ spans
55
+ });
56
+
57
+ await spanCollector.shutdown();
58
+ ```
59
+
60
+ Requires: `npm install @opentelemetry/api @opentelemetry/sdk-trace-base`
61
+
62
+ ## Documentation
53
63
 
54
- ## Support
64
+ Full documentation at [docs.teckel.ai](https://docs.teckel.ai):
55
65
 
56
- - **Email:** support@teckel.ai
57
- - **Website:** [teckel.ai](https://teckel.ai)
66
+ - [Getting Started](https://docs.teckel.ai/docs/getting_started)
67
+ - [TypeScript SDK Reference](https://docs.teckel.ai/docs/typescript_sdk_reference)
68
+ - [OpenTelemetry Integration](https://docs.teckel.ai/docs/opentelemetry_integration)
69
+ - [HTTP API Reference](https://docs.teckel.ai/docs/http_api_reference)
58
70
 
59
71
  ## License
60
72