@obsai/sdk 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/README.md +211 -0
  2. package/package.json +55 -0
package/README.md ADDED
@@ -0,0 +1,211 @@
1
+ # @obsai/sdk
2
+
3
+ Lightweight observability SDK for API and AI/LLM applications. Track HTTP requests and AI/LLM calls with automatic cost calculation.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ npm install @obsai/sdk
9
+ # or
10
+ yarn add @obsai/sdk
11
+ ```
12
+
13
+ ## Quick Start
14
+
15
+ ### 1. Initialize the SDK
16
+
17
+ ```typescript
18
+ import { init } from '@obsai/sdk';
19
+
20
+ init({
21
+ apiKey: 'obs_your_api_key', // Get this from your project settings
22
+ service: 'my-app',
23
+ debug: true, // optional - logs events to console
24
+ });
25
+ ```
26
+
27
+ ### 2. HTTP Middleware (Express)
28
+
29
+ Automatically track all HTTP requests:
30
+
31
+ ```typescript
32
+ import express from 'express';
33
+ import { init } from '@obsai/sdk';
34
+ import { httpMiddleware } from '@obsai/sdk/http';
35
+
36
+ init({
37
+ apiKey: 'obs_your_api_key',
38
+ service: 'api-server',
39
+ });
40
+
41
+ const app = express();
42
+
43
+ app.use(httpMiddleware({
44
+ excludePaths: ['/health', '/metrics'],
45
+ excludePatterns: [/^\/static/],
46
+ }));
47
+
48
+ app.get('/api/users', (req, res) => {
49
+ res.json({ users: [] });
50
+ });
51
+ ```
52
+
53
+ ### 3. AI/LLM Wrapper (OpenAI)
54
+
55
+ Automatically track all OpenAI calls with token usage and cost:
56
+
57
+ ```typescript
58
+ import OpenAI from 'openai';
59
+ import { init } from '@obsai/sdk';
60
+ import { wrapOpenAI } from '@obsai/sdk/llm';
61
+
62
+ init({
63
+ apiKey: 'obs_your_api_key',
64
+ service: 'ai-service',
65
+ });
66
+
67
+ const openai = wrapOpenAI(new OpenAI());
68
+
69
+ // All calls are automatically tracked
70
+ const response = await openai.chat.completions.create({
71
+ model: 'gpt-4o-mini',
72
+ messages: [{ role: 'user', content: 'Hello!' }],
73
+ });
74
+ ```
75
+
76
+ ### 4. AI/LLM Wrapper (Anthropic)
77
+
78
+ ```typescript
79
+ import Anthropic from '@anthropic-ai/sdk';
80
+ import { init } from '@obsai/sdk';
81
+ import { wrapAnthropic } from '@obsai/sdk/llm';
82
+
83
+ init({
84
+ apiKey: 'obs_your_api_key',
85
+ });
86
+
87
+ const anthropic = wrapAnthropic(new Anthropic());
88
+
89
+ const response = await anthropic.messages.create({
90
+ model: 'claude-3-sonnet-20240229',
91
+ max_tokens: 1024,
92
+ messages: [{ role: 'user', content: 'Hello!' }],
93
+ });
94
+ ```
95
+
96
+ ### 5. Manual Tracking
97
+
98
+ For custom events or other providers:
99
+
100
+ ```typescript
101
+ import { init, track } from '@obsai/sdk';
102
+ import { trackLLMCall } from '@obsai/sdk/llm';
103
+
104
+ init({ apiKey: 'obs_your_api_key' });
105
+
106
+ // Track custom HTTP events
107
+ await track({
108
+ kind: 'http',
109
+ name: 'GET /api/users',
110
+ status: 'ok',
111
+ durationMs: 45,
112
+ attrs: { userId: '123' },
113
+ });
114
+
115
+ // Track custom AI/LLM calls
116
+ await trackLLMCall({
117
+ provider: 'gemini',
118
+ model: 'gemini-pro',
119
+ tokensIn: 150,
120
+ tokensOut: 500,
121
+ durationMs: 1200,
122
+ status: 'ok',
123
+ });
124
+ ```
125
+
126
+ ## SDK-Free Integration
127
+
128
+ ### Universal AI Gateway
129
+
130
+ Use Observa as an OpenAI-compatible proxy. No SDK needed!
131
+
132
+ ```typescript
133
+ import OpenAI from 'openai';
134
+
135
+ const client = new OpenAI({
136
+ baseURL: 'https://obsai.strateogi.com/api/v1/gateway',
137
+ apiKey: 'obs_your_api_key', // Your Observa API key
138
+ defaultHeaders: {
139
+ 'x-provider-key': process.env.OPENAI_API_KEY, // Your real provider key
140
+ 'x-provider': 'openai', // Optional: auto-detected from model
141
+ },
142
+ });
143
+
144
+ // Works exactly like normal OpenAI
145
+ const response = await client.chat.completions.create({
146
+ model: 'gpt-4o-mini',
147
+ messages: [{ role: 'user', content: 'Hello!' }],
148
+ });
149
+ ```
150
+
151
+ Supported providers: OpenAI, Anthropic (Claude), Google (Gemini)
152
+
153
+ ### Direct API Ingestion
154
+
155
+ Send events directly via HTTP:
156
+
157
+ ```bash
158
+ curl -X POST https://obsai.strateogi.com/api/ingest \
159
+ -H "Authorization: Bearer obs_your_api_key" \
160
+ -H "Content-Type: application/json" \
161
+ -d '{
162
+ "events": [{
163
+ "kind": "ai",
164
+ "name": "gpt-4o-mini",
165
+ "trace_id": "trace-123",
166
+ "status": "ok",
167
+ "duration_ms": 1500,
168
+ "attrs": {
169
+ "provider": "openai",
170
+ "prompt_tokens": 100,
171
+ "completion_tokens": 250
172
+ }
173
+ }]
174
+ }'
175
+ ```
176
+
177
+ ## Privacy Modes
178
+
179
+ Configure per-project privacy level in your Observa dashboard:
180
+
181
+ - **Metadata Only** (default): Only track latency, tokens, costs, status. No prompts/responses stored.
182
+ - **Full Logging**: Track everything including prompts and responses for debugging.
183
+
184
+ ## Configuration Options
185
+
186
+ | Option | Type | Required | Description |
187
+ |--------|------|----------|-------------|
188
+ | `apiKey` | string | Yes | Your project API key from Observa |
189
+ | `service` | string | No | Service name for grouping events (default: "default") |
190
+ | `baseUrl` | string | No | Custom Observa instance URL (default: https://obsai.strateogi.com) |
191
+ | `debug` | boolean | No | Log events to console (default: false) |
192
+
193
+ ## Supported AI Providers
194
+
195
+ | Provider | Models | Auto Cost Calculation |
196
+ |----------|--------|----------------------|
197
+ | OpenAI | GPT-4o, GPT-4o-mini, GPT-4, GPT-3.5-turbo, etc. | Yes |
198
+ | Anthropic | Claude 3.5, Claude 3, etc. | Yes |
199
+ | Google | Gemini Pro, Gemini Flash, etc. | Yes |
200
+ | Custom | Any | Manual via attrs |
201
+
202
+ ## Get Your API Key
203
+
204
+ 1. Sign up at [obsai.strateogi.com](https://obsai.strateogi.com)
205
+ 2. Create a project
206
+ 3. Go to Integrations tab
207
+ 4. Generate an API key
208
+
209
+ ## License
210
+
211
+ MIT
package/package.json ADDED
@@ -0,0 +1,55 @@
1
+ {
2
+ "name": "@obsai/sdk",
3
+ "version": "0.1.0",
4
+ "description": "Lightweight observability SDK for API and AI/LLM applications",
5
+ "main": "dist/index.js",
6
+ "module": "dist/index.mjs",
7
+ "types": "dist/index.d.ts",
8
+ "exports": {
9
+ ".": {
10
+ "require": "./dist/index.js",
11
+ "import": "./dist/index.mjs",
12
+ "types": "./dist/index.d.ts"
13
+ },
14
+ "./http": {
15
+ "require": "./dist/http.js",
16
+ "import": "./dist/http.mjs",
17
+ "types": "./dist/http.d.ts"
18
+ },
19
+ "./llm": {
20
+ "require": "./dist/llm.js",
21
+ "import": "./dist/llm.mjs",
22
+ "types": "./dist/llm.d.ts"
23
+ }
24
+ },
25
+ "scripts": {
26
+ "build": "tsup src/index.ts src/http.ts src/llm.ts --format cjs,esm --dts --clean",
27
+ "dev": "tsup src/index.ts src/http.ts src/llm.ts --format cjs,esm --dts --watch"
28
+ },
29
+ "files": [
30
+ "dist"
31
+ ],
32
+ "keywords": [
33
+ "observability",
34
+ "monitoring",
35
+ "llm",
36
+ "api",
37
+ "openai",
38
+ "claude",
39
+ "gemini",
40
+ "telemetry"
41
+ ],
42
+ "license": "MIT",
43
+ "devDependencies": {
44
+ "tsup": "^8.0.0",
45
+ "typescript": "^5.0.0"
46
+ },
47
+ "peerDependencies": {
48
+ "openai": ">=4.0.0"
49
+ },
50
+ "peerDependenciesMeta": {
51
+ "openai": {
52
+ "optional": true
53
+ }
54
+ }
55
+ }