@costcanary/sdk 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,36 @@
1
+ # @costcanary/sdk
2
+
3
+ Track per-user LLM costs in 1 line of code.
4
+
5
+ ## Install
6
+
7
+ ```bash
8
+ npm install @costcanary/sdk
9
+ ```
10
+
11
+ ## Usage
12
+
13
+ ```javascript
14
+ const { init, track } = require("@costcanary/sdk");
15
+
16
+ init({ apiKey: "cck_your_api_key" });
17
+
18
+ const result = await track({
19
+ userId: "user_123", // your end-user's ID
20
+ feature: "chat", // which feature triggered this call
21
+ fn: () => openai.chat.completions.create({ ... }),
22
+ });
23
+ ```
24
+
25
+ That's it. CostCanary automatically extracts tokens, calculates cost, and sends it to your dashboard.
26
+
27
+ ## Supported Models
28
+
29
+ gpt-4o, gpt-4o-mini, gpt-4-turbo, gpt-3.5-turbo, claude-3-5-sonnet, claude-3-haiku, gemini-1.5-pro, gemini-1.5-flash
30
+
31
+ ## Config
32
+
33
+ | Option | Default | Description |
34
+ |--------|---------|-------------|
35
+ | `apiKey` | required | Your CostCanary API key |
36
+ | `endpoint` | `https://costcanary.com/api/ingest` | Override for self-hosted |
package/package.json ADDED
@@ -0,0 +1,20 @@
1
+ {
2
+ "name": "@costcanary/sdk",
3
+ "version": "0.1.0",
4
+ "description": "Track per-user LLM costs in 1 line of code",
5
+ "main": "src/index.js",
6
+ "files": ["src/", "README.md"],
7
+ "keywords": ["llm", "cost", "profitability", "openai", "anthropic", "tracking"],
8
+ "license": "MIT",
9
+ "repository": {
10
+ "type": "git",
11
+ "url": "https://github.com/your-org/costcanary"
12
+ },
13
+ "homepage": "https://costcanary.com",
14
+ "engines": {
15
+ "node": ">=18"
16
+ },
17
+ "scripts": {
18
+ "prepublishOnly": "node -e \"require('./src/index.js')\" && echo 'SDK OK'"
19
+ }
20
+ }
package/src/costs.js ADDED
@@ -0,0 +1,14 @@
1
+ // Per-token costs in USD (input / output per token)
2
+ const MODEL_COSTS = {
3
+ "gpt-4o": { input: 0.0000025, output: 0.00001 },
4
+ "gpt-4o-mini": { input: 0.00000015, output: 0.0000006 },
5
+ "gpt-4-turbo": { input: 0.00001, output: 0.00003 },
6
+ "gpt-3.5-turbo": { input: 0.0000005, output: 0.0000015 },
7
+ "claude-3-5-sonnet": { input: 0.000003, output: 0.000015 },
8
+ "claude-3-haiku": { input: 0.00000025, output: 0.00000125 },
9
+ "gemini-1.5-pro": { input: 0.00000125, output: 0.000005 },
10
+ "gemini-1.5-flash": { input: 0.000000075, output: 0.0000003 },
11
+ "default": { input: 0.000001, output: 0.000004 },
12
+ };
13
+
14
+ module.exports = { MODEL_COSTS };
package/src/index.js ADDED
@@ -0,0 +1,53 @@
1
+ const { MODEL_COSTS } = require("./costs");
2
+ const { enqueue } = require("./queue");
3
+
4
+ let _config = { apiKey: null, endpoint: "https://costcanary.com/api/ingest" };
5
+
6
+ /** Initialize the SDK with your API key. */
7
+ function init(config) {
8
+ _config.apiKey = config.apiKey;
9
+ if (config.endpoint) _config.endpoint = config.endpoint;
10
+ }
11
+
12
+ /**
13
+ * PROPRIETARY: Wrap any LLM call to automatically track cost per user.
14
+ * @param {{ userId: string, feature: string, fn: () => Promise }} options
15
+ * @returns {Promise} The original LLM result
16
+ */
17
+ async function track({ userId, feature, fn }) {
18
+ if (!_config.apiKey) {
19
+ console.warn("[CostCanary] No API key set. Call init() first.");
20
+ return await fn();
21
+ }
22
+
23
+ const start = Date.now();
24
+ const result = await fn();
25
+ const duration = Date.now() - start;
26
+
27
+ const usage = result?.usage || {};
28
+ const model = result?.model || "unknown";
29
+ const tokens = (usage.prompt_tokens || 0) + (usage.completion_tokens || 0);
30
+ const cost = _calculateCost(model, usage);
31
+
32
+ _sendToIngest({ endUserId: userId, feature, model, tokens, cost, duration,
33
+ timestamp: new Date().toISOString() });
34
+
35
+ return result;
36
+ }
37
+
38
+ function _calculateCost(model, usage) {
39
+ const rates = MODEL_COSTS[model] || MODEL_COSTS["default"];
40
+ return (usage.prompt_tokens || 0) * rates.input
41
+ + (usage.completion_tokens || 0) * rates.output;
42
+ }
43
+
44
+ function _sendToIngest(data) {
45
+ fetch(_config.endpoint, {
46
+ method: "POST",
47
+ headers: { "Content-Type": "application/json",
48
+ "Authorization": `Bearer ${_config.apiKey}` },
49
+ body: JSON.stringify(data),
50
+ }).catch(() => enqueue(data, _sendToIngest));
51
+ }
52
+
53
+ module.exports = { init, track };
package/src/queue.js ADDED
@@ -0,0 +1,21 @@
1
+ const _queue = [];
2
+ let _retryTimer = null;
3
+
4
+ function enqueue(item, sendFn) {
5
+ _queue.push({ item, sendFn });
6
+ if (!_retryTimer) {
7
+ _retryTimer = setInterval(flush, 60_000);
8
+ }
9
+ }
10
+
11
+ function flush() {
12
+ if (_queue.length === 0) {
13
+ clearInterval(_retryTimer);
14
+ _retryTimer = null;
15
+ return;
16
+ }
17
+ const { item, sendFn } = _queue.shift();
18
+ sendFn(item).catch(() => _queue.unshift({ item, sendFn }));
19
+ }
20
+
21
+ module.exports = { enqueue };