@tamyla/clodo-framework 4.3.4 → 4.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/README.md +34 -8
- package/dist/utilities/ai/client.js +276 -0
- package/dist/utilities/ai/index.js +6 -0
- package/dist/utilities/analytics/index.js +6 -0
- package/dist/utilities/analytics/writer.js +226 -0
- package/dist/utilities/bindings/client.js +283 -0
- package/dist/utilities/bindings/index.js +6 -0
- package/dist/utilities/cache/index.js +9 -0
- package/dist/utilities/cache/leaderboard.js +52 -0
- package/dist/utilities/cache/rate-limiter.js +57 -0
- package/dist/utilities/cache/session.js +69 -0
- package/dist/utilities/cache/upstash.js +200 -0
- package/dist/utilities/durable-objects/base.js +200 -0
- package/dist/utilities/durable-objects/counter.js +117 -0
- package/dist/utilities/durable-objects/index.js +10 -0
- package/dist/utilities/durable-objects/rate-limiter.js +80 -0
- package/dist/utilities/durable-objects/session-store.js +126 -0
- package/dist/utilities/durable-objects/websocket-room.js +223 -0
- package/dist/utilities/email/handler.js +359 -0
- package/dist/utilities/email/index.js +6 -0
- package/dist/utilities/index.js +65 -0
- package/dist/utilities/kv/index.js +6 -0
- package/dist/utilities/kv/storage.js +268 -0
- package/dist/utilities/queues/consumer.js +188 -0
- package/dist/utilities/queues/index.js +7 -0
- package/dist/utilities/queues/producer.js +74 -0
- package/dist/utilities/scheduled/handler.js +276 -0
- package/dist/utilities/scheduled/index.js +6 -0
- package/dist/utilities/storage/index.js +6 -0
- package/dist/utilities/storage/r2.js +314 -0
- package/dist/utilities/vectorize/index.js +6 -0
- package/dist/utilities/vectorize/store.js +273 -0
- package/package.json +21 -3
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,18 @@
|
|
|
1
|
+
# [4.4.0](https://github.com/tamylaa/clodo-framework/compare/v4.3.5...v4.4.0) (2026-02-05)
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
### Features
|
|
5
|
+
|
|
6
|
+
* add comprehensive Cloudflare Workers utilities ([8c82fd9](https://github.com/tamylaa/clodo-framework/commit/8c82fd97b9f2ac7ea1e579b3a4a37fea9f0aaaab))
|
|
7
|
+
* add comprehensive Cloudflare Workers utilities ([b7fd6e8](https://github.com/tamylaa/clodo-framework/commit/b7fd6e881cc6734606917b1786150abc5022f6ab))
|
|
8
|
+
|
|
9
|
+
## [4.3.5](https://github.com/tamylaa/clodo-framework/compare/v4.3.4...v4.3.5) (2026-02-04)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
### Bug Fixes
|
|
13
|
+
|
|
14
|
+
* **funding:** use string value for open_collective in FUNDING.yml ([680cb5f](https://github.com/tamylaa/clodo-framework/commit/680cb5fa28499f2297b39a556ee2c60c0573ab4a))
|
|
15
|
+
|
|
1
16
|
## [4.3.4](https://github.com/tamylaa/clodo-framework/compare/v4.3.3...v4.3.4) (2026-02-04)
|
|
2
17
|
|
|
3
18
|
|
package/README.md
CHANGED
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
# Clodo Framework
|
|
2
2
|
|
|
3
|
+
[](https://github.com/tamylaa/clodo-framework/actions/workflows/validate-on-pr.yml) [](https://github.com/tamylaa/clodo-framework/actions/workflows/full-tests-nightly.yml) [](https://www.npmjs.com/package/@tamyla/clodo-framework) [](https://www.npmjs.com/package/@tamyla/clodo-framework) [](https://github.com/tamylaa/clodo-framework/blob/main/LICENSE) [](https://github.com/sponsors/tamylaa) [](SUPPORT.md) [](https://www.buymeacoffee.com/yourname) [](https://www.patreon.com/yourname) [](https://paypal.me/yourname)
|
|
4
|
+
|
|
3
5
|
## 🚀 Production-Ready: Promise Delivered
|
|
4
6
|
|
|
5
7
|
**Framework Status: ✅ VALIDATED & PRODUCTION-READY**
|
|
6
8
|
**Validation: 10/10 Phases Passed**
|
|
7
9
|
**Service Generation: 28+ Files Per Service**
|
|
8
|
-
**Test Coverage
|
|
10
|
+
**Test Coverage:** (Latest CI run 2026-02-04) — **115 test suites passed; 4 tests skipped; 2113 passed, 2117 total**
|
|
9
11
|
|
|
10
12
|
A comprehensive framework for building enterprise-grade software architecture on Cloudflare Workers + D1. This framework enables rapid development of autonomous, domain-specific services while maintaining consistency and reusability across your entire ecosystem.
|
|
11
13
|
|
|
@@ -31,6 +33,21 @@ A comprehensive framework for building enterprise-grade software architecture on
|
|
|
31
33
|
- **[Security](docs/SECURITY.md)** - Security considerations
|
|
32
34
|
- **[Framework Evolution](docs/FRAMEWORK_EVOLUTION_NARRATIVE.md)** - Development history
|
|
33
35
|
|
|
36
|
+
## 🧑🤝🧑 Community & Support
|
|
37
|
+
- **Email:** `product@clodo.dev` — product support, feedback, and security reports
|
|
38
|
+
- **Twitter:** [@clodoframework](https://twitter.com/clodoframework) — follow for updates and announcements
|
|
39
|
+
- **Quick ways to help:**
|
|
40
|
+
- ⭐ Star the repository on GitHub
|
|
41
|
+
- 🐛 Open issues for bugs or feature ideas
|
|
42
|
+
- 🔀 Submit PRs or reviews (even small documentation fixes are welcome)
|
|
43
|
+
- 🗣️ Share your experience on Twitter and tag **@clodoframework**
|
|
44
|
+
|
|
45
|
+
Your feedback helps prioritize improvements and signals others that this project is useful.
|
|
46
|
+
|
|
47
|
+
- **Support & SLAs:** See [SUPPORT.md](SUPPORT.md) for response times and escalation procedures.
|
|
48
|
+
|
|
49
|
+
- **Donate / Sponsor:** If you'd like to support ongoing maintenance and improvements, please see [FUNDING.md](FUNDING.md) for ways to contribute (GitHub Sponsors, Open Collective, PayPal, Buy Me a Coffee, Patreon).
|
|
50
|
+
|
|
34
51
|
### 📁 **Documentation Structure**
|
|
35
52
|
```
|
|
36
53
|
├── docs/ # 📖 Public documentation (npm package)
|
|
@@ -260,7 +277,7 @@ clodo-framework/
|
|
|
260
277
|
│ ├── analysis/ # Technical analysis
|
|
261
278
|
│ └── licensing/ # License information
|
|
262
279
|
├── src/ # 💻 Source code
|
|
263
|
-
├── test/ # ✅ Test suites (
|
|
280
|
+
├── test/ # ✅ Test suites (Latest CI: 115 suites; 2113 tests passed, 4 skipped)
|
|
264
281
|
├── bin/ # 🔧 CLI executables
|
|
265
282
|
├── dist/ # 📦 Built distribution
|
|
266
283
|
└── templates/ # 📋 Service templates
|
|
@@ -268,8 +285,8 @@ clodo-framework/
|
|
|
268
285
|
```
|
|
269
286
|
|
|
270
287
|
**Quality Metrics:**
|
|
271
|
-
- ✅ **
|
|
272
|
-
- ✅ **
|
|
288
|
+
- ✅ **Latest CI (2026-02-04): 115 test suites passed; 4 tests skipped; 2113/2117 tests passed**
|
|
289
|
+
- ✅ **CLI tests:** passing (all CLI-specific tests passed in the latest run)
|
|
273
290
|
- ✅ **Clean architecture** (no temporary or duplicate files)
|
|
274
291
|
- ✅ **Configuration-based** (no hard-coded values in source)
|
|
275
292
|
|
|
@@ -1371,13 +1388,22 @@ npx wrangler login
|
|
|
1371
1388
|
# - Cloudflare D1:Edit permissions
|
|
1372
1389
|
```
|
|
1373
1390
|
|
|
1374
|
-
#### **
|
|
1391
|
+
#### **Integration tests & DNS: how CI avoids ENOTFOUND**
|
|
1392
|
+
- Integration tests mock network calls when `TEST_URL` is not provided to avoid DNS resolution failures (ENOTFOUND) in CI and developer environments.
|
|
1393
|
+
- To run integration tests against an actual deployment set `TEST_URL` to your deployment URL, for example:
|
|
1394
|
+
|
|
1375
1395
|
```bash
|
|
1376
|
-
|
|
1377
|
-
# The system waits 10 seconds but some domains may need longer
|
|
1378
|
-
# Tests failures don't prevent deployment success
|
|
1396
|
+
TEST_URL='https://your-service.workers.dev' npm run test:integration
|
|
1379
1397
|
```
|
|
1380
1398
|
|
|
1399
|
+
- Database integration tests are disabled by default for CI. Enable them by setting `RUN_DB_TESTS=true` (or `RUN_DB_TESTS=1`) when you have a configured database and credentials available:
|
|
1400
|
+
|
|
1401
|
+
```bash
|
|
1402
|
+
TEST_URL='https://your-service.workers.dev' RUN_DB_TESTS=true npm run test:integration
|
|
1403
|
+
```
|
|
1404
|
+
|
|
1405
|
+
This prevents flaky CI failures while still allowing full end-to-end verification when desired.
|
|
1406
|
+
|
|
1381
1407
|
#### **"npx command not found" on Windows**
|
|
1382
1408
|
```bash
|
|
1383
1409
|
# Problem: Command configuration for cross-platform compatibility
|
|
@@ -0,0 +1,276 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Workers AI Client
|
|
3
|
+
* Provides convenient methods for working with Cloudflare Workers AI
|
|
4
|
+
*
|
|
5
|
+
* @example
|
|
6
|
+
* import { AIClient, Models } from '@tamyla/clodo-framework/utilities/ai';
|
|
7
|
+
*
|
|
8
|
+
* const ai = new AIClient(env.AI);
|
|
9
|
+
* const response = await ai.chat([
|
|
10
|
+
* { role: 'user', content: 'Hello!' }
|
|
11
|
+
* ]);
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Default models for different tasks
|
|
16
|
+
*/
|
|
17
|
+
export const Models = {
|
|
18
|
+
// Text Generation
|
|
19
|
+
TEXT_GENERATION: '@cf/meta/llama-3.1-8b-instruct',
|
|
20
|
+
TEXT_GENERATION_FAST: '@cf/meta/llama-3.1-8b-instruct-fast',
|
|
21
|
+
TEXT_GENERATION_LARGE: '@cf/meta/llama-3.1-70b-instruct',
|
|
22
|
+
// Code Generation
|
|
23
|
+
CODE_GENERATION: '@hf/thebloke/deepseek-coder-6.7b-instruct-awq',
|
|
24
|
+
// Chat
|
|
25
|
+
CHAT: '@cf/meta/llama-3.1-8b-instruct',
|
|
26
|
+
// Embeddings
|
|
27
|
+
EMBEDDINGS: '@cf/baai/bge-base-en-v1.5',
|
|
28
|
+
EMBEDDINGS_LARGE: '@cf/baai/bge-large-en-v1.5',
|
|
29
|
+
// Image Generation
|
|
30
|
+
IMAGE_GENERATION: '@cf/stabilityai/stable-diffusion-xl-base-1.0',
|
|
31
|
+
IMAGE_GENERATION_FAST: '@cf/lykon/dreamshaper-8-lcm',
|
|
32
|
+
// Image Classification
|
|
33
|
+
IMAGE_CLASSIFICATION: '@cf/microsoft/resnet-50',
|
|
34
|
+
// Speech to Text
|
|
35
|
+
SPEECH_TO_TEXT: '@cf/openai/whisper',
|
|
36
|
+
SPEECH_TO_TEXT_TINY: '@cf/openai/whisper-tiny-en',
|
|
37
|
+
// Translation
|
|
38
|
+
TRANSLATION: '@cf/meta/m2m100-1.2b',
|
|
39
|
+
// Summarization
|
|
40
|
+
SUMMARIZATION: '@cf/facebook/bart-large-cnn',
|
|
41
|
+
// Sentiment Analysis
|
|
42
|
+
SENTIMENT: '@cf/huggingface/distilbert-sst-2-int8',
|
|
43
|
+
// Object Detection
|
|
44
|
+
OBJECT_DETECTION: '@cf/facebook/detr-resnet-50'
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* AI Client for Workers AI
|
|
49
|
+
*/
|
|
50
|
+
export class AIClient {
|
|
51
|
+
/**
|
|
52
|
+
* @param {Object} ai - AI binding from env
|
|
53
|
+
* @param {Object} options - Client options
|
|
54
|
+
*/
|
|
55
|
+
constructor(ai, options = {}) {
|
|
56
|
+
if (!ai) {
|
|
57
|
+
throw new Error('AI binding is required');
|
|
58
|
+
}
|
|
59
|
+
this.ai = ai;
|
|
60
|
+
this.options = {
|
|
61
|
+
defaultModel: Models.CHAT,
|
|
62
|
+
maxTokens: 1024,
|
|
63
|
+
temperature: 0.7,
|
|
64
|
+
...options
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Run a model directly
|
|
70
|
+
*/
|
|
71
|
+
async run(model, inputs) {
|
|
72
|
+
return this.ai.run(model, inputs);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Generate text from a prompt
|
|
77
|
+
*/
|
|
78
|
+
async generateText(prompt, options = {}) {
|
|
79
|
+
const model = options.model || this.options.defaultModel;
|
|
80
|
+
const response = await this.ai.run(model, {
|
|
81
|
+
prompt,
|
|
82
|
+
max_tokens: options.maxTokens || this.options.maxTokens,
|
|
83
|
+
temperature: options.temperature || this.options.temperature,
|
|
84
|
+
...(options.stream && {
|
|
85
|
+
stream: true
|
|
86
|
+
})
|
|
87
|
+
});
|
|
88
|
+
if (options.stream) {
|
|
89
|
+
return response;
|
|
90
|
+
}
|
|
91
|
+
return response.response || response;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Chat completion with message history
|
|
96
|
+
*/
|
|
97
|
+
async chat(messages, options = {}) {
|
|
98
|
+
const model = options.model || Models.CHAT;
|
|
99
|
+
const response = await this.ai.run(model, {
|
|
100
|
+
messages,
|
|
101
|
+
max_tokens: options.maxTokens || this.options.maxTokens,
|
|
102
|
+
temperature: options.temperature || this.options.temperature,
|
|
103
|
+
...(options.stream && {
|
|
104
|
+
stream: true
|
|
105
|
+
})
|
|
106
|
+
});
|
|
107
|
+
if (options.stream) {
|
|
108
|
+
return response;
|
|
109
|
+
}
|
|
110
|
+
return response.response || response;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Generate embeddings for text
|
|
115
|
+
*/
|
|
116
|
+
async embed(text, options = {}) {
|
|
117
|
+
const model = options.model || Models.EMBEDDINGS;
|
|
118
|
+
const texts = Array.isArray(text) ? text : [text];
|
|
119
|
+
const response = await this.ai.run(model, {
|
|
120
|
+
text: texts
|
|
121
|
+
});
|
|
122
|
+
return response.data || response;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Generate an image from a prompt
|
|
127
|
+
*/
|
|
128
|
+
async generateImage(prompt, options = {}) {
|
|
129
|
+
const model = options.model || Models.IMAGE_GENERATION;
|
|
130
|
+
const response = await this.ai.run(model, {
|
|
131
|
+
prompt,
|
|
132
|
+
negative_prompt: options.negativePrompt,
|
|
133
|
+
height: options.height || 1024,
|
|
134
|
+
width: options.width || 1024,
|
|
135
|
+
num_steps: options.steps || 20,
|
|
136
|
+
guidance: options.guidance || 7.5
|
|
137
|
+
});
|
|
138
|
+
return response;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
/**
|
|
142
|
+
* Classify an image
|
|
143
|
+
*/
|
|
144
|
+
async classifyImage(image, options = {}) {
|
|
145
|
+
const model = options.model || Models.IMAGE_CLASSIFICATION;
|
|
146
|
+
const response = await this.ai.run(model, {
|
|
147
|
+
image: Array.isArray(image) ? image : [...new Uint8Array(image)]
|
|
148
|
+
});
|
|
149
|
+
return response;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
/**
|
|
153
|
+
* Detect objects in an image
|
|
154
|
+
*/
|
|
155
|
+
async detectObjects(image, options = {}) {
|
|
156
|
+
const model = options.model || Models.OBJECT_DETECTION;
|
|
157
|
+
const response = await this.ai.run(model, {
|
|
158
|
+
image: Array.isArray(image) ? image : [...new Uint8Array(image)]
|
|
159
|
+
});
|
|
160
|
+
return response;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
/**
|
|
164
|
+
* Transcribe audio to text
|
|
165
|
+
*/
|
|
166
|
+
async transcribe(audio, options = {}) {
|
|
167
|
+
const model = options.model || Models.SPEECH_TO_TEXT;
|
|
168
|
+
const response = await this.ai.run(model, {
|
|
169
|
+
audio: Array.isArray(audio) ? audio : [...new Uint8Array(audio)]
|
|
170
|
+
});
|
|
171
|
+
return response;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Translate text
|
|
176
|
+
*/
|
|
177
|
+
async translate(text, sourceLang, targetLang, options = {}) {
|
|
178
|
+
const model = options.model || Models.TRANSLATION;
|
|
179
|
+
const response = await this.ai.run(model, {
|
|
180
|
+
text,
|
|
181
|
+
source_lang: sourceLang,
|
|
182
|
+
target_lang: targetLang
|
|
183
|
+
});
|
|
184
|
+
return response.translated_text || response;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Summarize text
|
|
189
|
+
*/
|
|
190
|
+
async summarize(text, options = {}) {
|
|
191
|
+
const model = options.model || Models.SUMMARIZATION;
|
|
192
|
+
const response = await this.ai.run(model, {
|
|
193
|
+
input_text: text,
|
|
194
|
+
max_length: options.maxLength || 150
|
|
195
|
+
});
|
|
196
|
+
return response.summary || response;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Analyze sentiment of text
|
|
201
|
+
*/
|
|
202
|
+
async analyzeSentiment(text, options = {}) {
|
|
203
|
+
const model = options.model || Models.SENTIMENT;
|
|
204
|
+
const response = await this.ai.run(model, {
|
|
205
|
+
text
|
|
206
|
+
});
|
|
207
|
+
return response;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
/**
|
|
211
|
+
* Calculate similarity between texts using embeddings
|
|
212
|
+
*/
|
|
213
|
+
async similarity(text1, text2) {
|
|
214
|
+
const embeddings = await this.embed([text1, text2]);
|
|
215
|
+
return this.cosineSimilarity(embeddings[0], embeddings[1]);
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
/**
|
|
219
|
+
* Calculate cosine similarity between two vectors
|
|
220
|
+
*/
|
|
221
|
+
cosineSimilarity(a, b) {
|
|
222
|
+
let dotProduct = 0;
|
|
223
|
+
let normA = 0;
|
|
224
|
+
let normB = 0;
|
|
225
|
+
for (let i = 0; i < a.length; i++) {
|
|
226
|
+
dotProduct += a[i] * b[i];
|
|
227
|
+
normA += a[i] * a[i];
|
|
228
|
+
normB += b[i] * b[i];
|
|
229
|
+
}
|
|
230
|
+
return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* Create an SSE stream from AI stream response
|
|
236
|
+
*/
|
|
237
|
+
export function createSSEStream(stream) {
|
|
238
|
+
const encoder = new TextEncoder();
|
|
239
|
+
return new ReadableStream({
|
|
240
|
+
async start(controller) {
|
|
241
|
+
const reader = stream.getReader();
|
|
242
|
+
try {
|
|
243
|
+
// eslint-disable-next-line no-constant-condition
|
|
244
|
+
while (true) {
|
|
245
|
+
const {
|
|
246
|
+
done,
|
|
247
|
+
value
|
|
248
|
+
} = await reader.read();
|
|
249
|
+
if (done) break;
|
|
250
|
+
const text = typeof value === 'string' ? value : new TextDecoder().decode(value);
|
|
251
|
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify({
|
|
252
|
+
text
|
|
253
|
+
})}\n\n`));
|
|
254
|
+
}
|
|
255
|
+
controller.enqueue(encoder.encode('data: [DONE]\n\n'));
|
|
256
|
+
controller.close();
|
|
257
|
+
} catch (error) {
|
|
258
|
+
controller.error(error);
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
/**
|
|
265
|
+
* Create a streaming response for AI output
|
|
266
|
+
*/
|
|
267
|
+
export function streamResponse(stream) {
|
|
268
|
+
return new Response(createSSEStream(stream), {
|
|
269
|
+
headers: {
|
|
270
|
+
'Content-Type': 'text/event-stream',
|
|
271
|
+
'Cache-Control': 'no-cache',
|
|
272
|
+
'Connection': 'keep-alive'
|
|
273
|
+
}
|
|
274
|
+
});
|
|
275
|
+
}
|
|
276
|
+
export default AIClient;
|
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Analytics Engine Utilities
|
|
3
|
+
* Write events to Cloudflare Analytics Engine
|
|
4
|
+
*
|
|
5
|
+
* @example
|
|
6
|
+
* import { AnalyticsWriter, EventTracker } from '@tamyla/clodo-framework/utilities/analytics';
|
|
7
|
+
*
|
|
8
|
+
* const analytics = new AnalyticsWriter(env.ANALYTICS);
|
|
9
|
+
*
|
|
10
|
+
* // Write a data point
|
|
11
|
+
* analytics.write({
|
|
12
|
+
* indexes: ['user-123'],
|
|
13
|
+
* blobs: ['page_view', '/home'],
|
|
14
|
+
* doubles: [Date.now()],
|
|
15
|
+
* });
|
|
16
|
+
*
|
|
17
|
+
* // Use event tracker for common patterns
|
|
18
|
+
* const tracker = new EventTracker(env.ANALYTICS);
|
|
19
|
+
* tracker.pageView('/home', { userId: '123', referrer: 'google.com' });
|
|
20
|
+
* tracker.event('button_click', { buttonId: 'signup' });
|
|
21
|
+
*/
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Analytics Engine Writer
|
|
25
|
+
*/
|
|
26
|
+
export class AnalyticsWriter {
|
|
27
|
+
/**
|
|
28
|
+
* @param {AnalyticsEngineDataset} dataset - Analytics Engine binding
|
|
29
|
+
*/
|
|
30
|
+
constructor(dataset) {
|
|
31
|
+
if (!dataset) {
|
|
32
|
+
throw new Error('Analytics Engine dataset binding is required');
|
|
33
|
+
}
|
|
34
|
+
this.dataset = dataset;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Write a data point to Analytics Engine
|
|
39
|
+
* @param {Object} dataPoint - Data point to write
|
|
40
|
+
* @param {string[]} dataPoint.indexes - Index values (up to 1)
|
|
41
|
+
* @param {string[]} dataPoint.blobs - Blob values (up to 20)
|
|
42
|
+
* @param {number[]} dataPoint.doubles - Double values (up to 20)
|
|
43
|
+
*/
|
|
44
|
+
write(dataPoint) {
|
|
45
|
+
this.dataset.writeDataPoint({
|
|
46
|
+
indexes: dataPoint.indexes || [],
|
|
47
|
+
blobs: dataPoint.blobs || [],
|
|
48
|
+
doubles: dataPoint.doubles || []
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Write multiple data points
|
|
54
|
+
* @param {Array<Object>} dataPoints
|
|
55
|
+
*/
|
|
56
|
+
writeBatch(dataPoints) {
|
|
57
|
+
for (const dataPoint of dataPoints) {
|
|
58
|
+
this.write(dataPoint);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* High-level event tracker
|
|
65
|
+
*/
|
|
66
|
+
export class EventTracker {
|
|
67
|
+
constructor(dataset, options = {}) {
|
|
68
|
+
this.writer = new AnalyticsWriter(dataset);
|
|
69
|
+
this.defaultIndex = options.defaultIndex || '';
|
|
70
|
+
this.includeTimestamp = options.includeTimestamp !== false;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Track a page view
|
|
75
|
+
*/
|
|
76
|
+
pageView(path, properties = {}) {
|
|
77
|
+
this.writer.write({
|
|
78
|
+
indexes: [properties.userId || this.defaultIndex],
|
|
79
|
+
blobs: ['page_view', path, properties.referrer || '', properties.userAgent || '', properties.country || ''],
|
|
80
|
+
doubles: this.includeTimestamp ? [Date.now()] : []
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Track a custom event
|
|
86
|
+
*/
|
|
87
|
+
event(eventName, properties = {}) {
|
|
88
|
+
const blobs = ['event', eventName, JSON.stringify(properties).slice(0, 1024) // Truncate if too long
|
|
89
|
+
];
|
|
90
|
+
this.writer.write({
|
|
91
|
+
indexes: [properties.userId || this.defaultIndex],
|
|
92
|
+
blobs,
|
|
93
|
+
doubles: this.includeTimestamp ? [Date.now(), properties.value || 0] : [properties.value || 0]
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* Track an error
|
|
99
|
+
*/
|
|
100
|
+
error(errorType, message, properties = {}) {
|
|
101
|
+
this.writer.write({
|
|
102
|
+
indexes: [properties.userId || this.defaultIndex],
|
|
103
|
+
blobs: ['error', errorType, message.slice(0, 1024), properties.stack?.slice(0, 1024) || '', properties.path || ''],
|
|
104
|
+
doubles: this.includeTimestamp ? [Date.now()] : []
|
|
105
|
+
});
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Track API request
|
|
110
|
+
*/
|
|
111
|
+
apiRequest(method, path, properties = {}) {
|
|
112
|
+
this.writer.write({
|
|
113
|
+
indexes: [properties.userId || this.defaultIndex],
|
|
114
|
+
blobs: ['api_request', method, path, properties.status?.toString() || '', properties.error || ''],
|
|
115
|
+
doubles: [Date.now(), properties.duration || 0, properties.status || 0, properties.responseSize || 0]
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Track user action
|
|
121
|
+
*/
|
|
122
|
+
userAction(action, target, properties = {}) {
|
|
123
|
+
this.writer.write({
|
|
124
|
+
indexes: [properties.userId || this.defaultIndex],
|
|
125
|
+
blobs: ['user_action', action, target, properties.label || '', properties.category || ''],
|
|
126
|
+
doubles: this.includeTimestamp ? [Date.now(), properties.value || 0] : [properties.value || 0]
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
/**
|
|
132
|
+
* Metrics collector for performance monitoring
|
|
133
|
+
*/
|
|
134
|
+
export class MetricsCollector {
|
|
135
|
+
constructor(dataset, options = {}) {
|
|
136
|
+
this.writer = new AnalyticsWriter(dataset);
|
|
137
|
+
this.serviceName = options.serviceName || 'worker';
|
|
138
|
+
this.buffer = [];
|
|
139
|
+
this.flushInterval = options.flushInterval || 1000;
|
|
140
|
+
this.maxBufferSize = options.maxBufferSize || 100;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Record a timing metric
|
|
145
|
+
*/
|
|
146
|
+
timing(name, durationMs, tags = {}) {
|
|
147
|
+
this._addToBuffer({
|
|
148
|
+
type: 'timing',
|
|
149
|
+
name,
|
|
150
|
+
value: durationMs,
|
|
151
|
+
tags
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
/**
|
|
156
|
+
* Record a counter metric
|
|
157
|
+
*/
|
|
158
|
+
counter(name, value = 1, tags = {}) {
|
|
159
|
+
this._addToBuffer({
|
|
160
|
+
type: 'counter',
|
|
161
|
+
name,
|
|
162
|
+
value,
|
|
163
|
+
tags
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
/**
|
|
168
|
+
* Record a gauge metric
|
|
169
|
+
*/
|
|
170
|
+
gauge(name, value, tags = {}) {
|
|
171
|
+
this._addToBuffer({
|
|
172
|
+
type: 'gauge',
|
|
173
|
+
name,
|
|
174
|
+
value,
|
|
175
|
+
tags
|
|
176
|
+
});
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Record request metrics
|
|
181
|
+
*/
|
|
182
|
+
request(properties = {}) {
|
|
183
|
+
this.writer.write({
|
|
184
|
+
indexes: [this.serviceName],
|
|
185
|
+
blobs: ['request', properties.method || 'GET', properties.path || '/', properties.status?.toString() || '200', properties.colo || ''],
|
|
186
|
+
doubles: [Date.now(), properties.duration || 0, properties.status || 200, properties.requestSize || 0, properties.responseSize || 0]
|
|
187
|
+
});
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
/**
|
|
191
|
+
* Create a timer that records duration on stop
|
|
192
|
+
*/
|
|
193
|
+
startTimer(name, tags = {}) {
|
|
194
|
+
const start = performance.now();
|
|
195
|
+
return {
|
|
196
|
+
stop: () => {
|
|
197
|
+
const duration = performance.now() - start;
|
|
198
|
+
this.timing(name, duration, tags);
|
|
199
|
+
return duration;
|
|
200
|
+
}
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
_addToBuffer(metric) {
|
|
204
|
+
this.buffer.push(metric);
|
|
205
|
+
if (this.buffer.length >= this.maxBufferSize) {
|
|
206
|
+
this.flush();
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
/**
|
|
211
|
+
* Flush buffered metrics
|
|
212
|
+
*/
|
|
213
|
+
flush() {
|
|
214
|
+
if (this.buffer.length === 0) return;
|
|
215
|
+
const timestamp = Date.now();
|
|
216
|
+
for (const metric of this.buffer) {
|
|
217
|
+
this.writer.write({
|
|
218
|
+
indexes: [this.serviceName],
|
|
219
|
+
blobs: [metric.type, metric.name, JSON.stringify(metric.tags || {})],
|
|
220
|
+
doubles: [timestamp, metric.value]
|
|
221
|
+
});
|
|
222
|
+
}
|
|
223
|
+
this.buffer = [];
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
export default AnalyticsWriter;
|