@tamyla/clodo-framework 4.4.1 → 4.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +2 -1851
- package/README.md +44 -18
- package/dist/cli/commands/add.js +325 -0
- package/dist/config/service-schema-config.js +98 -5
- package/dist/index.js +22 -3
- package/dist/middleware/Composer.js +2 -1
- package/dist/middleware/factories.js +445 -0
- package/dist/middleware/index.js +4 -1
- package/dist/modules/ModuleManager.js +6 -2
- package/dist/routing/EnhancedRouter.js +185 -44
- package/dist/routing/RequestContext.js +393 -0
- package/dist/schema/SchemaManager.js +6 -2
- package/dist/service-management/generators/code/ServiceMiddlewareGenerator.js +79 -223
- package/dist/service-management/generators/code/WorkerIndexGenerator.js +241 -98
- package/dist/service-management/generators/config/WranglerTomlGenerator.js +130 -89
- package/dist/simple-api.js +4 -4
- package/dist/utilities/index.js +134 -1
- package/dist/validation/environmentGuard.js +172 -0
- package/package.json +4 -1
- package/scripts/repro-clodo.js +123 -0
- package/templates/ai-worker/package.json +19 -0
- package/templates/ai-worker/src/index.js +160 -0
- package/templates/cron-worker/package.json +19 -0
- package/templates/cron-worker/src/index.js +211 -0
- package/templates/edge-proxy/package.json +18 -0
- package/templates/edge-proxy/src/index.js +150 -0
- package/templates/minimal/package.json +17 -0
- package/templates/minimal/src/index.js +40 -0
- package/templates/queue-processor/package.json +19 -0
- package/templates/queue-processor/src/index.js +213 -0
- package/templates/rest-api/.dev.vars +2 -0
- package/templates/rest-api/package.json +19 -0
- package/templates/rest-api/src/index.js +124 -0
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Cron Worker Template — @tamyla/clodo-framework
|
|
3
|
+
*
|
|
4
|
+
* A fully working scheduled/cron worker with:
|
|
5
|
+
* - Scheduled handler with cron trigger
|
|
6
|
+
* - KV state persistence between runs
|
|
7
|
+
* - Job registry for multiple cron schedules
|
|
8
|
+
* - Notification pattern
|
|
9
|
+
* - Health check endpoint
|
|
10
|
+
*
|
|
11
|
+
* Deploy with: npx wrangler deploy
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import {
|
|
15
|
+
createCorsMiddleware,
|
|
16
|
+
createErrorHandler,
|
|
17
|
+
createLogger,
|
|
18
|
+
composeMiddleware,
|
|
19
|
+
createEnvironmentGuard
|
|
20
|
+
} from '@tamyla/clodo-framework';
|
|
21
|
+
|
|
22
|
+
// ── Environment validation ────────────────────────────────────────────
|
|
23
|
+
const envGuard = createEnvironmentGuard({
|
|
24
|
+
required: ['KV_DATA'],
|
|
25
|
+
optional: ['NOTIFICATION_WEBHOOK']
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
// ── Middleware ────────────────────────────────────────────────────────
|
|
29
|
+
const middleware = composeMiddleware(
|
|
30
|
+
createCorsMiddleware({ origins: ['*'] }),
|
|
31
|
+
createLogger({ prefix: 'cron-worker' }),
|
|
32
|
+
createErrorHandler()
|
|
33
|
+
);
|
|
34
|
+
|
|
35
|
+
// ── Job registry — define your scheduled jobs ─────────────────────────
|
|
36
|
+
const jobs = {
|
|
37
|
+
/**
|
|
38
|
+
* Daily cleanup job — runs every day at midnight UTC
|
|
39
|
+
* Cron: 0 0 * * *
|
|
40
|
+
*/
|
|
41
|
+
async 'daily-cleanup'(env, ctx) {
|
|
42
|
+
console.log('Running daily cleanup...');
|
|
43
|
+
|
|
44
|
+
const state = await getJobState(env, 'daily-cleanup');
|
|
45
|
+
|
|
46
|
+
// Your cleanup logic here
|
|
47
|
+
const result = {
|
|
48
|
+
cleanedItems: 0,
|
|
49
|
+
freedSpace: '0 MB',
|
|
50
|
+
timestamp: new Date().toISOString()
|
|
51
|
+
};
|
|
52
|
+
|
|
53
|
+
// Persist state
|
|
54
|
+
await saveJobState(env, 'daily-cleanup', {
|
|
55
|
+
lastRun: new Date().toISOString(),
|
|
56
|
+
lastResult: result,
|
|
57
|
+
runCount: (state.runCount || 0) + 1
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
return result;
|
|
61
|
+
},
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Periodic health probe — runs every 5 minutes
|
|
65
|
+
* Cron: */5 * * * *
|
|
66
|
+
*/
|
|
67
|
+
async 'health-probe'(env, ctx) {
|
|
68
|
+
console.log('Running health probe...');
|
|
69
|
+
|
|
70
|
+
// Example: check external dependencies
|
|
71
|
+
const checks = {
|
|
72
|
+
timestamp: new Date().toISOString(),
|
|
73
|
+
status: 'healthy'
|
|
74
|
+
};
|
|
75
|
+
|
|
76
|
+
await saveJobState(env, 'health-probe', {
|
|
77
|
+
lastRun: new Date().toISOString(),
|
|
78
|
+
lastResult: checks
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
// Optionally notify on unhealthy status
|
|
82
|
+
if (checks.status !== 'healthy' && env.NOTIFICATION_WEBHOOK) {
|
|
83
|
+
ctx.waitUntil(
|
|
84
|
+
fetch(env.NOTIFICATION_WEBHOOK, {
|
|
85
|
+
method: 'POST',
|
|
86
|
+
headers: { 'Content-Type': 'application/json' },
|
|
87
|
+
body: JSON.stringify({
|
|
88
|
+
text: `⚠️ Health probe failed: ${JSON.stringify(checks)}`
|
|
89
|
+
})
|
|
90
|
+
})
|
|
91
|
+
);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
return checks;
|
|
95
|
+
}
|
|
96
|
+
};
|
|
97
|
+
|
|
98
|
+
// ── State helpers ────────────────────────────────────────────────────
|
|
99
|
+
async function getJobState(env, jobName) {
|
|
100
|
+
try {
|
|
101
|
+
const data = await env.KV_DATA.get(`cron:${jobName}`, { type: 'json' });
|
|
102
|
+
return data || {};
|
|
103
|
+
} catch {
|
|
104
|
+
return {};
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
async function saveJobState(env, jobName, state) {
|
|
109
|
+
await env.KV_DATA.put(`cron:${jobName}`, JSON.stringify(state));
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// ── HTTP Routes (for monitoring) ─────────────────────────────────────
|
|
113
|
+
const routes = {
|
|
114
|
+
'GET /health': async (req, env) => {
|
|
115
|
+
return jsonResponse({ status: 'healthy', type: 'cron-worker' });
|
|
116
|
+
},
|
|
117
|
+
|
|
118
|
+
'GET /api/jobs': async (req, env) => {
|
|
119
|
+
const jobNames = Object.keys(jobs);
|
|
120
|
+
const statuses = await Promise.all(
|
|
121
|
+
jobNames.map(async (name) => ({
|
|
122
|
+
name,
|
|
123
|
+
state: await getJobState(env, name)
|
|
124
|
+
}))
|
|
125
|
+
);
|
|
126
|
+
return jsonResponse({ jobs: statuses });
|
|
127
|
+
},
|
|
128
|
+
|
|
129
|
+
'GET /api/jobs/:name': async (req, env) => {
|
|
130
|
+
const name = req.params?.name;
|
|
131
|
+
if (!jobs[name]) {
|
|
132
|
+
return jsonResponse({ error: `Job '${name}' not found` }, 404);
|
|
133
|
+
}
|
|
134
|
+
const state = await getJobState(env, name);
|
|
135
|
+
return jsonResponse({ name, state });
|
|
136
|
+
},
|
|
137
|
+
|
|
138
|
+
// Manual trigger endpoint (useful for testing)
|
|
139
|
+
'POST /api/jobs/:name/trigger': async (req, env, ctx) => {
|
|
140
|
+
const name = req.params?.name;
|
|
141
|
+
if (!jobs[name]) {
|
|
142
|
+
return jsonResponse({ error: `Job '${name}' not found` }, 404);
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
const result = await jobs[name](env, ctx);
|
|
146
|
+
return jsonResponse({ triggered: true, name, result });
|
|
147
|
+
}
|
|
148
|
+
};
|
|
149
|
+
|
|
150
|
+
function jsonResponse(data, status = 200) {
|
|
151
|
+
return new Response(JSON.stringify(data), {
|
|
152
|
+
status,
|
|
153
|
+
headers: { 'Content-Type': 'application/json' }
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// ── Worker entry point ────────────────────────────────────────────────
|
|
158
|
+
export default {
|
|
159
|
+
// HTTP handler (for monitoring/manual triggers)
|
|
160
|
+
async fetch(request, env, ctx) {
|
|
161
|
+
envGuard.check(env);
|
|
162
|
+
const url = new URL(request.url);
|
|
163
|
+
const routeKey = `${request.method} ${url.pathname}`;
|
|
164
|
+
const handler = routes[routeKey];
|
|
165
|
+
|
|
166
|
+
if (!handler) {
|
|
167
|
+
return jsonResponse({ error: 'Not Found' }, 404);
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
return middleware.execute(request, () => handler(request, env, ctx));
|
|
171
|
+
},
|
|
172
|
+
|
|
173
|
+
// Scheduled handler (runs on cron triggers)
|
|
174
|
+
async scheduled(event, env, ctx) {
|
|
175
|
+
console.log(`Cron triggered at ${new Date(event.scheduledTime).toISOString()}, cron: ${event.cron}`);
|
|
176
|
+
|
|
177
|
+
// Map cron expressions to job names
|
|
178
|
+
const cronMap = {
|
|
179
|
+
'0 0 * * *': 'daily-cleanup',
|
|
180
|
+
'*/5 * * * *': 'health-probe'
|
|
181
|
+
};
|
|
182
|
+
|
|
183
|
+
const jobName = cronMap[event.cron];
|
|
184
|
+
|
|
185
|
+
if (jobName && jobs[jobName]) {
|
|
186
|
+
try {
|
|
187
|
+
const result = await jobs[jobName](env, ctx);
|
|
188
|
+
console.log(`Job ${jobName} completed:`, result);
|
|
189
|
+
} catch (error) {
|
|
190
|
+
console.error(`Job ${jobName} failed:`, error.message);
|
|
191
|
+
|
|
192
|
+
// Track failure
|
|
193
|
+
await saveJobState(env, jobName, {
|
|
194
|
+
lastRun: new Date().toISOString(),
|
|
195
|
+
lastError: error.message,
|
|
196
|
+
status: 'failed'
|
|
197
|
+
});
|
|
198
|
+
}
|
|
199
|
+
} else {
|
|
200
|
+
console.warn(`No job mapped to cron expression: ${event.cron}`);
|
|
201
|
+
// Run all jobs if no specific mapping
|
|
202
|
+
for (const [name, job] of Object.entries(jobs)) {
|
|
203
|
+
try {
|
|
204
|
+
await job(env, ctx);
|
|
205
|
+
} catch (error) {
|
|
206
|
+
console.error(`Job ${name} failed:`, error.message);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
};
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "{{SERVICE_NAME}}",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"private": true,
|
|
5
|
+
"type": "module",
|
|
6
|
+
"scripts": {
|
|
7
|
+
"dev": "wrangler dev",
|
|
8
|
+
"deploy": "wrangler deploy",
|
|
9
|
+
"test": "vitest"
|
|
10
|
+
},
|
|
11
|
+
"dependencies": {
|
|
12
|
+
"@tamyla/clodo-framework": "^4.4.1"
|
|
13
|
+
},
|
|
14
|
+
"devDependencies": {
|
|
15
|
+
"wrangler": "^3.0.0",
|
|
16
|
+
"vitest": "^2.0.0"
|
|
17
|
+
}
|
|
18
|
+
}
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Edge Proxy Worker Template — @tamyla/clodo-framework
|
|
3
|
+
*
|
|
4
|
+
* An edge proxy/gateway with:
|
|
5
|
+
* - Request rewriting & URL mapping
|
|
6
|
+
* - Response caching (Cache API)
|
|
7
|
+
* - Rate limiting
|
|
8
|
+
* - Header manipulation
|
|
9
|
+
* - A/B routing
|
|
10
|
+
*
|
|
11
|
+
* Deploy with: npx wrangler deploy
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import {
|
|
15
|
+
createCorsMiddleware,
|
|
16
|
+
createErrorHandler,
|
|
17
|
+
createLogger,
|
|
18
|
+
createRateLimitGuard,
|
|
19
|
+
composeMiddleware,
|
|
20
|
+
createEnvironmentGuard
|
|
21
|
+
} from '@tamyla/clodo-framework';
|
|
22
|
+
|
|
23
|
+
// ── Environment ──────────────────────────────────────────────────────
|
|
24
|
+
const envGuard = createEnvironmentGuard({
|
|
25
|
+
required: [],
|
|
26
|
+
optional: ['KV_CONFIG', 'UPSTREAM_URL']
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
const middleware = composeMiddleware(
|
|
30
|
+
createCorsMiddleware({ origins: ['*'] }),
|
|
31
|
+
createLogger({ prefix: 'edge-proxy' }),
|
|
32
|
+
createRateLimitGuard({ maxRequests: 200, windowMs: 60000 }),
|
|
33
|
+
createErrorHandler({ includeStack: false })
|
|
34
|
+
);
|
|
35
|
+
|
|
36
|
+
// ── Route mapping (path rewrites) ────────────────────────────────────
|
|
37
|
+
const routeMap = {
|
|
38
|
+
// Rewrite /api/* to upstream service
|
|
39
|
+
'/api/': {
|
|
40
|
+
upstream: 'https://api.example.com',
|
|
41
|
+
stripPrefix: '/api',
|
|
42
|
+
headers: { 'X-Forwarded-By': 'clodo-proxy' }
|
|
43
|
+
},
|
|
44
|
+
// Serve static assets from R2/another origin
|
|
45
|
+
'/assets/': {
|
|
46
|
+
upstream: 'https://cdn.example.com',
|
|
47
|
+
stripPrefix: '',
|
|
48
|
+
cache: { ttl: 86400 } // Cache for 24h
|
|
49
|
+
}
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
// ── Proxy logic ──────────────────────────────────────────────────────
|
|
53
|
+
async function proxyRequest(request, env, ctx) {
|
|
54
|
+
const url = new URL(request.url);
|
|
55
|
+
|
|
56
|
+
// Find matching route
|
|
57
|
+
let matchedRoute = null;
|
|
58
|
+
let matchedPrefix = '';
|
|
59
|
+
|
|
60
|
+
for (const [prefix, config] of Object.entries(routeMap)) {
|
|
61
|
+
if (url.pathname.startsWith(prefix)) {
|
|
62
|
+
matchedRoute = config;
|
|
63
|
+
matchedPrefix = prefix;
|
|
64
|
+
break;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (!matchedRoute) {
|
|
69
|
+
// Health check
|
|
70
|
+
if (url.pathname === '/health') {
|
|
71
|
+
return Response.json({ status: 'healthy', type: 'edge-proxy' });
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// No route matched
|
|
75
|
+
return Response.json({ error: 'No route matched', path: url.pathname }, { status: 404 });
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Build upstream URL
|
|
79
|
+
const upstreamBase = env.UPSTREAM_URL || matchedRoute.upstream;
|
|
80
|
+
const strippedPath = matchedRoute.stripPrefix
|
|
81
|
+
? url.pathname.replace(matchedPrefix, matchedRoute.stripPrefix || '/')
|
|
82
|
+
: url.pathname;
|
|
83
|
+
const upstreamUrl = new URL(strippedPath + url.search, upstreamBase);
|
|
84
|
+
|
|
85
|
+
// Check cache first
|
|
86
|
+
if (matchedRoute.cache && request.method === 'GET') {
|
|
87
|
+
const cache = caches.default;
|
|
88
|
+
const cacheKey = new Request(upstreamUrl.toString(), request);
|
|
89
|
+
const cached = await cache.match(cacheKey);
|
|
90
|
+
|
|
91
|
+
if (cached) {
|
|
92
|
+
const response = new Response(cached.body, cached);
|
|
93
|
+
response.headers.set('X-Cache', 'HIT');
|
|
94
|
+
return response;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Build upstream request
|
|
99
|
+
const headers = new Headers(request.headers);
|
|
100
|
+
|
|
101
|
+
// Add custom headers
|
|
102
|
+
if (matchedRoute.headers) {
|
|
103
|
+
for (const [key, value] of Object.entries(matchedRoute.headers)) {
|
|
104
|
+
headers.set(key, value);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Forward the original host
|
|
109
|
+
headers.set('X-Forwarded-Host', url.hostname);
|
|
110
|
+
headers.set('X-Forwarded-Proto', url.protocol.replace(':', ''));
|
|
111
|
+
|
|
112
|
+
const upstreamRequest = new Request(upstreamUrl.toString(), {
|
|
113
|
+
method: request.method,
|
|
114
|
+
headers,
|
|
115
|
+
body: request.method !== 'GET' && request.method !== 'HEAD' ? request.body : null,
|
|
116
|
+
redirect: 'follow'
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
// Fetch from upstream
|
|
120
|
+
const response = await fetch(upstreamRequest);
|
|
121
|
+
|
|
122
|
+
// Create mutable response
|
|
123
|
+
const proxyResponse = new Response(response.body, {
|
|
124
|
+
status: response.status,
|
|
125
|
+
statusText: response.statusText,
|
|
126
|
+
headers: new Headers(response.headers)
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
proxyResponse.headers.set('X-Cache', 'MISS');
|
|
130
|
+
proxyResponse.headers.set('X-Proxy', 'clodo-edge-proxy');
|
|
131
|
+
|
|
132
|
+
// Cache response if configured
|
|
133
|
+
if (matchedRoute.cache && request.method === 'GET' && response.ok) {
|
|
134
|
+
const cache = caches.default;
|
|
135
|
+
const cacheKey = new Request(upstreamUrl.toString(), request);
|
|
136
|
+
const toCache = proxyResponse.clone();
|
|
137
|
+
toCache.headers.set('Cache-Control', `public, max-age=${matchedRoute.cache.ttl}`);
|
|
138
|
+
ctx.waitUntil(cache.put(cacheKey, toCache));
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
return proxyResponse;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// ── Worker entry point ────────────────────────────────────────────────
|
|
145
|
+
export default {
|
|
146
|
+
async fetch(request, env, ctx) {
|
|
147
|
+
envGuard.check(env);
|
|
148
|
+
return middleware.execute(request, () => proxyRequest(request, env, ctx));
|
|
149
|
+
}
|
|
150
|
+
};
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "{{SERVICE_NAME}}",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"private": true,
|
|
5
|
+
"type": "module",
|
|
6
|
+
"scripts": {
|
|
7
|
+
"dev": "wrangler dev",
|
|
8
|
+
"deploy": "wrangler deploy",
|
|
9
|
+
"test": "vitest"
|
|
10
|
+
},
|
|
11
|
+
"dependencies": {
|
|
12
|
+
"@tamyla/clodo-framework": "^4.4.1"
|
|
13
|
+
},
|
|
14
|
+
"devDependencies": {
|
|
15
|
+
"wrangler": "^3.0.0"
|
|
16
|
+
}
|
|
17
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Minimal Worker Template — @tamyla/clodo-framework
|
|
3
|
+
*
|
|
4
|
+
* The absolute minimal starting point. Just a fetch handler,
|
|
5
|
+
* CORS, and a health check. Add what you need.
|
|
6
|
+
*
|
|
7
|
+
* Deploy with: npx wrangler deploy
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import {
|
|
11
|
+
createCorsMiddleware,
|
|
12
|
+
createErrorHandler,
|
|
13
|
+
composeMiddleware
|
|
14
|
+
} from '@tamyla/clodo-framework';
|
|
15
|
+
|
|
16
|
+
const middleware = composeMiddleware(
|
|
17
|
+
createCorsMiddleware({ origins: ['*'] }),
|
|
18
|
+
createErrorHandler()
|
|
19
|
+
);
|
|
20
|
+
|
|
21
|
+
export default {
|
|
22
|
+
async fetch(request, env, ctx) {
|
|
23
|
+
const url = new URL(request.url);
|
|
24
|
+
|
|
25
|
+
return middleware.execute(request, async () => {
|
|
26
|
+
if (url.pathname === '/health') {
|
|
27
|
+
return Response.json({ status: 'healthy', timestamp: new Date().toISOString() });
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
if (url.pathname === '/') {
|
|
31
|
+
return Response.json({
|
|
32
|
+
name: '{{SERVICE_NAME}}',
|
|
33
|
+
message: 'Worker is running. Edit src/index.js to add your routes.'
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return Response.json({ error: 'Not Found' }, { status: 404 });
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "{{SERVICE_NAME}}",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"private": true,
|
|
5
|
+
"type": "module",
|
|
6
|
+
"scripts": {
|
|
7
|
+
"dev": "wrangler dev",
|
|
8
|
+
"deploy": "wrangler deploy",
|
|
9
|
+
"test": "vitest"
|
|
10
|
+
},
|
|
11
|
+
"dependencies": {
|
|
12
|
+
"@tamyla/clodo-framework": "^4.4.1"
|
|
13
|
+
},
|
|
14
|
+
"devDependencies": {
|
|
15
|
+
"wrangler": "^3.0.0",
|
|
16
|
+
"vitest": "^2.0.0",
|
|
17
|
+
"@cloudflare/vitest-pool-workers": "^0.5.0"
|
|
18
|
+
}
|
|
19
|
+
}
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Queue Processor Worker Template — @tamyla/clodo-framework
|
|
3
|
+
*
|
|
4
|
+
* A fully working queue consumer/producer with:
|
|
5
|
+
* - Queue message consumption with batch processing
|
|
6
|
+
* - Dead letter queue handling
|
|
7
|
+
* - Retry logic with exponential backoff
|
|
8
|
+
* - KV state tracking
|
|
9
|
+
* - HTTP API for sending messages
|
|
10
|
+
*
|
|
11
|
+
* Deploy with: npx wrangler deploy
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import {
|
|
15
|
+
createCorsMiddleware,
|
|
16
|
+
createErrorHandler,
|
|
17
|
+
createLogger,
|
|
18
|
+
composeMiddleware,
|
|
19
|
+
createEnvironmentGuard
|
|
20
|
+
} from '@tamyla/clodo-framework';
|
|
21
|
+
|
|
22
|
+
import {
|
|
23
|
+
QueueProducer,
|
|
24
|
+
QueueConsumer,
|
|
25
|
+
MessageBuilder,
|
|
26
|
+
MessageTypes
|
|
27
|
+
} from '@tamyla/clodo-framework/utilities';
|
|
28
|
+
|
|
29
|
+
// ── Environment validation ────────────────────────────────────────────
|
|
30
|
+
const envGuard = createEnvironmentGuard({
|
|
31
|
+
required: ['QUEUE'],
|
|
32
|
+
optional: ['KV_DATA', 'DEAD_LETTER_QUEUE']
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
// ── Middleware ────────────────────────────────────────────────────────
|
|
36
|
+
const middleware = composeMiddleware(
|
|
37
|
+
createCorsMiddleware({ origins: ['*'] }),
|
|
38
|
+
createLogger({ prefix: 'queue-worker' }),
|
|
39
|
+
createErrorHandler()
|
|
40
|
+
);
|
|
41
|
+
|
|
42
|
+
// ── Message handlers (register per message type) ─────────────────────
|
|
43
|
+
const handlers = {
|
|
44
|
+
/**
|
|
45
|
+
* Process a generic task message
|
|
46
|
+
*/
|
|
47
|
+
async 'task.process'(message, env) {
|
|
48
|
+
const { taskId, payload } = message.body;
|
|
49
|
+
console.log(`Processing task ${taskId}:`, payload);
|
|
50
|
+
|
|
51
|
+
// Your business logic here
|
|
52
|
+
// Example: call an API, transform data, update a database
|
|
53
|
+
|
|
54
|
+
// Track completion in KV if available
|
|
55
|
+
if (env.KV_DATA) {
|
|
56
|
+
await env.KV_DATA.put(`task:${taskId}`, JSON.stringify({
|
|
57
|
+
status: 'completed',
|
|
58
|
+
completedAt: new Date().toISOString(),
|
|
59
|
+
result: { processed: true }
|
|
60
|
+
}));
|
|
61
|
+
}
|
|
62
|
+
},
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Process a notification message
|
|
66
|
+
*/
|
|
67
|
+
async 'notification.send'(message, env) {
|
|
68
|
+
const { recipient, subject, body } = message.body;
|
|
69
|
+
console.log(`Sending notification to ${recipient}: ${subject}`);
|
|
70
|
+
// Your notification logic here (email, webhook, push, etc.)
|
|
71
|
+
},
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Default handler for unknown message types
|
|
75
|
+
*/
|
|
76
|
+
async 'default'(message, env) {
|
|
77
|
+
console.warn(`Unknown message type: ${message.body?.type}`, message.body);
|
|
78
|
+
}
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
// ── Queue consumer logic ─────────────────────────────────────────────
|
|
82
|
+
async function processMessage(message, env) {
|
|
83
|
+
const type = message.body?.type || 'default';
|
|
84
|
+
const handler = handlers[type] || handlers['default'];
|
|
85
|
+
|
|
86
|
+
const maxRetries = message.body?.maxRetries || 3;
|
|
87
|
+
const attempt = message.body?._attempt || 1;
|
|
88
|
+
|
|
89
|
+
try {
|
|
90
|
+
await handler(message, env);
|
|
91
|
+
message.ack(); // Acknowledge successful processing
|
|
92
|
+
} catch (error) {
|
|
93
|
+
console.error(`Failed to process message (attempt ${attempt}/${maxRetries}):`, error.message);
|
|
94
|
+
|
|
95
|
+
if (attempt >= maxRetries) {
|
|
96
|
+
// Max retries exceeded — send to dead letter queue if available
|
|
97
|
+
if (env.DEAD_LETTER_QUEUE) {
|
|
98
|
+
await env.DEAD_LETTER_QUEUE.send({
|
|
99
|
+
...message.body,
|
|
100
|
+
_error: error.message,
|
|
101
|
+
_failedAt: new Date().toISOString(),
|
|
102
|
+
_attempts: attempt
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Track failure in KV
|
|
107
|
+
if (env.KV_DATA && message.body?.taskId) {
|
|
108
|
+
await env.KV_DATA.put(`task:${message.body.taskId}`, JSON.stringify({
|
|
109
|
+
status: 'failed',
|
|
110
|
+
error: error.message,
|
|
111
|
+
failedAt: new Date().toISOString(),
|
|
112
|
+
attempts: attempt
|
|
113
|
+
}));
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
message.ack(); // Remove from queue after max retries
|
|
117
|
+
} else {
|
|
118
|
+
message.retry({ delaySeconds: Math.pow(2, attempt) * 10 }); // Exponential backoff
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// ── HTTP Routes (for sending messages) ───────────────────────────────
|
|
124
|
+
const routes = {
|
|
125
|
+
'GET /health': async (req, env) => {
|
|
126
|
+
return jsonResponse({ status: 'healthy', type: 'queue-processor' });
|
|
127
|
+
},
|
|
128
|
+
|
|
129
|
+
'POST /api/enqueue': async (req, env) => {
|
|
130
|
+
const body = await req.json();
|
|
131
|
+
const message = {
|
|
132
|
+
type: body.type || 'task.process',
|
|
133
|
+
taskId: body.taskId || crypto.randomUUID(),
|
|
134
|
+
payload: body.payload || body,
|
|
135
|
+
_attempt: 1,
|
|
136
|
+
enqueuedAt: new Date().toISOString()
|
|
137
|
+
};
|
|
138
|
+
|
|
139
|
+
await env.QUEUE.send(message);
|
|
140
|
+
|
|
141
|
+
return jsonResponse({
|
|
142
|
+
queued: true,
|
|
143
|
+
taskId: message.taskId,
|
|
144
|
+
type: message.type
|
|
145
|
+
}, 202);
|
|
146
|
+
},
|
|
147
|
+
|
|
148
|
+
'POST /api/enqueue/batch': async (req, env) => {
|
|
149
|
+
const { messages } = await req.json();
|
|
150
|
+
const batch = messages.map(m => ({
|
|
151
|
+
body: {
|
|
152
|
+
type: m.type || 'task.process',
|
|
153
|
+
taskId: m.taskId || crypto.randomUUID(),
|
|
154
|
+
payload: m.payload || m,
|
|
155
|
+
_attempt: 1,
|
|
156
|
+
enqueuedAt: new Date().toISOString()
|
|
157
|
+
}
|
|
158
|
+
}));
|
|
159
|
+
|
|
160
|
+
await env.QUEUE.sendBatch(batch);
|
|
161
|
+
|
|
162
|
+
return jsonResponse({
|
|
163
|
+
queued: true,
|
|
164
|
+
count: batch.length,
|
|
165
|
+
taskIds: batch.map(b => b.body.taskId)
|
|
166
|
+
}, 202);
|
|
167
|
+
},
|
|
168
|
+
|
|
169
|
+
'GET /api/task/:taskId': async (req, env) => {
|
|
170
|
+
if (!env.KV_DATA) {
|
|
171
|
+
return jsonResponse({ error: 'KV storage not configured' }, 501);
|
|
172
|
+
}
|
|
173
|
+
const taskId = req.params?.taskId;
|
|
174
|
+
const data = await env.KV_DATA.get(`task:${taskId}`, { type: 'json' });
|
|
175
|
+
if (!data) {
|
|
176
|
+
return jsonResponse({ error: 'Task not found' }, 404);
|
|
177
|
+
}
|
|
178
|
+
return jsonResponse({ taskId, ...data });
|
|
179
|
+
}
|
|
180
|
+
};
|
|
181
|
+
|
|
182
|
+
function jsonResponse(data, status = 200) {
|
|
183
|
+
return new Response(JSON.stringify(data), {
|
|
184
|
+
status,
|
|
185
|
+
headers: { 'Content-Type': 'application/json' }
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// ── Worker entry point ────────────────────────────────────────────────
|
|
190
|
+
export default {
|
|
191
|
+
// HTTP handler
|
|
192
|
+
async fetch(request, env, ctx) {
|
|
193
|
+
envGuard.check(env);
|
|
194
|
+
const url = new URL(request.url);
|
|
195
|
+
const routeKey = `${request.method} ${url.pathname}`;
|
|
196
|
+
const handler = routes[routeKey];
|
|
197
|
+
|
|
198
|
+
if (!handler) {
|
|
199
|
+
return jsonResponse({ error: 'Not Found' }, 404);
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
return middleware.execute(request, () => handler(request, env, ctx));
|
|
203
|
+
},
|
|
204
|
+
|
|
205
|
+
// Queue consumer handler
|
|
206
|
+
async queue(batch, env, ctx) {
|
|
207
|
+
console.log(`Processing batch of ${batch.messages.length} messages`);
|
|
208
|
+
|
|
209
|
+
for (const message of batch.messages) {
|
|
210
|
+
await processMessage(message, env);
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "{{SERVICE_NAME}}",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"private": true,
|
|
5
|
+
"type": "module",
|
|
6
|
+
"scripts": {
|
|
7
|
+
"dev": "wrangler dev",
|
|
8
|
+
"deploy": "wrangler deploy",
|
|
9
|
+
"test": "vitest"
|
|
10
|
+
},
|
|
11
|
+
"dependencies": {
|
|
12
|
+
"@tamyla/clodo-framework": "^4.4.1"
|
|
13
|
+
},
|
|
14
|
+
"devDependencies": {
|
|
15
|
+
"wrangler": "^3.0.0",
|
|
16
|
+
"vitest": "^2.0.0",
|
|
17
|
+
"@cloudflare/vitest-pool-workers": "^0.5.0"
|
|
18
|
+
}
|
|
19
|
+
}
|