@frontmcp/skills 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +135 -0
- package/catalog/TEMPLATE.md +49 -0
- package/catalog/adapters/create-adapter/SKILL.md +127 -0
- package/catalog/adapters/official-adapters/SKILL.md +136 -0
- package/catalog/auth/configure-auth/SKILL.md +250 -0
- package/catalog/auth/configure-auth/references/auth-modes.md +77 -0
- package/catalog/auth/configure-session/SKILL.md +201 -0
- package/catalog/config/configure-elicitation/SKILL.md +136 -0
- package/catalog/config/configure-http/SKILL.md +167 -0
- package/catalog/config/configure-throttle/SKILL.md +189 -0
- package/catalog/config/configure-throttle/references/guard-config.md +68 -0
- package/catalog/config/configure-transport/SKILL.md +151 -0
- package/catalog/config/configure-transport/references/protocol-presets.md +57 -0
- package/catalog/deployment/build-for-browser/SKILL.md +95 -0
- package/catalog/deployment/build-for-cli/SKILL.md +100 -0
- package/catalog/deployment/build-for-sdk/SKILL.md +218 -0
- package/catalog/deployment/deploy-to-cloudflare/SKILL.md +192 -0
- package/catalog/deployment/deploy-to-lambda/SKILL.md +304 -0
- package/catalog/deployment/deploy-to-node/SKILL.md +229 -0
- package/catalog/deployment/deploy-to-node/references/Dockerfile.example +45 -0
- package/catalog/deployment/deploy-to-vercel/SKILL.md +196 -0
- package/catalog/deployment/deploy-to-vercel/references/vercel.json.example +60 -0
- package/catalog/development/create-agent/SKILL.md +563 -0
- package/catalog/development/create-agent/references/llm-config.md +46 -0
- package/catalog/development/create-job/SKILL.md +566 -0
- package/catalog/development/create-prompt/SKILL.md +400 -0
- package/catalog/development/create-provider/SKILL.md +233 -0
- package/catalog/development/create-resource/SKILL.md +437 -0
- package/catalog/development/create-skill/SKILL.md +526 -0
- package/catalog/development/create-skill-with-tools/SKILL.md +579 -0
- package/catalog/development/create-tool/SKILL.md +418 -0
- package/catalog/development/create-tool/references/output-schema-types.md +56 -0
- package/catalog/development/create-tool/references/tool-annotations.md +34 -0
- package/catalog/development/create-workflow/SKILL.md +709 -0
- package/catalog/development/decorators-guide/SKILL.md +598 -0
- package/catalog/plugins/create-plugin/SKILL.md +336 -0
- package/catalog/plugins/create-plugin-hooks/SKILL.md +282 -0
- package/catalog/plugins/official-plugins/SKILL.md +667 -0
- package/catalog/setup/frontmcp-skills-usage/SKILL.md +200 -0
- package/catalog/setup/multi-app-composition/SKILL.md +358 -0
- package/catalog/setup/nx-workflow/SKILL.md +357 -0
- package/catalog/setup/project-structure-nx/SKILL.md +186 -0
- package/catalog/setup/project-structure-standalone/SKILL.md +153 -0
- package/catalog/setup/setup-project/SKILL.md +493 -0
- package/catalog/setup/setup-redis/SKILL.md +385 -0
- package/catalog/setup/setup-sqlite/SKILL.md +359 -0
- package/catalog/skills-manifest.json +414 -0
- package/catalog/testing/setup-testing/SKILL.md +539 -0
- package/catalog/testing/setup-testing/references/test-auth.md +88 -0
- package/catalog/testing/setup-testing/references/test-browser-build.md +57 -0
- package/catalog/testing/setup-testing/references/test-cli-binary.md +48 -0
- package/catalog/testing/setup-testing/references/test-direct-client.md +62 -0
- package/catalog/testing/setup-testing/references/test-e2e-handler.md +51 -0
- package/catalog/testing/setup-testing/references/test-tool-unit.md +41 -0
- package/package.json +34 -0
- package/src/index.d.ts +3 -0
- package/src/index.js +16 -0
- package/src/index.js.map +1 -0
- package/src/loader.d.ts +46 -0
- package/src/loader.js +75 -0
- package/src/loader.js.map +1 -0
- package/src/manifest.d.ts +81 -0
- package/src/manifest.js +26 -0
- package/src/manifest.js.map +1 -0
|
@@ -0,0 +1,566 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: create-job
|
|
3
|
+
description: Create long-running jobs with retry policies, progress tracking, and permission controls. Use when building background tasks, data processing pipelines, or scheduled operations.
|
|
4
|
+
tags: [job, background, retry, progress, long-running]
|
|
5
|
+
priority: 6
|
|
6
|
+
visibility: both
|
|
7
|
+
license: Apache-2.0
|
|
8
|
+
metadata:
|
|
9
|
+
docs: https://docs.agentfront.dev/frontmcp/servers/jobs
|
|
10
|
+
---
|
|
11
|
+
|
|
12
|
+
# Creating Jobs
|
|
13
|
+
|
|
14
|
+
Jobs are long-running background tasks with built-in retry policies, progress tracking, and permission controls. Unlike tools (which execute synchronously within a request), jobs run asynchronously and persist their state across retries and restarts.
|
|
15
|
+
|
|
16
|
+
## When to Use @Job
|
|
17
|
+
|
|
18
|
+
Use `@Job` when you need to run work that may take longer than a request cycle, needs retry guarantees, or should track progress over time. Examples include:
|
|
19
|
+
|
|
20
|
+
- Data processing and ETL pipelines
|
|
21
|
+
- File imports and exports
|
|
22
|
+
- Report generation
|
|
23
|
+
- Scheduled maintenance tasks
|
|
24
|
+
- External API synchronization
|
|
25
|
+
|
|
26
|
+
If the work completes in under a few seconds and does not need retry or progress tracking, use a `@Tool` instead.
|
|
27
|
+
|
|
28
|
+
## Class-Based Pattern
|
|
29
|
+
|
|
30
|
+
Create a class extending `JobContext<In, Out>` and implement the `execute(input: In): Promise<Out>` method. The `@Job` decorator requires `name`, `inputSchema`, and `outputSchema`.
|
|
31
|
+
|
|
32
|
+
### JobMetadata Fields
|
|
33
|
+
|
|
34
|
+
| Field | Type | Required | Default | Description |
|
|
35
|
+
| -------------- | ------------------------ | -------- | ---------------- | -------------------------------------- |
|
|
36
|
+
| `name` | `string` | Yes | -- | Unique job name |
|
|
37
|
+
| `inputSchema` | `ZodRawShape` | Yes | -- | Zod raw shape for input validation |
|
|
38
|
+
| `outputSchema` | `ZodRawShape \| ZodType` | Yes | -- | Zod schema for output validation |
|
|
39
|
+
| `description` | `string` | No | -- | Human-readable description |
|
|
40
|
+
| `timeout` | `number` | No | `300000` (5 min) | Maximum execution time in milliseconds |
|
|
41
|
+
| `retry` | `RetryPolicy` | No | -- | Retry configuration (see below) |
|
|
42
|
+
| `tags` | `string[]` | No | -- | Categorization tags |
|
|
43
|
+
| `labels` | `Record<string, string>` | No | -- | Key-value labels for filtering |
|
|
44
|
+
| `permissions` | `JobPermissions` | No | -- | Access control configuration |
|
|
45
|
+
|
|
46
|
+
### Basic Example
|
|
47
|
+
|
|
48
|
+
```typescript
|
|
49
|
+
import { Job, JobContext } from '@frontmcp/sdk';
|
|
50
|
+
import { z } from 'zod';
|
|
51
|
+
|
|
52
|
+
@Job({
|
|
53
|
+
name: 'generate-report',
|
|
54
|
+
description: 'Generate a PDF report from data',
|
|
55
|
+
inputSchema: {
|
|
56
|
+
reportType: z.enum(['sales', 'inventory', 'users']).describe('Type of report'),
|
|
57
|
+
dateRange: z.object({
|
|
58
|
+
from: z.string().describe('Start date (ISO 8601)'),
|
|
59
|
+
to: z.string().describe('End date (ISO 8601)'),
|
|
60
|
+
}),
|
|
61
|
+
format: z.enum(['pdf', 'csv']).default('pdf').describe('Output format'),
|
|
62
|
+
},
|
|
63
|
+
outputSchema: {
|
|
64
|
+
url: z.string().url(),
|
|
65
|
+
pageCount: z.number().int(),
|
|
66
|
+
generatedAt: z.string(),
|
|
67
|
+
},
|
|
68
|
+
timeout: 120000,
|
|
69
|
+
})
|
|
70
|
+
class GenerateReportJob extends JobContext {
|
|
71
|
+
async execute(input: {
|
|
72
|
+
reportType: 'sales' | 'inventory' | 'users';
|
|
73
|
+
dateRange: { from: string; to: string };
|
|
74
|
+
format: 'pdf' | 'csv';
|
|
75
|
+
}) {
|
|
76
|
+
this.log(`Starting ${input.reportType} report generation`);
|
|
77
|
+
|
|
78
|
+
this.progress(10, 100, 'Fetching data');
|
|
79
|
+
const data = await this.fetchReportData(input.reportType, input.dateRange);
|
|
80
|
+
|
|
81
|
+
this.progress(50, 100, 'Generating document');
|
|
82
|
+
const document = await this.buildDocument(data, input.format);
|
|
83
|
+
|
|
84
|
+
this.progress(90, 100, 'Uploading');
|
|
85
|
+
const url = await this.uploadDocument(document);
|
|
86
|
+
|
|
87
|
+
this.progress(100, 100, 'Complete');
|
|
88
|
+
return {
|
|
89
|
+
url,
|
|
90
|
+
pageCount: document.pages,
|
|
91
|
+
generatedAt: new Date().toISOString(),
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
private async fetchReportData(type: string, range: { from: string; to: string }) {
|
|
96
|
+
return { rows: [], count: 0 };
|
|
97
|
+
}
|
|
98
|
+
private async buildDocument(data: unknown, format: string) {
|
|
99
|
+
return { pages: 5, buffer: Buffer.alloc(0) };
|
|
100
|
+
}
|
|
101
|
+
private async uploadDocument(doc: { buffer: Buffer }) {
|
|
102
|
+
return 'https://storage.example.com/reports/report-001.pdf';
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
## JobContext Methods and Properties
|
|
108
|
+
|
|
109
|
+
`JobContext` extends `ExecutionContextBase` and adds job-specific capabilities:
|
|
110
|
+
|
|
111
|
+
### Methods
|
|
112
|
+
|
|
113
|
+
- `execute(input: In): Promise<Out>` -- the main method you implement. Receives validated input, must return a value matching `outputSchema`.
|
|
114
|
+
- `this.progress(pct: number, total?: number, msg?: string)` -- report progress. `pct` is the current value, `total` is the maximum (default 100), `msg` is an optional status message.
|
|
115
|
+
- `this.log(message: string)` -- append a log entry to the job's log. Persisted with the job state and retrievable after completion.
|
|
116
|
+
- `this.respond(value: Out)` -- explicitly set the job output. Alternatively, return the value from `execute()`.
|
|
117
|
+
- `this.getLogs(): string[]` -- retrieve all log entries recorded so far.
|
|
118
|
+
- `this.get(token)` -- resolve a dependency from DI (throws if not found).
|
|
119
|
+
- `this.tryGet(token)` -- resolve a dependency from DI (returns `undefined` if not found).
|
|
120
|
+
- `this.fail(err)` -- abort execution, triggers error flow (never returns).
|
|
121
|
+
- `this.mark(stage)` -- set the active execution stage for debugging/tracking.
|
|
122
|
+
- `this.fetch(input, init?)` -- HTTP fetch with context propagation.
|
|
123
|
+
|
|
124
|
+
### Properties
|
|
125
|
+
|
|
126
|
+
- `this.attempt` -- the current attempt number (1-based). On the first run, `this.attempt` is `1`. On the first retry, it is `2`, and so on.
|
|
127
|
+
- `this.input` -- the validated input object.
|
|
128
|
+
- `this.metadata` -- job metadata from the decorator.
|
|
129
|
+
- `this.scope` -- the current scope instance.
|
|
130
|
+
|
|
131
|
+
## Retry Configuration
|
|
132
|
+
|
|
133
|
+
Configure automatic retries with exponential backoff using the `retry` field.
|
|
134
|
+
|
|
135
|
+
### RetryPolicy Fields
|
|
136
|
+
|
|
137
|
+
| Field | Type | Default | Description |
|
|
138
|
+
| ------------------- | -------- | ------- | ---------------------------------------------------- |
|
|
139
|
+
| `maxAttempts` | `number` | `1` | Total number of attempts (including the initial run) |
|
|
140
|
+
| `backoffMs` | `number` | `1000` | Initial delay before the first retry in milliseconds |
|
|
141
|
+
| `backoffMultiplier` | `number` | `2` | Multiplier applied to backoff after each retry |
|
|
142
|
+
| `maxBackoffMs` | `number` | `30000` | Maximum backoff duration in milliseconds |
|
|
143
|
+
|
|
144
|
+
### Example with Retry
|
|
145
|
+
|
|
146
|
+
```typescript
|
|
147
|
+
@Job({
|
|
148
|
+
name: 'sync-external-api',
|
|
149
|
+
description: 'Synchronize data from an external API',
|
|
150
|
+
inputSchema: {
|
|
151
|
+
endpoint: z.string().url().describe('API endpoint to sync from'),
|
|
152
|
+
batchSize: z.number().int().min(1).max(1000).default(100),
|
|
153
|
+
},
|
|
154
|
+
outputSchema: {
|
|
155
|
+
synced: z.number().int(),
|
|
156
|
+
errors: z.number().int(),
|
|
157
|
+
},
|
|
158
|
+
timeout: 600000, // 10 minutes
|
|
159
|
+
retry: {
|
|
160
|
+
maxAttempts: 5,
|
|
161
|
+
backoffMs: 2000,
|
|
162
|
+
backoffMultiplier: 2,
|
|
163
|
+
maxBackoffMs: 60000,
|
|
164
|
+
},
|
|
165
|
+
})
|
|
166
|
+
class SyncExternalApiJob extends JobContext {
|
|
167
|
+
async execute(input: { endpoint: string; batchSize: number }) {
|
|
168
|
+
this.log(`Attempt ${this.attempt}: syncing from ${input.endpoint}`);
|
|
169
|
+
|
|
170
|
+
const response = await this.fetch(input.endpoint);
|
|
171
|
+
if (!response.ok) {
|
|
172
|
+
this.fail(new Error(`API returned ${response.status}`));
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
const data = await response.json();
|
|
176
|
+
let synced = 0;
|
|
177
|
+
let errors = 0;
|
|
178
|
+
|
|
179
|
+
for (let i = 0; i < data.items.length; i += input.batchSize) {
|
|
180
|
+
const batch = data.items.slice(i, i + input.batchSize);
|
|
181
|
+
this.progress(i, data.items.length, `Processing batch ${Math.floor(i / input.batchSize) + 1}`);
|
|
182
|
+
|
|
183
|
+
try {
|
|
184
|
+
await this.processBatch(batch);
|
|
185
|
+
synced += batch.length;
|
|
186
|
+
} catch (err) {
|
|
187
|
+
errors += batch.length;
|
|
188
|
+
this.log(`Batch error: ${err}`);
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
return { synced, errors };
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
private async processBatch(batch: unknown[]) {
|
|
196
|
+
// process batch
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
```
|
|
200
|
+
|
|
201
|
+
With this configuration, if the job fails:
|
|
202
|
+
|
|
203
|
+
- Attempt 1: immediate execution
|
|
204
|
+
- Attempt 2: retry after 2000ms
|
|
205
|
+
- Attempt 3: retry after 4000ms
|
|
206
|
+
- Attempt 4: retry after 8000ms
|
|
207
|
+
- Attempt 5: retry after 16000ms
|
|
208
|
+
|
|
209
|
+
The backoff is capped at `maxBackoffMs` (60000ms), so no delay exceeds 60 seconds.
|
|
210
|
+
|
|
211
|
+
## Progress Tracking
|
|
212
|
+
|
|
213
|
+
Use `this.progress(pct, total?, msg?)` to report job progress. The framework persists progress and makes it queryable.
|
|
214
|
+
|
|
215
|
+
```typescript
|
|
216
|
+
@Job({
|
|
217
|
+
name: 'import-csv',
|
|
218
|
+
description: 'Import records from a CSV file',
|
|
219
|
+
inputSchema: {
|
|
220
|
+
fileUrl: z.string().url(),
|
|
221
|
+
tableName: z.string(),
|
|
222
|
+
},
|
|
223
|
+
outputSchema: {
|
|
224
|
+
imported: z.number().int(),
|
|
225
|
+
skipped: z.number().int(),
|
|
226
|
+
},
|
|
227
|
+
})
|
|
228
|
+
class ImportCsvJob extends JobContext {
|
|
229
|
+
async execute(input: { fileUrl: string; tableName: string }) {
|
|
230
|
+
this.progress(0, 100, 'Downloading file');
|
|
231
|
+
const rows = await this.downloadAndParse(input.fileUrl);
|
|
232
|
+
|
|
233
|
+
let imported = 0;
|
|
234
|
+
let skipped = 0;
|
|
235
|
+
|
|
236
|
+
for (let i = 0; i < rows.length; i++) {
|
|
237
|
+
this.progress(i + 1, rows.length, `Importing row ${i + 1} of ${rows.length}`);
|
|
238
|
+
|
|
239
|
+
try {
|
|
240
|
+
await this.insertRow(input.tableName, rows[i]);
|
|
241
|
+
imported++;
|
|
242
|
+
} catch {
|
|
243
|
+
skipped++;
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
this.log(`Import complete: ${imported} imported, ${skipped} skipped`);
|
|
248
|
+
return { imported, skipped };
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
private async downloadAndParse(url: string) {
|
|
252
|
+
return [];
|
|
253
|
+
}
|
|
254
|
+
private async insertRow(table: string, row: unknown) {
|
|
255
|
+
/* insert */
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
```
|
|
259
|
+
|
|
260
|
+
## Permissions
|
|
261
|
+
|
|
262
|
+
Control who can interact with jobs using the `permissions` field. Permissions support action-based access, roles, scopes, and custom predicates.
|
|
263
|
+
|
|
264
|
+
### Permission Actions
|
|
265
|
+
|
|
266
|
+
| Action | Description |
|
|
267
|
+
| --------- | -------------------------- |
|
|
268
|
+
| `create` | Submit a new job |
|
|
269
|
+
| `read` | View job status and output |
|
|
270
|
+
| `update` | Modify a running job |
|
|
271
|
+
| `delete` | Cancel or remove a job |
|
|
272
|
+
| `execute` | Trigger job execution |
|
|
273
|
+
| `list` | List jobs matching filters |
|
|
274
|
+
|
|
275
|
+
### Permission Configuration
|
|
276
|
+
|
|
277
|
+
```typescript
|
|
278
|
+
@Job({
|
|
279
|
+
name: 'data-export',
|
|
280
|
+
description: 'Export data to external storage',
|
|
281
|
+
inputSchema: {
|
|
282
|
+
dataset: z.string(),
|
|
283
|
+
destination: z.string().url(),
|
|
284
|
+
},
|
|
285
|
+
outputSchema: {
|
|
286
|
+
exportedRows: z.number().int(),
|
|
287
|
+
location: z.string().url(),
|
|
288
|
+
},
|
|
289
|
+
permissions: {
|
|
290
|
+
actions: ['create', 'read', 'execute', 'list'],
|
|
291
|
+
roles: ['admin', 'data-engineer'],
|
|
292
|
+
scopes: ['jobs:write', 'data:export'],
|
|
293
|
+
predicate: (ctx) => ctx.user?.department === 'engineering',
|
|
294
|
+
},
|
|
295
|
+
})
|
|
296
|
+
class DataExportJob extends JobContext {
|
|
297
|
+
async execute(input: { dataset: string; destination: string }) {
|
|
298
|
+
// Only users with the right roles, scopes, or matching the predicate can run this
|
|
299
|
+
this.log(`Exporting dataset: ${input.dataset}`);
|
|
300
|
+
const rows = await this.exportData(input.dataset, input.destination);
|
|
301
|
+
return { exportedRows: rows, location: input.destination };
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
private async exportData(dataset: string, destination: string) {
|
|
305
|
+
return 1000;
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
```
|
|
309
|
+
|
|
310
|
+
### Combining Permission Strategies
|
|
311
|
+
|
|
312
|
+
Permissions fields are additive -- all specified conditions must be met:
|
|
313
|
+
|
|
314
|
+
```typescript
|
|
315
|
+
permissions: {
|
|
316
|
+
// Actions this job supports
|
|
317
|
+
actions: ['create', 'read', 'execute', 'delete', 'list'],
|
|
318
|
+
|
|
319
|
+
// Role-based: user must have one of these roles
|
|
320
|
+
roles: ['admin', 'operator'],
|
|
321
|
+
|
|
322
|
+
// Scope-based: user token must include these scopes
|
|
323
|
+
scopes: ['jobs:manage'],
|
|
324
|
+
|
|
325
|
+
// Custom predicate: arbitrary logic
|
|
326
|
+
predicate: (ctx) => {
|
|
327
|
+
const user = ctx.user;
|
|
328
|
+
return user?.isActive && user?.emailVerified;
|
|
329
|
+
},
|
|
330
|
+
}
|
|
331
|
+
```
|
|
332
|
+
|
|
333
|
+
## Function Builder
|
|
334
|
+
|
|
335
|
+
For simple jobs that do not need a class, use the `job()` function builder. The callback receives `(input, ctx)` where `ctx` provides all `JobContext` methods.
|
|
336
|
+
|
|
337
|
+
```typescript
|
|
338
|
+
import { job } from '@frontmcp/sdk';
|
|
339
|
+
import { z } from 'zod';
|
|
340
|
+
|
|
341
|
+
const CleanupTempFiles = job({
|
|
342
|
+
name: 'cleanup-temp-files',
|
|
343
|
+
description: 'Remove temporary files older than the specified age',
|
|
344
|
+
inputSchema: {
|
|
345
|
+
directory: z.string().describe('Directory to clean'),
|
|
346
|
+
maxAgeDays: z.number().int().min(1).default(7),
|
|
347
|
+
},
|
|
348
|
+
outputSchema: {
|
|
349
|
+
deleted: z.number().int(),
|
|
350
|
+
freedBytes: z.number().int(),
|
|
351
|
+
},
|
|
352
|
+
})((input, ctx) => {
|
|
353
|
+
ctx.log(`Cleaning ${input.directory}, max age: ${input.maxAgeDays} days`);
|
|
354
|
+
ctx.progress(0, 100, 'Scanning directory');
|
|
355
|
+
|
|
356
|
+
// ... scan and delete logic ...
|
|
357
|
+
|
|
358
|
+
ctx.progress(100, 100, 'Cleanup complete');
|
|
359
|
+
return { deleted: 42, freedBytes: 1024000 };
|
|
360
|
+
});
|
|
361
|
+
```
|
|
362
|
+
|
|
363
|
+
Register it the same way as a class job: `jobs: [CleanupTempFiles]`.
|
|
364
|
+
|
|
365
|
+
## Remote and ESM Loading
|
|
366
|
+
|
|
367
|
+
Load jobs from external modules or remote URLs without importing them directly.
|
|
368
|
+
|
|
369
|
+
**ESM loading** -- load a job from an ES module:
|
|
370
|
+
|
|
371
|
+
```typescript
|
|
372
|
+
const ExternalJob = Job.esm('@my-org/jobs@^1.0.0', 'ExternalJob', {
|
|
373
|
+
description: 'A job loaded from an ES module',
|
|
374
|
+
});
|
|
375
|
+
```
|
|
376
|
+
|
|
377
|
+
**Remote loading** -- load a job from a remote URL:
|
|
378
|
+
|
|
379
|
+
```typescript
|
|
380
|
+
const CloudJob = Job.remote('https://example.com/jobs/cloud-job', 'CloudJob', {
|
|
381
|
+
description: 'A job loaded from a remote server',
|
|
382
|
+
});
|
|
383
|
+
```
|
|
384
|
+
|
|
385
|
+
Both return values that can be registered in `jobs: [ExternalJob, CloudJob]`.
|
|
386
|
+
|
|
387
|
+
## Registration and Configuration
|
|
388
|
+
|
|
389
|
+
### Registering Jobs
|
|
390
|
+
|
|
391
|
+
Add job classes (or function-style jobs) to the `jobs` array in `@App`.
|
|
392
|
+
|
|
393
|
+
```typescript
|
|
394
|
+
import { App } from '@frontmcp/sdk';
|
|
395
|
+
|
|
396
|
+
@App({
|
|
397
|
+
name: 'data-app',
|
|
398
|
+
jobs: [GenerateReportJob, SyncExternalApiJob, ImportCsvJob, CleanupTempFiles],
|
|
399
|
+
})
|
|
400
|
+
class DataApp {}
|
|
401
|
+
```
|
|
402
|
+
|
|
403
|
+
### Enabling the Jobs System
|
|
404
|
+
|
|
405
|
+
Jobs require a persistent store for state management. Enable the jobs system in `@FrontMcp` configuration.
|
|
406
|
+
|
|
407
|
+
```typescript
|
|
408
|
+
import { FrontMcp } from '@frontmcp/sdk';
|
|
409
|
+
|
|
410
|
+
@FrontMcp({
|
|
411
|
+
info: { name: 'my-server', version: '1.0.0' },
|
|
412
|
+
apps: [DataApp],
|
|
413
|
+
jobs: {
|
|
414
|
+
enabled: true,
|
|
415
|
+
store: {
|
|
416
|
+
redis: {
|
|
417
|
+
provider: 'redis',
|
|
418
|
+
host: 'localhost',
|
|
419
|
+
port: 6379,
|
|
420
|
+
keyPrefix: 'mcp:jobs:',
|
|
421
|
+
},
|
|
422
|
+
},
|
|
423
|
+
},
|
|
424
|
+
})
|
|
425
|
+
class MyServer {}
|
|
426
|
+
```
|
|
427
|
+
|
|
428
|
+
The store persists job state, progress, logs, and outputs across retries and server restarts. Redis is recommended for production. Without `jobs.enabled: true`, registered jobs will not be activated.
|
|
429
|
+
|
|
430
|
+
## Nx Generator
|
|
431
|
+
|
|
432
|
+
Scaffold a new job using the Nx generator:
|
|
433
|
+
|
|
434
|
+
```bash
|
|
435
|
+
nx generate @frontmcp/nx:job
|
|
436
|
+
```
|
|
437
|
+
|
|
438
|
+
This creates the job file, spec file, and updates barrel exports.
|
|
439
|
+
|
|
440
|
+
## Complete Example: Data Pipeline Job
|
|
441
|
+
|
|
442
|
+
```typescript
|
|
443
|
+
import { Job, JobContext, FrontMcp, App, job } from '@frontmcp/sdk';
|
|
444
|
+
import { z } from 'zod';
|
|
445
|
+
|
|
446
|
+
@Job({
|
|
447
|
+
name: 'etl-pipeline',
|
|
448
|
+
description: 'Extract, transform, and load data from source to warehouse',
|
|
449
|
+
inputSchema: {
|
|
450
|
+
source: z.string().url().describe('Source data URL'),
|
|
451
|
+
destination: z.string().describe('Destination table name'),
|
|
452
|
+
transformations: z
|
|
453
|
+
.array(z.enum(['normalize', 'deduplicate', 'validate', 'enrich']))
|
|
454
|
+
.default(['normalize', 'validate']),
|
|
455
|
+
},
|
|
456
|
+
outputSchema: {
|
|
457
|
+
extracted: z.number().int(),
|
|
458
|
+
transformed: z.number().int(),
|
|
459
|
+
loaded: z.number().int(),
|
|
460
|
+
errors: z.array(
|
|
461
|
+
z.object({
|
|
462
|
+
row: z.number(),
|
|
463
|
+
message: z.string(),
|
|
464
|
+
}),
|
|
465
|
+
),
|
|
466
|
+
duration: z.number(),
|
|
467
|
+
},
|
|
468
|
+
timeout: 900000, // 15 minutes
|
|
469
|
+
retry: {
|
|
470
|
+
maxAttempts: 3,
|
|
471
|
+
backoffMs: 5000,
|
|
472
|
+
backoffMultiplier: 2,
|
|
473
|
+
maxBackoffMs: 30000,
|
|
474
|
+
},
|
|
475
|
+
tags: ['etl', 'data-pipeline'],
|
|
476
|
+
labels: { team: 'data-engineering', priority: 'high' },
|
|
477
|
+
permissions: {
|
|
478
|
+
actions: ['create', 'read', 'execute', 'list'],
|
|
479
|
+
roles: ['admin', 'data-engineer'],
|
|
480
|
+
scopes: ['jobs:execute', 'data:write'],
|
|
481
|
+
},
|
|
482
|
+
})
|
|
483
|
+
class EtlPipelineJob extends JobContext {
|
|
484
|
+
async execute(input: {
|
|
485
|
+
source: string;
|
|
486
|
+
destination: string;
|
|
487
|
+
transformations: ('normalize' | 'deduplicate' | 'validate' | 'enrich')[];
|
|
488
|
+
}) {
|
|
489
|
+
const startTime = Date.now();
|
|
490
|
+
const errors: { row: number; message: string }[] = [];
|
|
491
|
+
|
|
492
|
+
this.log(`Attempt ${this.attempt}: Starting ETL pipeline`);
|
|
493
|
+
this.log(`Source: ${input.source}, Destination: ${input.destination}`);
|
|
494
|
+
|
|
495
|
+
// Extract
|
|
496
|
+
this.progress(0, 100, 'Extracting data');
|
|
497
|
+
this.mark('extract');
|
|
498
|
+
const rawData = await this.extract(input.source);
|
|
499
|
+
this.log(`Extracted ${rawData.length} records`);
|
|
500
|
+
|
|
501
|
+
// Transform
|
|
502
|
+
this.progress(33, 100, 'Applying transformations');
|
|
503
|
+
this.mark('transform');
|
|
504
|
+
let transformed = rawData;
|
|
505
|
+
for (const t of input.transformations) {
|
|
506
|
+
transformed = await this.applyTransformation(transformed, t, errors);
|
|
507
|
+
}
|
|
508
|
+
this.log(`Transformed ${transformed.length} records (${errors.length} errors)`);
|
|
509
|
+
|
|
510
|
+
// Load
|
|
511
|
+
this.progress(66, 100, 'Loading into warehouse');
|
|
512
|
+
this.mark('load');
|
|
513
|
+
const loaded = await this.load(input.destination, transformed);
|
|
514
|
+
this.log(`Loaded ${loaded} records into ${input.destination}`);
|
|
515
|
+
|
|
516
|
+
this.progress(100, 100, 'Pipeline complete');
|
|
517
|
+
const logs = this.getLogs();
|
|
518
|
+
this.log(`Total log entries: ${logs.length}`);
|
|
519
|
+
|
|
520
|
+
return {
|
|
521
|
+
extracted: rawData.length,
|
|
522
|
+
transformed: transformed.length,
|
|
523
|
+
loaded,
|
|
524
|
+
errors,
|
|
525
|
+
duration: Date.now() - startTime,
|
|
526
|
+
};
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
private async extract(source: string): Promise<unknown[]> {
|
|
530
|
+
return [];
|
|
531
|
+
}
|
|
532
|
+
private async applyTransformation(
|
|
533
|
+
data: unknown[],
|
|
534
|
+
type: string,
|
|
535
|
+
errors: { row: number; message: string }[],
|
|
536
|
+
): Promise<unknown[]> {
|
|
537
|
+
return data;
|
|
538
|
+
}
|
|
539
|
+
private async load(table: string, data: unknown[]): Promise<number> {
|
|
540
|
+
return data.length;
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
@App({
|
|
545
|
+
name: 'data-app',
|
|
546
|
+
jobs: [EtlPipelineJob],
|
|
547
|
+
})
|
|
548
|
+
class DataApp {}
|
|
549
|
+
|
|
550
|
+
@FrontMcp({
|
|
551
|
+
info: { name: 'data-server', version: '1.0.0' },
|
|
552
|
+
apps: [DataApp],
|
|
553
|
+
jobs: {
|
|
554
|
+
enabled: true,
|
|
555
|
+
store: {
|
|
556
|
+
redis: {
|
|
557
|
+
provider: 'redis',
|
|
558
|
+
host: 'localhost',
|
|
559
|
+
port: 6379,
|
|
560
|
+
keyPrefix: 'mcp:jobs:',
|
|
561
|
+
},
|
|
562
|
+
},
|
|
563
|
+
},
|
|
564
|
+
})
|
|
565
|
+
class DataServer {}
|
|
566
|
+
```
|