gorsee 0.2.11 → 0.2.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +52 -4
- package/dist-pkg/ai/bundle.d.ts +1 -0
- package/dist-pkg/ai/framework-context.d.ts +2 -0
- package/dist-pkg/ai/framework-context.js +6 -1
- package/dist-pkg/ai/ide.d.ts +1 -0
- package/dist-pkg/ai/ide.js +3 -0
- package/dist-pkg/ai/index.d.ts +10 -1
- package/dist-pkg/ai/index.js +13 -2
- package/dist-pkg/ai/mcp.js +4 -0
- package/dist-pkg/ai/session-pack.d.ts +8 -0
- package/dist-pkg/ai/session-pack.js +51 -1
- package/dist-pkg/ai/store.d.ts +25 -1
- package/dist-pkg/ai/store.js +89 -3
- package/dist-pkg/ai/summary.d.ts +88 -0
- package/dist-pkg/ai/summary.js +310 -1
- package/dist-pkg/build/manifest.d.ts +4 -2
- package/dist-pkg/build/manifest.js +32 -2
- package/dist-pkg/cli/cmd-ai.js +66 -0
- package/dist-pkg/cli/cmd-build.js +72 -26
- package/dist-pkg/cli/cmd-check.js +104 -11
- package/dist-pkg/cli/cmd-create.js +333 -7
- package/dist-pkg/cli/cmd-deploy.js +17 -3
- package/dist-pkg/cli/cmd-docs.d.ts +3 -1
- package/dist-pkg/cli/cmd-docs.js +5 -3
- package/dist-pkg/cli/cmd-start.js +8 -1
- package/dist-pkg/cli/cmd-upgrade.d.ts +3 -0
- package/dist-pkg/cli/cmd-upgrade.js +14 -2
- package/dist-pkg/cli/cmd-worker.d.ts +9 -0
- package/dist-pkg/cli/cmd-worker.js +78 -0
- package/dist-pkg/cli/framework-md.js +16 -4
- package/dist-pkg/cli/index.js +5 -0
- package/dist-pkg/runtime/app-config.d.ts +5 -0
- package/dist-pkg/runtime/app-config.js +26 -5
- package/dist-pkg/server/index.d.ts +2 -1
- package/dist-pkg/server/index.js +1 -0
- package/dist-pkg/server/jobs.d.ts +35 -1
- package/dist-pkg/server/jobs.js +226 -3
- package/dist-pkg/server/manifest.d.ts +30 -0
- package/dist-pkg/server/manifest.js +30 -1
- package/dist-pkg/server/redis-client.d.ts +9 -0
- package/dist-pkg/server/redis-client.js +4 -1
- package/dist-pkg/server/redis-job-queue.d.ts +2 -0
- package/dist-pkg/server/redis-job-queue.js +434 -16
- package/dist-pkg/server/worker-service.d.ts +33 -0
- package/dist-pkg/server/worker-service.js +135 -0
- package/dist-pkg/server-entry.d.ts +2 -1
- package/dist-pkg/server-entry.js +4 -0
- package/package.json +1 -1
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { stat } from "node:fs/promises";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { pathToFileURL } from "node:url";
|
|
4
|
+
import { configureAIObservability, emitAIEvent } from "../ai/index.js";
|
|
5
|
+
import { loadEnv } from "../env/index.js";
|
|
6
|
+
import { loadAppConfig, resolveAIConfig, resolveAppMode } from "../runtime/app-config.js";
|
|
7
|
+
import { createProjectContext } from "../runtime/project.js";
|
|
8
|
+
export function parseWorkerFlags(args) {
|
|
9
|
+
const flags = {
|
|
10
|
+
entry: "workers/main.ts"
|
|
11
|
+
};
|
|
12
|
+
for (let index = 0;index < args.length; index += 1) {
|
|
13
|
+
const arg = args[index];
|
|
14
|
+
if (!arg)
|
|
15
|
+
continue;
|
|
16
|
+
if (arg === "--entry" && args[index + 1])
|
|
17
|
+
flags.entry = args[++index];
|
|
18
|
+
else if (arg.startsWith("--entry="))
|
|
19
|
+
flags.entry = arg.slice(8);
|
|
20
|
+
}
|
|
21
|
+
return flags;
|
|
22
|
+
}
|
|
23
|
+
export async function runWorker(args, options = {}) {
|
|
24
|
+
const flags = parseWorkerFlags(args), { cwd } = createProjectContext(options);
|
|
25
|
+
await loadEnv(cwd);
|
|
26
|
+
const appConfig = await loadAppConfig(cwd), appMode = resolveAppMode(appConfig);
|
|
27
|
+
if (appMode !== "server") {
|
|
28
|
+
console.error(`
|
|
29
|
+
Error: \`gorsee worker\` is only available for server-mode apps. Current app.mode is "${appMode}".
|
|
30
|
+
`);
|
|
31
|
+
process.exit(1);
|
|
32
|
+
}
|
|
33
|
+
configureAIObservability(resolveAIConfig(cwd, appConfig));
|
|
34
|
+
const entryPath = join(cwd, flags.entry);
|
|
35
|
+
try {
|
|
36
|
+
await stat(entryPath);
|
|
37
|
+
} catch {
|
|
38
|
+
console.error(`
|
|
39
|
+
Error: Worker entry not found: ${flags.entry}
|
|
40
|
+
`);
|
|
41
|
+
process.exit(1);
|
|
42
|
+
}
|
|
43
|
+
await emitAIEvent({
|
|
44
|
+
kind: "worker.command.start",
|
|
45
|
+
severity: "info",
|
|
46
|
+
source: "cli",
|
|
47
|
+
message: "starting worker entry",
|
|
48
|
+
data: {
|
|
49
|
+
entry: flags.entry,
|
|
50
|
+
appMode
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
try {
|
|
54
|
+
await import(pathToFileURL(entryPath).href);
|
|
55
|
+
await emitAIEvent({
|
|
56
|
+
kind: "worker.command.finish",
|
|
57
|
+
severity: "info",
|
|
58
|
+
source: "cli",
|
|
59
|
+
message: "worker entry finished",
|
|
60
|
+
data: {
|
|
61
|
+
entry: flags.entry,
|
|
62
|
+
appMode
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
} catch (error) {
|
|
66
|
+
await emitAIEvent({
|
|
67
|
+
kind: "worker.command.error",
|
|
68
|
+
severity: "error",
|
|
69
|
+
source: "cli",
|
|
70
|
+
message: error instanceof Error ? error.message : String(error),
|
|
71
|
+
data: {
|
|
72
|
+
entry: flags.entry,
|
|
73
|
+
appMode
|
|
74
|
+
}
|
|
75
|
+
});
|
|
76
|
+
throw error;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
@@ -6,7 +6,7 @@ export function generateFrameworkMD(projectName) {
|
|
|
6
6
|
|
|
7
7
|
## Product Identity
|
|
8
8
|
|
|
9
|
-
Gorsee is an AI-first
|
|
9
|
+
Gorsee is an AI-first application platform designed for deterministic collaboration between humans and coding agents across frontend, fullstack, and server systems.
|
|
10
10
|
|
|
11
11
|
Treat it as a mature product framework:
|
|
12
12
|
|
|
@@ -16,6 +16,12 @@ Treat it as a mature product framework:
|
|
|
16
16
|
|
|
17
17
|
The framework prefers one clear path, strict contracts, and product-grade discipline over flexibility for its own sake.
|
|
18
18
|
|
|
19
|
+
Canonical modes:
|
|
20
|
+
|
|
21
|
+
- \`frontend\` for browser-first prerendered apps
|
|
22
|
+
- \`fullstack\` for the canonical UI + server path
|
|
23
|
+
- \`server\` for API-first and service-oriented systems
|
|
24
|
+
|
|
19
25
|
## Quick Reference
|
|
20
26
|
|
|
21
27
|
\`\`\`
|
|
@@ -110,7 +116,11 @@ export const cache = routeCache({
|
|
|
110
116
|
- SQLite adapters are the default persistent single-node path
|
|
111
117
|
- Redis adapters are the default multi-instance path
|
|
112
118
|
- Set \`runtime.topology = "multi-instance"\` in \`app.config.ts\` when replicas share traffic; production then requires an explicit distributed \`security.rateLimit.limiter\`
|
|
113
|
-
- Use \`createRedisJobQueue()\` for durable cross-replica job execution; \`createMemoryJobQueue()\`
|
|
119
|
+
- Use \`createRedisJobQueue()\` for durable cross-replica job execution; when the Redis client exposes sorted-set primitives Gorsee uses indexed due scans and renews long-running execution leases, while \`createMemoryJobQueue()\` remains single-node only
|
|
120
|
+
- Queue instances expose \`get(id)\`, \`peek(limit)\`, and \`cancel(id)\` for operator-facing inspection and pre-run cancellation of queued jobs
|
|
121
|
+
- Queue instances also expose bounded terminal history via \`recent(limit)\`, dead-letter inspection via \`failures(limit)\`, and controlled recovery with \`retryFailed(id)\`
|
|
122
|
+
- Queue lifecycle emits structured AI events: \`job.enqueue\`, \`job.start\`, \`job.retry\`, \`job.complete\`, \`job.fail\`, \`job.cancel\`, and \`job.dead-letter.retry\`
|
|
123
|
+
- Use \`defineWorkerService()\` + \`runWorkerService()\` for long-running server-mode workers with explicit ready/heartbeat/stop semantics
|
|
114
124
|
- \`createNodeRedisLikeClient()\` and \`createIORedisLikeClient()\` normalize real Redis SDK clients to the framework adapter contract
|
|
115
125
|
- \`routeCache()\` defaults to \`mode: "private"\` and varies by \`Cookie\`, \`Authorization\`, \`Accept\`, and \`X-Gorsee-Navigate\`
|
|
116
126
|
- Use \`mode: "public"\` or \`mode: "shared"\` only for intentionally non-personalized cache entries
|
|
@@ -165,7 +175,7 @@ export default {
|
|
|
165
175
|
- \`.gorsee/ai-diagnostics.json\` keeps the latest error/warning snapshot for fast IDE polling
|
|
166
176
|
- \`.gorsee/ide/diagnostics.json\`, \`.gorsee/ide/events.json\`, and \`.gorsee/ide/context.md\` are editor-facing projections produced by \`gorsee ai ide-sync\`
|
|
167
177
|
- \`.gorsee/ide/events.json\` now includes \`artifactRegressions\` plus event-level \`artifact\` / \`version\` metadata for release/deploy/build drift
|
|
168
|
-
- \`.gorsee/agent/latest.json\` and \`.gorsee/agent/latest.md\` are session packs for agents;
|
|
178
|
+
- \`.gorsee/agent/latest.json\` and \`.gorsee/agent/latest.md\` are session packs for agents; \`.gorsee/agent/deploy-summary.json\`, \`.gorsee/agent/release-brief.json\`, \`.gorsee/agent/incident-brief.json\`, and \`.gorsee/agent/incident-snapshot.json\` are grounded operational artifacts written alongside them
|
|
169
179
|
- \`gorsee ai doctor\` clusters repeated failures by trace/request/file/route so agents can spot systemic regressions quickly
|
|
170
180
|
- \`gorsee ai doctor\` and \`gorsee ai export\` also surface artifact regressions, so agents can reason about broken tarballs, VSIX files, build outputs, and deploy configs without separate tooling
|
|
171
181
|
- Bridge delivery is best-effort only; a dead IDE bridge must never fail the request/build/check path
|
|
@@ -200,6 +210,7 @@ export default {
|
|
|
200
210
|
## Product DX References
|
|
201
211
|
|
|
202
212
|
- Use \`docs/STARTER_ONBOARDING.md\` to choose the right app class
|
|
213
|
+
- Use \`docs/APPLICATION_MODES.md\` when choosing or changing \`app.mode\`
|
|
203
214
|
- Use \`docs/MIGRATION_GUIDE.md\` when cleaning up compatibility imports
|
|
204
215
|
- Use \`docs/UPGRADE_PLAYBOOK.md\` before release-channel or contract upgrades
|
|
205
216
|
- Use \`docs/DEPLOY_TARGET_GUIDE.md\` before committing to a deploy target
|
|
@@ -323,7 +334,7 @@ export default {
|
|
|
323
334
|
csp: true,
|
|
324
335
|
hsts: true,
|
|
325
336
|
csrf: true,
|
|
326
|
-
rateLimit: {
|
|
337
|
+
rateLimit: { maxRequests: 100, window: "1m" },
|
|
327
338
|
},
|
|
328
339
|
// RPC is a separate boundary from route _middleware.ts
|
|
329
340
|
// security: { rpc: { middlewares: [auth.protect(), createCSRFMiddleware(process.env.SESSION_SECRET!)] } },
|
|
@@ -358,6 +369,7 @@ export default {
|
|
|
358
369
|
gorsee dev Start dev server
|
|
359
370
|
gorsee build Production build
|
|
360
371
|
gorsee check Type + safety + structure check
|
|
372
|
+
gorsee worker Run canonical server-mode worker entry
|
|
361
373
|
gorsee check --rewrite-imports --rewrite-loaders
|
|
362
374
|
Normalize canonical imports and loader aliases before auditing
|
|
363
375
|
gorsee ai framework Export canonical framework context for cold-start agents
|
package/dist-pkg/cli/index.js
CHANGED
|
@@ -3,6 +3,7 @@ const args = process.argv.slice(2), command = args[0], COMMANDS = {
|
|
|
3
3
|
dev: "Start development server with HMR",
|
|
4
4
|
build: "Production build (client + server)",
|
|
5
5
|
start: "Start production server",
|
|
6
|
+
worker: "Run a server-mode worker entry",
|
|
6
7
|
check: "Check project: types, safety, structure, optional canonical autofix",
|
|
7
8
|
routes: "List all routes with render modes",
|
|
8
9
|
migrate: "Run database migrations",
|
|
@@ -33,6 +34,10 @@ async function main() {
|
|
|
33
34
|
const { runStart } = await import("./cmd-start.js");
|
|
34
35
|
await runStart(args.slice(1));
|
|
35
36
|
break;
|
|
37
|
+
case "worker":
|
|
38
|
+
const { runWorker } = await import("./cmd-worker.js");
|
|
39
|
+
await runWorker(args.slice(1));
|
|
40
|
+
break;
|
|
36
41
|
case "check":
|
|
37
42
|
const { runCheck } = await import("./cmd-check.js");
|
|
38
43
|
await runCheck(args.slice(1));
|
|
@@ -7,7 +7,11 @@ export interface AppSecurityRateLimitConfig {
|
|
|
7
7
|
window?: string;
|
|
8
8
|
limiter?: RateLimiter | AsyncRateLimiter;
|
|
9
9
|
}
|
|
10
|
+
export type AppMode = "frontend" | "fullstack" | "server";
|
|
10
11
|
export interface AppConfig {
|
|
12
|
+
app?: {
|
|
13
|
+
mode?: AppMode;
|
|
14
|
+
};
|
|
11
15
|
ai?: AIObservabilityConfig;
|
|
12
16
|
runtime?: {
|
|
13
17
|
topology?: RuntimeTopology;
|
|
@@ -28,6 +32,7 @@ export interface AppConfig {
|
|
|
28
32
|
}
|
|
29
33
|
export type ProxyPreset = "none" | "reverse-proxy" | "vercel" | "netlify" | "fly" | "cloudflare";
|
|
30
34
|
export type RuntimeTopology = "single-instance" | "multi-instance";
|
|
35
|
+
export declare function resolveAppMode(config: AppConfig): AppMode;
|
|
31
36
|
export declare function loadAppConfig(cwd: string, explicitPath?: string): Promise<AppConfig>;
|
|
32
37
|
export declare function resolveRPCMiddlewares(config: AppConfig, explicitMiddlewares?: MiddlewareFn[]): MiddlewareFn[] | undefined;
|
|
33
38
|
export declare function resolveRuntimeTopology(config: AppConfig): RuntimeTopology;
|
|
@@ -1,7 +1,17 @@
|
|
|
1
|
-
import { stat } from "node:fs/promises";
|
|
2
|
-
import { join } from "node:path";
|
|
1
|
+
import { readFile, rm, stat, writeFile } from "node:fs/promises";
|
|
2
|
+
import { basename, dirname, extname, join } from "node:path";
|
|
3
3
|
import { pathToFileURL } from "node:url";
|
|
4
4
|
import { resolveAIObservabilityConfig } from "../ai/index.js";
|
|
5
|
+
export function resolveAppMode(config) {
|
|
6
|
+
switch (config.app?.mode) {
|
|
7
|
+
case "frontend":
|
|
8
|
+
case "server":
|
|
9
|
+
return config.app.mode;
|
|
10
|
+
case "fullstack":
|
|
11
|
+
default:
|
|
12
|
+
return "fullstack";
|
|
13
|
+
}
|
|
14
|
+
}
|
|
5
15
|
export async function loadAppConfig(cwd, explicitPath) {
|
|
6
16
|
const candidatePaths = explicitPath ? [explicitPath] : [
|
|
7
17
|
join(cwd, "app.config.ts"),
|
|
@@ -10,8 +20,13 @@ export async function loadAppConfig(cwd, explicitPath) {
|
|
|
10
20
|
];
|
|
11
21
|
for (const configPath of candidatePaths)
|
|
12
22
|
try {
|
|
13
|
-
const configStat = await stat(configPath);
|
|
14
|
-
|
|
23
|
+
const configStat = await stat(configPath), ext = extname(configPath) || ".ts", configDir = dirname(configPath), configBase = basename(configPath, ext), tempConfigPath = join(configDir, `.${configBase}.gorsee-load-${process.pid}-${configStat.mtimeMs}-${Date.now()}-${Math.random().toString(36).slice(2)}${ext}`);
|
|
24
|
+
try {
|
|
25
|
+
await writeFile(tempConfigPath, await readFile(configPath, "utf-8"), "utf-8");
|
|
26
|
+
return (await import(pathToFileURL(tempConfigPath).href)).default ?? {};
|
|
27
|
+
} finally {
|
|
28
|
+
await rm(tempConfigPath, { force: !0 }).catch(() => {});
|
|
29
|
+
}
|
|
15
30
|
} catch {
|
|
16
31
|
continue;
|
|
17
32
|
}
|
|
@@ -30,9 +45,15 @@ export function resolveSecurityRateLimit(config) {
|
|
|
30
45
|
return config.security?.rateLimit;
|
|
31
46
|
}
|
|
32
47
|
export function resolveAIConfig(cwd, config, explicitConfig) {
|
|
48
|
+
if (!config.ai && !explicitConfig)
|
|
49
|
+
return;
|
|
33
50
|
const merged = {
|
|
34
51
|
...config.ai ?? {},
|
|
35
|
-
...explicitConfig ?? {}
|
|
52
|
+
...explicitConfig ?? {},
|
|
53
|
+
app: explicitConfig?.app ?? config.ai?.app ?? {
|
|
54
|
+
mode: resolveAppMode(config),
|
|
55
|
+
runtimeTopology: resolveRuntimeTopology(config)
|
|
56
|
+
}
|
|
36
57
|
};
|
|
37
58
|
if (Object.keys(merged).length === 0)
|
|
38
59
|
return;
|
|
@@ -18,7 +18,8 @@ export { createRedisCacheStore } from "./redis-cache-store.js";
|
|
|
18
18
|
export { createRedisJobQueue, type RedisJobQueueOptions } from "./redis-job-queue.js";
|
|
19
19
|
export { createScopedRPCRegistry } from "./rpc-utils.js";
|
|
20
20
|
export { createSQLiteCacheStore } from "./sqlite-cache-store.js";
|
|
21
|
-
export { createMemoryJobQueue, defineJob, type JobContext, type JobDefinition, type JobEnqueueOptions, type JobQueue, type JobRunResult, type EnqueuedJob } from "./jobs.js";
|
|
21
|
+
export { createMemoryJobQueue, defineJob, type JobContext, type JobDefinition, type JobEnqueueOptions, type JobRetryOptions, type JobQueue, type JobRunResult, type EnqueuedJob, type QueuedJobRecord, type TerminalJobRecord, type MemoryJobQueueOptions } from "./jobs.js";
|
|
22
|
+
export { defineWorkerService, runWorkerService, type WorkerServiceContext, type WorkerServiceDefinition, type WorkerServiceStartHandle, type RunWorkerServiceOptions, type RunningWorkerService } from "./worker-service.js";
|
|
22
23
|
export { setupI18n, loadLocale, getLocale, getLocales, getDefaultLocale, getFallbackLocales, setLocale, t, plural, negotiateLocale, resolveLocaleFromPath, stripLocalePrefix, withLocalePath, buildHreflangLinks, formatNumber, formatDate, formatRelativeTime, type I18nConfig, type LocaleNegotiationInput, type LocaleNegotiationResult, } from "../i18n/index.js";
|
|
23
24
|
export { loadContentCollection, parseFrontmatter, extractExcerpt, queryContent, getContentEntryBySlug, type ContentCollectionOptions, type ContentEntry, type ContentQueryOptions, } from "../content/index.js";
|
|
24
25
|
export { type RedisLikeClient, type NodeRedisClientLike, type IORedisClientLike, createNodeRedisLikeClient, createIORedisLikeClient, deleteExpiredRedisKeys, } from "./redis-client.js";
|
package/dist-pkg/server/index.js
CHANGED
|
@@ -46,6 +46,7 @@ export { createRedisJobQueue } from "./redis-job-queue.js";
|
|
|
46
46
|
export { createScopedRPCRegistry } from "./rpc-utils.js";
|
|
47
47
|
export { createSQLiteCacheStore } from "./sqlite-cache-store.js";
|
|
48
48
|
export { createMemoryJobQueue, defineJob } from "./jobs.js";
|
|
49
|
+
export { defineWorkerService, runWorkerService } from "./worker-service.js";
|
|
49
50
|
export {
|
|
50
51
|
setupI18n,
|
|
51
52
|
loadLocale,
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
type Awaitable<T> = T | Promise<T>;
|
|
2
|
+
export type JobQueueKind = "memory" | "redis";
|
|
2
3
|
export interface JobContext {
|
|
3
4
|
attempt: number;
|
|
4
5
|
maxAttempts: number;
|
|
@@ -12,6 +13,8 @@ export interface JobEnqueueOptions {
|
|
|
12
13
|
maxAttempts?: number;
|
|
13
14
|
backoffMs?: number;
|
|
14
15
|
}
|
|
16
|
+
export interface JobRetryOptions extends JobEnqueueOptions {
|
|
17
|
+
}
|
|
15
18
|
export interface EnqueuedJob<Payload = unknown> {
|
|
16
19
|
id: string;
|
|
17
20
|
name: string;
|
|
@@ -22,6 +25,10 @@ export interface EnqueuedJob<Payload = unknown> {
|
|
|
22
25
|
backoffMs: number;
|
|
23
26
|
lastError?: string;
|
|
24
27
|
}
|
|
28
|
+
export interface QueuedJobRecord<Payload = unknown> extends EnqueuedJob<Payload> {
|
|
29
|
+
createdAt: number;
|
|
30
|
+
updatedAt: number;
|
|
31
|
+
}
|
|
25
32
|
export interface JobRunResult {
|
|
26
33
|
id: string;
|
|
27
34
|
name: string;
|
|
@@ -30,12 +37,39 @@ export interface JobRunResult {
|
|
|
30
37
|
nextRunAt?: number;
|
|
31
38
|
error?: string;
|
|
32
39
|
}
|
|
40
|
+
export interface TerminalJobRecord<Payload = unknown> extends QueuedJobRecord<Payload> {
|
|
41
|
+
status: "completed" | "failed";
|
|
42
|
+
finishedAt: number;
|
|
43
|
+
}
|
|
44
|
+
export interface MemoryJobQueueOptions {
|
|
45
|
+
historyLimit?: number;
|
|
46
|
+
}
|
|
47
|
+
interface JobLifecycleEventInput {
|
|
48
|
+
kind: "job.enqueue" | "job.start" | "job.retry" | "job.complete" | "job.fail" | "job.cancel" | "job.dead-letter.retry";
|
|
49
|
+
severity: "info" | "warn" | "error";
|
|
50
|
+
queue: JobQueueKind;
|
|
51
|
+
id: string;
|
|
52
|
+
name: string;
|
|
53
|
+
attempts?: number;
|
|
54
|
+
maxAttempts?: number;
|
|
55
|
+
runAt?: number;
|
|
56
|
+
nextRunAt?: number;
|
|
57
|
+
error?: string;
|
|
58
|
+
workerInstanceId?: string;
|
|
59
|
+
}
|
|
33
60
|
export interface JobQueue {
|
|
34
61
|
enqueue<Payload>(job: JobDefinition<Payload>, payload: Payload, options?: JobEnqueueOptions): Promise<EnqueuedJob<Payload>>;
|
|
35
62
|
runNext(now?: number): Promise<JobRunResult | null>;
|
|
36
63
|
drain(now?: number): Promise<JobRunResult[]>;
|
|
37
64
|
size(): Promise<number>;
|
|
65
|
+
get(id: string): Promise<QueuedJobRecord | null>;
|
|
66
|
+
peek(limit?: number): Promise<QueuedJobRecord[]>;
|
|
67
|
+
cancel(id: string): Promise<boolean>;
|
|
68
|
+
recent(limit?: number): Promise<TerminalJobRecord[]>;
|
|
69
|
+
failures(limit?: number): Promise<TerminalJobRecord[]>;
|
|
70
|
+
retryFailed(id: string, options?: JobRetryOptions): Promise<EnqueuedJob | null>;
|
|
38
71
|
}
|
|
39
72
|
export declare function defineJob<Payload>(name: string, handler: (payload: Payload, context: JobContext) => Awaitable<void>): JobDefinition<Payload>;
|
|
40
|
-
export declare function
|
|
73
|
+
export declare function emitJobLifecycleEvent(input: JobLifecycleEventInput): Promise<void>;
|
|
74
|
+
export declare function createMemoryJobQueue(options?: MemoryJobQueueOptions): JobQueue;
|
|
41
75
|
export {};
|
package/dist-pkg/server/jobs.js
CHANGED
|
@@ -1,15 +1,75 @@
|
|
|
1
|
+
import { emitAIEvent } from "../ai/index.js";
|
|
1
2
|
export function defineJob(name, handler) {
|
|
2
3
|
return { name, handler };
|
|
3
4
|
}
|
|
4
|
-
export function
|
|
5
|
-
|
|
5
|
+
export async function emitJobLifecycleEvent(input) {
|
|
6
|
+
await emitAIEvent({
|
|
7
|
+
kind: input.kind,
|
|
8
|
+
severity: input.severity,
|
|
9
|
+
source: "runtime",
|
|
10
|
+
message: buildJobLifecycleMessage(input),
|
|
11
|
+
data: {
|
|
12
|
+
queue: input.queue,
|
|
13
|
+
jobId: input.id,
|
|
14
|
+
jobName: input.name,
|
|
15
|
+
attempts: input.attempts,
|
|
16
|
+
maxAttempts: input.maxAttempts,
|
|
17
|
+
runAt: input.runAt,
|
|
18
|
+
nextRunAt: input.nextRunAt,
|
|
19
|
+
workerInstanceId: input.workerInstanceId,
|
|
20
|
+
error: input.error
|
|
21
|
+
}
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
function buildJobLifecycleMessage(input) {
|
|
25
|
+
switch (input.kind) {
|
|
26
|
+
case "job.enqueue":
|
|
27
|
+
return `queued job ${input.name}`;
|
|
28
|
+
case "job.start":
|
|
29
|
+
return `started job ${input.name}`;
|
|
30
|
+
case "job.retry":
|
|
31
|
+
return `retrying job ${input.name}`;
|
|
32
|
+
case "job.complete":
|
|
33
|
+
return `completed job ${input.name}`;
|
|
34
|
+
case "job.fail":
|
|
35
|
+
return `failed job ${input.name}`;
|
|
36
|
+
case "job.cancel":
|
|
37
|
+
return `cancelled job ${input.name}`;
|
|
38
|
+
case "job.dead-letter.retry":
|
|
39
|
+
return `requeued failed job ${input.name}`;
|
|
40
|
+
default:
|
|
41
|
+
return `job lifecycle event for ${input.name}`;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
export function createMemoryJobQueue(options = {}) {
|
|
45
|
+
const jobs = [], history = [];
|
|
6
46
|
let sequence = 0;
|
|
47
|
+
const historyLimit = options.historyLimit ?? 100;
|
|
7
48
|
function sortJobs() {
|
|
8
49
|
jobs.sort((a, b) => a.runAt - b.runAt || a.sequence - b.sequence);
|
|
9
50
|
}
|
|
51
|
+
function pushHistory(entry, status) {
|
|
52
|
+
history.unshift({
|
|
53
|
+
id: entry.id,
|
|
54
|
+
name: entry.name,
|
|
55
|
+
payload: entry.payload,
|
|
56
|
+
runAt: entry.runAt,
|
|
57
|
+
attempts: entry.attempts,
|
|
58
|
+
maxAttempts: entry.maxAttempts,
|
|
59
|
+
backoffMs: entry.backoffMs,
|
|
60
|
+
lastError: entry.lastError,
|
|
61
|
+
createdAt: entry.createdAt,
|
|
62
|
+
updatedAt: entry.updatedAt,
|
|
63
|
+
finishedAt: Date.now(),
|
|
64
|
+
status,
|
|
65
|
+
job: entry.job
|
|
66
|
+
});
|
|
67
|
+
if (history.length > historyLimit)
|
|
68
|
+
history.length = historyLimit;
|
|
69
|
+
}
|
|
10
70
|
return {
|
|
11
71
|
async enqueue(job, payload, options = {}) {
|
|
12
|
-
const enqueued = {
|
|
72
|
+
const now = Date.now(), enqueued = {
|
|
13
73
|
id: crypto.randomUUID(),
|
|
14
74
|
name: job.name,
|
|
15
75
|
payload,
|
|
@@ -17,11 +77,23 @@ export function createMemoryJobQueue() {
|
|
|
17
77
|
attempts: 0,
|
|
18
78
|
maxAttempts: options.maxAttempts ?? 3,
|
|
19
79
|
backoffMs: options.backoffMs ?? 1000,
|
|
80
|
+
createdAt: now,
|
|
81
|
+
updatedAt: now,
|
|
20
82
|
job,
|
|
21
83
|
sequence: sequence++
|
|
22
84
|
};
|
|
23
85
|
jobs.push(enqueued);
|
|
24
86
|
sortJobs();
|
|
87
|
+
await emitJobLifecycleEvent({
|
|
88
|
+
kind: "job.enqueue",
|
|
89
|
+
severity: "info",
|
|
90
|
+
queue: "memory",
|
|
91
|
+
id: enqueued.id,
|
|
92
|
+
name: enqueued.name,
|
|
93
|
+
attempts: enqueued.attempts,
|
|
94
|
+
maxAttempts: enqueued.maxAttempts,
|
|
95
|
+
runAt: enqueued.runAt
|
|
96
|
+
});
|
|
25
97
|
return enqueued;
|
|
26
98
|
},
|
|
27
99
|
async runNext(now = Date.now()) {
|
|
@@ -31,11 +103,33 @@ export function createMemoryJobQueue() {
|
|
|
31
103
|
return null;
|
|
32
104
|
const entry = jobs.splice(nextIndex, 1)[0];
|
|
33
105
|
entry.attempts += 1;
|
|
106
|
+
await emitJobLifecycleEvent({
|
|
107
|
+
kind: "job.start",
|
|
108
|
+
severity: "info",
|
|
109
|
+
queue: "memory",
|
|
110
|
+
id: entry.id,
|
|
111
|
+
name: entry.name,
|
|
112
|
+
attempts: entry.attempts,
|
|
113
|
+
maxAttempts: entry.maxAttempts,
|
|
114
|
+
runAt: entry.runAt
|
|
115
|
+
});
|
|
34
116
|
try {
|
|
35
117
|
await entry.job.handler(entry.payload, {
|
|
36
118
|
attempt: entry.attempts,
|
|
37
119
|
maxAttempts: entry.maxAttempts
|
|
38
120
|
});
|
|
121
|
+
entry.updatedAt = Date.now();
|
|
122
|
+
pushHistory(entry, "completed");
|
|
123
|
+
await emitJobLifecycleEvent({
|
|
124
|
+
kind: "job.complete",
|
|
125
|
+
severity: "info",
|
|
126
|
+
queue: "memory",
|
|
127
|
+
id: entry.id,
|
|
128
|
+
name: entry.name,
|
|
129
|
+
attempts: entry.attempts,
|
|
130
|
+
maxAttempts: entry.maxAttempts,
|
|
131
|
+
runAt: entry.runAt
|
|
132
|
+
});
|
|
39
133
|
return {
|
|
40
134
|
id: entry.id,
|
|
41
135
|
name: entry.name,
|
|
@@ -47,8 +141,21 @@ export function createMemoryJobQueue() {
|
|
|
47
141
|
entry.lastError = message;
|
|
48
142
|
if (entry.attempts < entry.maxAttempts) {
|
|
49
143
|
entry.runAt = now + entry.backoffMs * entry.attempts;
|
|
144
|
+
entry.updatedAt = Date.now();
|
|
50
145
|
jobs.push(entry);
|
|
51
146
|
sortJobs();
|
|
147
|
+
await emitJobLifecycleEvent({
|
|
148
|
+
kind: "job.retry",
|
|
149
|
+
severity: "warn",
|
|
150
|
+
queue: "memory",
|
|
151
|
+
id: entry.id,
|
|
152
|
+
name: entry.name,
|
|
153
|
+
attempts: entry.attempts,
|
|
154
|
+
maxAttempts: entry.maxAttempts,
|
|
155
|
+
runAt: now,
|
|
156
|
+
nextRunAt: entry.runAt,
|
|
157
|
+
error: message
|
|
158
|
+
});
|
|
52
159
|
return {
|
|
53
160
|
id: entry.id,
|
|
54
161
|
name: entry.name,
|
|
@@ -58,6 +165,19 @@ export function createMemoryJobQueue() {
|
|
|
58
165
|
error: message
|
|
59
166
|
};
|
|
60
167
|
}
|
|
168
|
+
entry.updatedAt = Date.now();
|
|
169
|
+
pushHistory(entry, "failed");
|
|
170
|
+
await emitJobLifecycleEvent({
|
|
171
|
+
kind: "job.fail",
|
|
172
|
+
severity: "error",
|
|
173
|
+
queue: "memory",
|
|
174
|
+
id: entry.id,
|
|
175
|
+
name: entry.name,
|
|
176
|
+
attempts: entry.attempts,
|
|
177
|
+
maxAttempts: entry.maxAttempts,
|
|
178
|
+
runAt: entry.runAt,
|
|
179
|
+
error: message
|
|
180
|
+
});
|
|
61
181
|
return {
|
|
62
182
|
id: entry.id,
|
|
63
183
|
name: entry.name,
|
|
@@ -78,6 +198,109 @@ export function createMemoryJobQueue() {
|
|
|
78
198
|
},
|
|
79
199
|
async size() {
|
|
80
200
|
return jobs.length;
|
|
201
|
+
},
|
|
202
|
+
async get(id) {
|
|
203
|
+
const job = jobs.find((entry) => entry.id === id);
|
|
204
|
+
if (!job)
|
|
205
|
+
return null;
|
|
206
|
+
return {
|
|
207
|
+
id: job.id,
|
|
208
|
+
name: job.name,
|
|
209
|
+
payload: job.payload,
|
|
210
|
+
runAt: job.runAt,
|
|
211
|
+
attempts: job.attempts,
|
|
212
|
+
maxAttempts: job.maxAttempts,
|
|
213
|
+
backoffMs: job.backoffMs,
|
|
214
|
+
lastError: job.lastError,
|
|
215
|
+
createdAt: job.createdAt,
|
|
216
|
+
updatedAt: job.updatedAt
|
|
217
|
+
};
|
|
218
|
+
},
|
|
219
|
+
async peek(limit = Number.POSITIVE_INFINITY) {
|
|
220
|
+
sortJobs();
|
|
221
|
+
return jobs.slice(0, limit).map((job) => ({
|
|
222
|
+
id: job.id,
|
|
223
|
+
name: job.name,
|
|
224
|
+
payload: job.payload,
|
|
225
|
+
runAt: job.runAt,
|
|
226
|
+
attempts: job.attempts,
|
|
227
|
+
maxAttempts: job.maxAttempts,
|
|
228
|
+
backoffMs: job.backoffMs,
|
|
229
|
+
lastError: job.lastError,
|
|
230
|
+
createdAt: job.createdAt,
|
|
231
|
+
updatedAt: job.updatedAt
|
|
232
|
+
}));
|
|
233
|
+
},
|
|
234
|
+
async cancel(id) {
|
|
235
|
+
const index = jobs.findIndex((entry) => entry.id === id);
|
|
236
|
+
if (index === -1)
|
|
237
|
+
return !1;
|
|
238
|
+
const [entry] = jobs.splice(index, 1);
|
|
239
|
+
if (entry)
|
|
240
|
+
await emitJobLifecycleEvent({
|
|
241
|
+
kind: "job.cancel",
|
|
242
|
+
severity: "info",
|
|
243
|
+
queue: "memory",
|
|
244
|
+
id: entry.id,
|
|
245
|
+
name: entry.name,
|
|
246
|
+
attempts: entry.attempts,
|
|
247
|
+
maxAttempts: entry.maxAttempts,
|
|
248
|
+
runAt: entry.runAt
|
|
249
|
+
});
|
|
250
|
+
return !0;
|
|
251
|
+
},
|
|
252
|
+
async recent(limit = 50) {
|
|
253
|
+
return history.slice(0, limit).map((entry) => ({
|
|
254
|
+
id: entry.id,
|
|
255
|
+
name: entry.name,
|
|
256
|
+
payload: entry.payload,
|
|
257
|
+
runAt: entry.runAt,
|
|
258
|
+
attempts: entry.attempts,
|
|
259
|
+
maxAttempts: entry.maxAttempts,
|
|
260
|
+
backoffMs: entry.backoffMs,
|
|
261
|
+
lastError: entry.lastError,
|
|
262
|
+
createdAt: entry.createdAt,
|
|
263
|
+
updatedAt: entry.updatedAt,
|
|
264
|
+
finishedAt: entry.finishedAt,
|
|
265
|
+
status: entry.status
|
|
266
|
+
}));
|
|
267
|
+
},
|
|
268
|
+
async failures(limit = 50) {
|
|
269
|
+
return history.filter((entry) => entry.status === "failed").slice(0, limit).map((entry) => ({
|
|
270
|
+
id: entry.id,
|
|
271
|
+
name: entry.name,
|
|
272
|
+
payload: entry.payload,
|
|
273
|
+
runAt: entry.runAt,
|
|
274
|
+
attempts: entry.attempts,
|
|
275
|
+
maxAttempts: entry.maxAttempts,
|
|
276
|
+
backoffMs: entry.backoffMs,
|
|
277
|
+
lastError: entry.lastError,
|
|
278
|
+
createdAt: entry.createdAt,
|
|
279
|
+
updatedAt: entry.updatedAt,
|
|
280
|
+
finishedAt: entry.finishedAt,
|
|
281
|
+
status: entry.status
|
|
282
|
+
}));
|
|
283
|
+
},
|
|
284
|
+
async retryFailed(id, options = {}) {
|
|
285
|
+
const failedEntry = history.find((entry) => entry.id === id && entry.status === "failed");
|
|
286
|
+
if (!failedEntry)
|
|
287
|
+
return null;
|
|
288
|
+
const retried = await this.enqueue(failedEntry.job, failedEntry.payload, {
|
|
289
|
+
runAt: options.runAt ?? Date.now(),
|
|
290
|
+
maxAttempts: options.maxAttempts ?? failedEntry.maxAttempts,
|
|
291
|
+
backoffMs: options.backoffMs ?? failedEntry.backoffMs
|
|
292
|
+
});
|
|
293
|
+
await emitJobLifecycleEvent({
|
|
294
|
+
kind: "job.dead-letter.retry",
|
|
295
|
+
severity: "warn",
|
|
296
|
+
queue: "memory",
|
|
297
|
+
id: retried.id,
|
|
298
|
+
name: retried.name,
|
|
299
|
+
attempts: retried.attempts,
|
|
300
|
+
maxAttempts: retried.maxAttempts,
|
|
301
|
+
runAt: retried.runAt
|
|
302
|
+
});
|
|
303
|
+
return retried;
|
|
81
304
|
}
|
|
82
305
|
};
|
|
83
306
|
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
export declare const BUILD_MANIFEST_SCHEMA_VERSION: 1;
|
|
2
|
+
export declare const RELEASE_ARTIFACT_SCHEMA_VERSION: 1;
|
|
2
3
|
export interface BuildManifestRoute {
|
|
3
4
|
js?: string;
|
|
4
5
|
hasLoader: boolean;
|
|
@@ -6,13 +7,42 @@ export interface BuildManifestRoute {
|
|
|
6
7
|
}
|
|
7
8
|
export interface BuildManifest {
|
|
8
9
|
schemaVersion: number;
|
|
10
|
+
appMode?: "frontend" | "fullstack" | "server";
|
|
9
11
|
routes: Record<string, BuildManifestRoute>;
|
|
10
12
|
chunks: string[];
|
|
11
13
|
prerendered: string[];
|
|
12
14
|
buildTime: string;
|
|
13
15
|
}
|
|
16
|
+
export interface ReleaseArtifactSummary {
|
|
17
|
+
routeCount: number;
|
|
18
|
+
clientAssetCount: number;
|
|
19
|
+
prerenderedCount: number;
|
|
20
|
+
serverEntryCount: number;
|
|
21
|
+
}
|
|
22
|
+
export interface ReleaseArtifactRuntime {
|
|
23
|
+
kind: "frontend-static" | "fullstack-runtime" | "server-runtime";
|
|
24
|
+
processEntrypoints: string[];
|
|
25
|
+
handlerEntrypoints: string[];
|
|
26
|
+
workerEntrypoint?: string;
|
|
27
|
+
}
|
|
28
|
+
export interface ReleaseArtifactArtifacts {
|
|
29
|
+
buildManifest: string;
|
|
30
|
+
clientAssets: string[];
|
|
31
|
+
serverEntries: string[];
|
|
32
|
+
prerenderedHtml: string[];
|
|
33
|
+
}
|
|
34
|
+
export interface ReleaseArtifact {
|
|
35
|
+
schemaVersion: number;
|
|
36
|
+
appMode: "frontend" | "fullstack" | "server";
|
|
37
|
+
generatedAt: string;
|
|
38
|
+
summary: ReleaseArtifactSummary;
|
|
39
|
+
runtime: ReleaseArtifactRuntime;
|
|
40
|
+
artifacts: ReleaseArtifactArtifacts;
|
|
41
|
+
}
|
|
14
42
|
export declare function loadBuildManifest(distDir: string): Promise<BuildManifest>;
|
|
43
|
+
export declare function loadReleaseArtifact(distDir: string): Promise<ReleaseArtifact>;
|
|
15
44
|
export declare function parseBuildManifest(raw: string): BuildManifest;
|
|
45
|
+
export declare function parseReleaseArtifact(raw: string): ReleaseArtifact;
|
|
16
46
|
export declare function getRouteBuildEntry(manifest: BuildManifest, pathname: string): BuildManifestRoute | undefined;
|
|
17
47
|
export declare function getClientBundleForRoute(manifest: BuildManifest, pathname: string): string | undefined;
|
|
18
48
|
export declare function isPrerenderedRoute(manifest: BuildManifest, pathname: string): boolean;
|