@vibescope/mcp-server 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +98 -0
- package/dist/cli.d.ts +34 -0
- package/dist/cli.js +356 -0
- package/dist/cli.test.d.ts +1 -0
- package/dist/cli.test.js +367 -0
- package/dist/handlers/__test-utils__.d.ts +72 -0
- package/dist/handlers/__test-utils__.js +176 -0
- package/dist/handlers/blockers.d.ts +18 -0
- package/dist/handlers/blockers.js +81 -0
- package/dist/handlers/bodies-of-work.d.ts +34 -0
- package/dist/handlers/bodies-of-work.js +614 -0
- package/dist/handlers/checkouts.d.ts +37 -0
- package/dist/handlers/checkouts.js +377 -0
- package/dist/handlers/cost.d.ts +39 -0
- package/dist/handlers/cost.js +247 -0
- package/dist/handlers/decisions.d.ts +16 -0
- package/dist/handlers/decisions.js +64 -0
- package/dist/handlers/deployment.d.ts +36 -0
- package/dist/handlers/deployment.js +1062 -0
- package/dist/handlers/discovery.d.ts +14 -0
- package/dist/handlers/discovery.js +870 -0
- package/dist/handlers/fallback.d.ts +18 -0
- package/dist/handlers/fallback.js +216 -0
- package/dist/handlers/findings.d.ts +18 -0
- package/dist/handlers/findings.js +110 -0
- package/dist/handlers/git-issues.d.ts +22 -0
- package/dist/handlers/git-issues.js +247 -0
- package/dist/handlers/ideas.d.ts +19 -0
- package/dist/handlers/ideas.js +188 -0
- package/dist/handlers/index.d.ts +29 -0
- package/dist/handlers/index.js +65 -0
- package/dist/handlers/knowledge-query.d.ts +22 -0
- package/dist/handlers/knowledge-query.js +253 -0
- package/dist/handlers/knowledge.d.ts +12 -0
- package/dist/handlers/knowledge.js +108 -0
- package/dist/handlers/milestones.d.ts +20 -0
- package/dist/handlers/milestones.js +179 -0
- package/dist/handlers/organizations.d.ts +36 -0
- package/dist/handlers/organizations.js +428 -0
- package/dist/handlers/progress.d.ts +14 -0
- package/dist/handlers/progress.js +149 -0
- package/dist/handlers/project.d.ts +20 -0
- package/dist/handlers/project.js +278 -0
- package/dist/handlers/requests.d.ts +16 -0
- package/dist/handlers/requests.js +131 -0
- package/dist/handlers/roles.d.ts +30 -0
- package/dist/handlers/roles.js +281 -0
- package/dist/handlers/session.d.ts +20 -0
- package/dist/handlers/session.js +791 -0
- package/dist/handlers/tasks.d.ts +52 -0
- package/dist/handlers/tasks.js +1111 -0
- package/dist/handlers/tasks.test.d.ts +1 -0
- package/dist/handlers/tasks.test.js +431 -0
- package/dist/handlers/types.d.ts +94 -0
- package/dist/handlers/types.js +1 -0
- package/dist/handlers/validation.d.ts +16 -0
- package/dist/handlers/validation.js +188 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +2707 -0
- package/dist/knowledge.d.ts +6 -0
- package/dist/knowledge.js +121 -0
- package/dist/tools.d.ts +2 -0
- package/dist/tools.js +2498 -0
- package/dist/utils.d.ts +149 -0
- package/dist/utils.js +317 -0
- package/dist/utils.test.d.ts +1 -0
- package/dist/utils.test.js +532 -0
- package/dist/validators.d.ts +35 -0
- package/dist/validators.js +111 -0
- package/dist/validators.test.d.ts +1 -0
- package/dist/validators.test.js +176 -0
- package/package.json +44 -0
- package/src/cli.test.ts +442 -0
- package/src/cli.ts +439 -0
- package/src/handlers/__test-utils__.ts +217 -0
- package/src/handlers/blockers.test.ts +390 -0
- package/src/handlers/blockers.ts +110 -0
- package/src/handlers/bodies-of-work.test.ts +1276 -0
- package/src/handlers/bodies-of-work.ts +783 -0
- package/src/handlers/cost.test.ts +436 -0
- package/src/handlers/cost.ts +322 -0
- package/src/handlers/decisions.test.ts +401 -0
- package/src/handlers/decisions.ts +86 -0
- package/src/handlers/deployment.test.ts +516 -0
- package/src/handlers/deployment.ts +1289 -0
- package/src/handlers/discovery.test.ts +254 -0
- package/src/handlers/discovery.ts +969 -0
- package/src/handlers/fallback.test.ts +687 -0
- package/src/handlers/fallback.ts +260 -0
- package/src/handlers/findings.test.ts +565 -0
- package/src/handlers/findings.ts +153 -0
- package/src/handlers/ideas.test.ts +753 -0
- package/src/handlers/ideas.ts +247 -0
- package/src/handlers/index.ts +69 -0
- package/src/handlers/milestones.test.ts +584 -0
- package/src/handlers/milestones.ts +217 -0
- package/src/handlers/organizations.test.ts +997 -0
- package/src/handlers/organizations.ts +550 -0
- package/src/handlers/progress.test.ts +369 -0
- package/src/handlers/progress.ts +188 -0
- package/src/handlers/project.test.ts +562 -0
- package/src/handlers/project.ts +352 -0
- package/src/handlers/requests.test.ts +531 -0
- package/src/handlers/requests.ts +150 -0
- package/src/handlers/session.test.ts +459 -0
- package/src/handlers/session.ts +912 -0
- package/src/handlers/tasks.test.ts +602 -0
- package/src/handlers/tasks.ts +1393 -0
- package/src/handlers/types.ts +88 -0
- package/src/handlers/validation.test.ts +880 -0
- package/src/handlers/validation.ts +223 -0
- package/src/index.ts +3205 -0
- package/src/knowledge.ts +132 -0
- package/src/tmpclaude-0078-cwd +1 -0
- package/src/tmpclaude-0ee1-cwd +1 -0
- package/src/tmpclaude-2dd5-cwd +1 -0
- package/src/tmpclaude-344c-cwd +1 -0
- package/src/tmpclaude-3860-cwd +1 -0
- package/src/tmpclaude-4b63-cwd +1 -0
- package/src/tmpclaude-5c73-cwd +1 -0
- package/src/tmpclaude-5ee3-cwd +1 -0
- package/src/tmpclaude-6795-cwd +1 -0
- package/src/tmpclaude-709e-cwd +1 -0
- package/src/tmpclaude-9839-cwd +1 -0
- package/src/tmpclaude-d829-cwd +1 -0
- package/src/tmpclaude-e072-cwd +1 -0
- package/src/tmpclaude-f6ee-cwd +1 -0
- package/src/utils.test.ts +681 -0
- package/src/utils.ts +375 -0
- package/src/validators.test.ts +223 -0
- package/src/validators.ts +122 -0
- package/tmpclaude-0439-cwd +1 -0
- package/tmpclaude-132f-cwd +1 -0
- package/tmpclaude-15bb-cwd +1 -0
- package/tmpclaude-165a-cwd +1 -0
- package/tmpclaude-1ba9-cwd +1 -0
- package/tmpclaude-21a3-cwd +1 -0
- package/tmpclaude-2a38-cwd +1 -0
- package/tmpclaude-2adf-cwd +1 -0
- package/tmpclaude-2f56-cwd +1 -0
- package/tmpclaude-3626-cwd +1 -0
- package/tmpclaude-3727-cwd +1 -0
- package/tmpclaude-40bc-cwd +1 -0
- package/tmpclaude-436f-cwd +1 -0
- package/tmpclaude-4783-cwd +1 -0
- package/tmpclaude-4b6d-cwd +1 -0
- package/tmpclaude-4ba4-cwd +1 -0
- package/tmpclaude-51e6-cwd +1 -0
- package/tmpclaude-5ecf-cwd +1 -0
- package/tmpclaude-6f97-cwd +1 -0
- package/tmpclaude-7fb2-cwd +1 -0
- package/tmpclaude-825c-cwd +1 -0
- package/tmpclaude-8baf-cwd +1 -0
- package/tmpclaude-8d9f-cwd +1 -0
- package/tmpclaude-975c-cwd +1 -0
- package/tmpclaude-9983-cwd +1 -0
- package/tmpclaude-a045-cwd +1 -0
- package/tmpclaude-ac4a-cwd +1 -0
- package/tmpclaude-b593-cwd +1 -0
- package/tmpclaude-b891-cwd +1 -0
- package/tmpclaude-c032-cwd +1 -0
- package/tmpclaude-cf43-cwd +1 -0
- package/tmpclaude-d040-cwd +1 -0
- package/tmpclaude-dcdd-cwd +1 -0
- package/tmpclaude-dcee-cwd +1 -0
- package/tmpclaude-e16b-cwd +1 -0
- package/tmpclaude-ecd2-cwd +1 -0
- package/tmpclaude-f48d-cwd +1 -0
- package/tsconfig.json +16 -0
- package/vitest.config.ts +13 -0
|
@@ -0,0 +1,1289 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Deployment Handlers
|
|
3
|
+
*
|
|
4
|
+
* Handles deployment coordination and requirements:
|
|
5
|
+
* - request_deployment
|
|
6
|
+
* - claim_deployment_validation
|
|
7
|
+
* - report_validation
|
|
8
|
+
* - check_deployment_status
|
|
9
|
+
* - start_deployment
|
|
10
|
+
* - complete_deployment
|
|
11
|
+
* - cancel_deployment
|
|
12
|
+
* - add_deployment_requirement
|
|
13
|
+
* - complete_deployment_requirement
|
|
14
|
+
* - get_deployment_requirements
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
import type { Handler, HandlerRegistry } from './types.js';
|
|
18
|
+
import {
|
|
19
|
+
ValidationError,
|
|
20
|
+
validateRequired,
|
|
21
|
+
validateUUID,
|
|
22
|
+
validateEnvironment,
|
|
23
|
+
} from '../validators.js';
|
|
24
|
+
|
|
25
|
+
export const requestDeployment: Handler = async (args, ctx) => {
|
|
26
|
+
const { project_id, environment = 'production', version_bump = 'patch', notes, git_ref } = args as {
|
|
27
|
+
project_id: string;
|
|
28
|
+
environment?: string;
|
|
29
|
+
version_bump?: 'patch' | 'minor' | 'major';
|
|
30
|
+
notes?: string;
|
|
31
|
+
git_ref?: string;
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
const { supabase, session } = ctx;
|
|
35
|
+
const currentSessionId = session.currentSessionId;
|
|
36
|
+
|
|
37
|
+
validateRequired(project_id, 'project_id');
|
|
38
|
+
validateUUID(project_id, 'project_id');
|
|
39
|
+
validateEnvironment(environment);
|
|
40
|
+
|
|
41
|
+
if (version_bump && !['patch', 'minor', 'major'].includes(version_bump)) {
|
|
42
|
+
throw new ValidationError('Invalid version_bump value', {
|
|
43
|
+
field: 'version_bump',
|
|
44
|
+
validValues: ['patch', 'minor', 'major'],
|
|
45
|
+
hint: 'Must be one of: patch, minor, major',
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Check for existing active deployment
|
|
50
|
+
const { data: existingDeployment } = await supabase
|
|
51
|
+
.from('deployments')
|
|
52
|
+
.select('id, status')
|
|
53
|
+
.eq('project_id', project_id)
|
|
54
|
+
.not('status', 'in', '("deployed","failed")')
|
|
55
|
+
.single();
|
|
56
|
+
|
|
57
|
+
if (existingDeployment) {
|
|
58
|
+
return {
|
|
59
|
+
result: {
|
|
60
|
+
success: false,
|
|
61
|
+
error: 'A deployment is already in progress',
|
|
62
|
+
existing_deployment_id: existingDeployment.id,
|
|
63
|
+
existing_status: existingDeployment.status,
|
|
64
|
+
hint: 'Wait for the current deployment to complete or cancel it first',
|
|
65
|
+
},
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// Check for unvalidated completed tasks
|
|
70
|
+
const { data: unvalidatedTasks } = await supabase
|
|
71
|
+
.from('tasks')
|
|
72
|
+
.select('id, title, completed_at, completed_by_session_id')
|
|
73
|
+
.eq('project_id', project_id)
|
|
74
|
+
.eq('status', 'completed')
|
|
75
|
+
.is('validated_at', null)
|
|
76
|
+
.order('completed_at', { ascending: true });
|
|
77
|
+
|
|
78
|
+
if (unvalidatedTasks && unvalidatedTasks.length > 0) {
|
|
79
|
+
return {
|
|
80
|
+
result: {
|
|
81
|
+
success: false,
|
|
82
|
+
error: 'Cannot deploy: There are unvalidated completed tasks',
|
|
83
|
+
unvalidated_tasks: unvalidatedTasks.map(t => ({
|
|
84
|
+
id: t.id,
|
|
85
|
+
title: t.title,
|
|
86
|
+
completed_at: t.completed_at,
|
|
87
|
+
})),
|
|
88
|
+
unvalidated_count: unvalidatedTasks.length,
|
|
89
|
+
hint: 'All completed tasks must be validated before deployment. Use validate_task to review each task.',
|
|
90
|
+
action: `Call validate_task(task_id: "${unvalidatedTasks[0].id}", approved: true/false, validation_notes: "...")`,
|
|
91
|
+
},
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Get current version from project
|
|
96
|
+
const { data: project } = await supabase
|
|
97
|
+
.from('projects')
|
|
98
|
+
.select('current_version')
|
|
99
|
+
.eq('id', project_id)
|
|
100
|
+
.single();
|
|
101
|
+
|
|
102
|
+
const currentVersion = project?.current_version || '0.0.0';
|
|
103
|
+
|
|
104
|
+
// Create new deployment
|
|
105
|
+
const { data: deployment, error } = await supabase
|
|
106
|
+
.from('deployments')
|
|
107
|
+
.insert({
|
|
108
|
+
project_id,
|
|
109
|
+
environment,
|
|
110
|
+
version_bump,
|
|
111
|
+
notes,
|
|
112
|
+
git_ref,
|
|
113
|
+
requested_by: 'agent',
|
|
114
|
+
requesting_agent_session_id: currentSessionId,
|
|
115
|
+
})
|
|
116
|
+
.select()
|
|
117
|
+
.single();
|
|
118
|
+
|
|
119
|
+
if (error) throw error;
|
|
120
|
+
|
|
121
|
+
// Auto-convert pending deployment requirements to tasks
|
|
122
|
+
const { data: pendingRequirements } = await supabase
|
|
123
|
+
.from('deployment_requirements')
|
|
124
|
+
.select('id, type, title, description, stage, blocking')
|
|
125
|
+
.eq('project_id', project_id)
|
|
126
|
+
.eq('status', 'pending')
|
|
127
|
+
.is('converted_task_id', null);
|
|
128
|
+
|
|
129
|
+
const convertedTasks: Array<{ task_id: string; requirement_id: string; title: string }> = [];
|
|
130
|
+
|
|
131
|
+
if (pendingRequirements && pendingRequirements.length > 0) {
|
|
132
|
+
for (const req of pendingRequirements) {
|
|
133
|
+
const isDeployStage = req.stage === 'deployment';
|
|
134
|
+
const isBlocking = req.blocking ?? isDeployStage;
|
|
135
|
+
const titlePrefix = isBlocking
|
|
136
|
+
? 'DEPLOY:'
|
|
137
|
+
: isDeployStage
|
|
138
|
+
? 'DEPLOY:'
|
|
139
|
+
: req.stage === 'verification'
|
|
140
|
+
? 'VERIFY:'
|
|
141
|
+
: 'PREP:';
|
|
142
|
+
|
|
143
|
+
// Create linked task
|
|
144
|
+
const { data: newTask } = await supabase
|
|
145
|
+
.from('tasks')
|
|
146
|
+
.insert({
|
|
147
|
+
project_id,
|
|
148
|
+
title: `${titlePrefix} ${req.title}`,
|
|
149
|
+
description: `[${req.type}] ${req.description || req.title}`,
|
|
150
|
+
priority: 1,
|
|
151
|
+
status: 'pending',
|
|
152
|
+
blocking: isBlocking,
|
|
153
|
+
created_by: 'agent',
|
|
154
|
+
created_by_session_id: currentSessionId,
|
|
155
|
+
})
|
|
156
|
+
.select('id')
|
|
157
|
+
.single();
|
|
158
|
+
|
|
159
|
+
if (newTask) {
|
|
160
|
+
// Link task to requirement WITHOUT changing status
|
|
161
|
+
// This keeps the requirement visible in the deployment steps list (permanent)
|
|
162
|
+
await supabase
|
|
163
|
+
.from('deployment_requirements')
|
|
164
|
+
.update({
|
|
165
|
+
converted_task_id: newTask.id,
|
|
166
|
+
})
|
|
167
|
+
.eq('id', req.id);
|
|
168
|
+
|
|
169
|
+
convertedTasks.push({
|
|
170
|
+
task_id: newTask.id,
|
|
171
|
+
requirement_id: req.id,
|
|
172
|
+
title: `${titlePrefix} ${req.title}`,
|
|
173
|
+
});
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// Log progress
|
|
179
|
+
const convertedMsg = convertedTasks.length > 0
|
|
180
|
+
? ` (${convertedTasks.length} requirements converted to tasks)`
|
|
181
|
+
: '';
|
|
182
|
+
await supabase.from('progress_logs').insert({
|
|
183
|
+
project_id,
|
|
184
|
+
summary: `Deployment requested for ${environment} (${version_bump} bump from ${currentVersion})${convertedMsg}`,
|
|
185
|
+
details: notes || undefined,
|
|
186
|
+
created_by: 'agent',
|
|
187
|
+
created_by_session_id: currentSessionId,
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
return {
|
|
191
|
+
result: {
|
|
192
|
+
success: true,
|
|
193
|
+
deployment_id: deployment.id,
|
|
194
|
+
status: deployment.status,
|
|
195
|
+
environment: deployment.environment,
|
|
196
|
+
version_bump,
|
|
197
|
+
current_version: currentVersion,
|
|
198
|
+
converted_requirements: convertedTasks.length,
|
|
199
|
+
converted_tasks: convertedTasks.length > 0 ? convertedTasks : undefined,
|
|
200
|
+
message: convertedTasks.length > 0
|
|
201
|
+
? `Deployment created. ${convertedTasks.length} requirements converted to tasks. Run build/tests then call claim_deployment_validation.`
|
|
202
|
+
: 'Deployment created. Run build/tests then call claim_deployment_validation.',
|
|
203
|
+
},
|
|
204
|
+
};
|
|
205
|
+
};
|
|
206
|
+
|
|
207
|
+
export const claimDeploymentValidation: Handler = async (args, ctx) => {
|
|
208
|
+
const { project_id } = args as { project_id: string };
|
|
209
|
+
const { supabase, session } = ctx;
|
|
210
|
+
const currentSessionId = session.currentSessionId;
|
|
211
|
+
|
|
212
|
+
validateRequired(project_id, 'project_id');
|
|
213
|
+
validateUUID(project_id, 'project_id');
|
|
214
|
+
|
|
215
|
+
// Find pending deployment
|
|
216
|
+
const { data: deployment, error: fetchError } = await supabase
|
|
217
|
+
.from('deployments')
|
|
218
|
+
.select('*')
|
|
219
|
+
.eq('project_id', project_id)
|
|
220
|
+
.eq('status', 'pending')
|
|
221
|
+
.single();
|
|
222
|
+
|
|
223
|
+
if (fetchError || !deployment) {
|
|
224
|
+
return {
|
|
225
|
+
result: {
|
|
226
|
+
success: false,
|
|
227
|
+
error: 'No pending deployment found',
|
|
228
|
+
hint: 'Use request_deployment to create a deployment first, or check_deployment_status to see current state',
|
|
229
|
+
},
|
|
230
|
+
};
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
// Claim validation
|
|
234
|
+
const { data: updated, error: updateError } = await supabase
|
|
235
|
+
.from('deployments')
|
|
236
|
+
.update({
|
|
237
|
+
status: 'validating',
|
|
238
|
+
validation_agent_session_id: currentSessionId,
|
|
239
|
+
validation_started_at: new Date().toISOString(),
|
|
240
|
+
})
|
|
241
|
+
.eq('id', deployment.id)
|
|
242
|
+
.eq('status', 'pending')
|
|
243
|
+
.select()
|
|
244
|
+
.single();
|
|
245
|
+
|
|
246
|
+
if (updateError || !updated) {
|
|
247
|
+
return {
|
|
248
|
+
result: {
|
|
249
|
+
success: false,
|
|
250
|
+
error: 'Failed to claim validation - deployment may have been claimed by another agent',
|
|
251
|
+
},
|
|
252
|
+
};
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
return {
|
|
256
|
+
result: {
|
|
257
|
+
success: true,
|
|
258
|
+
deployment_id: deployment.id,
|
|
259
|
+
status: 'validating',
|
|
260
|
+
message: 'Validation claimed. Run build and tests, then call report_validation with results.',
|
|
261
|
+
},
|
|
262
|
+
};
|
|
263
|
+
};
|
|
264
|
+
|
|
265
|
+
export const reportValidation: Handler = async (args, ctx) => {
|
|
266
|
+
const { project_id, build_passed, tests_passed, error_message } = args as {
|
|
267
|
+
project_id: string;
|
|
268
|
+
build_passed: boolean;
|
|
269
|
+
tests_passed?: boolean;
|
|
270
|
+
error_message?: string;
|
|
271
|
+
};
|
|
272
|
+
|
|
273
|
+
const { supabase, session } = ctx;
|
|
274
|
+
const currentSessionId = session.currentSessionId;
|
|
275
|
+
|
|
276
|
+
validateRequired(project_id, 'project_id');
|
|
277
|
+
validateUUID(project_id, 'project_id');
|
|
278
|
+
if (build_passed === undefined) {
|
|
279
|
+
throw new ValidationError('build_passed is required', {
|
|
280
|
+
field: 'build_passed',
|
|
281
|
+
hint: 'Set to true if the build succeeded, false otherwise',
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
// Find validating deployment
|
|
286
|
+
const { data: deployment, error: fetchError } = await supabase
|
|
287
|
+
.from('deployments')
|
|
288
|
+
.select('id')
|
|
289
|
+
.eq('project_id', project_id)
|
|
290
|
+
.eq('status', 'validating')
|
|
291
|
+
.single();
|
|
292
|
+
|
|
293
|
+
if (fetchError || !deployment) {
|
|
294
|
+
return {
|
|
295
|
+
result: {
|
|
296
|
+
success: false,
|
|
297
|
+
error: 'No deployment being validated. Use claim_deployment_validation first.',
|
|
298
|
+
},
|
|
299
|
+
};
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
const validationPassed = build_passed && (tests_passed !== false);
|
|
303
|
+
const newStatus = validationPassed ? 'ready' : 'failed';
|
|
304
|
+
|
|
305
|
+
const { error: updateError } = await supabase
|
|
306
|
+
.from('deployments')
|
|
307
|
+
.update({
|
|
308
|
+
status: newStatus,
|
|
309
|
+
build_passed,
|
|
310
|
+
tests_passed: tests_passed ?? null,
|
|
311
|
+
validation_completed_at: new Date().toISOString(),
|
|
312
|
+
validation_error: error_message || null,
|
|
313
|
+
})
|
|
314
|
+
.eq('id', deployment.id);
|
|
315
|
+
|
|
316
|
+
if (updateError) throw updateError;
|
|
317
|
+
|
|
318
|
+
// Log result
|
|
319
|
+
await supabase.from('progress_logs').insert({
|
|
320
|
+
project_id,
|
|
321
|
+
summary: validationPassed
|
|
322
|
+
? `Deployment validation passed - ready to deploy`
|
|
323
|
+
: `Deployment validation failed: ${error_message || 'build/tests failed'}`,
|
|
324
|
+
details: `Build: ${build_passed ? 'passed' : 'failed'}, Tests: ${tests_passed === undefined ? 'skipped' : tests_passed ? 'passed' : 'failed'}`,
|
|
325
|
+
created_by: 'agent',
|
|
326
|
+
created_by_session_id: currentSessionId,
|
|
327
|
+
});
|
|
328
|
+
|
|
329
|
+
// Auto-create task for failed validation
|
|
330
|
+
let createdTaskId: string | null = null;
|
|
331
|
+
if (!validationPassed) {
|
|
332
|
+
const failureType = !build_passed ? 'build' : 'test';
|
|
333
|
+
const { data: newTask } = await supabase
|
|
334
|
+
.from('tasks')
|
|
335
|
+
.insert({
|
|
336
|
+
project_id,
|
|
337
|
+
title: `Fix ${failureType} failure`,
|
|
338
|
+
description: error_message || `${failureType} failed during deployment validation`,
|
|
339
|
+
priority: 1,
|
|
340
|
+
status: 'pending',
|
|
341
|
+
created_by: 'agent',
|
|
342
|
+
created_by_session_id: currentSessionId,
|
|
343
|
+
estimated_minutes: 30,
|
|
344
|
+
})
|
|
345
|
+
.select('id')
|
|
346
|
+
.single();
|
|
347
|
+
|
|
348
|
+
createdTaskId = newTask?.id || null;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
return {
|
|
352
|
+
result: {
|
|
353
|
+
success: true,
|
|
354
|
+
status: newStatus,
|
|
355
|
+
passed: validationPassed,
|
|
356
|
+
...(createdTaskId && { fix_task_id: createdTaskId }),
|
|
357
|
+
},
|
|
358
|
+
};
|
|
359
|
+
};
|
|
360
|
+
|
|
361
|
+
export const checkDeploymentStatus: Handler = async (args, ctx) => {
|
|
362
|
+
const { project_id } = args as { project_id: string };
|
|
363
|
+
const { supabase } = ctx;
|
|
364
|
+
|
|
365
|
+
validateRequired(project_id, 'project_id');
|
|
366
|
+
validateUUID(project_id, 'project_id');
|
|
367
|
+
|
|
368
|
+
// Get most recent deployment
|
|
369
|
+
const { data: deployment, error } = await supabase
|
|
370
|
+
.from('deployments')
|
|
371
|
+
.select('*')
|
|
372
|
+
.eq('project_id', project_id)
|
|
373
|
+
.order('created_at', { ascending: false })
|
|
374
|
+
.limit(1)
|
|
375
|
+
.single();
|
|
376
|
+
|
|
377
|
+
if (error || !deployment) {
|
|
378
|
+
return {
|
|
379
|
+
result: {
|
|
380
|
+
has_deployment: false,
|
|
381
|
+
message: 'No deployments found for this project',
|
|
382
|
+
},
|
|
383
|
+
};
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
// Auto-timeout stale deployments
|
|
387
|
+
const DEPLOYMENT_TIMEOUT_MS: Record<string, number> = {
|
|
388
|
+
pending: 30 * 60 * 1000,
|
|
389
|
+
validating: 15 * 60 * 1000,
|
|
390
|
+
ready: 30 * 60 * 1000,
|
|
391
|
+
deploying: 10 * 60 * 1000,
|
|
392
|
+
};
|
|
393
|
+
|
|
394
|
+
if (!['deployed', 'failed'].includes(deployment.status)) {
|
|
395
|
+
const timeout = DEPLOYMENT_TIMEOUT_MS[deployment.status];
|
|
396
|
+
if (timeout) {
|
|
397
|
+
const startTime =
|
|
398
|
+
deployment.status === 'deploying'
|
|
399
|
+
? deployment.deployment_started_at
|
|
400
|
+
: deployment.status === 'validating'
|
|
401
|
+
? deployment.validation_started_at
|
|
402
|
+
: deployment.created_at;
|
|
403
|
+
|
|
404
|
+
if (startTime && Date.now() - new Date(startTime).getTime() > timeout) {
|
|
405
|
+
const timeoutError = `Timed out: deployment was stuck in '${deployment.status}' state for too long`;
|
|
406
|
+
await supabase
|
|
407
|
+
.from('deployments')
|
|
408
|
+
.update({
|
|
409
|
+
status: 'failed',
|
|
410
|
+
deployment_error: timeoutError,
|
|
411
|
+
deployment_completed_at: new Date().toISOString(),
|
|
412
|
+
})
|
|
413
|
+
.eq('id', deployment.id);
|
|
414
|
+
|
|
415
|
+
deployment.status = 'failed';
|
|
416
|
+
deployment.deployment_error = timeoutError;
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
return {
|
|
422
|
+
result: {
|
|
423
|
+
has_deployment: true,
|
|
424
|
+
deployment: {
|
|
425
|
+
id: deployment.id,
|
|
426
|
+
status: deployment.status,
|
|
427
|
+
environment: deployment.environment,
|
|
428
|
+
requested_by: deployment.requested_by,
|
|
429
|
+
build_passed: deployment.build_passed,
|
|
430
|
+
tests_passed: deployment.tests_passed,
|
|
431
|
+
validation_error: deployment.validation_error,
|
|
432
|
+
deployment_error: deployment.deployment_error,
|
|
433
|
+
deployment_summary: deployment.deployment_summary,
|
|
434
|
+
notes: deployment.notes,
|
|
435
|
+
git_ref: deployment.git_ref,
|
|
436
|
+
created_at: deployment.created_at,
|
|
437
|
+
validation_started_at: deployment.validation_started_at,
|
|
438
|
+
validation_completed_at: deployment.validation_completed_at,
|
|
439
|
+
deployment_started_at: deployment.deployment_started_at,
|
|
440
|
+
deployment_completed_at: deployment.deployment_completed_at,
|
|
441
|
+
},
|
|
442
|
+
},
|
|
443
|
+
};
|
|
444
|
+
};
|
|
445
|
+
|
|
446
|
+
export const startDeployment: Handler = async (args, ctx) => {
|
|
447
|
+
const { project_id } = args as { project_id: string };
|
|
448
|
+
const { supabase, session } = ctx;
|
|
449
|
+
const currentSessionId = session.currentSessionId;
|
|
450
|
+
|
|
451
|
+
validateRequired(project_id, 'project_id');
|
|
452
|
+
validateUUID(project_id, 'project_id');
|
|
453
|
+
|
|
454
|
+
// Find ready deployment and project deployment instructions
|
|
455
|
+
const [deploymentResult, projectResult] = await Promise.all([
|
|
456
|
+
supabase
|
|
457
|
+
.from('deployments')
|
|
458
|
+
.select('id, environment')
|
|
459
|
+
.eq('project_id', project_id)
|
|
460
|
+
.eq('status', 'ready')
|
|
461
|
+
.single(),
|
|
462
|
+
supabase
|
|
463
|
+
.from('projects')
|
|
464
|
+
.select('deployment_instructions, git_main_branch')
|
|
465
|
+
.eq('id', project_id)
|
|
466
|
+
.single(),
|
|
467
|
+
]);
|
|
468
|
+
|
|
469
|
+
if (deploymentResult.error || !deploymentResult.data) {
|
|
470
|
+
return {
|
|
471
|
+
result: {
|
|
472
|
+
success: false,
|
|
473
|
+
error: 'No deployment ready. Must pass validation first.',
|
|
474
|
+
},
|
|
475
|
+
};
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
const deployment = deploymentResult.data;
|
|
479
|
+
const project = projectResult.data;
|
|
480
|
+
|
|
481
|
+
const { error: updateError } = await supabase
|
|
482
|
+
.from('deployments')
|
|
483
|
+
.update({
|
|
484
|
+
status: 'deploying',
|
|
485
|
+
deployment_started_at: new Date().toISOString(),
|
|
486
|
+
})
|
|
487
|
+
.eq('id', deployment.id);
|
|
488
|
+
|
|
489
|
+
if (updateError) throw updateError;
|
|
490
|
+
|
|
491
|
+
await supabase.from('progress_logs').insert({
|
|
492
|
+
project_id,
|
|
493
|
+
summary: `Deployment to ${deployment.environment} started`,
|
|
494
|
+
created_by: 'agent',
|
|
495
|
+
created_by_session_id: currentSessionId,
|
|
496
|
+
});
|
|
497
|
+
|
|
498
|
+
const result: Record<string, unknown> = {
|
|
499
|
+
success: true,
|
|
500
|
+
status: 'deploying',
|
|
501
|
+
env: deployment.environment,
|
|
502
|
+
};
|
|
503
|
+
|
|
504
|
+
if (project?.deployment_instructions) {
|
|
505
|
+
result.instructions = project.deployment_instructions;
|
|
506
|
+
} else {
|
|
507
|
+
result.instructions = `No deployment instructions configured. Common steps:\n1. Push to ${project?.git_main_branch || 'main'} branch\n2. Or run your deploy command (e.g., fly deploy, vercel deploy)\n3. Call complete_deployment when done`;
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
return { result };
|
|
511
|
+
};
|
|
512
|
+
|
|
513
|
+
export const completeDeployment: Handler = async (args, ctx) => {
|
|
514
|
+
const { project_id, success, summary } = args as {
|
|
515
|
+
project_id: string;
|
|
516
|
+
success: boolean;
|
|
517
|
+
summary?: string;
|
|
518
|
+
};
|
|
519
|
+
|
|
520
|
+
const { supabase, session } = ctx;
|
|
521
|
+
const currentSessionId = session.currentSessionId;
|
|
522
|
+
|
|
523
|
+
validateRequired(project_id, 'project_id');
|
|
524
|
+
validateUUID(project_id, 'project_id');
|
|
525
|
+
if (success === undefined) {
|
|
526
|
+
throw new ValidationError('success is required', {
|
|
527
|
+
field: 'success',
|
|
528
|
+
hint: 'Set to true if deployment succeeded, false otherwise',
|
|
529
|
+
});
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
// Find deploying deployment
|
|
533
|
+
const { data: deployment, error: fetchError } = await supabase
|
|
534
|
+
.from('deployments')
|
|
535
|
+
.select('id, environment, version_bump')
|
|
536
|
+
.eq('project_id', project_id)
|
|
537
|
+
.eq('status', 'deploying')
|
|
538
|
+
.single();
|
|
539
|
+
|
|
540
|
+
if (fetchError || !deployment) {
|
|
541
|
+
return {
|
|
542
|
+
result: {
|
|
543
|
+
success: false,
|
|
544
|
+
error: 'No deployment in progress. Use start_deployment first.',
|
|
545
|
+
},
|
|
546
|
+
};
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
const newStatus = success ? 'deployed' : 'failed';
|
|
550
|
+
let newVersion: string | null = null;
|
|
551
|
+
|
|
552
|
+
// If successful, calculate and store new version
|
|
553
|
+
if (success) {
|
|
554
|
+
const { data: project } = await supabase
|
|
555
|
+
.from('projects')
|
|
556
|
+
.select('current_version')
|
|
557
|
+
.eq('id', project_id)
|
|
558
|
+
.single();
|
|
559
|
+
|
|
560
|
+
const currentVersion = project?.current_version || '0.0.0';
|
|
561
|
+
const versionBump = deployment.version_bump || 'patch';
|
|
562
|
+
const parts = currentVersion.split('.').map((p: string) => parseInt(p, 10) || 0);
|
|
563
|
+
let [major, minor, patch] = [parts[0] || 0, parts[1] || 0, parts[2] || 0];
|
|
564
|
+
|
|
565
|
+
switch (versionBump) {
|
|
566
|
+
case 'major': major += 1; minor = 0; patch = 0; break;
|
|
567
|
+
case 'minor': minor += 1; patch = 0; break;
|
|
568
|
+
default: patch += 1;
|
|
569
|
+
}
|
|
570
|
+
|
|
571
|
+
newVersion = `${major}.${minor}.${patch}`;
|
|
572
|
+
|
|
573
|
+
await supabase
|
|
574
|
+
.from('projects')
|
|
575
|
+
.update({ current_version: newVersion })
|
|
576
|
+
.eq('id', project_id);
|
|
577
|
+
}
|
|
578
|
+
|
|
579
|
+
const { error: updateError } = await supabase
|
|
580
|
+
.from('deployments')
|
|
581
|
+
.update({
|
|
582
|
+
status: newStatus,
|
|
583
|
+
version: newVersion,
|
|
584
|
+
deployment_completed_at: new Date().toISOString(),
|
|
585
|
+
deployment_summary: summary || null,
|
|
586
|
+
})
|
|
587
|
+
.eq('id', deployment.id);
|
|
588
|
+
|
|
589
|
+
if (updateError) throw updateError;
|
|
590
|
+
|
|
591
|
+
await supabase.from('progress_logs').insert({
|
|
592
|
+
project_id,
|
|
593
|
+
summary: success
|
|
594
|
+
? `Deployed to ${deployment.environment}${newVersion ? ` v${newVersion}` : ''}`
|
|
595
|
+
: `Deployment failed`,
|
|
596
|
+
details: summary || undefined,
|
|
597
|
+
created_by: 'agent',
|
|
598
|
+
created_by_session_id: currentSessionId,
|
|
599
|
+
});
|
|
600
|
+
|
|
601
|
+
return {
|
|
602
|
+
result: {
|
|
603
|
+
success: true,
|
|
604
|
+
status: newStatus,
|
|
605
|
+
...(newVersion && { version: newVersion }),
|
|
606
|
+
},
|
|
607
|
+
};
|
|
608
|
+
};
|
|
609
|
+
|
|
610
|
+
export const cancelDeployment: Handler = async (args, ctx) => {
|
|
611
|
+
const { project_id, reason } = args as { project_id: string; reason?: string };
|
|
612
|
+
const { supabase, session } = ctx;
|
|
613
|
+
const currentSessionId = session.currentSessionId;
|
|
614
|
+
|
|
615
|
+
validateRequired(project_id, 'project_id');
|
|
616
|
+
validateUUID(project_id, 'project_id');
|
|
617
|
+
|
|
618
|
+
const { data: deployment, error: fetchError } = await supabase
|
|
619
|
+
.from('deployments')
|
|
620
|
+
.select('id')
|
|
621
|
+
.eq('project_id', project_id)
|
|
622
|
+
.not('status', 'in', '("deployed","failed")')
|
|
623
|
+
.single();
|
|
624
|
+
|
|
625
|
+
if (fetchError || !deployment) {
|
|
626
|
+
return { result: { success: false, error: 'No active deployment' } };
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
const { error: updateError } = await supabase
|
|
630
|
+
.from('deployments')
|
|
631
|
+
.update({
|
|
632
|
+
status: 'failed',
|
|
633
|
+
deployment_error: `Cancelled: ${reason || 'unspecified'}`,
|
|
634
|
+
deployment_completed_at: new Date().toISOString(),
|
|
635
|
+
})
|
|
636
|
+
.eq('id', deployment.id);
|
|
637
|
+
|
|
638
|
+
if (updateError) throw updateError;
|
|
639
|
+
|
|
640
|
+
await supabase.from('progress_logs').insert({
|
|
641
|
+
project_id,
|
|
642
|
+
summary: `Deployment cancelled${reason ? `: ${reason}` : ''}`,
|
|
643
|
+
created_by: 'agent',
|
|
644
|
+
created_by_session_id: currentSessionId,
|
|
645
|
+
});
|
|
646
|
+
|
|
647
|
+
return { result: { success: true } };
|
|
648
|
+
};
|
|
649
|
+
|
|
650
|
+
export const addDeploymentRequirement: Handler = async (args, ctx) => {
|
|
651
|
+
const { project_id, type, title, description, file_path, stage = 'preparation', blocking = false } = args as {
|
|
652
|
+
project_id: string;
|
|
653
|
+
type: string;
|
|
654
|
+
title: string;
|
|
655
|
+
description?: string;
|
|
656
|
+
file_path?: string;
|
|
657
|
+
stage?: string;
|
|
658
|
+
blocking?: boolean;
|
|
659
|
+
};
|
|
660
|
+
|
|
661
|
+
const { supabase, session } = ctx;
|
|
662
|
+
const currentSessionId = session.currentSessionId;
|
|
663
|
+
|
|
664
|
+
validateRequired(project_id, 'project_id');
|
|
665
|
+
validateUUID(project_id, 'project_id');
|
|
666
|
+
validateRequired(type, 'type');
|
|
667
|
+
validateRequired(title, 'title');
|
|
668
|
+
|
|
669
|
+
const validTypes = ['migration', 'env_var', 'config', 'manual', 'breaking_change', 'agent_task'];
|
|
670
|
+
if (!validTypes.includes(type)) {
|
|
671
|
+
throw new ValidationError(`type must be one of: ${validTypes.join(', ')}`);
|
|
672
|
+
}
|
|
673
|
+
|
|
674
|
+
const validStages = ['preparation', 'deployment', 'verification'];
|
|
675
|
+
if (!validStages.includes(stage)) {
|
|
676
|
+
throw new ValidationError(`stage must be one of: ${validStages.join(', ')}`);
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
const { data: requirement, error } = await supabase
|
|
680
|
+
.from('deployment_requirements')
|
|
681
|
+
.insert({
|
|
682
|
+
project_id,
|
|
683
|
+
type,
|
|
684
|
+
title,
|
|
685
|
+
description: description || null,
|
|
686
|
+
file_path: file_path || null,
|
|
687
|
+
stage,
|
|
688
|
+
blocking,
|
|
689
|
+
created_by_session_id: currentSessionId,
|
|
690
|
+
})
|
|
691
|
+
.select('id, type, title, stage, blocking')
|
|
692
|
+
.single();
|
|
693
|
+
|
|
694
|
+
if (error) throw new Error(`Failed to add requirement: ${error.message}`);
|
|
695
|
+
|
|
696
|
+
const blockingText = blocking ? ' (BLOCKING)' : '';
|
|
697
|
+
await supabase.from('progress_logs').insert({
|
|
698
|
+
project_id,
|
|
699
|
+
summary: `Added ${stage} deployment requirement${blockingText}: ${title}`,
|
|
700
|
+
details: `Type: ${type}, Stage: ${stage}${blocking ? ', Blocking: true' : ''}${file_path ? `, File: ${file_path}` : ''}`,
|
|
701
|
+
created_by: 'agent',
|
|
702
|
+
created_by_session_id: currentSessionId,
|
|
703
|
+
});
|
|
704
|
+
|
|
705
|
+
const stageMessage = blocking
|
|
706
|
+
? 'Will block all other work when converted to task.'
|
|
707
|
+
: stage === 'deployment'
|
|
708
|
+
? 'Will run during deployment.'
|
|
709
|
+
: stage === 'verification'
|
|
710
|
+
? 'Will run after deployment for verification.'
|
|
711
|
+
: 'Will run during preparation phase.';
|
|
712
|
+
|
|
713
|
+
return {
|
|
714
|
+
result: {
|
|
715
|
+
success: true,
|
|
716
|
+
requirement_id: requirement.id,
|
|
717
|
+
stage: requirement.stage,
|
|
718
|
+
message: `Added ${type} requirement. ${stageMessage}`,
|
|
719
|
+
},
|
|
720
|
+
};
|
|
721
|
+
};
|
|
722
|
+
|
|
723
|
+
export const completeDeploymentRequirement: Handler = async (args, ctx) => {
|
|
724
|
+
const { requirement_id } = args as { requirement_id: string };
|
|
725
|
+
const { supabase, session } = ctx;
|
|
726
|
+
const currentSessionId = session.currentSessionId;
|
|
727
|
+
|
|
728
|
+
validateRequired(requirement_id, 'requirement_id');
|
|
729
|
+
validateUUID(requirement_id, 'requirement_id');
|
|
730
|
+
|
|
731
|
+
const { data: requirement, error: fetchError } = await supabase
|
|
732
|
+
.from('deployment_requirements')
|
|
733
|
+
.select('id, title, status')
|
|
734
|
+
.eq('id', requirement_id)
|
|
735
|
+
.single();
|
|
736
|
+
|
|
737
|
+
if (fetchError || !requirement) {
|
|
738
|
+
throw new Error('Requirement not found');
|
|
739
|
+
}
|
|
740
|
+
|
|
741
|
+
if (requirement.status !== 'pending') {
|
|
742
|
+
return {
|
|
743
|
+
result: {
|
|
744
|
+
success: false,
|
|
745
|
+
error: `Requirement is already ${requirement.status}`,
|
|
746
|
+
},
|
|
747
|
+
};
|
|
748
|
+
}
|
|
749
|
+
|
|
750
|
+
const { error: updateError } = await supabase
|
|
751
|
+
.from('deployment_requirements')
|
|
752
|
+
.update({
|
|
753
|
+
status: 'completed',
|
|
754
|
+
completed_at: new Date().toISOString(),
|
|
755
|
+
completed_by: currentSessionId || 'agent',
|
|
756
|
+
})
|
|
757
|
+
.eq('id', requirement_id);
|
|
758
|
+
|
|
759
|
+
if (updateError) throw updateError;
|
|
760
|
+
|
|
761
|
+
return {
|
|
762
|
+
result: {
|
|
763
|
+
success: true,
|
|
764
|
+
requirement_id,
|
|
765
|
+
title: requirement.title,
|
|
766
|
+
},
|
|
767
|
+
};
|
|
768
|
+
};
|
|
769
|
+
|
|
770
|
+
export const getDeploymentRequirements: Handler = async (args, ctx) => {
|
|
771
|
+
const { project_id, status = 'pending', stage } = args as {
|
|
772
|
+
project_id: string;
|
|
773
|
+
status?: string;
|
|
774
|
+
stage?: string;
|
|
775
|
+
};
|
|
776
|
+
|
|
777
|
+
const { supabase } = ctx;
|
|
778
|
+
|
|
779
|
+
validateRequired(project_id, 'project_id');
|
|
780
|
+
validateUUID(project_id, 'project_id');
|
|
781
|
+
|
|
782
|
+
let query = supabase
|
|
783
|
+
.from('deployment_requirements')
|
|
784
|
+
.select('id, type, title, description, file_path, status, stage, blocking, created_at, completed_at')
|
|
785
|
+
.eq('project_id', project_id)
|
|
786
|
+
.order('stage', { ascending: true })
|
|
787
|
+
.order('created_at', { ascending: false });
|
|
788
|
+
|
|
789
|
+
if (status !== 'all') {
|
|
790
|
+
query = query.eq('status', status);
|
|
791
|
+
}
|
|
792
|
+
|
|
793
|
+
if (stage && stage !== 'all') {
|
|
794
|
+
query = query.eq('stage', stage);
|
|
795
|
+
}
|
|
796
|
+
|
|
797
|
+
const { data: requirements, error } = await query;
|
|
798
|
+
|
|
799
|
+
if (error) throw new Error(`Failed to fetch requirements: ${error.message}`);
|
|
800
|
+
|
|
801
|
+
const preparationPending = requirements?.filter(r => r.status === 'pending' && r.stage === 'preparation').length || 0;
|
|
802
|
+
const deploymentPending = requirements?.filter(r => r.status === 'pending' && r.stage === 'deployment').length || 0;
|
|
803
|
+
|
|
804
|
+
return {
|
|
805
|
+
result: {
|
|
806
|
+
requirements: requirements || [],
|
|
807
|
+
preparation_pending: preparationPending,
|
|
808
|
+
deployment_pending: deploymentPending,
|
|
809
|
+
deployment_blocked: preparationPending > 0 || deploymentPending > 0,
|
|
810
|
+
},
|
|
811
|
+
};
|
|
812
|
+
};
|
|
813
|
+
|
|
814
|
+
// ============================================================================
|
|
815
|
+
// Scheduled Deployments
|
|
816
|
+
// ============================================================================
|
|
817
|
+
|
|
818
|
+
export const scheduleDeployment: Handler = async (args, ctx) => {
|
|
819
|
+
const {
|
|
820
|
+
project_id,
|
|
821
|
+
environment = 'production',
|
|
822
|
+
version_bump = 'patch',
|
|
823
|
+
schedule_type = 'once',
|
|
824
|
+
scheduled_at,
|
|
825
|
+
auto_trigger = true,
|
|
826
|
+
notes,
|
|
827
|
+
git_ref,
|
|
828
|
+
} = args as {
|
|
829
|
+
project_id: string;
|
|
830
|
+
environment?: string;
|
|
831
|
+
version_bump?: string;
|
|
832
|
+
schedule_type?: string;
|
|
833
|
+
scheduled_at: string;
|
|
834
|
+
auto_trigger?: boolean;
|
|
835
|
+
notes?: string;
|
|
836
|
+
git_ref?: string;
|
|
837
|
+
};
|
|
838
|
+
|
|
839
|
+
const { supabase, session } = ctx;
|
|
840
|
+
const currentSessionId = session.currentSessionId;
|
|
841
|
+
|
|
842
|
+
validateRequired(project_id, 'project_id');
|
|
843
|
+
validateUUID(project_id, 'project_id');
|
|
844
|
+
validateRequired(scheduled_at, 'scheduled_at');
|
|
845
|
+
validateEnvironment(environment);
|
|
846
|
+
|
|
847
|
+
if (!['patch', 'minor', 'major'].includes(version_bump)) {
|
|
848
|
+
throw new ValidationError('Invalid version_bump value', {
|
|
849
|
+
field: 'version_bump',
|
|
850
|
+
validValues: ['patch', 'minor', 'major'],
|
|
851
|
+
});
|
|
852
|
+
}
|
|
853
|
+
|
|
854
|
+
if (!['once', 'daily', 'weekly', 'monthly'].includes(schedule_type)) {
|
|
855
|
+
throw new ValidationError('Invalid schedule_type value', {
|
|
856
|
+
field: 'schedule_type',
|
|
857
|
+
validValues: ['once', 'daily', 'weekly', 'monthly'],
|
|
858
|
+
});
|
|
859
|
+
}
|
|
860
|
+
|
|
861
|
+
// Parse and validate scheduled_at
|
|
862
|
+
const scheduledDate = new Date(scheduled_at);
|
|
863
|
+
if (isNaN(scheduledDate.getTime())) {
|
|
864
|
+
throw new ValidationError('Invalid scheduled_at date format', {
|
|
865
|
+
field: 'scheduled_at',
|
|
866
|
+
hint: 'Use ISO 8601 format, e.g., 2025-01-15T14:00:00Z',
|
|
867
|
+
});
|
|
868
|
+
}
|
|
869
|
+
|
|
870
|
+
if (scheduledDate.getTime() <= Date.now()) {
|
|
871
|
+
throw new ValidationError('scheduled_at must be in the future', {
|
|
872
|
+
field: 'scheduled_at',
|
|
873
|
+
});
|
|
874
|
+
}
|
|
875
|
+
|
|
876
|
+
// Create scheduled deployment
|
|
877
|
+
const { data: schedule, error } = await supabase
|
|
878
|
+
.from('scheduled_deployments')
|
|
879
|
+
.insert({
|
|
880
|
+
project_id,
|
|
881
|
+
environment,
|
|
882
|
+
version_bump,
|
|
883
|
+
schedule_type,
|
|
884
|
+
scheduled_at: scheduledDate.toISOString(),
|
|
885
|
+
auto_trigger,
|
|
886
|
+
notes: notes || null,
|
|
887
|
+
git_ref: git_ref || null,
|
|
888
|
+
created_by: 'agent',
|
|
889
|
+
created_by_session_id: currentSessionId,
|
|
890
|
+
})
|
|
891
|
+
.select('id, scheduled_at, schedule_type')
|
|
892
|
+
.single();
|
|
893
|
+
|
|
894
|
+
if (error) throw new Error(`Failed to create schedule: ${error.message}`);
|
|
895
|
+
|
|
896
|
+
// Log progress
|
|
897
|
+
await supabase.from('progress_logs').insert({
|
|
898
|
+
project_id,
|
|
899
|
+
summary: `Scheduled ${schedule_type} deployment to ${environment} for ${scheduledDate.toISOString()}`,
|
|
900
|
+
details: `Auto-trigger: ${auto_trigger}, Version bump: ${version_bump}`,
|
|
901
|
+
created_by: 'agent',
|
|
902
|
+
created_by_session_id: currentSessionId,
|
|
903
|
+
});
|
|
904
|
+
|
|
905
|
+
return {
|
|
906
|
+
result: {
|
|
907
|
+
success: true,
|
|
908
|
+
schedule_id: schedule.id,
|
|
909
|
+
scheduled_at: schedule.scheduled_at,
|
|
910
|
+
schedule_type: schedule.schedule_type,
|
|
911
|
+
auto_trigger,
|
|
912
|
+
message: auto_trigger
|
|
913
|
+
? 'Deployment scheduled. Will trigger automatically when time arrives.'
|
|
914
|
+
: 'Deployment scheduled. Manual trigger required from dashboard.',
|
|
915
|
+
},
|
|
916
|
+
};
|
|
917
|
+
};
|
|
918
|
+
|
|
919
|
+
export const getScheduledDeployments: Handler = async (args, ctx) => {
|
|
920
|
+
const { project_id, include_disabled = false } = args as {
|
|
921
|
+
project_id: string;
|
|
922
|
+
include_disabled?: boolean;
|
|
923
|
+
};
|
|
924
|
+
|
|
925
|
+
const { supabase } = ctx;
|
|
926
|
+
|
|
927
|
+
validateRequired(project_id, 'project_id');
|
|
928
|
+
validateUUID(project_id, 'project_id');
|
|
929
|
+
|
|
930
|
+
let query = supabase
|
|
931
|
+
.from('scheduled_deployments')
|
|
932
|
+
.select('*')
|
|
933
|
+
.eq('project_id', project_id)
|
|
934
|
+
.order('scheduled_at', { ascending: true });
|
|
935
|
+
|
|
936
|
+
if (!include_disabled) {
|
|
937
|
+
query = query.eq('enabled', true);
|
|
938
|
+
}
|
|
939
|
+
|
|
940
|
+
const { data: schedules, error } = await query;
|
|
941
|
+
|
|
942
|
+
if (error) throw new Error(`Failed to fetch schedules: ${error.message}`);
|
|
943
|
+
|
|
944
|
+
const now = new Date();
|
|
945
|
+
const schedulesWithStatus = (schedules || []).map(s => ({
|
|
946
|
+
...s,
|
|
947
|
+
is_due: s.enabled && new Date(s.scheduled_at) <= now,
|
|
948
|
+
}));
|
|
949
|
+
|
|
950
|
+
const dueCount = schedulesWithStatus.filter(s => s.is_due && s.auto_trigger).length;
|
|
951
|
+
|
|
952
|
+
return {
|
|
953
|
+
result: {
|
|
954
|
+
schedules: schedulesWithStatus,
|
|
955
|
+
count: schedulesWithStatus.length,
|
|
956
|
+
due_count: dueCount,
|
|
957
|
+
...(dueCount > 0 && {
|
|
958
|
+
hint: 'There are due schedules. Call trigger_scheduled_deployment to execute.',
|
|
959
|
+
}),
|
|
960
|
+
},
|
|
961
|
+
};
|
|
962
|
+
};
|
|
963
|
+
|
|
964
|
+
export const updateScheduledDeployment: Handler = async (args, ctx) => {
|
|
965
|
+
const {
|
|
966
|
+
schedule_id,
|
|
967
|
+
environment,
|
|
968
|
+
version_bump,
|
|
969
|
+
schedule_type,
|
|
970
|
+
scheduled_at,
|
|
971
|
+
auto_trigger,
|
|
972
|
+
enabled,
|
|
973
|
+
notes,
|
|
974
|
+
git_ref,
|
|
975
|
+
} = args as {
|
|
976
|
+
schedule_id: string;
|
|
977
|
+
environment?: string;
|
|
978
|
+
version_bump?: string;
|
|
979
|
+
schedule_type?: string;
|
|
980
|
+
scheduled_at?: string;
|
|
981
|
+
auto_trigger?: boolean;
|
|
982
|
+
enabled?: boolean;
|
|
983
|
+
notes?: string;
|
|
984
|
+
git_ref?: string;
|
|
985
|
+
};
|
|
986
|
+
|
|
987
|
+
const { supabase } = ctx;
|
|
988
|
+
|
|
989
|
+
validateRequired(schedule_id, 'schedule_id');
|
|
990
|
+
validateUUID(schedule_id, 'schedule_id');
|
|
991
|
+
|
|
992
|
+
const updates: Record<string, unknown> = {};
|
|
993
|
+
|
|
994
|
+
if (environment !== undefined) {
|
|
995
|
+
validateEnvironment(environment);
|
|
996
|
+
updates.environment = environment;
|
|
997
|
+
}
|
|
998
|
+
|
|
999
|
+
if (version_bump !== undefined) {
|
|
1000
|
+
if (!['patch', 'minor', 'major'].includes(version_bump)) {
|
|
1001
|
+
throw new ValidationError('Invalid version_bump value');
|
|
1002
|
+
}
|
|
1003
|
+
updates.version_bump = version_bump;
|
|
1004
|
+
}
|
|
1005
|
+
|
|
1006
|
+
if (schedule_type !== undefined) {
|
|
1007
|
+
if (!['once', 'daily', 'weekly', 'monthly'].includes(schedule_type)) {
|
|
1008
|
+
throw new ValidationError('Invalid schedule_type value');
|
|
1009
|
+
}
|
|
1010
|
+
updates.schedule_type = schedule_type;
|
|
1011
|
+
}
|
|
1012
|
+
|
|
1013
|
+
if (scheduled_at !== undefined) {
|
|
1014
|
+
const scheduledDate = new Date(scheduled_at);
|
|
1015
|
+
if (isNaN(scheduledDate.getTime())) {
|
|
1016
|
+
throw new ValidationError('Invalid scheduled_at date format');
|
|
1017
|
+
}
|
|
1018
|
+
updates.scheduled_at = scheduledDate.toISOString();
|
|
1019
|
+
}
|
|
1020
|
+
|
|
1021
|
+
if (auto_trigger !== undefined) updates.auto_trigger = auto_trigger;
|
|
1022
|
+
if (enabled !== undefined) updates.enabled = enabled;
|
|
1023
|
+
if (notes !== undefined) updates.notes = notes;
|
|
1024
|
+
if (git_ref !== undefined) updates.git_ref = git_ref;
|
|
1025
|
+
|
|
1026
|
+
if (Object.keys(updates).length === 0) {
|
|
1027
|
+
return { result: { success: false, error: 'No updates provided' } };
|
|
1028
|
+
}
|
|
1029
|
+
|
|
1030
|
+
const { error } = await supabase
|
|
1031
|
+
.from('scheduled_deployments')
|
|
1032
|
+
.update(updates)
|
|
1033
|
+
.eq('id', schedule_id);
|
|
1034
|
+
|
|
1035
|
+
if (error) throw new Error(`Failed to update schedule: ${error.message}`);
|
|
1036
|
+
|
|
1037
|
+
return { result: { success: true, schedule_id } };
|
|
1038
|
+
};
|
|
1039
|
+
|
|
1040
|
+
export const deleteScheduledDeployment: Handler = async (args, ctx) => {
|
|
1041
|
+
const { schedule_id } = args as { schedule_id: string };
|
|
1042
|
+
const { supabase } = ctx;
|
|
1043
|
+
|
|
1044
|
+
validateRequired(schedule_id, 'schedule_id');
|
|
1045
|
+
validateUUID(schedule_id, 'schedule_id');
|
|
1046
|
+
|
|
1047
|
+
const { error } = await supabase
|
|
1048
|
+
.from('scheduled_deployments')
|
|
1049
|
+
.delete()
|
|
1050
|
+
.eq('id', schedule_id);
|
|
1051
|
+
|
|
1052
|
+
if (error) throw new Error(`Failed to delete schedule: ${error.message}`);
|
|
1053
|
+
|
|
1054
|
+
return { result: { success: true } };
|
|
1055
|
+
};
|
|
1056
|
+
|
|
1057
|
+
export const triggerScheduledDeployment: Handler = async (args, ctx) => {
|
|
1058
|
+
const { schedule_id } = args as { schedule_id: string };
|
|
1059
|
+
const { supabase, session } = ctx;
|
|
1060
|
+
const currentSessionId = session.currentSessionId;
|
|
1061
|
+
|
|
1062
|
+
validateRequired(schedule_id, 'schedule_id');
|
|
1063
|
+
validateUUID(schedule_id, 'schedule_id');
|
|
1064
|
+
|
|
1065
|
+
// Get the schedule
|
|
1066
|
+
const { data: schedule, error: fetchError } = await supabase
|
|
1067
|
+
.from('scheduled_deployments')
|
|
1068
|
+
.select('*')
|
|
1069
|
+
.eq('id', schedule_id)
|
|
1070
|
+
.single();
|
|
1071
|
+
|
|
1072
|
+
if (fetchError || !schedule) {
|
|
1073
|
+
return { result: { success: false, error: 'Schedule not found' } };
|
|
1074
|
+
}
|
|
1075
|
+
|
|
1076
|
+
if (!schedule.enabled) {
|
|
1077
|
+
return { result: { success: false, error: 'Schedule is disabled' } };
|
|
1078
|
+
}
|
|
1079
|
+
|
|
1080
|
+
// Check for existing active deployment
|
|
1081
|
+
const { data: existingDeployment } = await supabase
|
|
1082
|
+
.from('deployments')
|
|
1083
|
+
.select('id, status')
|
|
1084
|
+
.eq('project_id', schedule.project_id)
|
|
1085
|
+
.not('status', 'in', '("deployed","failed")')
|
|
1086
|
+
.single();
|
|
1087
|
+
|
|
1088
|
+
if (existingDeployment) {
|
|
1089
|
+
return {
|
|
1090
|
+
result: {
|
|
1091
|
+
success: false,
|
|
1092
|
+
error: 'A deployment is already in progress',
|
|
1093
|
+
existing_deployment_id: existingDeployment.id,
|
|
1094
|
+
hint: 'Wait for current deployment to complete or cancel it first',
|
|
1095
|
+
},
|
|
1096
|
+
};
|
|
1097
|
+
}
|
|
1098
|
+
|
|
1099
|
+
// Create the deployment (similar to request_deployment)
|
|
1100
|
+
const { data: deployment, error: deployError } = await supabase
|
|
1101
|
+
.from('deployments')
|
|
1102
|
+
.insert({
|
|
1103
|
+
project_id: schedule.project_id,
|
|
1104
|
+
environment: schedule.environment,
|
|
1105
|
+
version_bump: schedule.version_bump,
|
|
1106
|
+
notes: schedule.notes,
|
|
1107
|
+
git_ref: schedule.git_ref,
|
|
1108
|
+
requested_by: 'agent',
|
|
1109
|
+
requesting_agent_session_id: currentSessionId,
|
|
1110
|
+
})
|
|
1111
|
+
.select('id, status')
|
|
1112
|
+
.single();
|
|
1113
|
+
|
|
1114
|
+
if (deployError) throw new Error(`Failed to create deployment: ${deployError.message}`);
|
|
1115
|
+
|
|
1116
|
+
// Auto-convert pending deployment requirements to tasks
|
|
1117
|
+
const { data: pendingRequirements } = await supabase
|
|
1118
|
+
.from('deployment_requirements')
|
|
1119
|
+
.select('id, type, title, description, stage, blocking')
|
|
1120
|
+
.eq('project_id', schedule.project_id)
|
|
1121
|
+
.eq('status', 'pending')
|
|
1122
|
+
.is('converted_task_id', null);
|
|
1123
|
+
|
|
1124
|
+
const convertedTasks: Array<{ task_id: string; requirement_id: string; title: string }> = [];
|
|
1125
|
+
|
|
1126
|
+
if (pendingRequirements && pendingRequirements.length > 0) {
|
|
1127
|
+
for (const req of pendingRequirements) {
|
|
1128
|
+
const isDeployStage = req.stage === 'deployment';
|
|
1129
|
+
const isBlocking = req.blocking ?? isDeployStage;
|
|
1130
|
+
const titlePrefix = isBlocking
|
|
1131
|
+
? 'DEPLOY:'
|
|
1132
|
+
: isDeployStage
|
|
1133
|
+
? 'DEPLOY:'
|
|
1134
|
+
: req.stage === 'verification'
|
|
1135
|
+
? 'VERIFY:'
|
|
1136
|
+
: 'PREP:';
|
|
1137
|
+
|
|
1138
|
+
// Create linked task
|
|
1139
|
+
const { data: newTask } = await supabase
|
|
1140
|
+
.from('tasks')
|
|
1141
|
+
.insert({
|
|
1142
|
+
project_id: schedule.project_id,
|
|
1143
|
+
title: `${titlePrefix} ${req.title}`,
|
|
1144
|
+
description: `[${req.type}] ${req.description || req.title}`,
|
|
1145
|
+
priority: 1,
|
|
1146
|
+
status: 'pending',
|
|
1147
|
+
blocking: isBlocking,
|
|
1148
|
+
created_by: 'agent',
|
|
1149
|
+
created_by_session_id: currentSessionId,
|
|
1150
|
+
})
|
|
1151
|
+
.select('id')
|
|
1152
|
+
.single();
|
|
1153
|
+
|
|
1154
|
+
if (newTask) {
|
|
1155
|
+
// Link task to requirement WITHOUT changing status
|
|
1156
|
+
// This keeps the requirement visible in the deployment steps list (permanent)
|
|
1157
|
+
await supabase
|
|
1158
|
+
.from('deployment_requirements')
|
|
1159
|
+
.update({
|
|
1160
|
+
converted_task_id: newTask.id,
|
|
1161
|
+
})
|
|
1162
|
+
.eq('id', req.id);
|
|
1163
|
+
|
|
1164
|
+
convertedTasks.push({
|
|
1165
|
+
task_id: newTask.id,
|
|
1166
|
+
requirement_id: req.id,
|
|
1167
|
+
title: `${titlePrefix} ${req.title}`,
|
|
1168
|
+
});
|
|
1169
|
+
}
|
|
1170
|
+
}
|
|
1171
|
+
}
|
|
1172
|
+
|
|
1173
|
+
// Update the schedule
|
|
1174
|
+
const scheduleUpdates: Record<string, unknown> = {
|
|
1175
|
+
last_triggered_at: new Date().toISOString(),
|
|
1176
|
+
last_deployment_id: deployment.id,
|
|
1177
|
+
trigger_count: schedule.trigger_count + 1,
|
|
1178
|
+
};
|
|
1179
|
+
|
|
1180
|
+
// For recurring schedules, calculate next run time
|
|
1181
|
+
if (schedule.schedule_type !== 'once') {
|
|
1182
|
+
const currentScheduledAt = new Date(schedule.scheduled_at);
|
|
1183
|
+
let nextScheduledAt: Date;
|
|
1184
|
+
|
|
1185
|
+
switch (schedule.schedule_type) {
|
|
1186
|
+
case 'daily':
|
|
1187
|
+
nextScheduledAt = new Date(currentScheduledAt.getTime() + 24 * 60 * 60 * 1000);
|
|
1188
|
+
break;
|
|
1189
|
+
case 'weekly':
|
|
1190
|
+
nextScheduledAt = new Date(currentScheduledAt.getTime() + 7 * 24 * 60 * 60 * 1000);
|
|
1191
|
+
break;
|
|
1192
|
+
case 'monthly':
|
|
1193
|
+
nextScheduledAt = new Date(currentScheduledAt.getTime() + 30 * 24 * 60 * 60 * 1000);
|
|
1194
|
+
break;
|
|
1195
|
+
default:
|
|
1196
|
+
nextScheduledAt = currentScheduledAt;
|
|
1197
|
+
}
|
|
1198
|
+
|
|
1199
|
+
scheduleUpdates.scheduled_at = nextScheduledAt.toISOString();
|
|
1200
|
+
} else {
|
|
1201
|
+
// One-time schedule, disable it
|
|
1202
|
+
scheduleUpdates.enabled = false;
|
|
1203
|
+
}
|
|
1204
|
+
|
|
1205
|
+
await supabase
|
|
1206
|
+
.from('scheduled_deployments')
|
|
1207
|
+
.update(scheduleUpdates)
|
|
1208
|
+
.eq('id', schedule_id);
|
|
1209
|
+
|
|
1210
|
+
// Log progress
|
|
1211
|
+
const convertedMsg = convertedTasks.length > 0
|
|
1212
|
+
? `, ${convertedTasks.length} requirements converted to tasks`
|
|
1213
|
+
: '';
|
|
1214
|
+
await supabase.from('progress_logs').insert({
|
|
1215
|
+
project_id: schedule.project_id,
|
|
1216
|
+
summary: `Triggered scheduled deployment to ${schedule.environment}${convertedMsg}`,
|
|
1217
|
+
details: `Schedule: ${schedule.schedule_type}, Trigger #${schedule.trigger_count + 1}`,
|
|
1218
|
+
created_by: 'agent',
|
|
1219
|
+
created_by_session_id: currentSessionId,
|
|
1220
|
+
});
|
|
1221
|
+
|
|
1222
|
+
return {
|
|
1223
|
+
result: {
|
|
1224
|
+
success: true,
|
|
1225
|
+
deployment_id: deployment.id,
|
|
1226
|
+
schedule_id,
|
|
1227
|
+
schedule_type: schedule.schedule_type,
|
|
1228
|
+
next_scheduled_at: schedule.schedule_type !== 'once' ? scheduleUpdates.scheduled_at : null,
|
|
1229
|
+
converted_requirements: convertedTasks.length,
|
|
1230
|
+
converted_tasks: convertedTasks.length > 0 ? convertedTasks : undefined,
|
|
1231
|
+
message: convertedTasks.length > 0
|
|
1232
|
+
? `Deployment created from schedule. ${convertedTasks.length} requirements converted to tasks. Run validation then deploy.`
|
|
1233
|
+
: 'Deployment created from schedule. Run validation then deploy.',
|
|
1234
|
+
},
|
|
1235
|
+
};
|
|
1236
|
+
};
|
|
1237
|
+
|
|
1238
|
+
export const checkDueDeployments: Handler = async (args, ctx) => {
|
|
1239
|
+
const { project_id } = args as { project_id: string };
|
|
1240
|
+
const { supabase } = ctx;
|
|
1241
|
+
|
|
1242
|
+
validateRequired(project_id, 'project_id');
|
|
1243
|
+
validateUUID(project_id, 'project_id');
|
|
1244
|
+
|
|
1245
|
+
// Find schedules that are due (enabled, auto_trigger, and scheduled_at <= now)
|
|
1246
|
+
const { data: dueSchedules, error } = await supabase
|
|
1247
|
+
.from('scheduled_deployments')
|
|
1248
|
+
.select('id, environment, version_bump, schedule_type, scheduled_at')
|
|
1249
|
+
.eq('project_id', project_id)
|
|
1250
|
+
.eq('enabled', true)
|
|
1251
|
+
.eq('auto_trigger', true)
|
|
1252
|
+
.lte('scheduled_at', new Date().toISOString())
|
|
1253
|
+
.order('scheduled_at', { ascending: true });
|
|
1254
|
+
|
|
1255
|
+
if (error) throw new Error(`Failed to check schedules: ${error.message}`);
|
|
1256
|
+
|
|
1257
|
+
return {
|
|
1258
|
+
result: {
|
|
1259
|
+
due_schedules: dueSchedules || [],
|
|
1260
|
+
count: dueSchedules?.length || 0,
|
|
1261
|
+
...(dueSchedules && dueSchedules.length > 0 && {
|
|
1262
|
+
hint: `Call trigger_scheduled_deployment(schedule_id: "${dueSchedules[0].id}") to trigger the first due deployment`,
|
|
1263
|
+
}),
|
|
1264
|
+
},
|
|
1265
|
+
};
|
|
1266
|
+
};
|
|
1267
|
+
|
|
1268
|
+
/**
|
|
1269
|
+
* Deployment handlers registry
|
|
1270
|
+
*/
|
|
1271
|
+
export const deploymentHandlers: HandlerRegistry = {
|
|
1272
|
+
request_deployment: requestDeployment,
|
|
1273
|
+
claim_deployment_validation: claimDeploymentValidation,
|
|
1274
|
+
report_validation: reportValidation,
|
|
1275
|
+
check_deployment_status: checkDeploymentStatus,
|
|
1276
|
+
start_deployment: startDeployment,
|
|
1277
|
+
complete_deployment: completeDeployment,
|
|
1278
|
+
cancel_deployment: cancelDeployment,
|
|
1279
|
+
add_deployment_requirement: addDeploymentRequirement,
|
|
1280
|
+
complete_deployment_requirement: completeDeploymentRequirement,
|
|
1281
|
+
get_deployment_requirements: getDeploymentRequirements,
|
|
1282
|
+
// Scheduled deployments
|
|
1283
|
+
schedule_deployment: scheduleDeployment,
|
|
1284
|
+
get_scheduled_deployments: getScheduledDeployments,
|
|
1285
|
+
update_scheduled_deployment: updateScheduledDeployment,
|
|
1286
|
+
delete_scheduled_deployment: deleteScheduledDeployment,
|
|
1287
|
+
trigger_scheduled_deployment: triggerScheduledDeployment,
|
|
1288
|
+
check_due_deployments: checkDueDeployments,
|
|
1289
|
+
};
|