claude-autopm 1.18.0 → 1.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/README.md +159 -0
  2. package/autopm/.claude/agents/core/mcp-manager.md +1 -1
  3. package/autopm/.claude/commands/pm/context.md +11 -0
  4. package/autopm/.claude/commands/pm/epic-decompose.md +25 -2
  5. package/autopm/.claude/commands/pm/epic-oneshot.md +13 -0
  6. package/autopm/.claude/commands/pm/epic-start.md +19 -0
  7. package/autopm/.claude/commands/pm/epic-sync-modular.md +10 -10
  8. package/autopm/.claude/commands/pm/epic-sync.md +14 -14
  9. package/autopm/.claude/commands/pm/issue-start.md +50 -5
  10. package/autopm/.claude/commands/pm/issue-sync.md +15 -15
  11. package/autopm/.claude/commands/pm/what-next.md +11 -0
  12. package/autopm/.claude/mcp/MCP-REGISTRY.md +1 -1
  13. package/autopm/.claude/scripts/azure/active-work.js +2 -2
  14. package/autopm/.claude/scripts/azure/blocked.js +13 -13
  15. package/autopm/.claude/scripts/azure/daily.js +1 -1
  16. package/autopm/.claude/scripts/azure/dashboard.js +1 -1
  17. package/autopm/.claude/scripts/azure/feature-list.js +2 -2
  18. package/autopm/.claude/scripts/azure/feature-status.js +1 -1
  19. package/autopm/.claude/scripts/azure/next-task.js +1 -1
  20. package/autopm/.claude/scripts/azure/search.js +1 -1
  21. package/autopm/.claude/scripts/azure/setup.js +15 -15
  22. package/autopm/.claude/scripts/azure/sprint-report.js +2 -2
  23. package/autopm/.claude/scripts/azure/sync.js +1 -1
  24. package/autopm/.claude/scripts/azure/us-list.js +1 -1
  25. package/autopm/.claude/scripts/azure/us-status.js +1 -1
  26. package/autopm/.claude/scripts/azure/validate.js +13 -13
  27. package/autopm/.claude/scripts/lib/frontmatter-utils.sh +42 -7
  28. package/autopm/.claude/scripts/lib/logging-utils.sh +20 -16
  29. package/autopm/.claude/scripts/lib/validation-utils.sh +1 -1
  30. package/autopm/.claude/scripts/pm/context.js +338 -0
  31. package/autopm/.claude/scripts/pm/issue-sync/format-comment.sh +3 -3
  32. package/autopm/.claude/scripts/pm/lib/README.md +85 -0
  33. package/autopm/.claude/scripts/pm/lib/logger.js +78 -0
  34. package/autopm/.claude/scripts/pm/next.js +25 -1
  35. package/autopm/.claude/scripts/pm/what-next.js +660 -0
  36. package/bin/autopm.js +25 -0
  37. package/package.json +1 -1
  38. package/lib/agentExecutor.js.deprecated +0 -101
  39. package/lib/azure/cache.js +0 -80
  40. package/lib/azure/client.js +0 -77
  41. package/lib/azure/formatter.js +0 -177
  42. package/lib/commandHelpers.js +0 -177
  43. package/lib/context/manager.js +0 -290
  44. package/lib/documentation/manager.js +0 -528
  45. package/lib/github/workflow-manager.js +0 -546
  46. package/lib/helpers/azure-batch-api.js +0 -133
  47. package/lib/helpers/azure-cache-manager.js +0 -287
  48. package/lib/helpers/azure-parallel-processor.js +0 -158
  49. package/lib/helpers/azure-work-item-create.js +0 -278
  50. package/lib/helpers/gh-issue-create.js +0 -250
  51. package/lib/helpers/interactive-prompt.js +0 -336
  52. package/lib/helpers/output-manager.js +0 -335
  53. package/lib/helpers/progress-indicator.js +0 -258
  54. package/lib/performance/benchmarker.js +0 -429
  55. package/lib/pm/epic-decomposer.js +0 -273
  56. package/lib/pm/epic-syncer.js +0 -221
  57. package/lib/prdMetadata.js +0 -270
  58. package/lib/providers/azure/index.js +0 -234
  59. package/lib/providers/factory.js +0 -87
  60. package/lib/providers/github/index.js +0 -204
  61. package/lib/providers/interface.js +0 -73
  62. package/lib/python/scaffold-manager.js +0 -576
  63. package/lib/react/scaffold-manager.js +0 -745
  64. package/lib/regression/analyzer.js +0 -578
  65. package/lib/release/manager.js +0 -324
  66. package/lib/tailwind/manager.js +0 -486
  67. package/lib/traefik/manager.js +0 -484
  68. package/lib/utils/colors.js +0 -126
  69. package/lib/utils/config.js +0 -317
  70. package/lib/utils/filesystem.js +0 -316
  71. package/lib/utils/logger.js +0 -135
  72. package/lib/utils/prompts.js +0 -294
  73. package/lib/utils/shell.js +0 -237
  74. package/lib/validators/email-validator.js +0 -337
  75. package/lib/workflow/manager.js +0 -449
@@ -1,287 +0,0 @@
1
- #!/usr/bin/env node
2
- /**
3
- * Azure DevOps Intelligent Cache Manager
4
- * Implements smart caching with preloading and predictive fetching
5
- */
6
-
7
- const fs = require('fs').promises;
8
- const path = require('path');
9
- const crypto = require('crypto');
10
-
11
- class CacheManager {
12
- constructor(options = {}) {
13
- this.cacheDir = options.cacheDir || path.join(process.cwd(), '.claude', 'azure', 'cache');
14
- this.maxAge = options.maxAge || 3600000; // 1 hour default
15
- this.maxSize = options.maxSize || 100 * 1024 * 1024; // 100MB default
16
- this.preloadPatterns = new Map();
17
- this.accessLog = new Map();
18
- }
19
-
20
- /**
21
- * Initialize cache directory
22
- */
23
- async init() {
24
- await fs.mkdir(this.cacheDir, { recursive: true });
25
- await fs.mkdir(path.join(this.cacheDir, 'workitems'), { recursive: true });
26
- await fs.mkdir(path.join(this.cacheDir, 'queries'), { recursive: true });
27
- await fs.mkdir(path.join(this.cacheDir, 'metadata'), { recursive: true });
28
- await this.loadAccessLog();
29
- }
30
-
31
- /**
32
- * Get cached item with intelligent preloading
33
- */
34
- async get(key, category = 'general') {
35
- const filePath = this.getCachePath(key, category);
36
-
37
- try {
38
- const stats = await fs.stat(filePath);
39
- const age = Date.now() - stats.mtime.getTime();
40
-
41
- if (age > this.maxAge) {
42
- await fs.unlink(filePath).catch(() => {});
43
- return null;
44
- }
45
-
46
- const data = await fs.readFile(filePath, 'utf8');
47
-
48
- // Track access for intelligent preloading
49
- this.trackAccess(key, category);
50
-
51
- // Trigger preloading if patterns detected
52
- this.triggerPreload(key, category);
53
-
54
- return JSON.parse(data);
55
- } catch {
56
- return null;
57
- }
58
- }
59
-
60
- /**
61
- * Set cache item
62
- */
63
- async set(key, value, category = 'general', ttl = null) {
64
- const filePath = this.getCachePath(key, category);
65
-
66
- await fs.mkdir(path.dirname(filePath), { recursive: true });
67
- await fs.writeFile(filePath, JSON.stringify(value, null, 2));
68
-
69
- if (ttl) {
70
- // Store TTL metadata
71
- const metaPath = path.join(this.cacheDir, 'metadata', `${key}.meta`);
72
- await fs.writeFile(metaPath, JSON.stringify({ ttl, created: Date.now() }));
73
- }
74
-
75
- // Cleanup if cache size exceeded
76
- await this.enforceSize();
77
- }
78
-
79
- /**
80
- * Intelligent preloading based on access patterns
81
- */
82
- async preload(items, fetcher, category = 'workitems') {
83
- const uncached = [];
84
- const cached = [];
85
-
86
- // Check what's already cached
87
- for (const item of items) {
88
- const key = typeof item === 'object' ? item.id : item;
89
- const cachedItem = await this.get(key, category);
90
-
91
- if (cachedItem) {
92
- cached.push(cachedItem);
93
- } else {
94
- uncached.push(item);
95
- }
96
- }
97
-
98
- // Fetch uncached items in parallel
99
- if (uncached.length > 0 && fetcher) {
100
- const freshData = await fetcher(uncached);
101
-
102
- // Cache the fresh data
103
- for (let i = 0; i < freshData.length; i++) {
104
- if (freshData[i]) {
105
- const key = typeof uncached[i] === 'object' ? uncached[i].id : uncached[i];
106
- await this.set(key, freshData[i], category);
107
- cached.push(freshData[i]);
108
- }
109
- }
110
- }
111
-
112
- return cached;
113
- }
114
-
115
- /**
116
- * Track access patterns for intelligent preloading
117
- * @private
118
- */
119
- trackAccess(key, category) {
120
- const accessKey = `${category}:${key}`;
121
- const accesses = this.accessLog.get(accessKey) || [];
122
- accesses.push(Date.now());
123
-
124
- // Keep only recent accesses (last 24 hours)
125
- const cutoff = Date.now() - 86400000;
126
- const recentAccesses = accesses.filter(time => time > cutoff);
127
-
128
- this.accessLog.set(accessKey, recentAccesses);
129
- }
130
-
131
- /**
132
- * Trigger preloading based on patterns
133
- * @private
134
- */
135
- async triggerPreload(key, category) {
136
- // Detect sequential access patterns
137
- if (category === 'workitems' && /^\d+$/.test(key)) {
138
- const id = parseInt(key);
139
- const related = [];
140
-
141
- // Preload adjacent items
142
- for (let i = id - 2; i <= id + 2; i++) {
143
- if (i !== id && i > 0) {
144
- related.push(i.toString());
145
- }
146
- }
147
-
148
- // Preload in background
149
- setImmediate(async () => {
150
- for (const relatedId of related) {
151
- const cached = await this.get(relatedId, category);
152
- if (!cached) {
153
- // Mark for background fetch
154
- this.preloadPatterns.set(relatedId, Date.now());
155
- }
156
- }
157
- });
158
- }
159
- }
160
-
161
- /**
162
- * Get cache path for key
163
- * @private
164
- */
165
- getCachePath(key, category) {
166
- const hashedKey = crypto.createHash('md5').update(key).digest('hex');
167
- return path.join(this.cacheDir, category, `${hashedKey}.json`);
168
- }
169
-
170
- /**
171
- * Enforce cache size limit
172
- * @private
173
- */
174
- async enforceSize() {
175
- try {
176
- const files = await this.getAllCacheFiles();
177
- let totalSize = 0;
178
- const fileStats = [];
179
-
180
- for (const file of files) {
181
- const stats = await fs.stat(file);
182
- totalSize += stats.size;
183
- fileStats.push({ path: file, size: stats.size, mtime: stats.mtime });
184
- }
185
-
186
- if (totalSize > this.maxSize) {
187
- // Sort by modification time (oldest first)
188
- fileStats.sort((a, b) => a.mtime - b.mtime);
189
-
190
- // Remove oldest files until under limit
191
- for (const file of fileStats) {
192
- if (totalSize <= this.maxSize) break;
193
-
194
- await fs.unlink(file.path).catch(() => {});
195
- totalSize -= file.size;
196
- }
197
- }
198
- } catch (error) {
199
- console.error('Cache cleanup error:', error.message);
200
- }
201
- }
202
-
203
- /**
204
- * Get all cache files
205
- * @private
206
- */
207
- async getAllCacheFiles() {
208
- const files = [];
209
- const categories = ['workitems', 'queries', 'general'];
210
-
211
- for (const category of categories) {
212
- const dir = path.join(this.cacheDir, category);
213
- try {
214
- const entries = await fs.readdir(dir);
215
- files.push(...entries.map(f => path.join(dir, f)));
216
- } catch {}
217
- }
218
-
219
- return files;
220
- }
221
-
222
- /**
223
- * Load access log from disk
224
- * @private
225
- */
226
- async loadAccessLog() {
227
- const logPath = path.join(this.cacheDir, 'metadata', 'access.log');
228
- try {
229
- const data = await fs.readFile(logPath, 'utf8');
230
- this.accessLog = new Map(JSON.parse(data));
231
- } catch {}
232
- }
233
-
234
- /**
235
- * Save access log to disk
236
- */
237
- async saveAccessLog() {
238
- const logPath = path.join(this.cacheDir, 'metadata', 'access.log');
239
- await fs.mkdir(path.dirname(logPath), { recursive: true });
240
- await fs.writeFile(logPath, JSON.stringify([...this.accessLog]));
241
- }
242
-
243
- /**
244
- * Clear all cache
245
- */
246
- async clear() {
247
- await fs.rm(this.cacheDir, { recursive: true, force: true });
248
- await this.init();
249
- }
250
-
251
- /**
252
- * Get cache statistics
253
- */
254
- async getStats() {
255
- const files = await this.getAllCacheFiles();
256
- let totalSize = 0;
257
- let totalFiles = files.length;
258
- let oldestFile = null;
259
- let newestFile = null;
260
-
261
- for (const file of files) {
262
- try {
263
- const stats = await fs.stat(file);
264
- totalSize += stats.size;
265
-
266
- if (!oldestFile || stats.mtime < oldestFile.mtime) {
267
- oldestFile = { path: file, mtime: stats.mtime };
268
- }
269
-
270
- if (!newestFile || stats.mtime > newestFile.mtime) {
271
- newestFile = { path: file, mtime: stats.mtime };
272
- }
273
- } catch {}
274
- }
275
-
276
- return {
277
- totalFiles,
278
- totalSize,
279
- totalSizeMB: (totalSize / (1024 * 1024)).toFixed(2),
280
- oldestFile,
281
- newestFile,
282
- utilizationPercent: Math.round((totalSize / this.maxSize) * 100)
283
- };
284
- }
285
- }
286
-
287
- module.exports = CacheManager;
@@ -1,158 +0,0 @@
1
- #!/usr/bin/env node
2
- /**
3
- * Azure DevOps Parallel Processor
4
- * Enables parallel processing of work items and API calls
5
- */
6
-
7
- const { Worker } = require('worker_threads');
8
- const os = require('os');
9
-
10
- class ParallelProcessor {
11
- constructor(options = {}) {
12
- this.maxWorkers = options.maxWorkers || os.cpus().length;
13
- this.taskQueue = [];
14
- this.activeWorkers = 0;
15
- }
16
-
17
- /**
18
- * Process items in parallel
19
- * @param {Array} items - Items to process
20
- * @param {Function} processor - Processing function
21
- * @param {Object} options - Processing options
22
- * @returns {Promise<Array>} Processed results
23
- */
24
- async processParallel(items, processor, options = {}) {
25
- const chunkSize = Math.ceil(items.length / this.maxWorkers);
26
- const chunks = this.chunkArray(items, chunkSize);
27
-
28
- const promises = chunks.map(chunk =>
29
- this.processChunk(chunk, processor, options)
30
- );
31
-
32
- const results = await Promise.all(promises);
33
- return results.flat();
34
- }
35
-
36
- /**
37
- * Process a chunk of items
38
- * @private
39
- */
40
- async processChunk(chunk, processor, options) {
41
- const results = [];
42
-
43
- // Use Promise.all for concurrent processing within chunk
44
- const promises = chunk.map(item =>
45
- this.wrapWithTimeout(
46
- processor(item, options),
47
- options.timeout || 30000
48
- )
49
- );
50
-
51
- try {
52
- const chunkResults = await Promise.allSettled(promises);
53
-
54
- for (const result of chunkResults) {
55
- if (result.status === 'fulfilled') {
56
- results.push(result.value);
57
- } else {
58
- console.error(`Processing failed: ${result.reason}`);
59
- results.push(null);
60
- }
61
- }
62
- } catch (error) {
63
- console.error(`Chunk processing error: ${error.message}`);
64
- }
65
-
66
- return results;
67
- }
68
-
69
- /**
70
- * Wrap promise with timeout
71
- * @private
72
- */
73
- wrapWithTimeout(promise, timeout) {
74
- return Promise.race([
75
- promise,
76
- new Promise((_, reject) =>
77
- setTimeout(() => reject(new Error('Operation timed out')), timeout)
78
- )
79
- ]);
80
- }
81
-
82
- /**
83
- * Chunk array into smaller arrays
84
- * @private
85
- */
86
- chunkArray(array, size) {
87
- const chunks = [];
88
- for (let i = 0; i < array.length; i += size) {
89
- chunks.push(array.slice(i, i + size));
90
- }
91
- return chunks;
92
- }
93
-
94
- /**
95
- * Process WIQL queries in parallel
96
- */
97
- async parallelWiqlQueries(queries, apiClient) {
98
- return this.processParallel(
99
- queries,
100
- async (query) => {
101
- try {
102
- return await apiClient.queryByWiql(query);
103
- } catch (error) {
104
- console.error(`Query failed: ${error.message}`);
105
- return null;
106
- }
107
- },
108
- { timeout: 10000 }
109
- );
110
- }
111
-
112
- /**
113
- * Fetch work items in parallel
114
- */
115
- async parallelFetchWorkItems(ids, apiClient, options = {}) {
116
- const showProgress = options.showProgress !== false;
117
- let processed = 0;
118
-
119
- return this.processParallel(
120
- ids,
121
- async (id) => {
122
- try {
123
- const result = await apiClient.getWorkItem(id);
124
-
125
- if (showProgress) {
126
- processed++;
127
- this.updateProgress(processed, ids.length);
128
- }
129
-
130
- return result;
131
- } catch (error) {
132
- console.error(`Failed to fetch work item ${id}: ${error.message}`);
133
- return null;
134
- }
135
- },
136
- { timeout: 5000 }
137
- );
138
- }
139
-
140
- /**
141
- * Update progress indicator
142
- * @private
143
- */
144
- updateProgress(current, total) {
145
- const percent = Math.round((current / total) * 100);
146
- const barLength = 30;
147
- const filled = Math.round((percent / 100) * barLength);
148
- const bar = '█'.repeat(filled) + '░'.repeat(barLength - filled);
149
-
150
- process.stdout.write(`\rProgress: ${bar} ${percent}% (${current}/${total})`);
151
-
152
- if (current === total) {
153
- process.stdout.write('\n');
154
- }
155
- }
156
- }
157
-
158
- module.exports = ParallelProcessor;
@@ -1,278 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- /**
4
- * Azure DevOps Work Item Creation Helper
5
- * Provides methods to create work items with JSON responses
6
- */
7
-
8
- const fs = require('fs-extra');
9
- const path = require('path');
10
-
11
- class AzureWorkItemCreator {
12
- constructor() {
13
- this.client = null;
14
- }
15
-
16
- /**
17
- * Set Azure DevOps client
18
- */
19
- _setClient(client) {
20
- this.client = client;
21
- }
22
-
23
- /**
24
- * Validate that client is configured
25
- */
26
- _validateClient() {
27
- if (!this.client) {
28
- throw new Error('Azure DevOps client not configured. Call _setClient() first.');
29
- }
30
- }
31
-
32
- /**
33
- * Validate required options
34
- */
35
- _validateOptions(options, requiredFields = ['title']) {
36
- for (const field of requiredFields) {
37
- if (!options[field]) {
38
- throw new Error(`${field} is required`);
39
- }
40
- }
41
- }
42
-
43
- /**
44
- * Build work item fields from options
45
- */
46
- async _buildFields(options) {
47
- const fields = {};
48
-
49
- // Title
50
- if (options.title) {
51
- fields['System.Title'] = options.title;
52
- }
53
-
54
- // Description
55
- if (options.description) {
56
- fields['System.Description'] = options.description;
57
- } else if (options.descriptionFile) {
58
- fields['System.Description'] = await fs.readFile(options.descriptionFile, 'utf8');
59
- }
60
-
61
- // Tags
62
- if (options.tags && options.tags.length > 0) {
63
- fields['System.Tags'] = options.tags.join('; ');
64
- }
65
-
66
- // Priority
67
- if (options.priority !== undefined) {
68
- fields['Microsoft.VSTS.Common.Priority'] = options.priority;
69
- }
70
-
71
- // Severity (for bugs)
72
- if (options.severity) {
73
- fields['Microsoft.VSTS.Common.Severity'] = options.severity;
74
- }
75
-
76
- // Iteration Path
77
- if (options.iterationPath) {
78
- fields['System.IterationPath'] = options.iterationPath;
79
- }
80
-
81
- // Area Path
82
- if (options.areaPath) {
83
- fields['System.AreaPath'] = options.areaPath;
84
- }
85
-
86
- // Assigned To
87
- if (options.assignedTo) {
88
- fields['System.AssignedTo'] = options.assignedTo;
89
- }
90
-
91
- // Acceptance Criteria (for stories)
92
- if (options.acceptanceCriteria) {
93
- if (typeof options.acceptanceCriteria === 'string') {
94
- fields['Microsoft.VSTS.Common.AcceptanceCriteria'] = options.acceptanceCriteria;
95
- } else if (Array.isArray(options.acceptanceCriteria)) {
96
- fields['Microsoft.VSTS.Common.AcceptanceCriteria'] =
97
- options.acceptanceCriteria.map(c => `- ${c}`).join('\n');
98
- }
99
- }
100
-
101
- // Story Points (for stories)
102
- if (options.storyPoints !== undefined) {
103
- fields['Microsoft.VSTS.Scheduling.StoryPoints'] = options.storyPoints;
104
- }
105
-
106
- // Remaining Work (for tasks)
107
- if (options.remainingWork !== undefined) {
108
- fields['Microsoft.VSTS.Scheduling.RemainingWork'] = options.remainingWork;
109
- }
110
-
111
- // Parent link
112
- if (options.parentId) {
113
- fields['System.Parent'] = options.parentId;
114
- }
115
-
116
- return fields;
117
- }
118
-
119
- /**
120
- * Format work item response
121
- */
122
- _formatResponse(workItem) {
123
- return {
124
- id: workItem.id,
125
- type: workItem.fields['System.WorkItemType'],
126
- title: workItem.fields['System.Title'],
127
- state: workItem.fields['System.State'],
128
- url: workItem._links?.html?.href || `https://dev.azure.com/_workitems/edit/${workItem.id}`,
129
- fields: workItem.fields
130
- };
131
- }
132
-
133
- /**
134
- * Create a work item
135
- */
136
- async createWorkItem(options) {
137
- this._validateClient();
138
- this._validateOptions(options, ['title']);
139
-
140
- try {
141
- const fields = await this._buildFields(options);
142
- const workItem = await this.client.createWorkItem(options.type, fields);
143
-
144
- return this._formatResponse(workItem);
145
- } catch (error) {
146
- if (error.message && error.message.includes('Unauthorized')) {
147
- throw new Error('Azure DevOps API Error: Unauthorized. Check your PAT token.');
148
- }
149
- throw error;
150
- }
151
- }
152
-
153
- /**
154
- * Create an Epic
155
- */
156
- async createEpic(options) {
157
- const epicName = options.epicName || options.title?.replace(/^Epic:\s*/, '');
158
-
159
- if (!epicName && !options.title) {
160
- throw new Error('epicName or title is required for epic creation');
161
- }
162
-
163
- // Prepare epic options
164
- const epicOptions = {
165
- type: 'Epic',
166
- title: options.title || `Epic: ${epicName}`,
167
- description: options.description || this._generateEpicDescription(options),
168
- tags: ['epic', `epic:${epicName}`],
169
- priority: options.priority
170
- };
171
-
172
- // Add additional fields
173
- if (options.acceptanceCriteria) {
174
- epicOptions.acceptanceCriteria = options.acceptanceCriteria;
175
- }
176
-
177
- return await this.createWorkItem(epicOptions);
178
- }
179
-
180
- /**
181
- * Generate epic description from options
182
- */
183
- _generateEpicDescription(options) {
184
- let description = '## Epic Description\n\n';
185
-
186
- if (options.description) {
187
- description += `${options.description}\n\n`;
188
- }
189
-
190
- if (options.acceptanceCriteria && options.acceptanceCriteria.length > 0) {
191
- description += '## Acceptance Criteria\n\n';
192
- options.acceptanceCriteria.forEach(criteria => {
193
- description += `- ${criteria}\n`;
194
- });
195
- description += '\n';
196
- }
197
-
198
- if (options.tasks && options.tasks.length > 0) {
199
- description += '## Tasks\n\n';
200
- options.tasks.forEach(task => {
201
- description += `- ${task}\n`;
202
- });
203
- }
204
-
205
- return description;
206
- }
207
-
208
- /**
209
- * Create a User Story
210
- */
211
- async createUserStory(options) {
212
- this._validateOptions(options, ['title']);
213
-
214
- const storyOptions = {
215
- type: 'User Story',
216
- ...options
217
- };
218
-
219
- return await this.createWorkItem(storyOptions);
220
- }
221
-
222
- /**
223
- * Create a Task
224
- */
225
- async createTask(options) {
226
- this._validateOptions(options, ['title']);
227
-
228
- const taskOptions = {
229
- type: 'Task',
230
- ...options
231
- };
232
-
233
- return await this.createWorkItem(taskOptions);
234
- }
235
-
236
- /**
237
- * Create multiple work items in batch
238
- */
239
- async createBatch(items) {
240
- const results = [];
241
-
242
- for (const item of items) {
243
- try {
244
- const result = await this.createWorkItem(item);
245
- results.push(result);
246
- } catch (error) {
247
- results.push({
248
- error: error.message,
249
- item
250
- });
251
- }
252
- }
253
-
254
- return results;
255
- }
256
-
257
- /**
258
- * Helper method for PM commands - create epic and return just the ID
259
- */
260
- async createEpicAndGetId(options) {
261
- const result = await this.createEpic(options);
262
- return result.id;
263
- }
264
- }
265
-
266
- // Create singleton instance
267
- const creator = new AzureWorkItemCreator();
268
-
269
- // Export methods bound to instance
270
- module.exports = {
271
- createWorkItem: creator.createWorkItem.bind(creator),
272
- createEpic: creator.createEpic.bind(creator),
273
- createUserStory: creator.createUserStory.bind(creator),
274
- createTask: creator.createTask.bind(creator),
275
- createBatch: creator.createBatch.bind(creator),
276
- createEpicAndGetId: creator.createEpicAndGetId.bind(creator),
277
- _setClient: creator._setClient.bind(creator)
278
- };