@alanse/clickup-multi-mcp-server 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Dockerfile +38 -0
- package/LICENSE +21 -0
- package/README.md +470 -0
- package/build/config.js +237 -0
- package/build/index.js +87 -0
- package/build/logger.js +163 -0
- package/build/middleware/security.js +231 -0
- package/build/server.js +288 -0
- package/build/services/clickup/base.js +432 -0
- package/build/services/clickup/bulk.js +180 -0
- package/build/services/clickup/document.js +159 -0
- package/build/services/clickup/folder.js +136 -0
- package/build/services/clickup/index.js +76 -0
- package/build/services/clickup/list.js +191 -0
- package/build/services/clickup/tag.js +239 -0
- package/build/services/clickup/task/index.js +32 -0
- package/build/services/clickup/task/task-attachments.js +105 -0
- package/build/services/clickup/task/task-comments.js +114 -0
- package/build/services/clickup/task/task-core.js +604 -0
- package/build/services/clickup/task/task-custom-fields.js +107 -0
- package/build/services/clickup/task/task-search.js +986 -0
- package/build/services/clickup/task/task-service.js +104 -0
- package/build/services/clickup/task/task-tags.js +113 -0
- package/build/services/clickup/time.js +244 -0
- package/build/services/clickup/types.js +33 -0
- package/build/services/clickup/workspace.js +397 -0
- package/build/services/shared.js +61 -0
- package/build/sse_server.js +277 -0
- package/build/tools/documents.js +489 -0
- package/build/tools/folder.js +331 -0
- package/build/tools/index.js +16 -0
- package/build/tools/list.js +428 -0
- package/build/tools/member.js +106 -0
- package/build/tools/tag.js +833 -0
- package/build/tools/task/attachments.js +357 -0
- package/build/tools/task/attachments.types.js +9 -0
- package/build/tools/task/bulk-operations.js +338 -0
- package/build/tools/task/handlers.js +919 -0
- package/build/tools/task/index.js +30 -0
- package/build/tools/task/main.js +233 -0
- package/build/tools/task/single-operations.js +469 -0
- package/build/tools/task/time-tracking.js +575 -0
- package/build/tools/task/utilities.js +310 -0
- package/build/tools/task/workspace-operations.js +258 -0
- package/build/tools/tool-enhancer.js +37 -0
- package/build/tools/utils.js +12 -0
- package/build/tools/workspace-helper.js +44 -0
- package/build/tools/workspace.js +73 -0
- package/build/utils/color-processor.js +183 -0
- package/build/utils/concurrency-utils.js +248 -0
- package/build/utils/date-utils.js +542 -0
- package/build/utils/resolver-utils.js +135 -0
- package/build/utils/sponsor-service.js +93 -0
- package/build/utils/token-utils.js +49 -0
- package/package.json +77 -0
- package/smithery.yaml +23 -0
|
@@ -0,0 +1,357 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SPDX-FileCopyrightText: © 2025 Talib Kareem <taazkareem@icloud.com>
|
|
3
|
+
* SPDX-License-Identifier: MIT
|
|
4
|
+
*
|
|
5
|
+
* ClickUp MCP Task Attachment Tool
|
|
6
|
+
*
|
|
7
|
+
* This module implements a tool for attaching files to ClickUp tasks
|
|
8
|
+
* with automatic method selection based on file source and size.
|
|
9
|
+
*/
|
|
10
|
+
import { clickUpServices } from '../../services/shared.js';
|
|
11
|
+
import { validateTaskIdentification } from './utilities.js';
|
|
12
|
+
import { sponsorService } from '../../utils/sponsor-service.js';
|
|
13
|
+
import { Logger } from '../../logger.js';
|
|
14
|
+
// Use shared services instance
|
|
15
|
+
const { task: taskService } = clickUpServices;
|
|
16
|
+
// Create a logger instance for attachments
|
|
17
|
+
const logger = new Logger('TaskAttachments');
|
|
18
|
+
// Session storage for chunked uploads (in-memory for demonstration)
|
|
19
|
+
const chunkSessions = new Map();
|
|
20
|
+
// Clean up expired sessions periodically
|
|
21
|
+
setInterval(() => {
|
|
22
|
+
const now = Date.now();
|
|
23
|
+
const expired = 24 * 60 * 60 * 1000; // 24 hours
|
|
24
|
+
for (const [token, session] of chunkSessions.entries()) {
|
|
25
|
+
if (now - session.timestamp > expired) {
|
|
26
|
+
chunkSessions.delete(token);
|
|
27
|
+
logger.debug(`Cleaned up expired upload session: ${token}`);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}, 3600 * 1000); // Check every hour
|
|
31
|
+
/**
|
|
32
|
+
* Single unified tool for attaching files to ClickUp tasks
|
|
33
|
+
*/
|
|
34
|
+
export const attachTaskFileTool = {
|
|
35
|
+
name: "attach_task_file",
|
|
36
|
+
description: `Attaches file to task. Use taskId (preferred) or taskName + optional listName. File sources: 1) base64 + filename (≤10MB), 2) URL (http/https), 3) local path (absolute), 4) chunked for large files. WARNING: taskName without listName may match multiple tasks.`,
|
|
37
|
+
inputSchema: {
|
|
38
|
+
type: "object",
|
|
39
|
+
properties: {
|
|
40
|
+
taskId: {
|
|
41
|
+
type: "string",
|
|
42
|
+
description: "ID of the task to attach the file to. Works with both regular task IDs (9 characters) and custom IDs with uppercase prefixes (like 'DEV-1234')."
|
|
43
|
+
},
|
|
44
|
+
taskName: {
|
|
45
|
+
type: "string",
|
|
46
|
+
description: "Name of the task to attach the file to. The tool will search for tasks with this name across all lists unless listName is specified."
|
|
47
|
+
},
|
|
48
|
+
listName: {
|
|
49
|
+
type: "string",
|
|
50
|
+
description: "Optional: Name of list containing the task. Providing this narrows the search to a specific list, improving performance and reducing ambiguity."
|
|
51
|
+
},
|
|
52
|
+
file_name: {
|
|
53
|
+
type: "string",
|
|
54
|
+
description: "Name of the file to be attached (include the extension). Required when using file_data."
|
|
55
|
+
},
|
|
56
|
+
file_data: {
|
|
57
|
+
type: "string",
|
|
58
|
+
description: "Base64-encoded content of the file (without the data URL prefix)."
|
|
59
|
+
},
|
|
60
|
+
file_url: {
|
|
61
|
+
type: "string",
|
|
62
|
+
description: "DUAL PURPOSE PARAMETER: Either (1) a web URL starting with http/https to download a file from, OR (2) an absolute local file path starting with / or drive letter. DO NOT use relative paths."
|
|
63
|
+
},
|
|
64
|
+
auth_header: {
|
|
65
|
+
type: "string",
|
|
66
|
+
description: "Optional authorization header to use when downloading from a web URL (ignored for local files)."
|
|
67
|
+
},
|
|
68
|
+
// Advanced parameters for chunked uploads - usually not needed as chunking is automatic
|
|
69
|
+
chunk_index: {
|
|
70
|
+
type: "number",
|
|
71
|
+
description: "Optional: For advanced usage with large file chunking. The 0-based index of this chunk."
|
|
72
|
+
},
|
|
73
|
+
chunk_session: {
|
|
74
|
+
type: "string",
|
|
75
|
+
description: "Optional: For advanced usage with large file chunking. Session identifier from a previous chunk upload."
|
|
76
|
+
},
|
|
77
|
+
chunk_total: {
|
|
78
|
+
type: "number",
|
|
79
|
+
description: "Optional: For advanced usage with large file chunking. Total number of chunks expected."
|
|
80
|
+
},
|
|
81
|
+
chunk_is_last: {
|
|
82
|
+
type: "boolean",
|
|
83
|
+
description: "Optional: For advanced usage with large file chunking. Whether this is the final chunk."
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
};
|
|
88
|
+
/**
|
|
89
|
+
* Handler function for the attachTaskFileTool
|
|
90
|
+
*/
|
|
91
|
+
async function attachTaskFileHandler(params) {
|
|
92
|
+
// Extract common parameters
|
|
93
|
+
const { taskId, taskName, listName, customTaskId, file_name, file_data, file_url, auth_header, chunk_total, chunk_size, chunk_index, session_id } = params;
|
|
94
|
+
// Validate task identification
|
|
95
|
+
const validationResult = validateTaskIdentification({ taskId, taskName, listName, customTaskId }, { useGlobalLookup: true });
|
|
96
|
+
if (!validationResult.isValid) {
|
|
97
|
+
throw new Error(validationResult.errorMessage);
|
|
98
|
+
}
|
|
99
|
+
// Validate file source - either file_data or file_url must be provided
|
|
100
|
+
if (!file_data && !file_url && !session_id) {
|
|
101
|
+
throw new Error("Either file_data, file_url, or session_id must be provided");
|
|
102
|
+
}
|
|
103
|
+
// Resolve task ID
|
|
104
|
+
const result = await taskService.findTasks({
|
|
105
|
+
taskId,
|
|
106
|
+
taskName,
|
|
107
|
+
listName,
|
|
108
|
+
allowMultipleMatches: false,
|
|
109
|
+
useSmartDisambiguation: true,
|
|
110
|
+
includeFullDetails: false
|
|
111
|
+
});
|
|
112
|
+
if (!result || Array.isArray(result)) {
|
|
113
|
+
throw new Error("Task not found");
|
|
114
|
+
}
|
|
115
|
+
const resolvedTaskId = result.id;
|
|
116
|
+
try {
|
|
117
|
+
// CASE 1: Chunked upload continuation
|
|
118
|
+
if (session_id) {
|
|
119
|
+
return await handleChunkUpload(resolvedTaskId, session_id, chunk_index, file_data, chunk_total === chunk_index + 1);
|
|
120
|
+
}
|
|
121
|
+
// CASE 2: URL-based upload or local file path
|
|
122
|
+
if (file_url) {
|
|
123
|
+
// Check if it's a local file path
|
|
124
|
+
logger.debug(`Checking if path is local: ${file_url}`);
|
|
125
|
+
if (file_url.startsWith('/') || /^[A-Za-z]:\\/.test(file_url)) {
|
|
126
|
+
logger.debug(`Detected as local path, proceeding to handle: ${file_url}`);
|
|
127
|
+
return await handleLocalFileUpload(resolvedTaskId, file_url, file_name);
|
|
128
|
+
}
|
|
129
|
+
else if (file_url.startsWith('http://') || file_url.startsWith('https://')) {
|
|
130
|
+
logger.debug(`Detected as URL, proceeding with URL upload: ${file_url}`);
|
|
131
|
+
return await handleUrlUpload(resolvedTaskId, file_url, file_name, auth_header);
|
|
132
|
+
}
|
|
133
|
+
else {
|
|
134
|
+
throw new Error(`Invalid file_url format: "${file_url}". The file_url parameter must be either an absolute file path (starting with / or drive letter) or a web URL (starting with http:// or https://)`);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
// CASE 3: Base64 upload (with automatic chunking for large files)
|
|
138
|
+
if (file_data) {
|
|
139
|
+
if (!file_name) {
|
|
140
|
+
throw new Error("file_name is required when using file_data");
|
|
141
|
+
}
|
|
142
|
+
// Check if we need to use chunking (file > 10MB)
|
|
143
|
+
const fileBuffer = Buffer.from(file_data, 'base64');
|
|
144
|
+
const fileSize = fileBuffer.length;
|
|
145
|
+
if (fileSize > 10 * 1024 * 1024) {
|
|
146
|
+
// For large files, start chunked upload process
|
|
147
|
+
return await startChunkedUpload(resolvedTaskId, file_name, fileBuffer);
|
|
148
|
+
}
|
|
149
|
+
else {
|
|
150
|
+
// For small files, upload directly
|
|
151
|
+
return await handleDirectUpload(resolvedTaskId, file_name, fileBuffer);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
throw new Error("Invalid parameters: Unable to determine upload method");
|
|
155
|
+
}
|
|
156
|
+
catch (error) {
|
|
157
|
+
logger.error(`Error attaching file to task:`, error);
|
|
158
|
+
throw error;
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
/**
|
|
162
|
+
* Handle direct upload for small files
|
|
163
|
+
*/
|
|
164
|
+
async function handleDirectUpload(taskId, fileName, fileBuffer) {
|
|
165
|
+
try {
|
|
166
|
+
// Call service method
|
|
167
|
+
const result = await taskService.uploadTaskAttachment(taskId, fileBuffer, fileName);
|
|
168
|
+
return {
|
|
169
|
+
success: true,
|
|
170
|
+
message: `File "${fileName}" successfully attached to task ${taskId}`,
|
|
171
|
+
attachment: result
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
catch (error) {
|
|
175
|
+
throw new Error(`Failed to upload file: ${error.message}`);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
/**
|
|
179
|
+
* Handle URL-based upload
|
|
180
|
+
*/
|
|
181
|
+
async function handleUrlUpload(taskId, fileUrl, fileName, authHeader) {
|
|
182
|
+
try {
|
|
183
|
+
// Extract filename from URL if not provided
|
|
184
|
+
const extractedFileName = fileName || new URL(fileUrl).pathname.split('/').pop() || 'downloaded-file';
|
|
185
|
+
// Call service method
|
|
186
|
+
const result = await taskService.uploadTaskAttachmentFromUrl(taskId, fileUrl, extractedFileName, authHeader);
|
|
187
|
+
return {
|
|
188
|
+
success: true,
|
|
189
|
+
message: `File from "${fileUrl}" successfully attached to task ${taskId}`,
|
|
190
|
+
attachment: result
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
catch (error) {
|
|
194
|
+
if (error.message === 'Invalid URL') {
|
|
195
|
+
throw new Error(`Failed to upload file from URL: Invalid URL format. The file_url parameter must be a valid web URL starting with http:// or https://`);
|
|
196
|
+
}
|
|
197
|
+
throw new Error(`Failed to upload file from URL: ${error.message}`);
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
/**
|
|
201
|
+
* Start a chunked upload process for large files
|
|
202
|
+
*/
|
|
203
|
+
async function startChunkedUpload(taskId, fileName, fileBuffer) {
|
|
204
|
+
// Generate a session token
|
|
205
|
+
const sessionToken = `chunk_session_${Date.now()}_${Math.random().toString(36).substring(2, 15)}`;
|
|
206
|
+
// Store the file in chunks (for demonstration - in production would store chunk info only)
|
|
207
|
+
// Split the file into chunks for storage
|
|
208
|
+
const chunkSize = 5 * 1024 * 1024; // 5MB chunks
|
|
209
|
+
const chunksMap = new Map();
|
|
210
|
+
for (let i = 0; i < fileBuffer.length; i += chunkSize) {
|
|
211
|
+
const chunk = fileBuffer.slice(i, i + chunkSize);
|
|
212
|
+
chunksMap.set(Math.floor(i / chunkSize), chunk);
|
|
213
|
+
}
|
|
214
|
+
// Create a new session
|
|
215
|
+
chunkSessions.set(sessionToken, {
|
|
216
|
+
taskId,
|
|
217
|
+
fileName,
|
|
218
|
+
fileSize: fileBuffer.length,
|
|
219
|
+
chunks: chunksMap,
|
|
220
|
+
timestamp: Date.now()
|
|
221
|
+
});
|
|
222
|
+
// Return initial chunk
|
|
223
|
+
return {
|
|
224
|
+
success: true,
|
|
225
|
+
message: `Large file detected. Chunked upload initialized for "${fileName}" (${fileBuffer.length} bytes)`,
|
|
226
|
+
chunk_session: sessionToken,
|
|
227
|
+
chunks_total: chunksMap.size,
|
|
228
|
+
chunk_uploaded: 1,
|
|
229
|
+
attachment: null,
|
|
230
|
+
details: {
|
|
231
|
+
taskId,
|
|
232
|
+
fileName,
|
|
233
|
+
fileSize: fileBuffer.length,
|
|
234
|
+
chunkCount: chunksMap.size,
|
|
235
|
+
progress: Math.round((1 / chunksMap.size) * 100)
|
|
236
|
+
}
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
/**
|
|
240
|
+
* Handle chunk upload as part of a multi-chunk process
|
|
241
|
+
*/
|
|
242
|
+
async function handleChunkUpload(taskId, sessionToken, chunkIndex, chunkData, isLastChunk) {
|
|
243
|
+
// Verify session exists
|
|
244
|
+
const session = chunkSessions.get(sessionToken);
|
|
245
|
+
if (!session) {
|
|
246
|
+
throw new Error("Upload session not found or expired");
|
|
247
|
+
}
|
|
248
|
+
// If this is the last chunk or all chunks are uploaded, finalize the upload
|
|
249
|
+
if (isLastChunk || (session.chunks.size === 1 && chunkIndex === undefined)) {
|
|
250
|
+
// Combine all chunks
|
|
251
|
+
const fileData = Buffer.allocUnsafe(session.fileSize);
|
|
252
|
+
let offset = 0;
|
|
253
|
+
// Sort chunks by index
|
|
254
|
+
const sortedChunks = Array.from(session.chunks.entries())
|
|
255
|
+
.sort((a, b) => a[0] - b[0]);
|
|
256
|
+
for (const entry of sortedChunks) {
|
|
257
|
+
const [index, chunk] = entry;
|
|
258
|
+
chunk.copy(fileData, offset);
|
|
259
|
+
offset += chunk.length;
|
|
260
|
+
}
|
|
261
|
+
try {
|
|
262
|
+
// Call service method
|
|
263
|
+
const result = await taskService.uploadTaskAttachment(session.taskId, fileData, session.fileName);
|
|
264
|
+
// Clean up the session
|
|
265
|
+
chunkSessions.delete(sessionToken);
|
|
266
|
+
return {
|
|
267
|
+
success: true,
|
|
268
|
+
message: `File "${session.fileName}" successfully attached to task ${session.taskId}`,
|
|
269
|
+
attachment: result
|
|
270
|
+
};
|
|
271
|
+
}
|
|
272
|
+
catch (error) {
|
|
273
|
+
throw new Error(`Failed to upload file: ${error.message}`);
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
// Otherwise handle the current chunk
|
|
277
|
+
if (chunkIndex === undefined || chunkData === undefined) {
|
|
278
|
+
throw new Error("chunk_index and chunk_data are required for chunk uploads");
|
|
279
|
+
}
|
|
280
|
+
// Store the chunk
|
|
281
|
+
// (In a real implementation, we'd append to a temp file or storage)
|
|
282
|
+
session.chunks.delete(chunkIndex); // Remove the chunk if it exists
|
|
283
|
+
session.chunks.set(chunkIndex, Buffer.from(chunkData, 'base64'));
|
|
284
|
+
return {
|
|
285
|
+
success: true,
|
|
286
|
+
message: `Chunk ${chunkIndex + 1}/${session.chunks.size} received`,
|
|
287
|
+
chunk_session: sessionToken,
|
|
288
|
+
chunks_remaining: session.chunks.size - chunkIndex - 1,
|
|
289
|
+
details: {
|
|
290
|
+
taskId: session.taskId,
|
|
291
|
+
fileName: session.fileName,
|
|
292
|
+
chunksReceived: chunkIndex + 1,
|
|
293
|
+
progress: Math.round(((chunkIndex + 1) / session.chunks.size) * 100)
|
|
294
|
+
}
|
|
295
|
+
};
|
|
296
|
+
}
|
|
297
|
+
/**
|
|
298
|
+
* Handle local file path upload
|
|
299
|
+
*/
|
|
300
|
+
async function handleLocalFileUpload(taskId, filePath, fileName) {
|
|
301
|
+
try {
|
|
302
|
+
// Import fs and path modules
|
|
303
|
+
const fs = await import('fs');
|
|
304
|
+
const path = await import('path');
|
|
305
|
+
logger.debug(`Processing absolute file path: ${filePath}`);
|
|
306
|
+
// Normalize the path to prevent directory traversal attacks
|
|
307
|
+
const normalizedPath = path.normalize(filePath);
|
|
308
|
+
// Check if file exists
|
|
309
|
+
if (!fs.existsSync(normalizedPath)) {
|
|
310
|
+
throw new Error(`Local file not found: ${normalizedPath}`);
|
|
311
|
+
}
|
|
312
|
+
// Validate file stats
|
|
313
|
+
const stats = fs.statSync(normalizedPath);
|
|
314
|
+
if (!stats.isFile()) {
|
|
315
|
+
throw new Error(`Path is not a file: ${normalizedPath}`);
|
|
316
|
+
}
|
|
317
|
+
// Get file name if not provided
|
|
318
|
+
const extractedFileName = fileName || path.basename(normalizedPath);
|
|
319
|
+
// Read file
|
|
320
|
+
const fileBuffer = fs.readFileSync(normalizedPath);
|
|
321
|
+
const fileSize = fileBuffer.length;
|
|
322
|
+
logger.debug(`Successfully read file: ${extractedFileName} (${fileSize} bytes)`);
|
|
323
|
+
// Choose upload method based on file size
|
|
324
|
+
if (fileSize > 10 * 1024 * 1024) {
|
|
325
|
+
// For large files, start chunked upload process
|
|
326
|
+
return await startChunkedUpload(taskId, extractedFileName, fileBuffer);
|
|
327
|
+
}
|
|
328
|
+
else {
|
|
329
|
+
// For small files, upload directly
|
|
330
|
+
return await handleDirectUpload(taskId, extractedFileName, fileBuffer);
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
catch (error) {
|
|
334
|
+
if (error.message.includes('ENOENT')) {
|
|
335
|
+
throw new Error(`Failed to upload local file: Local file not found: ${filePath}. Make sure the file exists and the path is absolute.`);
|
|
336
|
+
}
|
|
337
|
+
else if (error.message.includes('EACCES')) {
|
|
338
|
+
throw new Error(`Failed to upload local file: Permission denied accessing: ${filePath}. Check file permissions.`);
|
|
339
|
+
}
|
|
340
|
+
throw new Error(`Failed to upload local file: ${error.message}`);
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
/**
|
|
344
|
+
* Creates a wrapped handler function with standard error handling and response formatting
|
|
345
|
+
*/
|
|
346
|
+
function createHandlerWrapper(handler, formatResponse = (result) => result) {
|
|
347
|
+
return async (parameters) => {
|
|
348
|
+
try {
|
|
349
|
+
const result = await handler(parameters);
|
|
350
|
+
return sponsorService.createResponse(formatResponse(result), true);
|
|
351
|
+
}
|
|
352
|
+
catch (error) {
|
|
353
|
+
return sponsorService.createErrorResponse(error, parameters);
|
|
354
|
+
}
|
|
355
|
+
};
|
|
356
|
+
}
|
|
357
|
+
export const handleAttachTaskFile = createHandlerWrapper(attachTaskFileHandler);
|