@taazkareem/clickup-mcp-server 0.6.1 → 0.6.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -6
- package/build/index.js +1 -0
- package/build/logger.js +26 -1
- package/build/server.js +1 -1
- package/build/services/clickup/base.js +22 -1
- package/build/services/clickup/bulk.js +76 -45
- package/build/services/clickup/index.js +2 -2
- package/build/services/clickup/task/index.js +32 -0
- package/build/services/clickup/task/task-attachments.js +97 -0
- package/build/services/clickup/task/task-comments.js +104 -0
- package/build/services/clickup/task/task-core.js +477 -0
- package/build/services/clickup/task/task-custom-fields.js +97 -0
- package/build/services/clickup/task/task-search.js +462 -0
- package/build/services/clickup/task/task-service.js +25 -0
- package/build/services/clickup/task/task-tags.js +101 -0
- package/build/services/clickup/workspace.js +81 -36
- package/build/tools/folder.js +1 -1
- package/build/tools/list.js +2 -4
- package/build/tools/task/attachments.js +18 -5
- package/build/tools/task/attachments.types.js +9 -0
- package/build/tools/task/bulk-operations.js +111 -15
- package/build/tools/task/handlers.js +169 -24
- package/build/tools/task/index.js +1 -1
- package/build/tools/task/main.js +36 -1
- package/build/tools/task/single-operations.js +51 -4
- package/build/tools/task/utilities.js +24 -71
- package/build/tools/utils.js +2 -2
- package/build/utils/date-utils.js +149 -30
- package/build/utils/resolver-utils.js +33 -40
- package/build/utils/sponsor-service.js +1 -1
- package/package.json +1 -1
- package/build/services/clickup/task.js +0 -701
|
@@ -104,8 +104,11 @@ export class WorkspaceService extends BaseClickUpService {
|
|
|
104
104
|
try {
|
|
105
105
|
// If we have the hierarchy in memory and not forcing refresh, return it
|
|
106
106
|
if (this.workspaceHierarchy && !forceRefresh) {
|
|
107
|
+
this.logger.debug('Returning cached workspace hierarchy');
|
|
107
108
|
return this.workspaceHierarchy;
|
|
108
109
|
}
|
|
110
|
+
const startTime = Date.now();
|
|
111
|
+
this.logger.info('Starting workspace hierarchy fetch');
|
|
109
112
|
// Start building the workspace tree
|
|
110
113
|
const workspaceTree = {
|
|
111
114
|
root: {
|
|
@@ -115,51 +118,93 @@ export class WorkspaceService extends BaseClickUpService {
|
|
|
115
118
|
}
|
|
116
119
|
};
|
|
117
120
|
// Get all spaces
|
|
121
|
+
const spacesStartTime = Date.now();
|
|
118
122
|
const spaces = await this.getSpaces();
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
const
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
123
|
+
const spacesTime = Date.now() - spacesStartTime;
|
|
124
|
+
this.logger.info(`Fetched ${spaces.length} spaces in ${spacesTime}ms`);
|
|
125
|
+
// Process spaces in batches to respect rate limits
|
|
126
|
+
const batchSize = 3; // Process 3 spaces at a time
|
|
127
|
+
const spaceNodes = [];
|
|
128
|
+
let totalFolders = 0;
|
|
129
|
+
let totalLists = 0;
|
|
130
|
+
for (let i = 0; i < spaces.length; i += batchSize) {
|
|
131
|
+
const batchStartTime = Date.now();
|
|
132
|
+
const spaceBatch = spaces.slice(i, i + batchSize);
|
|
133
|
+
this.logger.debug(`Processing space batch ${i / batchSize + 1} of ${Math.ceil(spaces.length / batchSize)} (${spaceBatch.length} spaces)`);
|
|
134
|
+
const batchNodes = await Promise.all(spaceBatch.map(async (space) => {
|
|
135
|
+
const spaceStartTime = Date.now();
|
|
136
|
+
const spaceNode = {
|
|
137
|
+
id: space.id,
|
|
138
|
+
name: space.name,
|
|
139
|
+
type: 'space',
|
|
135
140
|
children: []
|
|
136
141
|
};
|
|
137
|
-
//
|
|
138
|
-
const
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
142
|
+
// Fetch initial space data
|
|
143
|
+
const [folders, listsInSpace] = await Promise.all([
|
|
144
|
+
this.getFoldersInSpace(space.id),
|
|
145
|
+
this.getListsInSpace(space.id)
|
|
146
|
+
]);
|
|
147
|
+
totalFolders += folders.length;
|
|
148
|
+
totalLists += listsInSpace.length;
|
|
149
|
+
// Process folders in smaller batches
|
|
150
|
+
const folderBatchSize = 5; // Process 5 folders at a time
|
|
151
|
+
const folderNodes = [];
|
|
152
|
+
for (let j = 0; j < folders.length; j += folderBatchSize) {
|
|
153
|
+
const folderBatchStartTime = Date.now();
|
|
154
|
+
const folderBatch = folders.slice(j, j + folderBatchSize);
|
|
155
|
+
const batchFolderNodes = await Promise.all(folderBatch.map(async (folder) => {
|
|
156
|
+
const folderNode = {
|
|
157
|
+
id: folder.id,
|
|
158
|
+
name: folder.name,
|
|
159
|
+
type: 'folder',
|
|
160
|
+
parentId: space.id,
|
|
161
|
+
children: []
|
|
162
|
+
};
|
|
163
|
+
// Get lists in the folder
|
|
164
|
+
const listsInFolder = await this.getListsInFolder(folder.id);
|
|
165
|
+
totalLists += listsInFolder.length;
|
|
166
|
+
folderNode.children = listsInFolder.map(list => ({
|
|
167
|
+
id: list.id,
|
|
168
|
+
name: list.name,
|
|
169
|
+
type: 'list',
|
|
170
|
+
parentId: folder.id
|
|
171
|
+
}));
|
|
172
|
+
return folderNode;
|
|
173
|
+
}));
|
|
174
|
+
folderNodes.push(...batchFolderNodes);
|
|
175
|
+
const folderBatchTime = Date.now() - folderBatchStartTime;
|
|
176
|
+
this.logger.debug(`Processed folder batch in space ${space.name} in ${folderBatchTime}ms (${folderBatch.length} folders)`);
|
|
146
177
|
}
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
for (const list of listsInSpace) {
|
|
153
|
-
this.logger.debug(`Adding list directly to space: ${list.name} (${list.id})`);
|
|
154
|
-
spaceNode.children?.push({
|
|
178
|
+
// Add folder nodes to space
|
|
179
|
+
spaceNode.children?.push(...folderNodes);
|
|
180
|
+
// Add folderless lists to space
|
|
181
|
+
this.logger.debug(`Adding ${listsInSpace.length} lists directly to space ${space.name}`);
|
|
182
|
+
const listNodes = listsInSpace.map(list => ({
|
|
155
183
|
id: list.id,
|
|
156
184
|
name: list.name,
|
|
157
185
|
type: 'list',
|
|
158
186
|
parentId: space.id
|
|
159
|
-
});
|
|
160
|
-
|
|
161
|
-
|
|
187
|
+
}));
|
|
188
|
+
spaceNode.children?.push(...listNodes);
|
|
189
|
+
const spaceTime = Date.now() - spaceStartTime;
|
|
190
|
+
this.logger.info(`Processed space ${space.name} in ${spaceTime}ms (${folders.length} folders, ${listsInSpace.length} lists)`);
|
|
191
|
+
return spaceNode;
|
|
192
|
+
}));
|
|
193
|
+
spaceNodes.push(...batchNodes);
|
|
194
|
+
const batchTime = Date.now() - batchStartTime;
|
|
195
|
+
this.logger.info(`Processed space batch in ${batchTime}ms (${spaceBatch.length} spaces)`);
|
|
162
196
|
}
|
|
197
|
+
// Add all space nodes to the workspace tree
|
|
198
|
+
workspaceTree.root.children.push(...spaceNodes);
|
|
199
|
+
const totalTime = Date.now() - startTime;
|
|
200
|
+
this.logger.info('Workspace hierarchy fetch completed', {
|
|
201
|
+
duration: totalTime,
|
|
202
|
+
spaces: spaces.length,
|
|
203
|
+
folders: totalFolders,
|
|
204
|
+
lists: totalLists,
|
|
205
|
+
averageTimePerSpace: totalTime / spaces.length,
|
|
206
|
+
averageTimePerNode: totalTime / (spaces.length + totalFolders + totalLists)
|
|
207
|
+
});
|
|
163
208
|
// Store the hierarchy for later use
|
|
164
209
|
this.workspaceHierarchy = workspaceTree;
|
|
165
210
|
return workspaceTree;
|
package/build/tools/folder.js
CHANGED
|
@@ -156,7 +156,7 @@ Requirements:
|
|
|
156
156
|
- EITHER folderId OR (folderName + space information) is REQUIRED
|
|
157
157
|
- When using folderName, you MUST provide EITHER spaceId OR spaceName
|
|
158
158
|
|
|
159
|
-
|
|
159
|
+
Warning:
|
|
160
160
|
- This action CANNOT be undone
|
|
161
161
|
- All lists and tasks within the folder will also be permanently deleted
|
|
162
162
|
- Using folderName is risky as names may not be unique across different spaces`,
|
package/build/tools/list.js
CHANGED
|
@@ -8,11 +8,9 @@
|
|
|
8
8
|
* retrieving, and deleting lists. It supports creating lists both in spaces
|
|
9
9
|
* and in folders.
|
|
10
10
|
*/
|
|
11
|
-
import {
|
|
11
|
+
import { listService, workspaceService } from '../services/shared.js';
|
|
12
12
|
import config from '../config.js';
|
|
13
13
|
import { sponsorService } from '../utils/sponsor-service.js';
|
|
14
|
-
// Use shared services instance
|
|
15
|
-
const { list: listService, workspace: workspaceService } = clickUpServices;
|
|
16
14
|
/**
|
|
17
15
|
* Tool definition for creating a list directly in a space
|
|
18
16
|
*/
|
|
@@ -216,7 +214,7 @@ Valid Usage:
|
|
|
216
214
|
Requirements:
|
|
217
215
|
- EITHER listId OR listName: REQUIRED
|
|
218
216
|
|
|
219
|
-
|
|
217
|
+
Warning:
|
|
220
218
|
- This action CANNOT be undone
|
|
221
219
|
- All tasks within the list will also be permanently deleted
|
|
222
220
|
- Using listName is risky as names may not be unique`,
|
|
@@ -7,11 +7,12 @@
|
|
|
7
7
|
* This module implements a tool for attaching files to ClickUp tasks
|
|
8
8
|
* with automatic method selection based on file source and size.
|
|
9
9
|
*/
|
|
10
|
-
import {
|
|
11
|
-
import { validateTaskIdentification
|
|
10
|
+
import { clickUpServices } from '../../services/shared.js';
|
|
11
|
+
import { validateTaskIdentification } from './utilities.js';
|
|
12
12
|
import { sponsorService } from '../../utils/sponsor-service.js';
|
|
13
|
+
// Use shared services instance
|
|
14
|
+
const { task: taskService } = clickUpServices;
|
|
13
15
|
// Session storage for chunked uploads (in-memory for demonstration)
|
|
14
|
-
// In production, this should use a more persistent store
|
|
15
16
|
const chunkSessions = new Map();
|
|
16
17
|
// Clean up expired sessions periodically
|
|
17
18
|
setInterval(() => {
|
|
@@ -113,7 +114,18 @@ async function attachTaskFileHandler(params) {
|
|
|
113
114
|
throw new Error("Either file_data, file_url, or session_id must be provided");
|
|
114
115
|
}
|
|
115
116
|
// Resolve task ID
|
|
116
|
-
const
|
|
117
|
+
const result = await taskService.findTasks({
|
|
118
|
+
taskId,
|
|
119
|
+
taskName,
|
|
120
|
+
listName,
|
|
121
|
+
allowMultipleMatches: false,
|
|
122
|
+
useSmartDisambiguation: true,
|
|
123
|
+
includeFullDetails: false
|
|
124
|
+
});
|
|
125
|
+
if (!result || Array.isArray(result)) {
|
|
126
|
+
throw new Error("Task not found");
|
|
127
|
+
}
|
|
128
|
+
const resolvedTaskId = result.id;
|
|
117
129
|
try {
|
|
118
130
|
// CASE 1: Chunked upload continuation
|
|
119
131
|
if (session_id) {
|
|
@@ -254,7 +266,8 @@ async function handleChunkUpload(taskId, sessionToken, chunkIndex, chunkData, is
|
|
|
254
266
|
// Sort chunks by index
|
|
255
267
|
const sortedChunks = Array.from(session.chunks.entries())
|
|
256
268
|
.sort((a, b) => a[0] - b[0]);
|
|
257
|
-
for (const
|
|
269
|
+
for (const entry of sortedChunks) {
|
|
270
|
+
const [index, chunk] = entry;
|
|
258
271
|
chunk.copy(fileData, offset);
|
|
259
272
|
offset += chunk.length;
|
|
260
273
|
}
|
|
@@ -86,18 +86,11 @@ Requirements:
|
|
|
86
86
|
Notes:
|
|
87
87
|
- Configure batch size and concurrency via options for performance
|
|
88
88
|
- Each task should have a name with emoji prefix
|
|
89
|
-
- All tasks will be created in the same list
|
|
89
|
+
- All tasks will be created in the same list
|
|
90
|
+
- Custom fields can be set for each task using the custom_fields property (array of {id, value} objects)`,
|
|
90
91
|
inputSchema: {
|
|
91
92
|
type: "object",
|
|
92
93
|
properties: {
|
|
93
|
-
listId: {
|
|
94
|
-
type: "string",
|
|
95
|
-
description: "ID of list for new tasks (preferred). Use this instead of listName if you have it."
|
|
96
|
-
},
|
|
97
|
-
listName: {
|
|
98
|
-
type: "string",
|
|
99
|
-
description: "Name of list for new tasks. Only use if you don't have listId."
|
|
100
|
-
},
|
|
101
94
|
tasks: {
|
|
102
95
|
type: "array",
|
|
103
96
|
description: "Array of tasks to create. Each task must have at least a name.",
|
|
@@ -134,18 +127,73 @@ Notes:
|
|
|
134
127
|
type: "string"
|
|
135
128
|
},
|
|
136
129
|
description: "Optional array of tag names to assign to the task. The tags must already exist in the space."
|
|
130
|
+
},
|
|
131
|
+
custom_fields: {
|
|
132
|
+
type: "array",
|
|
133
|
+
items: {
|
|
134
|
+
type: "object",
|
|
135
|
+
properties: {
|
|
136
|
+
id: {
|
|
137
|
+
type: "string",
|
|
138
|
+
description: "ID of the custom field"
|
|
139
|
+
},
|
|
140
|
+
value: {
|
|
141
|
+
description: "Value for the custom field. Type depends on the field type."
|
|
142
|
+
}
|
|
143
|
+
},
|
|
144
|
+
required: ["id", "value"]
|
|
145
|
+
},
|
|
146
|
+
description: "Optional array of custom field values to set on the task."
|
|
137
147
|
}
|
|
138
148
|
},
|
|
139
149
|
required: ["name"]
|
|
140
150
|
}
|
|
141
151
|
},
|
|
142
|
-
|
|
152
|
+
listId: {
|
|
153
|
+
type: "string",
|
|
154
|
+
description: "ID of list for new tasks (preferred). Use this instead of listName if you have it."
|
|
155
|
+
},
|
|
156
|
+
listName: {
|
|
157
|
+
type: "string",
|
|
158
|
+
description: "Name of list for new tasks. Only use if you don't have listId."
|
|
159
|
+
},
|
|
160
|
+
options: {
|
|
161
|
+
description: "Processing options (or JSON string representing options)",
|
|
162
|
+
oneOf: [
|
|
163
|
+
{
|
|
164
|
+
type: "object",
|
|
165
|
+
description: "Optional processing settings",
|
|
166
|
+
properties: {
|
|
167
|
+
batchSize: {
|
|
168
|
+
type: "number",
|
|
169
|
+
description: "Tasks per batch (default: 10)"
|
|
170
|
+
},
|
|
171
|
+
concurrency: {
|
|
172
|
+
type: "number",
|
|
173
|
+
description: "Parallel operations (default: 3)"
|
|
174
|
+
},
|
|
175
|
+
continueOnError: {
|
|
176
|
+
type: "boolean",
|
|
177
|
+
description: "Continue if some tasks fail"
|
|
178
|
+
},
|
|
179
|
+
retryCount: {
|
|
180
|
+
type: "number",
|
|
181
|
+
description: "Retry attempts for failures"
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
},
|
|
185
|
+
{
|
|
186
|
+
type: "string",
|
|
187
|
+
description: "JSON string representing options. Will be parsed automatically."
|
|
188
|
+
}
|
|
189
|
+
]
|
|
190
|
+
}
|
|
143
191
|
},
|
|
144
192
|
required: ["tasks"]
|
|
145
193
|
}
|
|
146
194
|
};
|
|
147
195
|
/**
|
|
148
|
-
* Tool definition for updating multiple tasks
|
|
196
|
+
* Tool definition for updating multiple tasks efficiently
|
|
149
197
|
*/
|
|
150
198
|
export const updateBulkTasksTool = {
|
|
151
199
|
name: "update_bulk_tasks",
|
|
@@ -163,7 +211,8 @@ Requirements:
|
|
|
163
211
|
Notes:
|
|
164
212
|
- Only specified fields will be updated for each task
|
|
165
213
|
- Configure batch size and concurrency via options for performance
|
|
166
|
-
- Each task can have different fields to update
|
|
214
|
+
- Each task can have different fields to update
|
|
215
|
+
- Custom fields can be updated using the custom_fields property (array of {id, value} objects)`,
|
|
167
216
|
inputSchema: {
|
|
168
217
|
type: "object",
|
|
169
218
|
properties: {
|
|
@@ -198,11 +247,58 @@ Notes:
|
|
|
198
247
|
dueDate: {
|
|
199
248
|
type: "string",
|
|
200
249
|
description: "New due date. Supports Unix timestamps (in milliseconds) and natural language expressions like '1 hour from now', 'tomorrow', etc."
|
|
250
|
+
},
|
|
251
|
+
custom_fields: {
|
|
252
|
+
type: "array",
|
|
253
|
+
items: {
|
|
254
|
+
type: "object",
|
|
255
|
+
properties: {
|
|
256
|
+
id: {
|
|
257
|
+
type: "string",
|
|
258
|
+
description: "ID of the custom field"
|
|
259
|
+
},
|
|
260
|
+
value: {
|
|
261
|
+
description: "Value for the custom field. Type depends on the field type."
|
|
262
|
+
}
|
|
263
|
+
},
|
|
264
|
+
required: ["id", "value"]
|
|
265
|
+
},
|
|
266
|
+
description: "Optional array of custom field values to set on the task."
|
|
201
267
|
}
|
|
202
268
|
}
|
|
203
269
|
}
|
|
204
270
|
},
|
|
205
|
-
options:
|
|
271
|
+
options: {
|
|
272
|
+
description: "Processing options (or JSON string representing options)",
|
|
273
|
+
oneOf: [
|
|
274
|
+
{
|
|
275
|
+
type: "object",
|
|
276
|
+
description: "Optional processing settings",
|
|
277
|
+
properties: {
|
|
278
|
+
batchSize: {
|
|
279
|
+
type: "number",
|
|
280
|
+
description: "Tasks per batch (default: 10)"
|
|
281
|
+
},
|
|
282
|
+
concurrency: {
|
|
283
|
+
type: "number",
|
|
284
|
+
description: "Parallel operations (default: 3)"
|
|
285
|
+
},
|
|
286
|
+
continueOnError: {
|
|
287
|
+
type: "boolean",
|
|
288
|
+
description: "Continue if some tasks fail"
|
|
289
|
+
},
|
|
290
|
+
retryCount: {
|
|
291
|
+
type: "number",
|
|
292
|
+
description: "Retry attempts for failures"
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
},
|
|
296
|
+
{
|
|
297
|
+
type: "string",
|
|
298
|
+
description: "JSON string representing options. Will be parsed automatically."
|
|
299
|
+
}
|
|
300
|
+
]
|
|
301
|
+
}
|
|
206
302
|
},
|
|
207
303
|
required: ["tasks"]
|
|
208
304
|
}
|
|
@@ -227,7 +323,7 @@ Notes:
|
|
|
227
323
|
- Configure batch size and concurrency via options for performance
|
|
228
324
|
- All tasks will be moved to the same destination list
|
|
229
325
|
|
|
230
|
-
|
|
326
|
+
Warning:
|
|
231
327
|
- Task statuses may reset if destination list has different status options
|
|
232
328
|
- Using taskName without listName will fail as tasks may have identical names across lists`,
|
|
233
329
|
inputSchema: {
|
|
@@ -274,7 +370,7 @@ Requirements:
|
|
|
274
370
|
Notes:
|
|
275
371
|
- Configure batch size and concurrency via options for performance
|
|
276
372
|
|
|
277
|
-
|
|
373
|
+
Warning:
|
|
278
374
|
- This action CANNOT be undone for any of the tasks
|
|
279
375
|
- Using taskName without listName is dangerous as names may not be unique
|
|
280
376
|
- Always provide listName when using taskName for safer targeting`,
|