@freelancercom/phabricator-mcp 1.0.0 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +33 -7
- package/dist/tools/coerce.d.ts +9 -0
- package/dist/tools/coerce.js +21 -0
- package/dist/tools/differential.js +14 -13
- package/dist/tools/diffusion.js +13 -12
- package/dist/tools/maniphest.js +8 -7
- package/dist/tools/paste.js +7 -6
- package/dist/tools/phame.js +9 -8
- package/dist/tools/phriction.js +7 -6
- package/dist/tools/project.js +11 -10
- package/dist/tools/transaction.js +4 -3
- package/dist/tools/user.js +7 -6
- package/package.json +2 -3
- package/dist/client/conduit.test.d.ts +0 -1
- package/dist/client/conduit.test.js +0 -97
- package/dist/config.test.d.ts +0 -1
- package/dist/config.test.js +0 -54
package/README.md
CHANGED
|
@@ -11,7 +11,7 @@ An [MCP (Model Context Protocol)](https://modelcontextprotocol.io/) server that
|
|
|
11
11
|
### Claude Code (CLI)
|
|
12
12
|
|
|
13
13
|
```bash
|
|
14
|
-
claude mcp add --scope user phabricator -- npx
|
|
14
|
+
claude mcp add --scope user phabricator -- npx @freelancercom/phabricator-mcp@latest
|
|
15
15
|
```
|
|
16
16
|
|
|
17
17
|
Or with environment variables (if not using `~/.arcrc`):
|
|
@@ -20,7 +20,7 @@ Or with environment variables (if not using `~/.arcrc`):
|
|
|
20
20
|
claude mcp add --scope user phabricator \
|
|
21
21
|
-e PHABRICATOR_URL=https://phabricator.example.com \
|
|
22
22
|
-e PHABRICATOR_API_TOKEN=api-xxxxx \
|
|
23
|
-
-- npx
|
|
23
|
+
-- npx @freelancercom/phabricator-mcp@latest
|
|
24
24
|
```
|
|
25
25
|
|
|
26
26
|
The `--scope user` flag installs the server globally, making it available in all projects.
|
|
@@ -34,7 +34,7 @@ Add to your Codex config (`~/.codex/config.json`):
|
|
|
34
34
|
"mcpServers": {
|
|
35
35
|
"phabricator": {
|
|
36
36
|
"command": "npx",
|
|
37
|
-
"args": ["
|
|
37
|
+
"args": ["@freelancercom/phabricator-mcp@latest"],
|
|
38
38
|
"env": {
|
|
39
39
|
"PHABRICATOR_URL": "https://phabricator.example.com",
|
|
40
40
|
"PHABRICATOR_API_TOKEN": "api-xxxxxxxxxxxxx"
|
|
@@ -54,7 +54,7 @@ Add to your opencode config (`~/.config/opencode/config.json`):
|
|
|
54
54
|
"servers": {
|
|
55
55
|
"phabricator": {
|
|
56
56
|
"command": "npx",
|
|
57
|
-
"args": ["
|
|
57
|
+
"args": ["@freelancercom/phabricator-mcp@latest"],
|
|
58
58
|
"env": {
|
|
59
59
|
"PHABRICATOR_URL": "https://phabricator.example.com",
|
|
60
60
|
"PHABRICATOR_API_TOKEN": "api-xxxxxxxxxxxxx"
|
|
@@ -74,7 +74,7 @@ Add to your VS Code `settings.json`:
|
|
|
74
74
|
"claude.mcpServers": {
|
|
75
75
|
"phabricator": {
|
|
76
76
|
"command": "npx",
|
|
77
|
-
"args": ["
|
|
77
|
+
"args": ["@freelancercom/phabricator-mcp@latest"],
|
|
78
78
|
"env": {
|
|
79
79
|
"PHABRICATOR_URL": "https://phabricator.example.com",
|
|
80
80
|
"PHABRICATOR_API_TOKEN": "api-xxxxxxxxxxxxx"
|
|
@@ -93,7 +93,7 @@ Add to your Cursor MCP config (`~/.cursor/mcp.json`):
|
|
|
93
93
|
"mcpServers": {
|
|
94
94
|
"phabricator": {
|
|
95
95
|
"command": "npx",
|
|
96
|
-
"args": ["
|
|
96
|
+
"args": ["@freelancercom/phabricator-mcp@latest"],
|
|
97
97
|
"env": {
|
|
98
98
|
"PHABRICATOR_URL": "https://phabricator.example.com",
|
|
99
99
|
"PHABRICATOR_API_TOKEN": "api-xxxxxxxxxxxxx"
|
|
@@ -112,7 +112,7 @@ Add to your VS Code `settings.json`:
|
|
|
112
112
|
"github.copilot.chat.mcp.servers": {
|
|
113
113
|
"phabricator": {
|
|
114
114
|
"command": "npx",
|
|
115
|
-
"args": ["
|
|
115
|
+
"args": ["@freelancercom/phabricator-mcp@latest"],
|
|
116
116
|
"env": {
|
|
117
117
|
"PHABRICATOR_URL": "https://phabricator.example.com",
|
|
118
118
|
"PHABRICATOR_API_TOKEN": "api-xxxxxxxxxxxxx"
|
|
@@ -122,6 +122,32 @@ Add to your VS Code `settings.json`:
|
|
|
122
122
|
}
|
|
123
123
|
```
|
|
124
124
|
|
|
125
|
+
## Upgrading
|
|
126
|
+
|
|
127
|
+
The default install uses `@freelancercom/phabricator-mcp@latest`, which tells npx to check for updates on each run. No action needed.
|
|
128
|
+
|
|
129
|
+
If you pinned a specific version (e.g. `@freelancercom/phabricator-mcp@1.0.0`) or omitted the version suffix, npx caches the package and won't pick up new versions. To upgrade:
|
|
130
|
+
|
|
131
|
+
```bash
|
|
132
|
+
npx clear-npx-cache
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
Then restart your MCP client.
|
|
136
|
+
|
|
137
|
+
### Migrating from `github:freelancer/phabricator-mcp`
|
|
138
|
+
|
|
139
|
+
If you previously installed using the GitHub URL, update your config to use the npm package instead:
|
|
140
|
+
|
|
141
|
+
```bash
|
|
142
|
+
# Remove old server
|
|
143
|
+
claude mcp remove phabricator -s user
|
|
144
|
+
|
|
145
|
+
# Add new one
|
|
146
|
+
claude mcp add --scope user phabricator -- npx @freelancercom/phabricator-mcp@latest
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
For JSON configs, replace `["github:freelancer/phabricator-mcp"]` with `["@freelancercom/phabricator-mcp@latest"]` in your args.
|
|
150
|
+
|
|
125
151
|
## Configuration
|
|
126
152
|
|
|
127
153
|
The server automatically reads configuration from `~/.arcrc` (created by [Arcanist](https://secure.phabricator.com/book/phabricator/article/arcanist/)). No additional configuration is needed if you've already set up `arc`.
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
/**
|
|
3
|
+
* Coerce a JSON string into an object before Zod validation.
|
|
4
|
+
*
|
|
5
|
+
* Some MCP clients may send object parameters as JSON strings when they
|
|
6
|
+
* haven't loaded the tool schema. This wrapper gracefully handles that
|
|
7
|
+
* by parsing the string before validation.
|
|
8
|
+
*/
|
|
9
|
+
export declare function jsonCoerce<T extends z.ZodTypeAny>(schema: T): z.ZodEffects<T, T["_output"], unknown>;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
/**
|
|
3
|
+
* Coerce a JSON string into an object before Zod validation.
|
|
4
|
+
*
|
|
5
|
+
* Some MCP clients may send object parameters as JSON strings when they
|
|
6
|
+
* haven't loaded the tool schema. This wrapper gracefully handles that
|
|
7
|
+
* by parsing the string before validation.
|
|
8
|
+
*/
|
|
9
|
+
export function jsonCoerce(schema) {
|
|
10
|
+
return z.preprocess((val) => {
|
|
11
|
+
if (typeof val === 'string') {
|
|
12
|
+
try {
|
|
13
|
+
return JSON.parse(val);
|
|
14
|
+
}
|
|
15
|
+
catch {
|
|
16
|
+
return val;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
return val;
|
|
20
|
+
}, schema);
|
|
21
|
+
}
|
|
@@ -1,23 +1,24 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import { jsonCoerce } from './coerce.js';
|
|
2
3
|
export function registerDifferentialTools(server, client) {
|
|
3
4
|
// Search revisions
|
|
4
5
|
server.tool('phabricator_revision_search', 'Search Differential revisions (code reviews)', {
|
|
5
6
|
queryKey: z.string().optional().describe('Built-in query: "all", "active", "authored", "waiting"'),
|
|
6
|
-
constraints: z.object({
|
|
7
|
-
ids: z.array(z.number()).optional().describe('Revision IDs'),
|
|
7
|
+
constraints: jsonCoerce(z.object({
|
|
8
|
+
ids: z.array(z.coerce.number()).optional().describe('Revision IDs'),
|
|
8
9
|
phids: z.array(z.string()).optional().describe('Revision PHIDs'),
|
|
9
10
|
authorPHIDs: z.array(z.string()).optional().describe('Author PHIDs'),
|
|
10
11
|
reviewerPHIDs: z.array(z.string()).optional().describe('Reviewer PHIDs'),
|
|
11
12
|
repositoryPHIDs: z.array(z.string()).optional().describe('Repository PHIDs'),
|
|
12
13
|
statuses: z.array(z.string()).optional().describe('Statuses: needs-review, needs-revision, accepted, published, abandoned, changes-planned'),
|
|
13
|
-
}).optional().describe('Search constraints'),
|
|
14
|
-
attachments: z.object({
|
|
14
|
+
})).optional().describe('Search constraints'),
|
|
15
|
+
attachments: jsonCoerce(z.object({
|
|
15
16
|
reviewers: z.boolean().optional().describe('Include reviewers'),
|
|
16
17
|
subscribers: z.boolean().optional().describe('Include subscribers'),
|
|
17
18
|
projects: z.boolean().optional().describe('Include projects'),
|
|
18
|
-
}).optional().describe('Data attachments'),
|
|
19
|
+
})).optional().describe('Data attachments'),
|
|
19
20
|
order: z.string().optional().describe('Result order'),
|
|
20
|
-
limit: z.number().max(100).optional().describe('Maximum results'),
|
|
21
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results'),
|
|
21
22
|
after: z.string().optional().describe('Pagination cursor'),
|
|
22
23
|
}, async (params) => {
|
|
23
24
|
const result = await client.call('differential.revision.search', params);
|
|
@@ -71,7 +72,7 @@ export function registerDifferentialTools(server, client) {
|
|
|
71
72
|
});
|
|
72
73
|
// Get raw diff content
|
|
73
74
|
server.tool('phabricator_get_raw_diff', 'Get the raw diff/patch content for a Differential diff by diff ID. Use phabricator_diff_search to find the diff ID from a revision PHID first.', {
|
|
74
|
-
diffID: z.number().describe('The diff ID (numeric, e.g., 1392561). Use phabricator_diff_search to find this from a revision.'),
|
|
75
|
+
diffID: z.coerce.number().describe('The diff ID (numeric, e.g., 1392561). Use phabricator_diff_search to find this from a revision.'),
|
|
75
76
|
}, async (params) => {
|
|
76
77
|
const result = await client.call('differential.getrawdiff', {
|
|
77
78
|
diffID: params.diffID,
|
|
@@ -80,15 +81,15 @@ export function registerDifferentialTools(server, client) {
|
|
|
80
81
|
});
|
|
81
82
|
// Search diffs
|
|
82
83
|
server.tool('phabricator_diff_search', 'Search Differential diffs', {
|
|
83
|
-
constraints: z.object({
|
|
84
|
-
ids: z.array(z.number()).optional().describe('Diff IDs'),
|
|
84
|
+
constraints: jsonCoerce(z.object({
|
|
85
|
+
ids: z.array(z.coerce.number()).optional().describe('Diff IDs'),
|
|
85
86
|
phids: z.array(z.string()).optional().describe('Diff PHIDs'),
|
|
86
87
|
revisionPHIDs: z.array(z.string()).optional().describe('Revision PHIDs'),
|
|
87
|
-
}).optional().describe('Search constraints'),
|
|
88
|
-
attachments: z.object({
|
|
88
|
+
})).optional().describe('Search constraints'),
|
|
89
|
+
attachments: jsonCoerce(z.object({
|
|
89
90
|
commits: z.boolean().optional().describe('Include commit info'),
|
|
90
|
-
}).optional().describe('Data attachments'),
|
|
91
|
-
limit: z.number().max(100).optional().describe('Maximum results'),
|
|
91
|
+
})).optional().describe('Data attachments'),
|
|
92
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results'),
|
|
92
93
|
after: z.string().optional().describe('Pagination cursor'),
|
|
93
94
|
}, async (params) => {
|
|
94
95
|
const result = await client.call('differential.diff.search', params);
|
package/dist/tools/diffusion.js
CHANGED
|
@@ -1,24 +1,25 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import { jsonCoerce } from './coerce.js';
|
|
2
3
|
export function registerDiffusionTools(server, client) {
|
|
3
4
|
// Search repositories
|
|
4
5
|
server.tool('phabricator_repository_search', 'Search Diffusion repositories', {
|
|
5
6
|
queryKey: z.string().optional().describe('Built-in query: "all", "active"'),
|
|
6
|
-
constraints: z.object({
|
|
7
|
-
ids: z.array(z.number()).optional().describe('Repository IDs'),
|
|
7
|
+
constraints: jsonCoerce(z.object({
|
|
8
|
+
ids: z.array(z.coerce.number()).optional().describe('Repository IDs'),
|
|
8
9
|
phids: z.array(z.string()).optional().describe('Repository PHIDs'),
|
|
9
10
|
callsigns: z.array(z.string()).optional().describe('Repository callsigns'),
|
|
10
11
|
shortNames: z.array(z.string()).optional().describe('Repository short names'),
|
|
11
12
|
types: z.array(z.string()).optional().describe('VCS types: git, hg, svn'),
|
|
12
13
|
uris: z.array(z.string()).optional().describe('Repository URIs'),
|
|
13
14
|
query: z.string().optional().describe('Full-text search query'),
|
|
14
|
-
}).optional().describe('Search constraints'),
|
|
15
|
-
attachments: z.object({
|
|
15
|
+
})).optional().describe('Search constraints'),
|
|
16
|
+
attachments: jsonCoerce(z.object({
|
|
16
17
|
uris: z.boolean().optional().describe('Include repository URIs'),
|
|
17
18
|
metrics: z.boolean().optional().describe('Include metrics'),
|
|
18
19
|
projects: z.boolean().optional().describe('Include projects'),
|
|
19
|
-
}).optional().describe('Data attachments'),
|
|
20
|
+
})).optional().describe('Data attachments'),
|
|
20
21
|
order: z.string().optional().describe('Result order'),
|
|
21
|
-
limit: z.number().max(100).optional().describe('Maximum results'),
|
|
22
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results'),
|
|
22
23
|
after: z.string().optional().describe('Pagination cursor'),
|
|
23
24
|
}, async (params) => {
|
|
24
25
|
const result = await client.call('diffusion.repository.search', params);
|
|
@@ -26,20 +27,20 @@ export function registerDiffusionTools(server, client) {
|
|
|
26
27
|
});
|
|
27
28
|
// Search commits
|
|
28
29
|
server.tool('phabricator_commit_search', 'Search Diffusion commits', {
|
|
29
|
-
constraints: z.object({
|
|
30
|
-
ids: z.array(z.number()).optional().describe('Commit IDs'),
|
|
30
|
+
constraints: jsonCoerce(z.object({
|
|
31
|
+
ids: z.array(z.coerce.number()).optional().describe('Commit IDs'),
|
|
31
32
|
phids: z.array(z.string()).optional().describe('Commit PHIDs'),
|
|
32
33
|
repositoryPHIDs: z.array(z.string()).optional().describe('Repository PHIDs'),
|
|
33
34
|
identifiers: z.array(z.string()).optional().describe('Commit identifiers (hashes)'),
|
|
34
35
|
authorPHIDs: z.array(z.string()).optional().describe('Author PHIDs'),
|
|
35
36
|
query: z.string().optional().describe('Full-text search query'),
|
|
36
|
-
}).optional().describe('Search constraints'),
|
|
37
|
-
attachments: z.object({
|
|
37
|
+
})).optional().describe('Search constraints'),
|
|
38
|
+
attachments: jsonCoerce(z.object({
|
|
38
39
|
projects: z.boolean().optional().describe('Include projects'),
|
|
39
40
|
subscribers: z.boolean().optional().describe('Include subscribers'),
|
|
40
|
-
}).optional().describe('Data attachments'),
|
|
41
|
+
})).optional().describe('Data attachments'),
|
|
41
42
|
order: z.string().optional().describe('Result order'),
|
|
42
|
-
limit: z.number().max(100).optional().describe('Maximum results'),
|
|
43
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results'),
|
|
43
44
|
after: z.string().optional().describe('Pagination cursor'),
|
|
44
45
|
}, async (params) => {
|
|
45
46
|
const result = await client.call('diffusion.commit.search', params);
|
package/dist/tools/maniphest.js
CHANGED
|
@@ -1,27 +1,28 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import { jsonCoerce } from './coerce.js';
|
|
2
3
|
export function registerManiphestTools(server, client) {
|
|
3
4
|
// Search tasks
|
|
4
5
|
server.tool('phabricator_task_search', 'Search Maniphest tasks with optional filters', {
|
|
5
6
|
queryKey: z.string().optional().describe('Built-in query: "all", "open", "authored", "assigned"'),
|
|
6
|
-
constraints: z.object({
|
|
7
|
-
ids: z.array(z.number()).optional().describe('Task IDs to search for'),
|
|
7
|
+
constraints: jsonCoerce(z.object({
|
|
8
|
+
ids: z.array(z.coerce.number()).optional().describe('Task IDs to search for'),
|
|
8
9
|
phids: z.array(z.string()).optional().describe('Task PHIDs to search for'),
|
|
9
10
|
assigned: z.array(z.string()).optional().describe('Assigned user PHIDs'),
|
|
10
11
|
authorPHIDs: z.array(z.string()).optional().describe('Author PHIDs'),
|
|
11
12
|
statuses: z.array(z.string()).optional().describe('Task statuses: open, resolved, wontfix, invalid, spite, duplicate'),
|
|
12
|
-
priorities: z.array(z.number()).optional().describe('Priority levels'),
|
|
13
|
+
priorities: z.array(z.coerce.number()).optional().describe('Priority levels'),
|
|
13
14
|
subtypes: z.array(z.string()).optional().describe('Task subtypes'),
|
|
14
15
|
columnPHIDs: z.array(z.string()).optional().describe('Workboard column PHIDs'),
|
|
15
16
|
projectPHIDs: z.array(z.string()).optional().describe('Project PHIDs (tasks tagged with these projects)'),
|
|
16
17
|
query: z.string().optional().describe('Full-text search query'),
|
|
17
|
-
}).optional().describe('Search constraints'),
|
|
18
|
-
attachments: z.object({
|
|
18
|
+
})).optional().describe('Search constraints'),
|
|
19
|
+
attachments: jsonCoerce(z.object({
|
|
19
20
|
columns: z.boolean().optional().describe('Include workboard column info'),
|
|
20
21
|
projects: z.boolean().optional().describe('Include project info'),
|
|
21
22
|
subscribers: z.boolean().optional().describe('Include subscriber info'),
|
|
22
|
-
}).optional().describe('Data attachments to include'),
|
|
23
|
+
})).optional().describe('Data attachments to include'),
|
|
23
24
|
order: z.string().optional().describe('Result order: "priority", "updated", "newest", "oldest"'),
|
|
24
|
-
limit: z.number().max(100).optional().describe('Maximum results (max 100)'),
|
|
25
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results (max 100)'),
|
|
25
26
|
after: z.string().optional().describe('Cursor for pagination'),
|
|
26
27
|
}, async (params) => {
|
|
27
28
|
const result = await client.call('maniphest.search', params);
|
package/dist/tools/paste.js
CHANGED
|
@@ -1,20 +1,21 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import { jsonCoerce } from './coerce.js';
|
|
2
3
|
export function registerPasteTools(server, client) {
|
|
3
4
|
// Search pastes
|
|
4
5
|
server.tool('phabricator_paste_search', 'Search Phabricator pastes', {
|
|
5
6
|
queryKey: z.string().optional().describe('Built-in query: "all", "authored"'),
|
|
6
|
-
constraints: z.object({
|
|
7
|
-
ids: z.array(z.number()).optional().describe('Paste IDs'),
|
|
7
|
+
constraints: jsonCoerce(z.object({
|
|
8
|
+
ids: z.array(z.coerce.number()).optional().describe('Paste IDs'),
|
|
8
9
|
phids: z.array(z.string()).optional().describe('Paste PHIDs'),
|
|
9
10
|
authorPHIDs: z.array(z.string()).optional().describe('Author PHIDs'),
|
|
10
11
|
languages: z.array(z.string()).optional().describe('Languages'),
|
|
11
12
|
query: z.string().optional().describe('Full-text search query'),
|
|
12
|
-
}).optional().describe('Search constraints'),
|
|
13
|
-
attachments: z.object({
|
|
13
|
+
})).optional().describe('Search constraints'),
|
|
14
|
+
attachments: jsonCoerce(z.object({
|
|
14
15
|
content: z.boolean().optional().describe('Include paste content'),
|
|
15
|
-
}).optional().describe('Data attachments'),
|
|
16
|
+
})).optional().describe('Data attachments'),
|
|
16
17
|
order: z.string().optional().describe('Result order'),
|
|
17
|
-
limit: z.number().max(100).optional().describe('Maximum results'),
|
|
18
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results'),
|
|
18
19
|
after: z.string().optional().describe('Pagination cursor'),
|
|
19
20
|
}, async (params) => {
|
|
20
21
|
const result = await client.call('paste.search', params);
|
package/dist/tools/phame.js
CHANGED
|
@@ -1,15 +1,16 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import { jsonCoerce } from './coerce.js';
|
|
2
3
|
export function registerPhameTools(server, client) {
|
|
3
4
|
// Search blogs
|
|
4
5
|
server.tool('phabricator_blog_search', 'Search Phame blogs', {
|
|
5
6
|
queryKey: z.string().optional().describe('Built-in query: "all", "active"'),
|
|
6
|
-
constraints: z.object({
|
|
7
|
-
ids: z.array(z.number()).optional().describe('Blog IDs'),
|
|
7
|
+
constraints: jsonCoerce(z.object({
|
|
8
|
+
ids: z.array(z.coerce.number()).optional().describe('Blog IDs'),
|
|
8
9
|
phids: z.array(z.string()).optional().describe('Blog PHIDs'),
|
|
9
10
|
query: z.string().optional().describe('Full-text search query'),
|
|
10
|
-
}).optional().describe('Search constraints'),
|
|
11
|
+
})).optional().describe('Search constraints'),
|
|
11
12
|
order: z.string().optional().describe('Result order'),
|
|
12
|
-
limit: z.number().max(100).optional().describe('Maximum results (max 100)'),
|
|
13
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results (max 100)'),
|
|
13
14
|
after: z.string().optional().describe('Cursor for pagination'),
|
|
14
15
|
}, async (params) => {
|
|
15
16
|
const result = await client.call('phame.blog.search', params);
|
|
@@ -18,15 +19,15 @@ export function registerPhameTools(server, client) {
|
|
|
18
19
|
// Search blog posts
|
|
19
20
|
server.tool('phabricator_blog_post_search', 'Search Phame blog posts', {
|
|
20
21
|
queryKey: z.string().optional().describe('Built-in query: "all", "live"'),
|
|
21
|
-
constraints: z.object({
|
|
22
|
-
ids: z.array(z.number()).optional().describe('Post IDs'),
|
|
22
|
+
constraints: jsonCoerce(z.object({
|
|
23
|
+
ids: z.array(z.coerce.number()).optional().describe('Post IDs'),
|
|
23
24
|
phids: z.array(z.string()).optional().describe('Post PHIDs'),
|
|
24
25
|
blogPHIDs: z.array(z.string()).optional().describe('Filter by blog PHIDs'),
|
|
25
26
|
visibility: z.array(z.string()).optional().describe('Visibility: "published", "draft", "archived"'),
|
|
26
27
|
query: z.string().optional().describe('Full-text search query'),
|
|
27
|
-
}).optional().describe('Search constraints'),
|
|
28
|
+
})).optional().describe('Search constraints'),
|
|
28
29
|
order: z.string().optional().describe('Result order'),
|
|
29
|
-
limit: z.number().max(100).optional().describe('Maximum results (max 100)'),
|
|
30
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results (max 100)'),
|
|
30
31
|
after: z.string().optional().describe('Cursor for pagination'),
|
|
31
32
|
}, async (params) => {
|
|
32
33
|
const result = await client.call('phame.post.search', params);
|
package/dist/tools/phriction.js
CHANGED
|
@@ -1,21 +1,22 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import { jsonCoerce } from './coerce.js';
|
|
2
3
|
export function registerPhrictionTools(server, client) {
|
|
3
4
|
// Search wiki documents
|
|
4
5
|
server.tool('phabricator_document_search', 'Search Phriction wiki documents', {
|
|
5
6
|
queryKey: z.string().optional().describe('Built-in query: "all", "active"'),
|
|
6
|
-
constraints: z.object({
|
|
7
|
-
ids: z.array(z.number()).optional().describe('Document IDs'),
|
|
7
|
+
constraints: jsonCoerce(z.object({
|
|
8
|
+
ids: z.array(z.coerce.number()).optional().describe('Document IDs'),
|
|
8
9
|
phids: z.array(z.string()).optional().describe('Document PHIDs'),
|
|
9
10
|
paths: z.array(z.string()).optional().describe('Document paths'),
|
|
10
11
|
ancestorPaths: z.array(z.string()).optional().describe('Ancestor paths to search under'),
|
|
11
12
|
statuses: z.array(z.string()).optional().describe('Document statuses'),
|
|
12
13
|
query: z.string().optional().describe('Full-text search query'),
|
|
13
|
-
}).optional().describe('Search constraints'),
|
|
14
|
-
attachments: z.object({
|
|
14
|
+
})).optional().describe('Search constraints'),
|
|
15
|
+
attachments: jsonCoerce(z.object({
|
|
15
16
|
content: z.boolean().optional().describe('Include document content'),
|
|
16
|
-
}).optional().describe('Data attachments'),
|
|
17
|
+
})).optional().describe('Data attachments'),
|
|
17
18
|
order: z.string().optional().describe('Result order'),
|
|
18
|
-
limit: z.number().max(100).optional().describe('Maximum results'),
|
|
19
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results'),
|
|
19
20
|
after: z.string().optional().describe('Pagination cursor'),
|
|
20
21
|
}, async (params) => {
|
|
21
22
|
const result = await client.call('phriction.document.search', params);
|
package/dist/tools/project.js
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import { jsonCoerce } from './coerce.js';
|
|
2
3
|
export function registerProjectTools(server, client) {
|
|
3
4
|
// Search projects
|
|
4
5
|
server.tool('phabricator_project_search', 'Search Phabricator projects', {
|
|
5
6
|
queryKey: z.string().optional().describe('Built-in query: "all", "active", "joined"'),
|
|
6
|
-
constraints: z.object({
|
|
7
|
-
ids: z.array(z.number()).optional().describe('Project IDs'),
|
|
7
|
+
constraints: jsonCoerce(z.object({
|
|
8
|
+
ids: z.array(z.coerce.number()).optional().describe('Project IDs'),
|
|
8
9
|
phids: z.array(z.string()).optional().describe('Project PHIDs'),
|
|
9
10
|
slugs: z.array(z.string()).optional().describe('Project slugs'),
|
|
10
11
|
name: z.string().optional().describe('Exact name match'),
|
|
@@ -14,14 +15,14 @@ export function registerProjectTools(server, client) {
|
|
|
14
15
|
isMilestone: z.boolean().optional().describe('Filter milestones'),
|
|
15
16
|
isRoot: z.boolean().optional().describe('Filter root projects'),
|
|
16
17
|
query: z.string().optional().describe('Full-text search query'),
|
|
17
|
-
}).optional().describe('Search constraints'),
|
|
18
|
-
attachments: z.object({
|
|
18
|
+
})).optional().describe('Search constraints'),
|
|
19
|
+
attachments: jsonCoerce(z.object({
|
|
19
20
|
members: z.boolean().optional().describe('Include members'),
|
|
20
21
|
watchers: z.boolean().optional().describe('Include watchers'),
|
|
21
22
|
ancestors: z.boolean().optional().describe('Include ancestors'),
|
|
22
|
-
}).optional().describe('Data attachments'),
|
|
23
|
+
})).optional().describe('Data attachments'),
|
|
23
24
|
order: z.string().optional().describe('Result order'),
|
|
24
|
-
limit: z.number().max(100).optional().describe('Maximum results'),
|
|
25
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results'),
|
|
25
26
|
after: z.string().optional().describe('Pagination cursor'),
|
|
26
27
|
}, async (params) => {
|
|
27
28
|
const result = await client.call('project.search', params);
|
|
@@ -67,13 +68,13 @@ export function registerProjectTools(server, client) {
|
|
|
67
68
|
});
|
|
68
69
|
// Search workboard columns
|
|
69
70
|
server.tool('phabricator_column_search', 'Search project workboard columns', {
|
|
70
|
-
constraints: z.object({
|
|
71
|
-
ids: z.array(z.number()).optional().describe('Column IDs'),
|
|
71
|
+
constraints: jsonCoerce(z.object({
|
|
72
|
+
ids: z.array(z.coerce.number()).optional().describe('Column IDs'),
|
|
72
73
|
phids: z.array(z.string()).optional().describe('Column PHIDs'),
|
|
73
74
|
projects: z.array(z.string()).optional().describe('Project PHIDs'),
|
|
74
|
-
}).optional().describe('Search constraints'),
|
|
75
|
+
})).optional().describe('Search constraints'),
|
|
75
76
|
order: z.string().optional().describe('Result order'),
|
|
76
|
-
limit: z.number().max(100).optional().describe('Maximum results'),
|
|
77
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results'),
|
|
77
78
|
after: z.string().optional().describe('Pagination cursor'),
|
|
78
79
|
}, async (params) => {
|
|
79
80
|
const result = await client.call('project.column.search', params);
|
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import { jsonCoerce } from './coerce.js';
|
|
2
3
|
export function registerTransactionTools(server, client) {
|
|
3
4
|
server.tool('phabricator_transaction_search', 'Search transactions (comments, status changes, etc.) on any Phabricator object (e.g., "D123", "T456")', {
|
|
4
5
|
objectIdentifier: z.string().describe('Object ID (e.g., "D123", "T456") or PHID'),
|
|
5
|
-
constraints: z.object({
|
|
6
|
+
constraints: jsonCoerce(z.object({
|
|
6
7
|
phids: z.array(z.string()).optional().describe('Transaction PHIDs'),
|
|
7
8
|
authorPHIDs: z.array(z.string()).optional().describe('Author PHIDs'),
|
|
8
|
-
}).optional().describe('Search constraints'),
|
|
9
|
-
limit: z.number().max(100).optional().describe('Maximum results (max 100)'),
|
|
9
|
+
})).optional().describe('Search constraints'),
|
|
10
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results (max 100)'),
|
|
10
11
|
after: z.string().optional().describe('Pagination cursor'),
|
|
11
12
|
}, async (params) => {
|
|
12
13
|
const { objectIdentifier, ...searchParams } = params;
|
package/dist/tools/user.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import { jsonCoerce } from './coerce.js';
|
|
2
3
|
export function registerUserTools(server, client) {
|
|
3
4
|
// Get current user
|
|
4
5
|
server.tool('phabricator_user_whoami', 'Get information about the current authenticated user', {}, async () => {
|
|
@@ -8,8 +9,8 @@ export function registerUserTools(server, client) {
|
|
|
8
9
|
// Search users
|
|
9
10
|
server.tool('phabricator_user_search', 'Search Phabricator users', {
|
|
10
11
|
queryKey: z.string().optional().describe('Built-in query: "all", "active", "approval"'),
|
|
11
|
-
constraints: z.object({
|
|
12
|
-
ids: z.array(z.number()).optional().describe('User IDs'),
|
|
12
|
+
constraints: jsonCoerce(z.object({
|
|
13
|
+
ids: z.array(z.coerce.number()).optional().describe('User IDs'),
|
|
13
14
|
phids: z.array(z.string()).optional().describe('User PHIDs'),
|
|
14
15
|
usernames: z.array(z.string()).optional().describe('Usernames'),
|
|
15
16
|
nameLike: z.string().optional().describe('Name prefix search'),
|
|
@@ -18,12 +19,12 @@ export function registerUserTools(server, client) {
|
|
|
18
19
|
isBot: z.boolean().optional().describe('Filter by bot status'),
|
|
19
20
|
isMailingList: z.boolean().optional().describe('Filter by mailing list status'),
|
|
20
21
|
query: z.string().optional().describe('Full-text search query'),
|
|
21
|
-
}).optional().describe('Search constraints'),
|
|
22
|
-
attachments: z.object({
|
|
22
|
+
})).optional().describe('Search constraints'),
|
|
23
|
+
attachments: jsonCoerce(z.object({
|
|
23
24
|
availability: z.boolean().optional().describe('Include availability info'),
|
|
24
|
-
}).optional().describe('Data attachments'),
|
|
25
|
+
})).optional().describe('Data attachments'),
|
|
25
26
|
order: z.string().optional().describe('Result order'),
|
|
26
|
-
limit: z.number().max(100).optional().describe('Maximum results'),
|
|
27
|
+
limit: z.coerce.number().max(100).optional().describe('Maximum results'),
|
|
27
28
|
after: z.string().optional().describe('Pagination cursor'),
|
|
28
29
|
}, async (params) => {
|
|
29
30
|
const result = await client.call('user.search', params);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@freelancercom/phabricator-mcp",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.3",
|
|
4
4
|
"description": "MCP server for Phabricator Conduit API - manage tasks, code reviews, repositories, and more",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -15,8 +15,7 @@
|
|
|
15
15
|
"prepare": "npm run build",
|
|
16
16
|
"start": "node dist/index.js",
|
|
17
17
|
"dev": "tsx --watch src/index.ts",
|
|
18
|
-
"typecheck": "tsc --noEmit"
|
|
19
|
-
"test": "node --test --import tsx 'src/**/*.test.ts'"
|
|
18
|
+
"typecheck": "tsc --noEmit"
|
|
20
19
|
},
|
|
21
20
|
"keywords": [
|
|
22
21
|
"mcp",
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,97 +0,0 @@
|
|
|
1
|
-
import { describe, it, mock, beforeEach, afterEach } from 'node:test';
|
|
2
|
-
import assert from 'node:assert';
|
|
3
|
-
import { ConduitClient, ConduitError } from './conduit.js';
|
|
4
|
-
describe('ConduitClient', () => {
|
|
5
|
-
const mockConfig = {
|
|
6
|
-
phabricatorUrl: 'https://phabricator.example.com',
|
|
7
|
-
apiToken: 'api-test-token',
|
|
8
|
-
};
|
|
9
|
-
let originalFetch;
|
|
10
|
-
beforeEach(() => {
|
|
11
|
-
originalFetch = global.fetch;
|
|
12
|
-
});
|
|
13
|
-
afterEach(() => {
|
|
14
|
-
global.fetch = originalFetch;
|
|
15
|
-
});
|
|
16
|
-
it('should construct correct API URL', async () => {
|
|
17
|
-
let capturedUrl;
|
|
18
|
-
global.fetch = mock.fn(async (url) => {
|
|
19
|
-
capturedUrl = url;
|
|
20
|
-
return new Response(JSON.stringify({ result: {}, error_code: null, error_info: null }));
|
|
21
|
-
});
|
|
22
|
-
const client = new ConduitClient(mockConfig);
|
|
23
|
-
await client.call('user.whoami');
|
|
24
|
-
assert.strictEqual(capturedUrl, 'https://phabricator.example.com/api/user.whoami');
|
|
25
|
-
});
|
|
26
|
-
it('should include API token in request body', async () => {
|
|
27
|
-
let capturedBody;
|
|
28
|
-
global.fetch = mock.fn(async (_url, init) => {
|
|
29
|
-
capturedBody = init?.body;
|
|
30
|
-
return new Response(JSON.stringify({ result: {}, error_code: null, error_info: null }));
|
|
31
|
-
});
|
|
32
|
-
const client = new ConduitClient(mockConfig);
|
|
33
|
-
await client.call('user.whoami');
|
|
34
|
-
assert.ok(capturedBody);
|
|
35
|
-
const params = new URLSearchParams(capturedBody);
|
|
36
|
-
const paramsJson = JSON.parse(params.get('params'));
|
|
37
|
-
assert.strictEqual(paramsJson.__conduit__.token, 'api-test-token');
|
|
38
|
-
});
|
|
39
|
-
it('should pass parameters to the API', async () => {
|
|
40
|
-
let capturedBody;
|
|
41
|
-
global.fetch = mock.fn(async (_url, init) => {
|
|
42
|
-
capturedBody = init?.body;
|
|
43
|
-
return new Response(JSON.stringify({ result: {}, error_code: null, error_info: null }));
|
|
44
|
-
});
|
|
45
|
-
const client = new ConduitClient(mockConfig);
|
|
46
|
-
await client.call('maniphest.search', { queryKey: 'assigned', limit: 10 });
|
|
47
|
-
const params = new URLSearchParams(capturedBody);
|
|
48
|
-
const paramsJson = JSON.parse(params.get('params'));
|
|
49
|
-
assert.strictEqual(paramsJson.queryKey, 'assigned');
|
|
50
|
-
assert.strictEqual(paramsJson.limit, 10);
|
|
51
|
-
});
|
|
52
|
-
it('should return result on success', async () => {
|
|
53
|
-
const expectedResult = { userName: 'testuser', realName: 'Test User' };
|
|
54
|
-
global.fetch = mock.fn(async () => {
|
|
55
|
-
return new Response(JSON.stringify({ result: expectedResult, error_code: null, error_info: null }));
|
|
56
|
-
});
|
|
57
|
-
const client = new ConduitClient(mockConfig);
|
|
58
|
-
const result = await client.call('user.whoami');
|
|
59
|
-
assert.deepStrictEqual(result, expectedResult);
|
|
60
|
-
});
|
|
61
|
-
it('should throw ConduitError on API error', async () => {
|
|
62
|
-
global.fetch = mock.fn(async () => {
|
|
63
|
-
return new Response(JSON.stringify({
|
|
64
|
-
result: null,
|
|
65
|
-
error_code: 'ERR-CONDUIT-CORE',
|
|
66
|
-
error_info: 'Invalid token',
|
|
67
|
-
}));
|
|
68
|
-
});
|
|
69
|
-
const client = new ConduitClient(mockConfig);
|
|
70
|
-
await assert.rejects(() => client.call('user.whoami'), (err) => {
|
|
71
|
-
assert.ok(err instanceof ConduitError);
|
|
72
|
-
assert.strictEqual(err.code, 'ERR-CONDUIT-CORE');
|
|
73
|
-
assert.strictEqual(err.message, 'Invalid token');
|
|
74
|
-
return true;
|
|
75
|
-
});
|
|
76
|
-
});
|
|
77
|
-
it('should throw ConduitError on HTTP error', async () => {
|
|
78
|
-
global.fetch = mock.fn(async () => {
|
|
79
|
-
return new Response('Not Found', { status: 404, statusText: 'Not Found' });
|
|
80
|
-
});
|
|
81
|
-
const client = new ConduitClient(mockConfig);
|
|
82
|
-
await assert.rejects(() => client.call('user.whoami'), (err) => {
|
|
83
|
-
assert.ok(err instanceof ConduitError);
|
|
84
|
-
assert.strictEqual(err.code, 'HTTP_ERROR');
|
|
85
|
-
assert.ok(err.message.includes('404'));
|
|
86
|
-
return true;
|
|
87
|
-
});
|
|
88
|
-
});
|
|
89
|
-
});
|
|
90
|
-
describe('ConduitError', () => {
|
|
91
|
-
it('should have correct name and properties', () => {
|
|
92
|
-
const error = new ConduitError('TEST_CODE', 'Test message');
|
|
93
|
-
assert.strictEqual(error.name, 'ConduitError');
|
|
94
|
-
assert.strictEqual(error.code, 'TEST_CODE');
|
|
95
|
-
assert.strictEqual(error.message, 'Test message');
|
|
96
|
-
});
|
|
97
|
-
});
|
package/dist/config.test.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
package/dist/config.test.js
DELETED
|
@@ -1,54 +0,0 @@
|
|
|
1
|
-
import { describe, it, beforeEach, afterEach } from 'node:test';
|
|
2
|
-
import assert from 'node:assert';
|
|
3
|
-
describe('loadConfig', () => {
|
|
4
|
-
const originalEnv = { ...process.env };
|
|
5
|
-
beforeEach(() => {
|
|
6
|
-
// Clear relevant env vars
|
|
7
|
-
delete process.env.PHABRICATOR_URL;
|
|
8
|
-
delete process.env.PHABRICATOR_API_TOKEN;
|
|
9
|
-
});
|
|
10
|
-
afterEach(() => {
|
|
11
|
-
process.env = { ...originalEnv };
|
|
12
|
-
});
|
|
13
|
-
it('should load config from environment variables', async () => {
|
|
14
|
-
process.env.PHABRICATOR_URL = 'https://phabricator.example.com';
|
|
15
|
-
process.env.PHABRICATOR_API_TOKEN = 'api-test-token';
|
|
16
|
-
// Re-import to get fresh module
|
|
17
|
-
const { loadConfig } = await import('./config.js');
|
|
18
|
-
const config = loadConfig();
|
|
19
|
-
assert.strictEqual(config.phabricatorUrl, 'https://phabricator.example.com');
|
|
20
|
-
assert.strictEqual(config.apiToken, 'api-test-token');
|
|
21
|
-
});
|
|
22
|
-
it('should strip trailing slash from URL', async () => {
|
|
23
|
-
process.env.PHABRICATOR_URL = 'https://phabricator.example.com/';
|
|
24
|
-
process.env.PHABRICATOR_API_TOKEN = 'api-test-token';
|
|
25
|
-
const { loadConfig } = await import('./config.js');
|
|
26
|
-
const config = loadConfig();
|
|
27
|
-
assert.strictEqual(config.phabricatorUrl, 'https://phabricator.example.com');
|
|
28
|
-
});
|
|
29
|
-
it('should throw error for invalid URL', async () => {
|
|
30
|
-
process.env.PHABRICATOR_URL = 'not-a-valid-url';
|
|
31
|
-
process.env.PHABRICATOR_API_TOKEN = 'api-test-token';
|
|
32
|
-
const { loadConfig } = await import('./config.js?v=1');
|
|
33
|
-
assert.throws(() => loadConfig(), /Invalid url/);
|
|
34
|
-
});
|
|
35
|
-
it('should throw error for empty token', async () => {
|
|
36
|
-
process.env.PHABRICATOR_URL = 'https://phabricator.example.com';
|
|
37
|
-
process.env.PHABRICATOR_API_TOKEN = '';
|
|
38
|
-
// When token is empty string, it should either throw or fall back to arcrc
|
|
39
|
-
// Since arcrc exists on this machine, it will use that - so we skip this test
|
|
40
|
-
// if arcrc is present. The important thing is it doesn't accept empty string.
|
|
41
|
-
const { loadConfig } = await import('./config.js?v=2');
|
|
42
|
-
// This test verifies the schema validation - empty string should fail zod validation
|
|
43
|
-
// but it may fall back to arcrc first, so we just verify it doesn't crash
|
|
44
|
-
try {
|
|
45
|
-
const config = loadConfig();
|
|
46
|
-
// If it succeeds, it used arcrc fallback which is fine
|
|
47
|
-
assert.ok(config.apiToken.length > 0);
|
|
48
|
-
}
|
|
49
|
-
catch (e) {
|
|
50
|
-
// If it throws, that's also fine - means it correctly rejected empty token
|
|
51
|
-
assert.ok(e.message.includes('too_small') || e.message.includes('API_TOKEN'));
|
|
52
|
-
}
|
|
53
|
-
});
|
|
54
|
-
});
|