@lightward/mechanic-mcp 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,61 @@
1
+ # mechanic-mcp
2
+
3
+ Mechanic MCP server for the task library and docs. Built for writing and customizing Mechanic tasks (Shopify automation app: https://apps.shopify.com/mechanic). Offline by default (bundled data), serving public URLs for tasks (https://tasks.mechanic.dev) and docs (https://learn.mechanic.dev).
4
+
5
+ ## User guide
6
+ - Requirements: Node.js 18+, MCP-capable client (Cursor, Claude Desktop, Codex, Gemini CLI, etc.).
7
+ - What you can ask: find tasks; fetch task code (subscriptions + script/JS blocks); find docs; suggest similar tasks; get doc content; help writing or customizing Mechanic tasks.
8
+ - Setup (use `npx mechanic-mcp@latest`):
9
+ - Cursor:
10
+ ```json
11
+ {
12
+ "mcpServers": {
13
+ "mechanic-mcp": {
14
+ "command": "npx",
15
+ "args": ["-y", "mechanic-mcp@latest"]
16
+ }
17
+ }
18
+ }
19
+ ```
20
+ - Claude Desktop:
21
+ ```json
22
+ {
23
+ "mcpServers": {
24
+ "mechanic-mcp": {
25
+ "command": "npx",
26
+ "args": ["-y", "mechanic-mcp@latest"]
27
+ }
28
+ }
29
+ }
30
+ ```
31
+ - Codex (`~/.codex/config.toml`):
32
+ ```toml
33
+ [mcp_servers.mechanic-mcp]
34
+ command = "npx"
35
+ args = ["-y", "mechanic-mcp@latest"]
36
+ ```
37
+ - Gemini CLI: same JSON as Cursor/Claude.
38
+ - Tools:
39
+ - `search_tasks`: returns public URL, tags, subscriptions/subscriptions_template, options.
40
+ - `search_docs`: returns public URL/sourceUrl.
41
+ - `get_task` (tasks only): script + subscriptions + options + JS blocks; not full JSON.
42
+ - `get_doc` (docs only): full markdown.
43
+ - `similar_tasks`: related tasks by tags/subscriptions/title.
44
+ - `refresh_index`: rebuild (not needed for packaged data).
45
+ - Usage notes: cite public URLs (no local paths/.md); prefer GraphQL in code; when sharing code, return subscriptions + script/JS (relevant bits), not full JSON.
46
+
47
+ ## For maintainers
48
+ - Bundled data: `dist/data/index.json.gz`, `records.json.gz`, `manifest.json` (users don’t need source repos).
49
+ - Regenerate (if needed):
50
+ ```bash
51
+ MECHANIC_DOCS_PATH=/path/to/mechanic-docs MECHANIC_TASKS_PATH=/path/to/mechanic-tasks npm run build:data
52
+ npm run build
53
+ ```
54
+ - Tests: `npm run test:smoke`, `npm run test:smoke-doc`, `npm run test:smoke-task`.
55
+ - Publish: bump version, `npm publish`.
56
+
57
+ ## Env (optional)
58
+ - `MECHANIC_DATA_PATH` (default `dist/data`), `MECHANIC_DOCS_PATH`, `MECHANIC_TASKS_PATH`, repo URLs/branches, sync interval.
59
+
60
+ ## Runtime
61
+ - Loads bundled index/records from `MECHANIC_DATA_PATH`; `refresh_index` rebuilds if you opt in. Stdio transport; TF-IDF search with fuzzy + pagination; no network calls for search/resources.
package/dist/config.js ADDED
@@ -0,0 +1,33 @@
1
+ import path from 'node:path';
2
+ import { fileURLToPath } from 'node:url';
3
+ const env = process.env;
4
+ function resolvePath(value, fallback) {
5
+ if (value && path.isAbsolute(value)) {
6
+ return value;
7
+ }
8
+ if (value) {
9
+ return path.resolve(value);
10
+ }
11
+ return fallback;
12
+ }
13
+ export function loadConfig() {
14
+ const moduleDir = path.dirname(fileURLToPath(import.meta.url));
15
+ const projectRoot = path.resolve(moduleDir, '..');
16
+ return {
17
+ docs: {
18
+ url: env.MECHANIC_DOCS_REPO_URL,
19
+ branch: env.MECHANIC_DOCS_BRANCH || 'main',
20
+ localPath: resolvePath(env.MECHANIC_DOCS_PATH, path.resolve(projectRoot, 'mechanic-docs')),
21
+ },
22
+ tasks: {
23
+ url: env.MECHANIC_TASKS_REPO_URL,
24
+ branch: env.MECHANIC_TASKS_BRANCH || 'main',
25
+ localPath: resolvePath(env.MECHANIC_TASKS_PATH, path.resolve(projectRoot, 'mechanic-tasks')),
26
+ },
27
+ syncIntervalMinutes: Number(env.MECHANIC_SYNC_MINUTES || 30),
28
+ index: {
29
+ maxDocs: Number(env.MECHANIC_INDEX_MAX_DOCS || 20000),
30
+ },
31
+ dataDir: resolvePath(env.MECHANIC_DATA_PATH, path.resolve(projectRoot, 'dist', 'data')),
32
+ };
33
+ }
@@ -0,0 +1,66 @@
1
+ import fs from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ import matter from 'gray-matter';
4
+ import { walkFiles } from './fs.js';
5
+ const DOC_EXTENSIONS = new Set(['.md', '.mdx']);
6
+ function toTags(value) {
7
+ if (Array.isArray(value)) {
8
+ return value.map(String);
9
+ }
10
+ if (typeof value === 'string') {
11
+ return value.split(',').map((tag) => tag.trim()).filter(Boolean);
12
+ }
13
+ return [];
14
+ }
15
+ function firstHeading(content) {
16
+ const lines = content.split('\n');
17
+ for (const line of lines) {
18
+ const match = /^#{1,3}\s+(.*)$/.exec(line.trim());
19
+ if (match) {
20
+ return match[1].trim();
21
+ }
22
+ }
23
+ return undefined;
24
+ }
25
+ function extractHeadings(content) {
26
+ return content
27
+ .split('\n')
28
+ .map((line) => /^#{1,3}\s+(.*)$/.exec(line.trim()))
29
+ .filter((match) => Boolean(match))
30
+ .map((match) => match[1].trim());
31
+ }
32
+ export async function loadDocs(root) {
33
+ let files = [];
34
+ try {
35
+ files = await walkFiles(root, (file) => DOC_EXTENSIONS.has(path.extname(file)));
36
+ }
37
+ catch (error) {
38
+ const err = error;
39
+ if (err.code === 'ENOENT') {
40
+ console.error(`Docs path not found (${root}); returning no docs.`);
41
+ return [];
42
+ }
43
+ throw error;
44
+ }
45
+ const records = await Promise.all(files.map(async (filePath) => {
46
+ const raw = await fs.readFile(filePath, 'utf8');
47
+ const parsed = matter(raw);
48
+ const relPath = path.relative(root, filePath);
49
+ const heading = firstHeading(parsed.content);
50
+ const title = parsed.data.title || heading || path.basename(filePath, path.extname(filePath));
51
+ const urlPath = relPath.replace(/\\/g, '/');
52
+ const record = {
53
+ id: `doc:${relPath}`,
54
+ kind: 'doc',
55
+ title,
56
+ path: `https://learn.mechanic.dev/${urlPath}`,
57
+ section: relPath.split(path.sep)[0],
58
+ tags: toTags(parsed.data.tags),
59
+ headings: extractHeadings(parsed.content),
60
+ content: parsed.content,
61
+ sourceUrl: `https://learn.mechanic.dev/${urlPath}`,
62
+ };
63
+ return record;
64
+ }));
65
+ return records;
66
+ }
@@ -0,0 +1,60 @@
1
+ import { loadDocs } from './docs.js';
2
+ import { loadTasks } from './tasks.js';
3
+ import { buildIndex, searchAdvanced } from './indexer.js';
4
+ import { gunzipSync } from 'node:zlib';
5
+ import fs from 'node:fs';
6
+ import path from 'node:path';
7
+ async function loadRecordsFromBundle(dataDir) {
8
+ try {
9
+ const recordsPath = path.join(dataDir, 'records.json.gz');
10
+ const raw = fs.readFileSync(recordsPath);
11
+ const json = gunzipSync(raw).toString('utf-8');
12
+ return JSON.parse(json);
13
+ }
14
+ catch (error) {
15
+ return null;
16
+ }
17
+ }
18
+ export async function hydrateStore(config, prebuiltIndex, bundledRecords) {
19
+ const records = bundledRecords ||
20
+ (await (async () => {
21
+ const [docs, tasks] = await Promise.all([loadDocs(config.docs.localPath), loadTasks(config.tasks.localPath)]);
22
+ return [...docs, ...tasks];
23
+ })());
24
+ const index = prebuiltIndex || buildIndex(records);
25
+ return {
26
+ records,
27
+ index,
28
+ lastIndexed: new Date(),
29
+ };
30
+ }
31
+ export function searchStore(store, query, kind, limit) {
32
+ return searchAdvanced(store.index, {
33
+ query,
34
+ kind,
35
+ limit,
36
+ });
37
+ }
38
+ export function searchStoreAdvanced(store, options) {
39
+ return searchAdvanced(store.index, options);
40
+ }
41
+ export function loadPrebuiltIndex(dataDir) {
42
+ try {
43
+ const indexPath = path.join(dataDir, 'index.json.gz');
44
+ const raw = fs.readFileSync(indexPath);
45
+ const json = gunzipSync(raw).toString('utf-8');
46
+ return JSON.parse(json);
47
+ }
48
+ catch (error) {
49
+ // eslint-disable-next-line no-console
50
+ console.warn(`Failed to load prebuilt index from ${dataDir}: ${error}`);
51
+ return null;
52
+ }
53
+ }
54
+ export async function loadBundledRecords(dataDir) {
55
+ const records = await loadRecordsFromBundle(dataDir);
56
+ if (!records) {
57
+ console.warn(`No bundled records found in ${dataDir}`);
58
+ }
59
+ return records;
60
+ }
@@ -0,0 +1,18 @@
1
+ import fs from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ export async function walkFiles(root, predicate) {
4
+ const entries = await fs.readdir(root, { withFileTypes: true });
5
+ const results = [];
6
+ for (const entry of entries) {
7
+ const fullPath = path.join(root, entry.name);
8
+ if (entry.isDirectory()) {
9
+ const nested = await walkFiles(fullPath, predicate);
10
+ results.push(...nested);
11
+ continue;
12
+ }
13
+ if (entry.isFile() && predicate(fullPath)) {
14
+ results.push(fullPath);
15
+ }
16
+ }
17
+ return results;
18
+ }
@@ -0,0 +1,36 @@
1
+ import fs from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ import { execFile } from 'node:child_process';
4
+ import { promisify } from 'node:util';
5
+ const execFileAsync = promisify(execFile);
6
+ async function pathExists(target) {
7
+ try {
8
+ await fs.access(target);
9
+ return true;
10
+ }
11
+ catch {
12
+ return false;
13
+ }
14
+ }
15
+ async function ensureRepoCloned(config) {
16
+ const hasGitDir = await pathExists(path.join(config.localPath, '.git'));
17
+ if (hasGitDir) {
18
+ return;
19
+ }
20
+ if (!config.url) {
21
+ // No git repo and no URL provided: assume caller is pointing at a static/local path and skip cloning.
22
+ return;
23
+ }
24
+ await fs.mkdir(config.localPath, { recursive: true });
25
+ await execFileAsync('git', ['clone', '--branch', config.branch, config.url, config.localPath]);
26
+ }
27
+ export async function syncRepo(config) {
28
+ if (!config.url) {
29
+ // No remote; assume local path is already present and skip sync.
30
+ return;
31
+ }
32
+ await ensureRepoCloned(config);
33
+ await execFileAsync('git', ['fetch', '--prune'], { cwd: config.localPath });
34
+ await execFileAsync('git', ['checkout', config.branch], { cwd: config.localPath });
35
+ await execFileAsync('git', ['reset', '--hard', `origin/${config.branch}`], { cwd: config.localPath });
36
+ }
@@ -0,0 +1,253 @@
1
+ const STOP_WORDS = new Set([
2
+ 'a',
3
+ 'an',
4
+ 'and',
5
+ 'for',
6
+ 'from',
7
+ 'in',
8
+ 'of',
9
+ 'on',
10
+ 'or',
11
+ 'the',
12
+ 'to',
13
+ 'with',
14
+ ]);
15
+ const FIELD_CONFIG = {
16
+ title: {
17
+ weight: 5,
18
+ extractor: (item) => item.title,
19
+ },
20
+ tags: {
21
+ weight: 3.5,
22
+ extractor: (item) => item.tags.join(' '),
23
+ },
24
+ content: {
25
+ weight: 1.5,
26
+ extractor: (item) => item.content,
27
+ },
28
+ slug: {
29
+ weight: 4,
30
+ extractor: (item) => (item.kind === 'task' ? item.slug : ''),
31
+ },
32
+ headings: {
33
+ weight: 3,
34
+ extractor: (item) => (item.kind === 'doc' ? item.headings.join(' ') : ''),
35
+ },
36
+ section: {
37
+ weight: 2,
38
+ extractor: (item) => (item.kind === 'doc' ? item.section ?? '' : ''),
39
+ },
40
+ events: {
41
+ weight: 2,
42
+ extractor: (item) => (item.kind === 'task' ? item.events.join(' ') : ''),
43
+ },
44
+ actions: {
45
+ weight: 2,
46
+ extractor: (item) => (item.kind === 'task' ? item.actions.join(' ') : ''),
47
+ },
48
+ scopes: {
49
+ weight: 2,
50
+ extractor: (item) => (item.kind === 'task' ? item.scopes.join(' ') : ''),
51
+ },
52
+ };
53
+ function normalizeText(value) {
54
+ if (value === null || value === undefined)
55
+ return '';
56
+ if (Array.isArray(value))
57
+ return value.join(' ');
58
+ return String(value);
59
+ }
60
+ function tokenize(text) {
61
+ return normalizeText(text)
62
+ .toLowerCase()
63
+ .replace(/[_/]+/g, ' ')
64
+ .split(/[^a-z0-9]+/g)
65
+ .filter(Boolean)
66
+ .filter((token) => !STOP_WORDS.has(token));
67
+ }
68
+ function countTokens(text) {
69
+ return tokenize(text).reduce((counts, token) => {
70
+ counts[token] = (counts[token] || 0) + 1;
71
+ return counts;
72
+ }, {});
73
+ }
74
+ function levenshtein(a, b) {
75
+ if (a === b)
76
+ return 0;
77
+ if (!a.length)
78
+ return b.length;
79
+ if (!b.length)
80
+ return a.length;
81
+ const dp = Array.from({ length: a.length + 1 }, () => new Array(b.length + 1));
82
+ for (let i = 0; i <= a.length; i += 1)
83
+ dp[i][0] = i;
84
+ for (let j = 0; j <= b.length; j += 1)
85
+ dp[0][j] = j;
86
+ for (let i = 1; i <= a.length; i += 1) {
87
+ for (let j = 1; j <= b.length; j += 1) {
88
+ const cost = a[i - 1] === b[j - 1] ? 0 : 1;
89
+ dp[i][j] = Math.min(dp[i - 1][j] + 1, dp[i][j - 1] + 1, dp[i - 1][j - 1] + cost);
90
+ }
91
+ }
92
+ return dp[a.length][b.length];
93
+ }
94
+ function indexRecord(item) {
95
+ const fieldText = {};
96
+ const tokenFreq = {};
97
+ Object.entries(FIELD_CONFIG).forEach(([field, config]) => {
98
+ const text = normalizeText(config.extractor(item));
99
+ fieldText[field] = text;
100
+ tokenFreq[field] = countTokens(text);
101
+ });
102
+ return {
103
+ id: item.id,
104
+ kind: item.kind,
105
+ path: item.path,
106
+ tags: item.tags,
107
+ fieldText,
108
+ tokenFreq,
109
+ raw: item,
110
+ };
111
+ }
112
+ function buildDocFrequency(indexed) {
113
+ const docFreq = {};
114
+ indexed.forEach((doc) => {
115
+ const seen = new Set();
116
+ Object.values(doc.tokenFreq).forEach((fieldTokens) => {
117
+ Object.keys(fieldTokens).forEach((token) => seen.add(token));
118
+ });
119
+ seen.forEach((token) => {
120
+ docFreq[token] = (docFreq[token] || 0) + 1;
121
+ });
122
+ });
123
+ return docFreq;
124
+ }
125
+ export function buildIndex(records) {
126
+ const docs = records.map(indexRecord);
127
+ const docFreq = buildDocFrequency(docs);
128
+ return {
129
+ documents: docs,
130
+ docFreq,
131
+ totalDocuments: docs.length,
132
+ };
133
+ }
134
+ function fuzzyMatchToken(token, docFreq, maxEdits, limit) {
135
+ const vocabulary = Object.keys(docFreq);
136
+ const candidates = [];
137
+ for (const vocabToken of vocabulary) {
138
+ const distance = levenshtein(token, vocabToken);
139
+ if (distance > maxEdits)
140
+ continue;
141
+ const weight = distance === 0 ? 1 : distance === 1 ? 0.6 : 0.3;
142
+ candidates.push({ token: vocabToken, distance, weight });
143
+ }
144
+ if (!candidates.find((c) => c.distance === 0)) {
145
+ candidates.push({ token, distance: 0, weight: 1 });
146
+ }
147
+ candidates.sort((a, b) => {
148
+ if (a.distance !== b.distance)
149
+ return a.distance - b.distance;
150
+ return (docFreq[b.token] || 0) - (docFreq[a.token] || 0);
151
+ });
152
+ return candidates.slice(0, limit);
153
+ }
154
+ function buildTokenCandidates(tokens, docFreq, fuzzy, maxEdits, maxCandidates) {
155
+ return tokens.map((token) => ({
156
+ queryToken: token,
157
+ candidates: fuzzy
158
+ ? fuzzyMatchToken(token, docFreq, maxEdits, maxCandidates)
159
+ : [{ token, distance: 0, weight: 1 }],
160
+ }));
161
+ }
162
+ function scoreDocument(doc, tokenCandidates, built, kind) {
163
+ if (kind && doc.kind !== kind) {
164
+ return null;
165
+ }
166
+ const { docFreq, totalDocuments } = built;
167
+ let score = 0;
168
+ const matches = [];
169
+ tokenCandidates.forEach(({ candidates }) => {
170
+ candidates.forEach(({ token, weight }) => {
171
+ const idf = Math.log(1 + totalDocuments / (1 + (docFreq[token] || 0)));
172
+ Object.entries(doc.tokenFreq).forEach(([field, freqMap]) => {
173
+ const freq = freqMap[token] || 0;
174
+ if (freq === 0)
175
+ return;
176
+ const fieldWeight = FIELD_CONFIG[field]?.weight ?? 1;
177
+ const fieldScore = freq * fieldWeight * idf * weight;
178
+ score += fieldScore;
179
+ matches.push({ field, token });
180
+ });
181
+ });
182
+ });
183
+ if (tokenCandidates.length > 0 && score === 0) {
184
+ return null;
185
+ }
186
+ return { score, matches };
187
+ }
188
+ function buildSnippet(text, queryTokens, length = 200) {
189
+ if (!text)
190
+ return '';
191
+ const lower = text.toLowerCase();
192
+ for (const token of queryTokens) {
193
+ const idx = lower.indexOf(token);
194
+ if (idx !== -1) {
195
+ const start = Math.max(0, idx - Math.floor(length / 2));
196
+ const end = Math.min(text.length, start + length);
197
+ const prefix = start > 0 ? '…' : '';
198
+ const suffix = end < text.length ? '…' : '';
199
+ return `${prefix}${text.slice(start, end).trim()}${suffix}`;
200
+ }
201
+ }
202
+ return text.slice(0, length).trim();
203
+ }
204
+ function buildDocSnippet(item, queryTokens) {
205
+ if (item.kind === 'doc') {
206
+ const heading = item.headings[0] || item.title;
207
+ const excerpt = buildSnippet(item.content, queryTokens);
208
+ return `${heading}: ${excerpt}`;
209
+ }
210
+ // For tasks, use docs content if present as intro
211
+ return buildSnippet(item.content, queryTokens);
212
+ }
213
+ function parseQuery(query) {
214
+ return tokenize(query);
215
+ }
216
+ export function searchAdvanced(built, options) {
217
+ const { query, kind, limit = 10, offset = 0, fuzzy = true, fuzzyMaxEdits = 1, fuzzyMaxCandidates = 3, tags, subscriptions, } = options;
218
+ const tokens = parseQuery(query);
219
+ const tokenCandidates = buildTokenCandidates(tokens, built.docFreq, fuzzy, fuzzyMaxEdits, fuzzyMaxCandidates);
220
+ const snippetTokens = tokenCandidates.flatMap((t) => t.candidates.map((c) => c.token));
221
+ const scored = [];
222
+ built.documents.forEach((doc) => {
223
+ if (tags && tags.length > 0) {
224
+ const docTags = (doc.raw.kind === 'task' ? doc.raw.tags : doc.tags) || [];
225
+ const lower = docTags.map((t) => t.toLowerCase());
226
+ const hasAll = tags.every((t) => lower.includes(t.toLowerCase()));
227
+ if (!hasAll)
228
+ return;
229
+ }
230
+ if (subscriptions && subscriptions.length > 0 && doc.raw.kind === 'task') {
231
+ const subs = (doc.raw.events || []).map((s) => s.toLowerCase()).join(' ');
232
+ const templates = (doc.raw.subscriptions_template || '').toLowerCase();
233
+ const combined = `${subs} ${templates}`;
234
+ const hasAllSubs = subscriptions.every((s) => combined.includes(s.toLowerCase()));
235
+ if (!hasAllSubs)
236
+ return;
237
+ }
238
+ const scoredDoc = scoreDocument(doc, tokenCandidates, built, kind);
239
+ if (!scoredDoc)
240
+ return;
241
+ scored.push({ doc, score: scoredDoc.score, matches: scoredDoc.matches });
242
+ });
243
+ scored.sort((a, b) => b.score - a.score);
244
+ return scored.slice(offset, offset + limit).map(({ doc, score }) => ({
245
+ id: doc.id,
246
+ kind: doc.kind,
247
+ title: doc.raw.title,
248
+ path: doc.path,
249
+ snippet: buildDocSnippet(doc.raw, snippetTokens),
250
+ tags: doc.tags,
251
+ score,
252
+ }));
253
+ }
@@ -0,0 +1,71 @@
1
+ import fs from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ const TASKS_SUBDIR = 'tasks';
4
+ async function readTasks(root) {
5
+ const taskDir = path.join(root, TASKS_SUBDIR);
6
+ let entries;
7
+ try {
8
+ entries = await fs.readdir(taskDir, { withFileTypes: true });
9
+ }
10
+ catch (error) {
11
+ const err = error;
12
+ if (err.code === 'ENOENT') {
13
+ console.error(`Tasks path not found (${taskDir}); returning no tasks.`);
14
+ return [];
15
+ }
16
+ throw error;
17
+ }
18
+ const tasks = [];
19
+ for (const entry of entries) {
20
+ if (!entry.isFile() || !entry.name.endsWith('.json')) {
21
+ continue;
22
+ }
23
+ if (entry.name.startsWith('.')) {
24
+ continue;
25
+ }
26
+ const filePath = path.join(taskDir, entry.name);
27
+ const raw = await fs.readFile(filePath, 'utf8');
28
+ const json = JSON.parse(raw);
29
+ const handle = entry.name.replace(/\.json$/, '');
30
+ const tags = Array.isArray(json.tags) ? json.tags.map(String) : [];
31
+ const subscriptions = Array.isArray(json.subscriptions) ? json.subscriptions.map(String) : [];
32
+ const options = json.options;
33
+ const script = json.script;
34
+ const onlineJs = json.online_store_javascript;
35
+ const orderJs = json.order_status_javascript;
36
+ const subsTemplate = json.subscriptions_template;
37
+ const pathPublic = `https://tasks.mechanic.dev/${handle}`;
38
+ const record = {
39
+ id: `task:${handle}`,
40
+ kind: 'task',
41
+ slug: handle,
42
+ title: json.name || handle,
43
+ path: pathPublic,
44
+ summary: json.docs || '',
45
+ tags,
46
+ events: subscriptions,
47
+ actions: [],
48
+ scopes: [],
49
+ risk: 'unknown',
50
+ options,
51
+ script,
52
+ online_store_javascript: onlineJs ?? null,
53
+ order_status_javascript: orderJs ?? null,
54
+ subscriptions_template: subsTemplate,
55
+ content: [
56
+ json.docs || '',
57
+ json.subscriptions_template || '',
58
+ json.script || '',
59
+ json.online_store_javascript || '',
60
+ json.order_status_javascript || '',
61
+ ]
62
+ .filter(Boolean)
63
+ .join('\n\n'),
64
+ };
65
+ tasks.push(record);
66
+ }
67
+ return tasks;
68
+ }
69
+ export async function loadTasks(root) {
70
+ return readTasks(root);
71
+ }
Binary file
@@ -0,0 +1,12 @@
1
+ {
2
+ "builtAt": "2025-12-02T04:31:30.664Z",
3
+ "counts": {
4
+ "docs": 312,
5
+ "tasks": 357,
6
+ "total": 669
7
+ },
8
+ "sources": {
9
+ "docsPath": "/Users/msodomsky/dev/mechanic-docs",
10
+ "tasksPath": "/Users/msodomsky/dev/mechanic-tasks"
11
+ }
12
+ }
Binary file
package/dist/index.js ADDED
@@ -0,0 +1,68 @@
1
+ #!/usr/bin/env node
2
+ import path from 'node:path';
3
+ import fs from 'node:fs';
4
+ import { loadConfig } from './config.js';
5
+ import { hydrateStore, loadPrebuiltIndex, loadBundledRecords } from './core/engine.js';
6
+ import { syncRepo } from './core/git.js';
7
+ import { startMcpServer } from './mcp/server.js';
8
+ async function bootstrap() {
9
+ if (process.argv.includes('--help') || process.argv.includes('-h')) {
10
+ console.log('Usage: mechanic-mcp (runs MCP server on stdio)');
11
+ console.log('Environment: MECHANIC_DATA_PATH (defaults to dist/data with bundled index/records).');
12
+ console.log('Tools: search_tasks, search_docs, get_task, get_doc, similar_tasks, refresh_index.');
13
+ process.exit(0);
14
+ }
15
+ const config = loadConfig();
16
+ // Keep a mutable reference so refresh can swap it.
17
+ const prebuiltIndex = loadPrebuiltIndex(config.dataDir);
18
+ const bundledRecords = await loadBundledRecords(config.dataDir);
19
+ let store = await hydrateStore(config, prebuiltIndex, bundledRecords);
20
+ const refresh = async () => {
21
+ // If we have bundled records, skip repo sync; otherwise sync and rebuild.
22
+ if (!bundledRecords) {
23
+ await Promise.all([syncRepo(config.docs), syncRepo(config.tasks)]);
24
+ store = await hydrateStore(config, prebuiltIndex, bundledRecords);
25
+ }
26
+ };
27
+ // Initial sync happens before serving.
28
+ await refresh();
29
+ // Background sync.
30
+ const intervalMs = config.syncIntervalMinutes * 60 * 1000;
31
+ const timer = setInterval(() => {
32
+ refresh().catch((error) => {
33
+ console.error('Periodic refresh failed', error);
34
+ });
35
+ }, intervalMs);
36
+ const stop = await startMcpServer({
37
+ getStore: () => store,
38
+ refresh,
39
+ });
40
+ // Log manifest info if present
41
+ const manifestPath = path.join(config.dataDir, 'manifest.json');
42
+ if (fs.existsSync(manifestPath)) {
43
+ try {
44
+ const manifestRaw = fs.readFileSync(manifestPath, 'utf-8');
45
+ const manifest = JSON.parse(manifestRaw);
46
+ console.error('Data manifest', manifest);
47
+ }
48
+ catch (error) {
49
+ console.error(`Unable to read manifest at ${manifestPath}: ${error}`);
50
+ }
51
+ }
52
+ const shutdown = async () => {
53
+ clearInterval(timer);
54
+ await stop();
55
+ };
56
+ const handleSignal = async (signal) => {
57
+ console.error(`Received ${signal}, shutting down...`);
58
+ await shutdown();
59
+ process.exit(0);
60
+ };
61
+ process.on('SIGINT', handleSignal);
62
+ process.on('SIGTERM', handleSignal);
63
+ return { shutdown };
64
+ }
65
+ bootstrap().catch((error) => {
66
+ console.error(error);
67
+ process.exitCode = 1;
68
+ });
@@ -0,0 +1,291 @@
1
+ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
2
+ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
3
+ import { z } from 'zod';
4
+ import { searchStoreAdvanced } from '../core/engine.js';
5
+ import { searchInputSchema, searchResponseSchema, taskIdSchema, taskDetailSchema, docIdSchema, docDetailSchema, similarTasksInputSchema, similarTasksResponseSchema, } from '../schemas.js';
6
+ import { encodeUri, decodeUri } from '../util/uri.js';
7
+ export async function startMcpServer(options) {
8
+ const { getStore, refresh } = options;
9
+ const store = getStore();
10
+ const mcp = new McpServer({
11
+ name: 'mechanic-mcp',
12
+ version: '0.1.0',
13
+ }, {
14
+ capabilities: {
15
+ resources: {},
16
+ tools: {},
17
+ },
18
+ instructions: [
19
+ 'You are an assistant helping developers write and customize Mechanic tasks in Liquid.',
20
+ 'Use search_tasks for task library queries; use search_docs for Mechanic docs. Use get_task for tasks only, get_doc or resources/read for docs.',
21
+ 'Inputs: query (required); limit<=50; offset>=0; fuzzyMaxEdits<=2; fuzzyMaxCandidates<=5.',
22
+ 'Docs are resources; tasks are tools. Resource URIs are percent-encoded from list_resources.',
23
+ 'Always use public URLs: tasks.mechanic.dev/{handle} for tasks, learn.mechanic.dev/{path} for docs. Never cite local repo paths or .md filenames.',
24
+ 'Prefer Shopify GraphQL in code examples; REST is deprecated in Mechanic tasks.',
25
+ 'Recommend existing library tasks first; build new only if no close match.',
26
+ 'For task code, return subscriptions + script/JS (relevant bits), not full JSON; include the public task URL.',
27
+ ].join('\n'),
28
+ });
29
+ // Tool: search_tasks (tasks only)
30
+ mcp.registerTool('search_tasks', {
31
+ description: 'Search Mechanic tasks',
32
+ inputSchema: searchInputSchema.omit({ scope: true }),
33
+ outputSchema: searchResponseSchema,
34
+ }, async (input) => {
35
+ const hits = searchStoreAdvanced(getStore(), {
36
+ query: input.query,
37
+ kind: 'task',
38
+ limit: input.limit,
39
+ offset: input.offset,
40
+ fuzzy: input.fuzzy,
41
+ fuzzyMaxEdits: input.fuzzyMaxEdits,
42
+ fuzzyMaxCandidates: input.fuzzyMaxCandidates,
43
+ tags: input.tags,
44
+ subscriptions: input.subscriptions,
45
+ });
46
+ const hitsWithUrl = hits.map((hit) => ({
47
+ ...hit,
48
+ url: `https://tasks.mechanic.dev/${hit.id.replace(/^task:/, '')}`,
49
+ subscriptions: getStore()
50
+ .records.filter((r) => r.id === hit.id && r.kind === 'task')
51
+ .flatMap((r) => ('events' in r ? r.events : [])),
52
+ subscriptions_template: getStore()
53
+ .records.filter((r) => r.id === hit.id && r.kind === 'task')
54
+ .map((r) => ('subscriptions_template' in r ? r.subscriptions_template : undefined))
55
+ .find(Boolean),
56
+ options: getStore()
57
+ .records.filter((r) => r.id === hit.id && r.kind === 'task')
58
+ .map((r) => ('options' in r ? r.options : undefined))
59
+ .find(Boolean),
60
+ }));
61
+ const nextOffset = hits.length === input.limit ? input.offset + input.limit : undefined;
62
+ return {
63
+ content: [
64
+ {
65
+ type: 'text',
66
+ text: JSON.stringify({
67
+ items: hitsWithUrl,
68
+ nextOffset,
69
+ }, null, 2),
70
+ },
71
+ ],
72
+ structuredContent: {
73
+ items: hitsWithUrl,
74
+ nextOffset,
75
+ },
76
+ };
77
+ });
78
+ // Tool: search_docs (docs only)
79
+ mcp.registerTool('search_docs', {
80
+ description: 'Search Mechanic docs (generated task documentation)',
81
+ inputSchema: searchInputSchema.omit({ scope: true }),
82
+ outputSchema: searchResponseSchema,
83
+ }, async (input) => {
84
+ const hits = searchStoreAdvanced(getStore(), {
85
+ query: input.query,
86
+ kind: 'doc',
87
+ limit: input.limit,
88
+ offset: input.offset,
89
+ fuzzy: input.fuzzy,
90
+ fuzzyMaxEdits: input.fuzzyMaxEdits,
91
+ fuzzyMaxCandidates: input.fuzzyMaxCandidates,
92
+ tags: input.tags,
93
+ subscriptions: input.subscriptions,
94
+ });
95
+ const hitsWithUrl = hits.map((hit) => ({
96
+ ...hit,
97
+ url: `https://learn.mechanic.dev/${hit.path}`,
98
+ sourceUrl: `https://learn.mechanic.dev/${hit.path}`,
99
+ }));
100
+ const nextOffset = hits.length === input.limit ? input.offset + input.limit : undefined;
101
+ return {
102
+ content: [
103
+ {
104
+ type: 'text',
105
+ text: JSON.stringify({
106
+ items: hitsWithUrl,
107
+ nextOffset,
108
+ }, null, 2),
109
+ },
110
+ ],
111
+ structuredContent: {
112
+ items: hitsWithUrl,
113
+ nextOffset,
114
+ },
115
+ };
116
+ });
117
+ // Tool: get_task (full payload)
118
+ mcp.registerTool('get_task', {
119
+ description: 'Tasks only: fetch by handle/id with full payload (script, options, subscriptions).',
120
+ inputSchema: z.object({ id: taskIdSchema }),
121
+ outputSchema: taskDetailSchema,
122
+ }, async (input) => {
123
+ const normalizedId = input.id.startsWith('task:') ? input.id : `task:${input.id}`;
124
+ const record = getStore().records.find((r) => r.id === normalizedId);
125
+ if (!record || record.kind !== 'task') {
126
+ return {
127
+ content: [{ type: 'text', text: `Task not found or not a task: ${input.id}` }],
128
+ isError: true,
129
+ structuredContent: undefined,
130
+ };
131
+ }
132
+ const handle = record.slug || record.id.replace(/^task:/, '');
133
+ const url = `https://tasks.mechanic.dev/${handle}`;
134
+ const subscriptions = record.events || [];
135
+ // The task JSON isn't stored wholesale; we surface what we have
136
+ const detail = {
137
+ id: record.id,
138
+ handle,
139
+ name: record.title,
140
+ tags: record.tags,
141
+ url,
142
+ subscriptions,
143
+ subscriptions_template: record.subscriptions_template,
144
+ options: record.options,
145
+ script: record.script,
146
+ online_store_javascript: record.online_store_javascript || undefined,
147
+ order_status_javascript: record.order_status_javascript || undefined,
148
+ };
149
+ return {
150
+ content: [{ type: 'text', text: JSON.stringify(detail, null, 2) }],
151
+ structuredContent: taskDetailSchema.parse(detail),
152
+ };
153
+ });
154
+ // Tool: similar_tasks
155
+ mcp.registerTool('similar_tasks', {
156
+ description: 'Find similar tasks by tags/subscriptions/title content.',
157
+ inputSchema: similarTasksInputSchema,
158
+ outputSchema: similarTasksResponseSchema,
159
+ }, async (input) => {
160
+ const allTasks = getStore().records.filter((r) => r.kind === 'task');
161
+ const target = allTasks.find((r) => r.id === input.handle || r.id === `task:${input.handle}` || r.slug === input.handle);
162
+ if (!target) {
163
+ return {
164
+ content: [{ type: 'text', text: `Task not found: ${input.handle}` }],
165
+ structuredContent: undefined,
166
+ };
167
+ }
168
+ const targetTags = new Set((target.tags || []).map((t) => t.toLowerCase()));
169
+ const targetSubs = new Set((target.events || []).map((s) => s.toLowerCase()));
170
+ const targetTitle = target.title.toLowerCase();
171
+ const scored = [];
172
+ allTasks.forEach((r) => {
173
+ if (r.id === target.id)
174
+ return;
175
+ let score = 0;
176
+ const tagsLower = (r.tags || []).map((t) => t.toLowerCase());
177
+ tagsLower.forEach((t) => {
178
+ if (targetTags.has(t))
179
+ score += 2;
180
+ });
181
+ const subsLower = (r.events || []).map((s) => s.toLowerCase());
182
+ subsLower.forEach((s) => {
183
+ if (targetSubs.has(s))
184
+ score += 1;
185
+ });
186
+ if (targetTitle && r.title.toLowerCase().includes(targetTitle.split(' ')[0] || '')) {
187
+ score += 0.5;
188
+ }
189
+ if (score > 0) {
190
+ scored.push({ r, score });
191
+ }
192
+ });
193
+ scored.sort((a, b) => b.score - a.score);
194
+ const top = scored.slice(0, input.limit).map(({ r }) => {
195
+ const handle = r.slug || r.id.replace(/^task:/, '');
196
+ return {
197
+ id: r.id,
198
+ kind: r.kind,
199
+ title: r.title,
200
+ path: r.path,
201
+ url: `https://tasks.mechanic.dev/${handle}`,
202
+ snippet: r.kind === 'task' ? r.summary || '' : '',
203
+ tags: r.tags,
204
+ score: 0,
205
+ };
206
+ });
207
+ return {
208
+ content: [{ type: 'text', text: JSON.stringify({ items: top }, null, 2) }],
209
+ structuredContent: { items: top },
210
+ };
211
+ });
212
+ // Tool: get_doc (docs only)
213
+ mcp.registerTool('get_doc', {
214
+ description: 'Docs only: fetch by id/path (use search_docs to find ids).',
215
+ inputSchema: z.object({ id: docIdSchema }),
216
+ outputSchema: docDetailSchema,
217
+ }, async (input) => {
218
+ const normalizedId = input.id.startsWith('doc:') ? input.id : `doc:${input.id}`;
219
+ const record = getStore().records.find((r) => r.id === normalizedId);
220
+ if (!record || record.kind !== 'doc') {
221
+ return {
222
+ content: [{ type: 'text', text: `Doc not found or not a doc: ${input.id}` }],
223
+ isError: true,
224
+ structuredContent: undefined,
225
+ };
226
+ }
227
+ const detail = {
228
+ id: record.id,
229
+ title: record.title,
230
+ path: record.path,
231
+ url: record.sourceUrl,
232
+ content: record.content,
233
+ };
234
+ return {
235
+ content: [{ type: 'text', text: JSON.stringify(detail, null, 2) }],
236
+ structuredContent: docDetailSchema.parse(detail),
237
+ };
238
+ });
239
+ // Doc resources only
240
+ const docResources = getStore()
241
+ .records.filter((r) => r.kind === 'doc')
242
+ .map((doc) => {
243
+ const uri = `mechanic-docs://${encodeUri(doc.id)}`;
244
+ return {
245
+ uri,
246
+ name: doc.title,
247
+ description: doc.sourceUrl || doc.path,
248
+ mimeType: 'text/markdown',
249
+ };
250
+ });
251
+ docResources.forEach((res) => {
252
+ mcp.registerResource(res.name, res.uri, {
253
+ description: res.description,
254
+ mimeType: res.mimeType,
255
+ }, async () => {
256
+ const decodedId = decodeUri(res.uri.replace(/^mechanic-docs:\/\//, ''));
257
+ const record = getStore().records.find((r) => r.id === decodedId);
258
+ if (!record || record.kind !== 'doc') {
259
+ return { contents: [] };
260
+ }
261
+ return {
262
+ contents: [
263
+ {
264
+ uri: res.uri,
265
+ mimeType: 'text/markdown',
266
+ text: record.content,
267
+ },
268
+ ],
269
+ };
270
+ });
271
+ });
272
+ // Tool: refresh_index
273
+ mcp.registerTool('refresh_index', {
274
+ description: 'Refresh and rebuild the index from source data',
275
+ inputSchema: z.object({}),
276
+ }, async () => {
277
+ await refresh();
278
+ return { content: [{ type: 'text', text: 'Refreshed index' }] };
279
+ });
280
+ const transport = new StdioServerTransport();
281
+ await mcp.connect(transport);
282
+ const currentStore = getStore();
283
+ const docCount = currentStore.records.filter((item) => item.kind === 'doc').length;
284
+ const taskCount = currentStore.records.filter((item) => item.kind === 'task').length;
285
+ console.error(`MCP server ready (docs=${docCount}, tasks=${taskCount}) on stdio`);
286
+ return async () => {
287
+ await mcp.close();
288
+ transport.close();
289
+ console.error('MCP server stopped.');
290
+ };
291
+ }
@@ -0,0 +1,97 @@
1
+ import { z } from 'zod';
2
+ const scopeEnum = z
3
+ .enum(['all', 'task', 'doc', 'docs'])
4
+ .optional()
5
+ .transform((value) => {
6
+ if (value === 'docs')
7
+ return 'doc';
8
+ return value;
9
+ });
10
+ export const searchInputSchema = z.object({
11
+ query: z.string().min(1),
12
+ limit: z.number().int().min(1).max(50).default(10),
13
+ offset: z.number().int().min(0).default(0),
14
+ scope: scopeEnum,
15
+ sort: z.enum(['relevance']).optional().default('relevance'),
16
+ tags: z.array(z.string()).optional(),
17
+ subscriptions: z.array(z.string()).optional(),
18
+ fuzzy: z.boolean().optional().default(true),
19
+ fuzzyMaxEdits: z.number().int().min(0).max(2).optional().default(1),
20
+ fuzzyMaxCandidates: z.number().int().min(1).max(5).optional().default(3),
21
+ });
22
+ export const searchHitSchema = z.object({
23
+ id: z.string(),
24
+ kind: z.enum(['doc', 'task']),
25
+ title: z.string(),
26
+ path: z.string(),
27
+ url: z.string().optional(),
28
+ subscriptions: z.array(z.string()).optional(),
29
+ subscriptions_template: z.string().optional(),
30
+ options: z.record(z.any()).optional(),
31
+ sourceUrl: z.string().optional(),
32
+ snippet: z.string(),
33
+ tags: z.array(z.string()),
34
+ score: z.number(),
35
+ });
36
+ export const searchResponseSchema = z.object({
37
+ items: z.array(searchHitSchema),
38
+ nextOffset: z.number().int().min(0).optional(),
39
+ });
40
+ export const taskIdSchema = z.string().min(1);
41
+ export const taskDetailSchema = z.object({
42
+ id: z.string(),
43
+ handle: z.string(),
44
+ name: z.string(),
45
+ tags: z.array(z.string()),
46
+ url: z.string(),
47
+ subscriptions: z.array(z.string()),
48
+ subscriptions_template: z.string().optional(),
49
+ options: z.record(z.any()).optional(),
50
+ script: z.string().optional(),
51
+ online_store_javascript: z.string().optional(),
52
+ order_status_javascript: z.string().optional(),
53
+ });
54
+ export const docIdSchema = z.string().min(1);
55
+ export const docDetailSchema = z.object({
56
+ id: z.string(),
57
+ title: z.string(),
58
+ path: z.string(),
59
+ url: z.string().optional(),
60
+ content: z.string(),
61
+ });
62
+ export const similarTasksInputSchema = z.object({
63
+ handle: z.string().min(1),
64
+ limit: z.number().int().min(1).max(20).default(5),
65
+ });
66
+ export const similarTasksResponseSchema = z.object({
67
+ items: z.array(searchHitSchema.extend({
68
+ url: z.string(),
69
+ })),
70
+ });
71
+ export const resourceRequestSchema = z.object({
72
+ uri: z.string().min(1),
73
+ });
74
+ export const taskResourceSchema = z.object({
75
+ id: z.string(),
76
+ kind: z.literal('task'),
77
+ slug: z.string(),
78
+ title: z.string(),
79
+ path: z.string(),
80
+ summary: z.string().optional(),
81
+ tags: z.array(z.string()),
82
+ events: z.array(z.string()),
83
+ actions: z.array(z.string()),
84
+ scopes: z.array(z.string()),
85
+ risk: z.enum(['read', 'write', 'mixed', 'unknown']),
86
+ content: z.string(),
87
+ });
88
+ export const docResourceSchema = z.object({
89
+ id: z.string(),
90
+ kind: z.literal('doc'),
91
+ title: z.string(),
92
+ path: z.string(),
93
+ section: z.string().optional(),
94
+ tags: z.array(z.string()),
95
+ headings: z.array(z.string()),
96
+ content: z.string(),
97
+ });
@@ -0,0 +1,17 @@
1
+ import { strict as assert } from 'node:assert';
2
+ import { loadConfig } from '../config.js';
3
+ import { hydrateStore, loadPrebuiltIndex, loadBundledRecords } from '../core/engine.js';
4
+ async function main() {
5
+ const config = loadConfig();
6
+ const prebuilt = loadPrebuiltIndex(config.dataDir);
7
+ const records = await loadBundledRecords(config.dataDir);
8
+ const store = await hydrateStore(config, prebuilt, records);
9
+ assert(store.records.some((r) => r.kind === 'doc'), 'Expected docs in bundled records');
10
+ // eslint-disable-next-line no-console
11
+ console.log('Doc resource smoke passed.');
12
+ }
13
+ main().catch((error) => {
14
+ // eslint-disable-next-line no-console
15
+ console.error(error);
16
+ process.exitCode = 1;
17
+ });
@@ -0,0 +1,19 @@
1
+ import { strict as assert } from 'node:assert';
2
+ import { loadConfig } from '../config.js';
3
+ import { hydrateStore, loadPrebuiltIndex, loadBundledRecords } from '../core/engine.js';
4
+ async function main() {
5
+ const config = loadConfig();
6
+ const prebuilt = loadPrebuiltIndex(config.dataDir);
7
+ const bundled = await loadBundledRecords(config.dataDir);
8
+ const store = await hydrateStore(config, prebuilt, bundled);
9
+ const taskId = 'task:abandoned-checkout-emails';
10
+ const task = store.records.find((r) => r.id === taskId);
11
+ assert(task && task.kind === 'task', 'Expected abandoned-checkout-emails task');
12
+ assert(task.subscriptions_template, 'Expected subscriptions_template');
13
+ assert(task.script || task.content, 'Expected script/content');
14
+ console.log('get_task smoke passed.');
15
+ }
16
+ main().catch((error) => {
17
+ console.error(error);
18
+ process.exitCode = 1;
19
+ });
@@ -0,0 +1,46 @@
1
+ import { strict as assert } from 'node:assert';
2
+ import fs from 'node:fs';
3
+ import path from 'node:path';
4
+ import { loadConfig } from '../config.js';
5
+ import { hydrateStore, loadPrebuiltIndex, loadBundledRecords, searchStoreAdvanced } from '../core/engine.js';
6
+ async function run() {
7
+ const config = loadConfig();
8
+ const prebuilt = loadPrebuiltIndex(config.dataDir);
9
+ const bundled = await loadBundledRecords(config.dataDir);
10
+ const store = await hydrateStore(config, prebuilt, bundled);
11
+ const searchAndAssert = (query, kind) => {
12
+ const hits = searchStoreAdvanced(store, {
13
+ query,
14
+ kind,
15
+ limit: 3,
16
+ fuzzy: true,
17
+ });
18
+ assert(hits.length > 0, `Expected hits for query "${query}"`);
19
+ return hits;
20
+ };
21
+ // Task search
22
+ const taskHits = searchAndAssert('abandoned checkout', 'task');
23
+ const taskIds = taskHits.map((h) => h.id);
24
+ assert(taskIds.some((id) => id.includes('abandoned')), 'Expected an abandoned checkout task');
25
+ // Doc search
26
+ const docHits = searchAndAssert('subscription', 'doc');
27
+ assert(docHits.some((h) => h.id.startsWith('doc:')), 'Expected doc hits');
28
+ // Filter-only via kind and pagination
29
+ const paged = searchStoreAdvanced(store, {
30
+ query: 'checkout',
31
+ kind: 'task',
32
+ limit: 1,
33
+ offset: 1,
34
+ });
35
+ assert(paged.length === 1, 'Expected one result on paged query');
36
+ // Manifest existence
37
+ const manifestPath = path.join(config.dataDir, 'manifest.json');
38
+ assert(fs.existsSync(manifestPath), 'Manifest should exist');
39
+ // eslint-disable-next-line no-console
40
+ console.log('Smoke tests passed.');
41
+ }
42
+ run().catch((error) => {
43
+ // eslint-disable-next-line no-console
44
+ console.error(error);
45
+ process.exitCode = 1;
46
+ });
package/dist/types.js ADDED
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,11 @@
1
+ export function encodeUri(id) {
2
+ return encodeURIComponent(id);
3
+ }
4
+ export function decodeUri(uri) {
5
+ try {
6
+ return decodeURIComponent(uri);
7
+ }
8
+ catch (error) {
9
+ return uri;
10
+ }
11
+ }
package/package.json ADDED
@@ -0,0 +1,42 @@
1
+ {
2
+ "name": "@lightward/mechanic-mcp",
3
+ "version": "0.1.1",
4
+ "description": "Mechanic MCP server for docs and task library search",
5
+ "type": "module",
6
+ "main": "dist/index.js",
7
+ "bin": {
8
+ "mechanic-mcp": "dist/index.js"
9
+ },
10
+ "files": [
11
+ "dist/**/*.js",
12
+ "dist/**/*.d.ts",
13
+ "dist/data/index.json.gz",
14
+ "dist/data/records.json.gz",
15
+ "dist/data/manifest.json",
16
+ "README.md",
17
+ "LICENSE"
18
+ ],
19
+ "license": "MIT",
20
+ "scripts": {
21
+ "build": "tsc -p tsconfig.json",
22
+ "build:data": "tsx scripts/build-data.ts",
23
+ "dev": "tsx src/index.ts",
24
+ "start": "node dist/index.js",
25
+ "lint": "eslint . --ext .ts",
26
+ "test:smoke": "node dist/tests/smoke.js",
27
+ "test:smoke-doc": "node dist/tests/doc-resource.smoke.js",
28
+ "test:smoke-task": "node dist/tests/get-task.smoke.js"
29
+ },
30
+ "dependencies": {
31
+ "@modelcontextprotocol/sdk": "^1.24.0",
32
+ "gray-matter": "^4.0.3",
33
+ "yaml": "^2.4.1",
34
+ "zod": "^3.24.2"
35
+ },
36
+ "devDependencies": {
37
+ "@types/node": "^22.0.0",
38
+ "eslint": "^8.57.0",
39
+ "tsx": "^4.19.0",
40
+ "typescript": "^5.4.0"
41
+ }
42
+ }