agentic-knowledge-mcp 0.0.1 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/package.json +5 -5
  2. package/packages/cli/dist/cli.d.ts +2 -3
  3. package/packages/cli/dist/cli.js +15 -14
  4. package/packages/cli/dist/commands/create.js +13 -7
  5. package/packages/cli/dist/commands/init.js +117 -167
  6. package/packages/cli/dist/commands/refresh.js +248 -290
  7. package/packages/cli/dist/commands/status.js +185 -239
  8. package/packages/cli/dist/exports.d.ts +6 -0
  9. package/packages/cli/dist/exports.js +6 -0
  10. package/packages/cli/dist/index.d.ts +5 -4
  11. package/packages/cli/dist/index.js +30 -4
  12. package/packages/cli/package.json +9 -8
  13. package/packages/content-loader/dist/content/api-documentation-loader.js +27 -35
  14. package/packages/content-loader/dist/content/documentation-site-loader.js +27 -35
  15. package/packages/content-loader/dist/content/metadata-manager.d.ts +46 -56
  16. package/packages/content-loader/dist/content/metadata-manager.js +150 -147
  17. package/packages/content-loader/package.json +1 -1
  18. package/packages/core/dist/config/loader.js +162 -186
  19. package/packages/core/dist/config/manager.js +128 -160
  20. package/packages/core/dist/index.js +2 -16
  21. package/packages/core/dist/paths/calculator.d.ts +7 -0
  22. package/packages/core/dist/paths/calculator.js +143 -103
  23. package/packages/core/dist/paths/symlinks.d.ts +21 -0
  24. package/packages/core/dist/paths/symlinks.js +93 -0
  25. package/packages/core/dist/types.d.ts +35 -15
  26. package/packages/core/package.json +1 -1
  27. package/packages/mcp-server/dist/server.js +5 -5
  28. package/packages/mcp-server/package.json +2 -2
  29. package/packages/content-loader/dist/__tests__/debug-filtering.d.ts +0 -1
  30. package/packages/content-loader/dist/__tests__/debug-filtering.js +0 -17
  31. package/packages/content-loader/dist/__tests__/test-filtering.d.ts +0 -1
  32. package/packages/content-loader/dist/__tests__/test-filtering.js +0 -19
  33. package/packages/core/dist/content/api-documentation-loader.d.ts +0 -26
  34. package/packages/core/dist/content/api-documentation-loader.js +0 -45
  35. package/packages/core/dist/content/content-processor.d.ts +0 -44
  36. package/packages/core/dist/content/content-processor.js +0 -81
  37. package/packages/core/dist/content/documentation-site-loader.d.ts +0 -26
  38. package/packages/core/dist/content/documentation-site-loader.js +0 -45
  39. package/packages/core/dist/content/git-repo-loader.d.ts +0 -54
  40. package/packages/core/dist/content/git-repo-loader.js +0 -264
  41. package/packages/core/dist/content/index.d.ts +0 -9
  42. package/packages/core/dist/content/index.js +0 -9
  43. package/packages/core/dist/content/loader.d.ts +0 -50
  44. package/packages/core/dist/content/loader.js +0 -7
  45. package/packages/core/dist/content/metadata-manager.d.ts +0 -65
  46. package/packages/core/dist/content/metadata-manager.js +0 -160
package/package.json CHANGED
@@ -1,11 +1,11 @@
1
1
  {
2
2
  "name": "agentic-knowledge-mcp",
3
- "version": "0.0.1",
3
+ "version": "0.1.2",
4
4
  "description": "A Model Context Protocol server for agentic knowledge guidance with web-based documentation loading and intelligent search instructions",
5
5
  "type": "module",
6
6
  "main": "packages/mcp-server/dist/index.js",
7
7
  "bin": {
8
- "agentic-knowledge": "packages/cli/dist/cli.js"
8
+ "agentic-knowledge": "packages/cli/dist/index.js"
9
9
  },
10
10
  "engines": {
11
11
  "node": ">=18.0.0",
@@ -26,9 +26,9 @@
26
26
  "@modelcontextprotocol/sdk": "^1.19.1",
27
27
  "@types/js-yaml": "4.0.9",
28
28
  "js-yaml": "4.1.0",
29
- "@codemcp/knowledge-mcp-server": "0.0.1",
30
- "@codemcp/knowledge-content-loader": "0.0.1",
31
- "@codemcp/knowledge-core": "0.0.1"
29
+ "@codemcp/knowledge-content-loader": "0.1.2",
30
+ "@codemcp/knowledge-core": "0.1.2",
31
+ "@codemcp/knowledge-mcp-server": "0.1.2"
32
32
  },
33
33
  "devDependencies": {
34
34
  "@modelcontextprotocol/inspector": "0.16.8",
@@ -1,5 +1,4 @@
1
- #!/usr/bin/env node
2
1
  /**
3
- * CLI entry point for agentic-knowledge web content management
2
+ * CLI implementation for agentic-knowledge web content management
4
3
  */
5
- export {};
4
+ export declare function runCli(): void;
@@ -1,21 +1,22 @@
1
- #!/usr/bin/env node
2
1
  /**
3
- * CLI entry point for agentic-knowledge web content management
2
+ * CLI implementation for agentic-knowledge web content management
4
3
  */
5
4
  import { Command } from "commander";
6
5
  import { initCommand } from "./commands/init.js";
7
6
  import { refreshCommand } from "./commands/refresh.js";
8
7
  import { statusCommand } from "./commands/status.js";
9
8
  import { createCommand } from "./commands/create.js";
10
- const program = new Command();
11
- program
12
- .name("agentic-knowledge")
13
- .description("Manage web content sources for agentic knowledge system")
14
- .version("0.1.0");
15
- // Add commands
16
- program.addCommand(createCommand);
17
- program.addCommand(initCommand);
18
- program.addCommand(refreshCommand);
19
- program.addCommand(statusCommand);
20
- // Parse command line arguments
21
- program.parse();
9
+ export function runCli() {
10
+ const program = new Command();
11
+ program
12
+ .name("agentic-knowledge")
13
+ .description("Manage web content sources for agentic knowledge system")
14
+ .version("0.1.0");
15
+ // Add commands
16
+ program.addCommand(createCommand);
17
+ program.addCommand(initCommand);
18
+ program.addCommand(refreshCommand);
19
+ program.addCommand(statusCommand);
20
+ // Parse command line arguments
21
+ program.parse();
22
+ }
@@ -21,7 +21,7 @@ export const createCommand = new Command("create")
21
21
  const configManager = new ConfigManager();
22
22
  const { config, configPath } = await configManager.loadConfig(process.cwd());
23
23
  // Check if docset ID already exists
24
- if (config.docsets.find(d => d.id === options.id)) {
24
+ if (config.docsets.find((d) => d.id === options.id)) {
25
25
  throw new Error(`Docset with ID '${options.id}' already exists`);
26
26
  }
27
27
  let newDocset;
@@ -57,13 +57,14 @@ async function createGitRepoDocset(options) {
57
57
  id: options.id,
58
58
  name: options.name,
59
59
  description: options.description || `Git repository: ${options.url}`,
60
- web_sources: [{
60
+ sources: [
61
+ {
61
62
  url: options.url,
62
63
  type: "git_repo",
63
- options: {
64
- branch: options.branch
65
- }
66
- }]
64
+ branch: options.branch,
65
+ paths: options.paths ? options.paths.split(",") : undefined,
66
+ },
67
+ ],
67
68
  };
68
69
  }
69
70
  async function createLocalFolderDocset(options) {
@@ -85,6 +86,11 @@ async function createLocalFolderDocset(options) {
85
86
  id: options.id,
86
87
  name: options.name,
87
88
  description: options.description || `Local documentation: ${options.path}`,
88
- local_path: options.path
89
+ sources: [
90
+ {
91
+ type: "local_folder",
92
+ paths: [options.path],
93
+ },
94
+ ],
89
95
  };
90
96
  }
@@ -5,178 +5,128 @@ import { Command } from "commander";
5
5
  import chalk from "chalk";
6
6
  import { promises as fs } from "node:fs";
7
7
  import * as path from "node:path";
8
- import {
9
- ConfigManager,
10
- calculateLocalPath,
11
- ensureKnowledgeGitignoreSync,
12
- } from "@codemcp/knowledge-core";
13
- import {
14
- GitRepoLoader,
15
- WebSourceType,
16
- } from "@codemcp/knowledge-content-loader";
8
+ import { ConfigManager, calculateLocalPath, ensureKnowledgeGitignoreSync, } from "@codemcp/knowledge-core";
9
+ import { GitRepoLoader, WebSourceType, } from "@codemcp/knowledge-content-loader";
17
10
  export const initCommand = new Command("init")
18
- .description("Initialize web sources for a docset from configuration")
19
- .argument("<docset-id>", "ID of the docset to initialize")
20
- .option("-c, --config <path>", "Path to configuration file")
21
- .option("--force", "Force re-initialization even if already exists", false)
22
- .action(async (docsetId, options) => {
11
+ .description("Initialize sources for a docset from configuration")
12
+ .argument("<docset-id>", "ID of the docset to initialize")
13
+ .option("-c, --config <path>", "Path to configuration file")
14
+ .option("--force", "Force re-initialization even if already exists", false)
15
+ .action(async (docsetId, options) => {
23
16
  console.log(chalk.blue("šŸš€ Agentic Knowledge Integration Test"));
24
17
  try {
25
- // Use ConfigManager for all config operations
26
- const configManager = new ConfigManager();
27
- const { config, configPath } = await configManager.loadConfig(
28
- process.cwd(),
29
- );
30
- // Ensure .knowledge/.gitignore exists and contains docsets/ ignore rule
31
- ensureKnowledgeGitignoreSync(configPath);
32
- const docset = config.docsets.find((d) => d.id === docsetId);
33
- if (!docset) {
34
- throw new Error(
35
- `Docset '${docsetId}' not found in configuration. Available: ${config.docsets.map((d) => d.id).join(", ")}`,
36
- );
37
- }
38
- if (!docset.web_sources || docset.web_sources.length === 0) {
39
- throw new Error(`Docset '${docsetId}' has no web sources configured`);
40
- }
41
- console.log(chalk.green(`āœ… Found docset: ${docset.name}`));
42
- console.log(chalk.gray(`šŸ“ Description: ${docset.description}`));
43
- console.log(chalk.gray(`šŸ”— Web sources: ${docset.web_sources.length}`));
44
- // Calculate the local path for this docset
45
- const localPath = calculateLocalPath(docset, configPath);
46
- console.log(chalk.yellow(`\nšŸ“ Target directory: ${localPath}`));
47
- // Check if already exists
48
- let existsAlready = false;
49
- try {
50
- const stat = await fs.stat(localPath);
51
- if (stat.isDirectory()) {
52
- existsAlready = true;
18
+ // Use ConfigManager for all config operations
19
+ const configManager = new ConfigManager();
20
+ const { config, configPath } = await configManager.loadConfig(process.cwd());
21
+ // Ensure .knowledge/.gitignore exists and contains docsets/ ignore rule
22
+ ensureKnowledgeGitignoreSync(configPath);
23
+ const docset = config.docsets.find((d) => d.id === docsetId);
24
+ if (!docset) {
25
+ throw new Error(`Docset '${docsetId}' not found in configuration. Available: ${config.docsets.map((d) => d.id).join(", ")}`);
53
26
  }
54
- } catch {
55
- // Directory doesn't exist, which is fine
56
- }
57
- if (existsAlready && !options.force) {
58
- console.log(
59
- chalk.yellow(
60
- "āš ļø Directory already exists. Use --force to overwrite.",
61
- ),
62
- );
63
- const files = await fs.readdir(localPath);
64
- console.log(
65
- chalk.gray(
66
- `Existing files: ${files.slice(0, 5).join(", ")}${files.length > 5 ? "..." : ""}`,
67
- ),
68
- );
69
- return;
70
- }
71
- // Create target directory
72
- await fs.mkdir(localPath, { recursive: true });
73
- let totalFiles = 0;
74
- const allDiscoveredPaths = [];
75
- // Process each web source
76
- for (const [index, webSource] of docset.web_sources.entries()) {
77
- console.log(
78
- chalk.yellow(
79
- `\nšŸ”„ Loading source ${index + 1}/${docset.web_sources.length}: ${webSource.url}`,
80
- ),
81
- );
82
- if (webSource.type === "git_repo") {
83
- // Use GitRepoLoader for all Git operations (REQ-19)
84
- const loader = new GitRepoLoader();
85
- console.log(
86
- chalk.gray(` Using GitRepoLoader for smart content filtering`),
87
- );
88
- const webSourceConfig = {
89
- url: webSource.url,
90
- type: WebSourceType.GIT_REPO,
91
- options: webSource.options || {},
92
- };
93
- // Validate configuration
94
- const validation = loader.validateConfig(webSourceConfig);
95
- if (validation !== true) {
96
- throw new Error(
97
- `Invalid Git repository configuration: ${validation}`,
98
- );
99
- }
100
- // Load content using GitRepoLoader
101
- const result = await loader.load(webSourceConfig, localPath);
102
- if (!result.success) {
103
- throw new Error(`Git repository loading failed: ${result.error}`);
104
- }
105
- // Collect discovered paths for config update
106
- allDiscoveredPaths.push(...result.files);
107
- totalFiles += result.files.length;
108
- console.log(
109
- chalk.green(
110
- ` āœ… Copied ${result.files.length} files using smart filtering`,
111
- ),
112
- );
113
- // Create source metadata
114
- const metadata = {
115
- source_url: webSource.url,
116
- source_type: webSource.type,
117
- downloaded_at: new Date().toISOString(),
118
- files_count: result.files.length,
119
- files: result.files,
120
- docset_id: docsetId,
121
- content_hash: result.contentHash,
122
- };
123
- await fs.writeFile(
124
- path.join(localPath, `.agentic-source-${index}.json`),
125
- JSON.stringify(metadata, null, 2),
126
- );
127
- } else {
128
- console.log(
129
- chalk.red(
130
- ` āŒ Web source type '${webSource.type}' not yet supported`,
131
- ),
132
- );
27
+ if (!docset.sources || docset.sources.length === 0) {
28
+ throw new Error(`Docset '${docsetId}' has no sources configured`);
133
29
  }
134
- }
135
- // Create overall metadata
136
- const overallMetadata = {
137
- docset_id: docsetId,
138
- docset_name: docset.name,
139
- initialized_at: new Date().toISOString(),
140
- total_files: totalFiles,
141
- web_sources_count: docset.web_sources.length,
142
- };
143
- await fs.writeFile(
144
- path.join(localPath, ".agentic-metadata.json"),
145
- JSON.stringify(overallMetadata, null, 2),
146
- );
147
- // Update configuration with discovered paths (only if paths were discovered and force flag used)
148
- if (allDiscoveredPaths.length > 0 && options.force) {
149
- console.log(
150
- chalk.yellow(`\nšŸ“ Updating configuration with discovered paths...`),
151
- );
30
+ console.log(chalk.green(`āœ… Found docset: ${docset.name}`));
31
+ console.log(chalk.gray(`šŸ“ Description: ${docset.description}`));
32
+ console.log(chalk.gray(`šŸ”— Sources: ${docset.sources.length}`));
33
+ // Calculate the local path for this docset
34
+ const localPath = calculateLocalPath(docset, configPath);
35
+ console.log(chalk.yellow(`\nšŸ“ Target directory: ${localPath}`));
36
+ // Check if already exists
37
+ let existsAlready = false;
152
38
  try {
153
- await configManager.updateDocsetPaths(docsetId, allDiscoveredPaths);
154
- console.log(
155
- chalk.green(
156
- ` āœ… Updated config with ${allDiscoveredPaths.length} discovered paths`,
157
- ),
158
- );
159
- } catch (configError) {
160
- console.log(
161
- chalk.yellow(
162
- ` āš ļø Could not update config: ${configError instanceof Error ? configError.message : String(configError)}`,
163
- ),
164
- );
39
+ const stat = await fs.stat(localPath);
40
+ if (stat.isDirectory()) {
41
+ existsAlready = true;
42
+ }
43
+ }
44
+ catch {
45
+ // Directory doesn't exist, which is fine
46
+ }
47
+ if (existsAlready && !options.force) {
48
+ console.log(chalk.yellow("āš ļø Directory already exists. Use --force to overwrite."));
49
+ const files = await fs.readdir(localPath);
50
+ console.log(chalk.gray(`Existing files: ${files.slice(0, 5).join(", ")}${files.length > 5 ? "..." : ""}`));
51
+ return;
52
+ }
53
+ // Create target directory
54
+ await fs.mkdir(localPath, { recursive: true });
55
+ let totalFiles = 0;
56
+ const allDiscoveredPaths = [];
57
+ // Process each source
58
+ for (const [index, source] of docset.sources.entries()) {
59
+ console.log(chalk.yellow(`\nšŸ”„ Loading source ${index + 1}/${docset.sources.length}: ${source.type === "git_repo" ? source.url : source.paths?.join(", ")}`));
60
+ if (source.type === "git_repo") {
61
+ // Use GitRepoLoader for all Git operations (REQ-19)
62
+ const loader = new GitRepoLoader();
63
+ console.log(chalk.gray(` Using GitRepoLoader for smart content filtering`));
64
+ const webSourceConfig = {
65
+ url: source.url,
66
+ type: WebSourceType.GIT_REPO,
67
+ options: {
68
+ branch: source.branch || "main",
69
+ paths: source.paths || [],
70
+ },
71
+ };
72
+ // Validate configuration
73
+ const validation = loader.validateConfig(webSourceConfig);
74
+ if (validation !== true) {
75
+ throw new Error(`Invalid Git repository configuration: ${validation}`);
76
+ }
77
+ // Load content using GitRepoLoader
78
+ const result = await loader.load(webSourceConfig, localPath);
79
+ if (!result.success) {
80
+ throw new Error(`Git repository loading failed: ${result.error}`);
81
+ }
82
+ // Collect discovered paths for config update
83
+ allDiscoveredPaths.push(...result.files);
84
+ totalFiles += result.files.length;
85
+ console.log(chalk.green(` āœ… Copied ${result.files.length} files using smart filtering`));
86
+ // Create source metadata
87
+ const metadata = {
88
+ source_url: source.url,
89
+ source_type: source.type,
90
+ downloaded_at: new Date().toISOString(),
91
+ files_count: result.files.length,
92
+ files: result.files,
93
+ docset_id: docsetId,
94
+ content_hash: result.contentHash,
95
+ };
96
+ await fs.writeFile(path.join(localPath, `.agentic-source-${index}.json`), JSON.stringify(metadata, null, 2));
97
+ }
98
+ else {
99
+ console.log(chalk.red(` āŒ Source type '${source.type}' not yet supported`));
100
+ }
165
101
  }
166
- }
167
- console.log(
168
- chalk.green(`\nšŸŽ‰ Successfully initialized docset '${docsetId}'`),
169
- );
170
- console.log(chalk.gray(`šŸ“ Location: ${localPath}`));
171
- console.log(chalk.gray(`šŸ“„ Total files: ${totalFiles}`));
172
- console.log(
173
- chalk.gray(`šŸ”— Sources processed: ${docset.web_sources.length}`),
174
- );
175
- } catch (error) {
176
- console.error(chalk.red("\nāŒ Error:"));
177
- console.error(
178
- chalk.red(error instanceof Error ? error.message : String(error)),
179
- );
180
- process.exit(1);
102
+ // Create overall metadata
103
+ const overallMetadata = {
104
+ docset_id: docsetId,
105
+ docset_name: docset.name,
106
+ initialized_at: new Date().toISOString(),
107
+ total_files: totalFiles,
108
+ sources_count: docset.sources.length,
109
+ };
110
+ await fs.writeFile(path.join(localPath, ".agentic-metadata.json"), JSON.stringify(overallMetadata, null, 2));
111
+ // Update configuration with discovered paths (only if paths were discovered and force flag used)
112
+ if (allDiscoveredPaths.length > 0 && options.force) {
113
+ console.log(chalk.yellow(`\nšŸ“ Updating configuration with discovered paths...`));
114
+ try {
115
+ await configManager.updateDocsetPaths(docsetId, allDiscoveredPaths);
116
+ console.log(chalk.green(` āœ… Updated config with ${allDiscoveredPaths.length} discovered paths`));
117
+ }
118
+ catch (configError) {
119
+ console.log(chalk.yellow(` āš ļø Could not update config: ${configError instanceof Error ? configError.message : String(configError)}`));
120
+ }
121
+ }
122
+ console.log(chalk.green(`\nšŸŽ‰ Successfully initialized docset '${docsetId}'`));
123
+ console.log(chalk.gray(`šŸ“ Location: ${localPath}`));
124
+ console.log(chalk.gray(`šŸ“„ Total files: ${totalFiles}`));
125
+ console.log(chalk.gray(`šŸ”— Sources processed: ${docset.sources.length}`));
126
+ }
127
+ catch (error) {
128
+ console.error(chalk.red("\nāŒ Error:"));
129
+ console.error(chalk.red(error instanceof Error ? error.message : String(error)));
130
+ process.exit(1);
181
131
  }
182
- });
132
+ });