@fractary/codex-mcp 0.7.1 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -64,23 +64,86 @@ The server exposes an SSE (Server-Sent Events) endpoint for HTTP clients.
64
64
  Create a `.fractary/codex/config.yaml` configuration file:
65
65
 
66
66
  ```yaml
67
+ # Organization configuration
68
+ organizationSlug: fractary
69
+
70
+ # Cache configuration
67
71
  cache:
68
72
  dir: .fractary/codex/cache
69
73
  maxMemorySize: 104857600 # 100 MB
70
74
  defaultTtl: 3600 # 1 hour
71
75
 
76
+ # Storage providers
72
77
  storage:
73
78
  providers:
74
79
  - type: local
75
80
  basePath: ./knowledge
76
81
  - type: github
77
82
  token: ${GITHUB_TOKEN}
83
+
84
+ # Archive configuration (optional)
85
+ # Enables transparent access to archived documents in S3/R2/GCS
86
+ archive:
87
+ projects:
88
+ fractary/auth-service:
89
+ enabled: true
90
+ handler: s3 # s3, r2, gcs, or local
91
+ bucket: fractary-archives
92
+ patterns: # Optional: limit to specific patterns
93
+ - specs/**
94
+ - docs/**
95
+ fractary/api-gateway:
96
+ enabled: true
97
+ handler: r2
98
+ bucket: api-archives
99
+ ```
100
+
101
+ ### Archive Configuration
102
+
103
+ The archive feature enables transparent access to archived documents stored in cloud storage (S3, R2, GCS). When enabled, Codex automatically falls back to the archive when documents are not found locally or in GitHub.
104
+
105
+ **Key Features:**
106
+ - **Transparent URIs**: Same `codex://org/project/path` URI works for both active and archived documents
107
+ - **Storage Priority**: Local → Archive → GitHub → HTTP (automatic fallback)
108
+ - **Per-Project Config**: Different projects can use different storage backends and buckets
109
+ - **Pattern Matching**: Optional patterns limit which files are archived
110
+
111
+ **Configuration Fields:**
112
+ - `enabled`: Boolean - Whether archive is active for this project
113
+ - `handler`: String - Storage backend: `s3`, `r2`, `gcs`, or `local`
114
+ - `bucket`: String (optional) - Cloud storage bucket name
115
+ - `prefix`: String (optional) - Path prefix in bucket (default: `archive/`)
116
+ - `patterns`: Array (optional) - Glob patterns to match (e.g., `specs/**`, `*.md`)
117
+
118
+ **Archive Path Structure:**
119
+ ```
120
+ archive/{type}/{org}/{project}/{original-path}
121
+
122
+ Examples:
123
+ specs/WORK-123.md → archive/specs/fractary/auth-service/specs/WORK-123.md
124
+ docs/api.md → archive/docs/fractary/auth-service/docs/api.md
78
125
  ```
79
126
 
127
+ **Example Usage:**
128
+ ```typescript
129
+ // Reference archived spec (same URI as before archiving)
130
+ const result = await fetch('codex://fractary/auth-service/specs/WORK-123.md')
131
+ // Codex checks: local → S3 archive → GitHub → HTTP
132
+ // Returns content from archive if not found locally
133
+ ```
134
+
135
+ **Requirements:**
136
+ - [fractary CLI](https://github.com/fractary/cli) installed and configured
137
+ - Cloud storage credentials (AWS credentials for S3, Cloudflare for R2, etc.)
138
+ - Archive structure must mirror project structure
139
+
80
140
  ### Environment Variables
81
141
 
82
142
  - `FRACTARY_CONFIG`: Path to configuration file (default: `.fractary/codex/config.yaml`)
83
143
  - `GITHUB_TOKEN`: GitHub personal access token for GitHub storage provider
144
+ - `FRACTARY_CLI`: Path to fractary CLI executable (default: `fractary`)
145
+ - `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`: AWS credentials for S3
146
+ - `CLOUDFLARE_ACCOUNT_ID`, `CLOUDFLARE_API_TOKEN`: Cloudflare credentials for R2
84
147
 
85
148
  ## Available Tools
86
149
 
package/dist/cli.cjs CHANGED
@@ -12238,6 +12238,7 @@ var safeDump = renamed("safeDump", "dump");
12238
12238
 
12239
12239
  // ../../sdk/js/dist/index.js
12240
12240
  var import_promises = __toESM(require("fs/promises"), 1);
12241
+ var import_util4 = require("util");
12241
12242
  var __defProp2 = Object.defineProperty;
12242
12243
  var __getOwnPropNames2 = Object.getOwnPropertyNames;
12243
12244
  var __esm2 = (fn, res) => function __init() {
@@ -12638,6 +12639,16 @@ var DirectionalSyncSchema = external_exports.object({
12638
12639
  default_to_codex: external_exports.array(external_exports.string()).optional(),
12639
12640
  default_from_codex: external_exports.array(external_exports.string()).optional()
12640
12641
  });
12642
+ var ArchiveProjectConfigSchema = external_exports.object({
12643
+ enabled: external_exports.boolean(),
12644
+ handler: external_exports.enum(["s3", "r2", "gcs", "local"]),
12645
+ bucket: external_exports.string().optional(),
12646
+ prefix: external_exports.string().optional(),
12647
+ patterns: external_exports.array(external_exports.string()).optional()
12648
+ });
12649
+ var ArchiveConfigSchema = external_exports.object({
12650
+ projects: external_exports.record(ArchiveProjectConfigSchema)
12651
+ });
12641
12652
  var CodexConfigSchema = external_exports.object({
12642
12653
  organizationSlug: external_exports.string(),
12643
12654
  directories: external_exports.object({
@@ -12647,7 +12658,9 @@ var CodexConfigSchema = external_exports.object({
12647
12658
  }).optional(),
12648
12659
  rules: SyncRulesSchema.optional(),
12649
12660
  // Directional sync configuration
12650
- sync: DirectionalSyncSchema.optional()
12661
+ sync: DirectionalSyncSchema.optional(),
12662
+ // Archive configuration
12663
+ archive: ArchiveConfigSchema.optional()
12651
12664
  }).strict();
12652
12665
  init_matcher();
12653
12666
  init_matcher();
@@ -13133,6 +13146,188 @@ var HttpStorage = class {
13133
13146
  }
13134
13147
  }
13135
13148
  };
13149
+ var execFileAsync = (0, import_util4.promisify)(import_child_process.execFile);
13150
+ async function execFileNoThrow(command, args = [], options) {
13151
+ try {
13152
+ const { stdout, stderr } = await execFileAsync(command, args, {
13153
+ ...options,
13154
+ maxBuffer: options?.maxBuffer || 1024 * 1024 * 10
13155
+ // 10MB default
13156
+ });
13157
+ return {
13158
+ stdout: stdout || "",
13159
+ stderr: stderr || "",
13160
+ exitCode: 0
13161
+ };
13162
+ } catch (error) {
13163
+ const exitCode = typeof error.exitCode === "number" ? error.exitCode : 1;
13164
+ return {
13165
+ stdout: error.stdout || "",
13166
+ stderr: error.stderr || error.message || "",
13167
+ exitCode
13168
+ };
13169
+ }
13170
+ }
13171
+ var S3ArchiveStorage = class {
13172
+ name = "s3-archive";
13173
+ type = "s3-archive";
13174
+ projects;
13175
+ fractaryCli;
13176
+ constructor(options = {}) {
13177
+ this.projects = options.projects || {};
13178
+ this.fractaryCli = options.fractaryCli || "fractary";
13179
+ }
13180
+ /**
13181
+ * Check if this provider can handle the reference
13182
+ *
13183
+ * S3 Archive provider handles references that:
13184
+ * 1. Are for the current project (same org/project)
13185
+ * 2. Have archive enabled in config
13186
+ * 3. Match configured patterns (if specified)
13187
+ */
13188
+ canHandle(reference) {
13189
+ if (!reference.isCurrentProject) {
13190
+ return false;
13191
+ }
13192
+ const projectKey = `${reference.org}/${reference.project}`;
13193
+ const config = this.projects[projectKey];
13194
+ if (!config || !config.enabled) {
13195
+ return false;
13196
+ }
13197
+ if (config.patterns && config.patterns.length > 0) {
13198
+ return this.matchesPatterns(reference.path, config.patterns);
13199
+ }
13200
+ return true;
13201
+ }
13202
+ /**
13203
+ * Fetch content from S3 archive via fractary-file CLI
13204
+ */
13205
+ async fetch(reference, options) {
13206
+ const opts = mergeFetchOptions(options);
13207
+ const projectKey = `${reference.org}/${reference.project}`;
13208
+ const config = this.projects[projectKey];
13209
+ if (!config) {
13210
+ throw new Error(`No archive config for project: ${projectKey}`);
13211
+ }
13212
+ const archivePath = this.calculateArchivePath(reference, config);
13213
+ try {
13214
+ const result = await execFileNoThrow(
13215
+ this.fractaryCli,
13216
+ [
13217
+ "file",
13218
+ "read",
13219
+ "--remote-path",
13220
+ archivePath,
13221
+ "--handler",
13222
+ config.handler,
13223
+ ...config.bucket ? ["--bucket", config.bucket] : []
13224
+ ],
13225
+ {
13226
+ timeout: opts.timeout
13227
+ }
13228
+ );
13229
+ if (result.exitCode !== 0) {
13230
+ throw new Error(`fractary-file read failed: ${result.stderr}`);
13231
+ }
13232
+ const content = Buffer.from(result.stdout);
13233
+ return {
13234
+ content,
13235
+ contentType: detectContentType(reference.path),
13236
+ size: content.length,
13237
+ source: "s3-archive",
13238
+ metadata: {
13239
+ archivePath,
13240
+ bucket: config.bucket,
13241
+ handler: config.handler
13242
+ }
13243
+ };
13244
+ } catch (error) {
13245
+ const message = error instanceof Error ? error.message : String(error);
13246
+ throw new Error(`Failed to fetch from archive: ${message}`);
13247
+ }
13248
+ }
13249
+ /**
13250
+ * Check if archived file exists
13251
+ *
13252
+ * Note: This currently downloads the file to check existence.
13253
+ * TODO: Optimize by using fractary-file 'stat' or 'head' command when available
13254
+ * to avoid downloading full file for existence checks.
13255
+ */
13256
+ async exists(reference, options) {
13257
+ const projectKey = `${reference.org}/${reference.project}`;
13258
+ const config = this.projects[projectKey];
13259
+ if (!config) {
13260
+ return false;
13261
+ }
13262
+ try {
13263
+ await this.fetch(reference, { ...options, timeout: 5e3 });
13264
+ return true;
13265
+ } catch {
13266
+ return false;
13267
+ }
13268
+ }
13269
+ /**
13270
+ * Calculate archive path from reference
13271
+ *
13272
+ * Pattern: {prefix}/{type}/{org}/{project}/{original-path}
13273
+ *
13274
+ * Examples (with default prefix "archive/"):
13275
+ * specs/WORK-123.md → archive/specs/org/project/specs/WORK-123.md
13276
+ * docs/api.md → archive/docs/org/project/docs/api.md
13277
+ *
13278
+ * Examples (with custom prefix "archived-docs/"):
13279
+ * specs/WORK-123.md → archived-docs/specs/org/project/specs/WORK-123.md
13280
+ */
13281
+ calculateArchivePath(reference, config) {
13282
+ const type2 = this.detectType(reference.path);
13283
+ const prefix = config.prefix || "archive/";
13284
+ const trimmedPrefix = prefix.trim();
13285
+ if (!trimmedPrefix) {
13286
+ throw new Error("Archive prefix cannot be empty or whitespace-only");
13287
+ }
13288
+ const normalizedPrefix = trimmedPrefix.endsWith("/") ? trimmedPrefix : `${trimmedPrefix}/`;
13289
+ return `${normalizedPrefix}${type2}/${reference.org}/${reference.project}/${reference.path}`;
13290
+ }
13291
+ /**
13292
+ * Detect artifact type from path
13293
+ *
13294
+ * Used to organize archives by type
13295
+ */
13296
+ detectType(path6) {
13297
+ if (path6.startsWith("specs/")) return "specs";
13298
+ if (path6.startsWith("docs/")) return "docs";
13299
+ if (path6.includes("/logs/")) return "logs";
13300
+ return "misc";
13301
+ }
13302
+ /**
13303
+ * Check if path matches any of the patterns
13304
+ *
13305
+ * Supports glob-style patterns:
13306
+ * - specs/** (all files in specs/)
13307
+ * - *.md (all markdown files)
13308
+ * - docs/*.md (markdown files in docs/)
13309
+ */
13310
+ matchesPatterns(path6, patterns) {
13311
+ for (const pattern of patterns) {
13312
+ if (this.matchesPattern(path6, pattern)) {
13313
+ return true;
13314
+ }
13315
+ }
13316
+ return false;
13317
+ }
13318
+ /**
13319
+ * Check if path matches a single pattern
13320
+ */
13321
+ matchesPattern(path6, pattern) {
13322
+ const DOUBLE_STAR = "\0DOUBLE_STAR\0";
13323
+ let regexPattern = pattern.replace(/\*\*/g, DOUBLE_STAR);
13324
+ regexPattern = regexPattern.replace(/[.[\](){}+^$|\\]/g, "\\$&");
13325
+ regexPattern = regexPattern.replace(/\*/g, "[^/]*").replace(/\?/g, "[^/]");
13326
+ regexPattern = regexPattern.replace(new RegExp(DOUBLE_STAR, "g"), ".*");
13327
+ const regex = new RegExp(`^${regexPattern}$`);
13328
+ return regex.test(path6);
13329
+ }
13330
+ };
13136
13331
  var StorageManager = class {
13137
13332
  providers = /* @__PURE__ */ new Map();
13138
13333
  priority;
@@ -13140,7 +13335,10 @@ var StorageManager = class {
13140
13335
  this.providers.set("local", new LocalStorage(config.local));
13141
13336
  this.providers.set("github", new GitHubStorage(config.github));
13142
13337
  this.providers.set("http", new HttpStorage(config.http));
13143
- this.priority = config.priority || ["local", "github", "http"];
13338
+ if (config.s3Archive) {
13339
+ this.providers.set("s3-archive", new S3ArchiveStorage(config.s3Archive));
13340
+ }
13341
+ this.priority = config.priority || (config.s3Archive ? ["local", "s3-archive", "github", "http"] : ["local", "github", "http"]);
13144
13342
  }
13145
13343
  /**
13146
13344
  * Register a custom storage provider
@@ -14364,7 +14562,7 @@ function expandEnvVars(obj) {
14364
14562
  return obj;
14365
14563
  }
14366
14564
  var program2 = new Command();
14367
- program2.name("fractary-codex-mcp").description("MCP server for Fractary Codex knowledge management").version("0.4.0").option("--config <path>", "Path to config file", ".fractary/codex/config.yaml").action(async (options) => {
14565
+ program2.name("fractary-codex-mcp").description("MCP server for Fractary Codex knowledge management").version("0.8.0").option("--config <path>", "Path to config file", ".fractary/codex/config.yaml").action(async (options) => {
14368
14566
  let config = {};
14369
14567
  try {
14370
14568
  const configFile = (0, import_fs.readFileSync)(options.config, "utf-8");
@@ -14375,7 +14573,16 @@ program2.name("fractary-codex-mcp").description("MCP server for Fractary Codex k
14375
14573
  console.error(`Warning: Could not load config file: ${options.config}`);
14376
14574
  }
14377
14575
  }
14378
- const storage = createStorageManager(config.storage);
14576
+ const storageConfig = {
14577
+ ...config.storage || {}
14578
+ };
14579
+ if (config.archive) {
14580
+ storageConfig.s3Archive = {
14581
+ projects: config.archive.projects || {},
14582
+ fractaryCli: process.env.FRACTARY_CLI || "fractary"
14583
+ };
14584
+ }
14585
+ const storage = createStorageManager(storageConfig);
14379
14586
  const cache = createCacheManager({
14380
14587
  cacheDir: config.cache?.cacheDir || ".fractary/codex/cache",
14381
14588
  ...config.cache
@@ -14385,7 +14592,7 @@ program2.name("fractary-codex-mcp").description("MCP server for Fractary Codex k
14385
14592
  }
14386
14593
  const server = new McpServer({
14387
14594
  name: "fractary-codex",
14388
- version: "0.4.0",
14595
+ version: "0.8.0",
14389
14596
  cache,
14390
14597
  storage
14391
14598
  });