bmad-method 4.35.3 → 4.36.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. package/.github/workflows/discord.yaml +16 -0
  2. package/CHANGELOG.md +8 -2
  3. package/README.md +36 -3
  4. package/expansion-packs/Complete AI Agent System - Blank Templates & Google Cloud Setup/Complete AI Agent System - Flowchart.svg +102 -0
  5. package/expansion-packs/Complete AI Agent System - Blank Templates & Google Cloud Setup/PART 1 - Google Cloud Vertex AI Setup Documentation/1.1 Google Cloud Project Setup/1.1.1 - Initial Project Configuration - bash copy.txt +13 -0
  6. package/expansion-packs/Complete AI Agent System - Blank Templates & Google Cloud Setup/PART 1 - Google Cloud Vertex AI Setup Documentation/1.1 Google Cloud Project Setup/1.1.1 - Initial Project Configuration - bash.txt +13 -0
  7. package/expansion-packs/Complete AI Agent System - Blank Templates & Google Cloud Setup/PART 1 - Google Cloud Vertex AI Setup Documentation/1.2 Agent Development Kit Installation/1.2.2 - Basic Project Structure - txt.txt +25 -0
  8. package/expansion-packs/Complete AI Agent System - Blank Templates & Google Cloud Setup/PART 1 - Google Cloud Vertex AI Setup Documentation/1.3 Core Configuration Files/1.3.1 - settings.py +34 -0
  9. package/expansion-packs/Complete AI Agent System - Blank Templates & Google Cloud Setup/PART 1 - Google Cloud Vertex AI Setup Documentation/1.3 Core Configuration Files/1.3.2 - main.py - Base Application.py +70 -0
  10. package/expansion-packs/Complete AI Agent System - Blank Templates & Google Cloud Setup/PART 1 - Google Cloud Vertex AI Setup Documentation/1.4 Deployment Configuration/1.4.2 - cloudbuild.yaml +26 -0
  11. package/expansion-packs/Complete AI Agent System - Blank Templates & Google Cloud Setup/README.md +109 -0
  12. package/package.json +2 -2
  13. package/tools/flattener/aggregate.js +76 -0
  14. package/tools/flattener/binary.js +53 -0
  15. package/tools/flattener/discovery.js +70 -0
  16. package/tools/flattener/files.js +35 -0
  17. package/tools/flattener/ignoreRules.js +176 -0
  18. package/tools/flattener/main.js +113 -466
  19. package/tools/flattener/projectRoot.js +45 -0
  20. package/tools/flattener/prompts.js +44 -0
  21. package/tools/flattener/stats.js +30 -0
  22. package/tools/flattener/xml.js +86 -0
  23. package/tools/installer/package.json +1 -1
  24. package/tools/shared/bannerArt.js +105 -0
  25. package/tools/installer/package-lock.json +0 -906
@@ -0,0 +1,13 @@
1
+ # 1. Create new Google Cloud Project
2
+ gcloud projects create {{PROJECT_ID}} --name="{{COMPANY_NAME}} AI Agent System"
3
+
4
+ # 2. Set default project
5
+ gcloud config set project {{PROJECT_ID}}
6
+
7
+ # 3. Enable required APIs
8
+ gcloud services enable aiplatform.googleapis.com
9
+ gcloud services enable storage.googleapis.com
10
+ gcloud services enable cloudfunctions.googleapis.com
11
+ gcloud services enable run.googleapis.com
12
+ gcloud services enable firestore.googleapis.com
13
+ gcloud services enable secretmanager.googleapis.com
@@ -0,0 +1,13 @@
1
+ # 1. Create new Google Cloud Project
2
+ gcloud projects create {{PROJECT_ID}} --name="{{COMPANY_NAME}} AI Agent System"
3
+
4
+ # 2. Set default project
5
+ gcloud config set project {{PROJECT_ID}}
6
+
7
+ # 3. Enable required APIs
8
+ gcloud services enable aiplatform.googleapis.com
9
+ gcloud services enable storage.googleapis.com
10
+ gcloud services enable cloudfunctions.googleapis.com
11
+ gcloud services enable run.googleapis.com
12
+ gcloud services enable firestore.googleapis.com
13
+ gcloud services enable secretmanager.googleapis.com
@@ -0,0 +1,25 @@
1
+ {{company_name}}-ai-agents/
2
+ ├── agents/
3
+ │ ├── __init__.py
4
+ │ ├── {{team_1}}/
5
+ │ │ ├── __init__.py
6
+ │ │ ├── {{agent_1}}.py
7
+ │ │ └── {{agent_2}}.py
8
+ │ └── {{team_2}}/
9
+ ├── tasks/
10
+ │ ├── __init__.py
11
+ │ ├── {{task_category_1}}/
12
+ │ └── {{task_category_2}}/
13
+ ├── templates/
14
+ │ ├── {{document_type_1}}/
15
+ │ └── {{document_type_2}}/
16
+ ├── checklists/
17
+ ├── data/
18
+ ├── workflows/
19
+ ├── config/
20
+ │ ├── settings.py
21
+ │ └── agent_config.yaml
22
+ ├── main.py
23
+ └── deployment/
24
+ ├── Dockerfile
25
+ └── cloudbuild.yaml
@@ -0,0 +1,34 @@
1
+ import os
2
+ from pydantic import BaseSettings
3
+
4
+ class Settings(BaseSettings):
5
+ # Google Cloud Configuration
6
+ project_id: str = "{{PROJECT_ID}}"
7
+ location: str = "{{LOCATION}}" # e.g., "us-central1"
8
+
9
+ # Company Information
10
+ company_name: str = "{{COMPANY_NAME}}"
11
+ industry: str = "{{INDUSTRY}}"
12
+ business_type: str = "{{BUSINESS_TYPE}}"
13
+
14
+ # Agent Configuration
15
+ default_model: str = "gemini-1.5-pro"
16
+ max_iterations: int = 10
17
+ timeout_seconds: int = 300
18
+
19
+ # Storage Configuration
20
+ bucket_name: str = "{{COMPANY_NAME}}-ai-agents-storage"
21
+ database_name: str = "{{COMPANY_NAME}}-ai-agents-db"
22
+
23
+ # API Configuration
24
+ session_service_type: str = "vertex" # or "in_memory" for development
25
+ artifact_service_type: str = "gcs" # or "in_memory" for development
26
+ memory_service_type: str = "vertex" # or "in_memory" for development
27
+
28
+ # Security
29
+ service_account_path: str = "./{{COMPANY_NAME}}-ai-agents-key.json"
30
+
31
+ class Config:
32
+ env_file = ".env"
33
+
34
+ settings = Settings()
@@ -0,0 +1,70 @@
1
+ import asyncio
2
+ from google.adk.agents import LlmAgent
3
+ from google.adk.runners import Runner
4
+ from google.adk.sessions import VertexAiSessionService
5
+ from google.adk.artifacts import GcsArtifactService
6
+ from google.adk.memory import VertexAiRagMemoryService
7
+ from google.adk.models import Gemini
8
+
9
+ from config.settings import settings
10
+ from agents.{{primary_team}}.{{main_orchestrator}} import {{MainOrchestratorClass}}
11
+
12
+ class {{CompanyName}}AISystem:
13
+ def __init__(self):
14
+ self.settings = settings
15
+ self.runner = None
16
+ self.main_orchestrator = None
17
+
18
+ async def initialize(self):
19
+ """Initialize the AI agent system"""
20
+
21
+ # Create main orchestrator
22
+ self.main_orchestrator = {{MainOrchestratorClass}}()
23
+
24
+ # Initialize services
25
+ session_service = VertexAiSessionService(
26
+ project=self.settings.project_id,
27
+ location=self.settings.location
28
+ )
29
+
30
+ artifact_service = GcsArtifactService(
31
+ bucket_name=self.settings.bucket_name
32
+ )
33
+
34
+ memory_service = VertexAiRagMemoryService(
35
+ rag_corpus=f"projects/{self.settings.project_id}/locations/{self.settings.location}/ragCorpora/{{COMPANY_NAME}}-knowledge"
36
+ )
37
+
38
+ # Create runner
39
+ self.runner = Runner(
40
+ app_name=f"{self.settings.company_name}-AI-System",
41
+ agent=self.main_orchestrator,
42
+ session_service=session_service,
43
+ artifact_service=artifact_service,
44
+ memory_service=memory_service
45
+ )
46
+
47
+ print(f"✅ {self.settings.company_name} AI Agent System initialized successfully!")
48
+
49
+ async def run_agent_interaction(self, user_id: str, session_id: str, message: str):
50
+ """Run agent interaction"""
51
+ if not self.runner:
52
+ await self.initialize()
53
+
54
+ async for event in self.runner.run_async(
55
+ user_id=user_id,
56
+ session_id=session_id,
57
+ new_message=message
58
+ ):
59
+ yield event
60
+
61
+ # Application factory
62
+ async def create_app():
63
+ ai_system = {{CompanyName}}AISystem()
64
+ await ai_system.initialize()
65
+ return ai_system
66
+
67
+ if __name__ == "__main__":
68
+ # Development server
69
+ import uvicorn
70
+ uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
@@ -0,0 +1,26 @@
1
+ steps:
2
+ # Build the container image
3
+ - name: 'gcr.io/cloud-builders/docker'
4
+ args: ['build', '-t', 'gcr.io/{{PROJECT_ID}}/{{COMPANY_NAME}}-ai-agents:$COMMIT_SHA', '.']
5
+
6
+ # Push the container image to Container Registry
7
+ - name: 'gcr.io/cloud-builders/docker'
8
+ args: ['push', 'gcr.io/{{PROJECT_ID}}/{{COMPANY_NAME}}-ai-agents:$COMMIT_SHA']
9
+
10
+ # Deploy container image to Cloud Run
11
+ - name: 'gcr.io/google.com/cloudsdktool/cloud-sdk'
12
+ entrypoint: gcloud
13
+ args:
14
+ - 'run'
15
+ - 'deploy'
16
+ - '{{COMPANY_NAME}}-ai-agents'
17
+ - '--image'
18
+ - 'gcr.io/{{PROJECT_ID}}/{{COMPANY_NAME}}-ai-agents:$COMMIT_SHA'
19
+ - '--region'
20
+ - '{{LOCATION}}'
21
+ - '--platform'
22
+ - 'managed'
23
+ - '--allow-unauthenticated'
24
+
25
+ images:
26
+ - 'gcr.io/{{PROJECT_ID}}/{{COMPANY_NAME}}-ai-agents:$COMMIT_SHA'
@@ -0,0 +1,109 @@
1
+ # BMad Expansion Pack: Google Cloud Vertex AI Agent System
2
+
3
+ [](https://opensource.org/licenses/MIT)
4
+ [](https://www.google.com/search?q=https://github.com/antmikinka/BMAD-METHOD)
5
+ [](https://cloud.google.com/)
6
+
7
+ This expansion pack provides a complete, deployable starter kit for building and hosting sophisticated AI agent systems on Google Cloud Platform (GCP). It bridges the gap between the BMad Method's natural language framework and a production-ready cloud environment, leveraging Google Vertex AI, Cloud Run, and the Google Agent Development Kit (ADK).
8
+
9
+ ## Features
10
+
11
+ - **Automated GCP Setup**: `gcloud` scripts to configure your project, service accounts, and required APIs in minutes.
12
+ - **Production-Ready Deployment**: Includes a `Dockerfile` and `cloudbuild.yaml` for easy, repeatable deployments to Google Cloud Run.
13
+ - **Rich Template Library**: A comprehensive set of BMad-compatible templates for Teams, Agents, Tasks, Workflows, Documents, and Checklists.
14
+ - **Pre-configured Agent Roles**: Includes powerful master templates for key agent archetypes like Orchestrators and Specialists.
15
+ - **Highly Customizable**: Easily adapt the entire system with company-specific variables and industry-specific configurations.
16
+ - **Powered by Google ADK**: Built on the official Google Agent Development Kit for robust and native integration with Vertex AI services.
17
+
18
+ ## Prerequisites
19
+
20
+ Before you begin, ensure you have the following installed and configured:
21
+
22
+ - A Google Cloud Platform (GCP) Account with an active billing account.
23
+ - The [Google Cloud SDK (`gcloud` CLI)](<https://www.google.com/search?q=%5Bhttps://cloud.google.com/sdk/docs/install%5D(https://cloud.google.com/sdk/docs/install)>) installed and authenticated.
24
+ - [Docker](https://www.docker.com/products/docker-desktop/) installed on your local machine.
25
+ - Python 3.11+
26
+
27
+ ## Quick Start Guide
28
+
29
+ Follow these steps to get your own AI agent system running on Google Cloud.
30
+
31
+ ### 1\. Configure Setup Variables
32
+
33
+ The setup scripts use placeholder variables. Before running them, open the files in the `/scripts` directory and replace the following placeholders with your own values:
34
+
35
+ - `{{PROJECT_ID}}`: Your unique Google Cloud project ID.
36
+ - `{{COMPANY_NAME}}`: Your company or project name (used for naming resources).
37
+ - `{{LOCATION}}`: The GCP region you want to deploy to (e.g., `us-central1`).
38
+
39
+ ### 2\. Run the GCP Setup Scripts
40
+
41
+ Execute the setup scripts to prepare your Google Cloud environment.
42
+
43
+ ```bash
44
+ # Navigate to the scripts directory
45
+ cd scripts/
46
+
47
+ # Run the project configuration script
48
+ sh 1-initial-project-config.sh
49
+
50
+ # Run the service account setup script
51
+ sh 2-service-account-setup.sh
52
+ ```
53
+
54
+ These scripts will enable the necessary APIs, create a service account, assign permissions, and download a JSON key file required for authentication.
55
+
56
+ ### 3\. Install Python Dependencies
57
+
58
+ Install the required Python packages for the application.
59
+
60
+ ```bash
61
+ # From the root of the expansion pack
62
+ pip install -r requirements.txt
63
+ ```
64
+
65
+ ### 4\. Deploy to Cloud Run
66
+
67
+ Deploy the entire agent system as a serverless application using Cloud Build.
68
+
69
+ ```bash
70
+ # From the root of the expansion pack
71
+ gcloud builds submit --config deployment/cloudbuild.yaml .
72
+ ```
73
+
74
+ This command will build the Docker container, push it to the Google Container Registry, and deploy it to Cloud Run. Your agent system is now live\!
75
+
76
+ ## How to Use
77
+
78
+ Once deployed, the power of this system lies in its natural language templates.
79
+
80
+ 1. **Define Your Organization**: Go to `/templates/teams` and use the templates to define your agent teams (e.g., Product Development, Operations).
81
+ 2. **Customize Your Agents**: In `/templates/agents`, use the `Master-Agent-Template.yaml` to create new agents or customize the existing Orchestrator and Specialist templates. Define their personas, skills, and commands in plain English.
82
+ 3. **Build Your Workflows**: In `/templates/workflows`, link agents and tasks together to create complex, automated processes.
83
+
84
+ The deployed application reads these YAML and Markdown files to dynamically construct and run your AI workforce. When you update a template, your live agents automatically adopt the new behaviors.
85
+
86
+ ## What's Included
87
+
88
+ This expansion pack has a comprehensive structure to get you started:
89
+
90
+ ```
91
+ /
92
+ ├── deployment/ # Dockerfile and cloudbuild.yaml for deployment
93
+ ├── scripts/ # GCP setup scripts (project config, service accounts)
94
+ ├── src/ # Python source code (main.py, settings.py)
95
+ ├── templates/
96
+ │ ├── agents/ # Master, Orchestrator, Specialist agent templates
97
+ │ ├── teams/ # Team structure templates
98
+ │ ├── tasks/ # Generic and specialized task templates
99
+ │ ├── documents/ # Document and report templates
100
+ │ ├── checklists/ # Quality validation checklists
101
+ │ ├── workflows/ # Workflow definition templates
102
+ │ └── ...and more
103
+ ├── config/ # Customization guides and variable files
104
+ └── requirements.txt # Python package dependencies
105
+ ```
106
+
107
+ ## Contributing
108
+
109
+ Contributions are welcome\! Please follow the main project's `CONTRIBUTING.md` guidelines. For major changes or new features for this expansion pack, please open an issue or discussion first.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bmad-method",
3
- "version": "4.35.3",
3
+ "version": "4.36.0",
4
4
  "description": "Breakthrough Method of Agile AI-driven Development",
5
5
  "main": "tools/cli.js",
6
6
  "bin": {
@@ -40,9 +40,9 @@
40
40
  "commander": "^14.0.0",
41
41
  "fs-extra": "^11.3.0",
42
42
  "glob": "^11.0.3",
43
+ "ignore": "^7.0.5",
43
44
  "inquirer": "^8.2.6",
44
45
  "js-yaml": "^4.1.0",
45
- "minimatch": "^10.0.3",
46
46
  "ora": "^5.4.1"
47
47
  },
48
48
  "keywords": [
@@ -0,0 +1,76 @@
1
+ const fs = require("fs-extra");
2
+ const path = require("node:path");
3
+ const os = require("node:os");
4
+ const { isBinaryFile } = require("./binary.js");
5
+
6
+ /**
7
+ * Aggregate file contents with bounded concurrency.
8
+ * Returns text files, binary files (with size), and errors.
9
+ * @param {string[]} files absolute file paths
10
+ * @param {string} rootDir
11
+ * @param {{ text?: string, warn?: (msg: string) => void } | null} spinner
12
+ */
13
+ async function aggregateFileContents(files, rootDir, spinner = null) {
14
+ const results = {
15
+ textFiles: [],
16
+ binaryFiles: [],
17
+ errors: [],
18
+ totalFiles: files.length,
19
+ processedFiles: 0,
20
+ };
21
+
22
+ // Automatic concurrency selection based on CPU count and workload size.
23
+ // - Base on 2x logical CPUs, clamped to [2, 64]
24
+ // - For very small workloads, avoid excessive parallelism
25
+ const cpuCount = (os.cpus && Array.isArray(os.cpus()) ? os.cpus().length : (os.cpus?.length || 4));
26
+ let concurrency = Math.min(64, Math.max(2, (Number(cpuCount) || 4) * 2));
27
+ if (files.length > 0 && files.length < concurrency) {
28
+ concurrency = Math.max(1, Math.min(concurrency, Math.ceil(files.length / 2)));
29
+ }
30
+
31
+ async function processOne(filePath) {
32
+ try {
33
+ const relativePath = path.relative(rootDir, filePath);
34
+ if (spinner) {
35
+ spinner.text = `Processing: ${relativePath} (${results.processedFiles + 1}/${results.totalFiles})`;
36
+ }
37
+
38
+ const binary = await isBinaryFile(filePath);
39
+ if (binary) {
40
+ const size = (await fs.stat(filePath)).size;
41
+ results.binaryFiles.push({ path: relativePath, absolutePath: filePath, size });
42
+ } else {
43
+ const content = await fs.readFile(filePath, "utf8");
44
+ results.textFiles.push({
45
+ path: relativePath,
46
+ absolutePath: filePath,
47
+ content,
48
+ size: content.length,
49
+ lines: content.split("\n").length,
50
+ });
51
+ }
52
+ } catch (error) {
53
+ const relativePath = path.relative(rootDir, filePath);
54
+ const errorInfo = { path: relativePath, absolutePath: filePath, error: error.message };
55
+ results.errors.push(errorInfo);
56
+ if (spinner) {
57
+ spinner.warn(`Warning: Could not read file ${relativePath}: ${error.message}`);
58
+ } else {
59
+ console.warn(`Warning: Could not read file ${relativePath}: ${error.message}`);
60
+ }
61
+ } finally {
62
+ results.processedFiles++;
63
+ }
64
+ }
65
+
66
+ for (let i = 0; i < files.length; i += concurrency) {
67
+ const slice = files.slice(i, i + concurrency);
68
+ await Promise.all(slice.map(processOne));
69
+ }
70
+
71
+ return results;
72
+ }
73
+
74
+ module.exports = {
75
+ aggregateFileContents,
76
+ };
@@ -0,0 +1,53 @@
1
+ const fsp = require("node:fs/promises");
2
+ const path = require("node:path");
3
+ const { Buffer } = require("node:buffer");
4
+
5
+ /**
6
+ * Efficiently determine if a file is binary without reading the whole file.
7
+ * - Fast path by extension for common binaries
8
+ * - Otherwise read a small prefix and check for NUL bytes
9
+ * @param {string} filePath
10
+ * @returns {Promise<boolean>}
11
+ */
12
+ async function isBinaryFile(filePath) {
13
+ try {
14
+ const stats = await fsp.stat(filePath);
15
+ if (stats.isDirectory()) {
16
+ throw new Error("EISDIR: illegal operation on a directory");
17
+ }
18
+
19
+ const binaryExtensions = new Set([
20
+ ".jpg", ".jpeg", ".png", ".gif", ".bmp", ".ico", ".svg",
21
+ ".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx",
22
+ ".zip", ".tar", ".gz", ".rar", ".7z",
23
+ ".exe", ".dll", ".so", ".dylib",
24
+ ".mp3", ".mp4", ".avi", ".mov", ".wav",
25
+ ".ttf", ".otf", ".woff", ".woff2",
26
+ ".bin", ".dat", ".db", ".sqlite",
27
+ ]);
28
+
29
+ const ext = path.extname(filePath).toLowerCase();
30
+ if (binaryExtensions.has(ext)) return true;
31
+ if (stats.size === 0) return false;
32
+
33
+ const sampleSize = Math.min(4096, stats.size);
34
+ const fd = await fsp.open(filePath, "r");
35
+ try {
36
+ const buffer = Buffer.allocUnsafe(sampleSize);
37
+ const { bytesRead } = await fd.read(buffer, 0, sampleSize, 0);
38
+ const slice = bytesRead === sampleSize ? buffer : buffer.subarray(0, bytesRead);
39
+ return slice.includes(0);
40
+ } finally {
41
+ await fd.close();
42
+ }
43
+ } catch (error) {
44
+ console.warn(
45
+ `Warning: Could not determine if file is binary: ${filePath} - ${error.message}`,
46
+ );
47
+ return false;
48
+ }
49
+ }
50
+
51
+ module.exports = {
52
+ isBinaryFile,
53
+ };
@@ -0,0 +1,70 @@
1
+ const path = require("node:path");
2
+ const { execFile } = require("node:child_process");
3
+ const { promisify } = require("node:util");
4
+ const { glob } = require("glob");
5
+ const { loadIgnore } = require("./ignoreRules.js");
6
+
7
+ const pExecFile = promisify(execFile);
8
+
9
+ async function isGitRepo(rootDir) {
10
+ try {
11
+ const { stdout } = await pExecFile("git", [
12
+ "rev-parse",
13
+ "--is-inside-work-tree",
14
+ ], { cwd: rootDir });
15
+ return String(stdout || "").toString().trim() === "true";
16
+ } catch {
17
+ return false;
18
+ }
19
+ }
20
+
21
+ async function gitListFiles(rootDir) {
22
+ try {
23
+ const { stdout } = await pExecFile("git", [
24
+ "ls-files",
25
+ "-co",
26
+ "--exclude-standard",
27
+ ], { cwd: rootDir });
28
+ return String(stdout || "")
29
+ .split(/\r?\n/)
30
+ .map((s) => s.trim())
31
+ .filter(Boolean);
32
+ } catch {
33
+ return [];
34
+ }
35
+ }
36
+
37
+ /**
38
+ * Discover files under rootDir.
39
+ * - Prefer git ls-files when available for speed/correctness
40
+ * - Fallback to glob and apply unified ignore rules
41
+ * @param {string} rootDir
42
+ * @param {object} [options]
43
+ * @param {boolean} [options.preferGit=true]
44
+ * @returns {Promise<string[]>} absolute file paths
45
+ */
46
+ async function discoverFiles(rootDir, options = {}) {
47
+ const { preferGit = true } = options;
48
+ const { filter } = await loadIgnore(rootDir);
49
+
50
+ // Try git first
51
+ if (preferGit && await isGitRepo(rootDir)) {
52
+ const relFiles = await gitListFiles(rootDir);
53
+ const filteredRel = relFiles.filter((p) => filter(p));
54
+ return filteredRel.map((p) => path.resolve(rootDir, p));
55
+ }
56
+
57
+ // Glob fallback
58
+ const globbed = await glob("**/*", {
59
+ cwd: rootDir,
60
+ nodir: true,
61
+ dot: true,
62
+ follow: false,
63
+ });
64
+ const filteredRel = globbed.filter((p) => filter(p));
65
+ return filteredRel.map((p) => path.resolve(rootDir, p));
66
+ }
67
+
68
+ module.exports = {
69
+ discoverFiles,
70
+ };
@@ -0,0 +1,35 @@
1
+ const path = require("node:path");
2
+ const discovery = require("./discovery.js");
3
+ const ignoreRules = require("./ignoreRules.js");
4
+ const { isBinaryFile } = require("./binary.js");
5
+ const { aggregateFileContents } = require("./aggregate.js");
6
+
7
+ // Backward-compatible signature; delegate to central loader
8
+ async function parseGitignore(gitignorePath) {
9
+ return await ignoreRules.parseGitignore(gitignorePath);
10
+ }
11
+
12
+ async function discoverFiles(rootDir) {
13
+ try {
14
+ // Delegate to discovery module which respects .gitignore and defaults
15
+ return await discovery.discoverFiles(rootDir, { preferGit: true });
16
+ } catch (error) {
17
+ console.error("Error discovering files:", error.message);
18
+ return [];
19
+ }
20
+ }
21
+
22
+ async function filterFiles(files, rootDir) {
23
+ const { filter } = await ignoreRules.loadIgnore(rootDir);
24
+ const relativeFiles = files.map((f) => path.relative(rootDir, f));
25
+ const filteredRelative = relativeFiles.filter((p) => filter(p));
26
+ return filteredRelative.map((p) => path.resolve(rootDir, p));
27
+ }
28
+
29
+ module.exports = {
30
+ parseGitignore,
31
+ discoverFiles,
32
+ isBinaryFile,
33
+ aggregateFileContents,
34
+ filterFiles,
35
+ };