@meshxdata/fops 0.0.1 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,6 +7,9 @@ import inquirer from "inquirer";
7
7
  import { make } from "../shell.js";
8
8
  import { readFopsConfig, saveFopsConfig, promptAwsSsoConfig, detectEcrRegistry, checkEcrRepos } from "./aws.js";
9
9
 
10
+ // TODO: change back to "main" once stack/api is merged
11
+ export const CLONE_BRANCH = "stack/api";
12
+
10
13
  export function runSetup(dir, opts = {}) {
11
14
  const submodules = opts.submodules !== false;
12
15
  const createEnv = opts.env !== false;
@@ -34,19 +37,21 @@ export function runSetup(dir, opts = {}) {
34
37
  }
35
38
  }
36
39
  if (submodules) {
37
- console.log(chalk.blue("Initializing git submodules (checking out main)..."));
40
+ console.log(chalk.blue(`Initializing git submodules (checking out ${CLONE_BRANCH})...`));
38
41
  try {
39
42
  await execa("git", ["submodule", "update", "--init", "--remote", "--recursive"], { cwd: dir, stdio: "inherit" });
40
- console.log(chalk.green("Submodules initialized all on main."));
43
+ // Check out the target branch on each submodule
44
+ await execa("git", ["submodule", "foreach", `git fetch origin && git checkout origin/${CLONE_BRANCH} 2>/dev/null || git checkout origin/main`], { cwd: dir, stdio: "inherit" });
45
+ console.log(chalk.green(`Submodules initialized — on ${CLONE_BRANCH} (falling back to main).`));
41
46
  } catch {
42
- console.log(chalk.yellow("⚠ Some submodules had issues. Attempting to check out main individually..."));
47
+ console.log(chalk.yellow(`⚠ Some submodules had issues. Attempting to check out ${CLONE_BRANCH} individually...`));
43
48
  try {
44
49
  await execa("git", ["submodule", "init"], { cwd: dir, stdio: "inherit" });
45
- await execa("git", ["submodule", "foreach", "git fetch origin main && git checkout origin/main"], { cwd: dir, stdio: "inherit" });
50
+ await execa("git", ["submodule", "foreach", `git fetch origin && git checkout origin/${CLONE_BRANCH} 2>/dev/null || git checkout origin/main`], { cwd: dir, stdio: "inherit" });
46
51
  console.log(chalk.green("Submodules recovered."));
47
52
  } catch {
48
53
  console.log(chalk.yellow("Some submodules still failed. Fix manually with:"));
49
- console.log(chalk.gray(` cd ${dir} && git submodule foreach 'git checkout main && git pull'`));
54
+ console.log(chalk.gray(` cd ${dir} && git submodule foreach 'git checkout ${CLONE_BRANCH} || git checkout main && git pull'`));
50
55
  }
51
56
  }
52
57
  }
@@ -5,7 +5,7 @@ import chalk from "chalk";
5
5
  import { execa } from "execa";
6
6
  import inquirer from "inquirer";
7
7
  import { isFoundationRoot, findComposeRootUp } from "../project.js";
8
- import { runSetup } from "./setup.js";
8
+ import { runSetup, CLONE_BRANCH } from "./setup.js";
9
9
 
10
10
  export async function runInitWizard() {
11
11
  const cwd = process.cwd();
@@ -28,19 +28,21 @@ export async function runInitWizard() {
28
28
  projectRoot = foundUp;
29
29
  }
30
30
  if (!projectRoot) {
31
- let hasGit = false, hasDocker = false, hasAws = false;
31
+ let hasGit = false, hasDocker = false, hasAws = false, hasClaude = false;
32
32
  try { await execa("git", ["--version"]); hasGit = true; } catch {}
33
33
  try { await execa("docker", ["info"], { timeout: 5000 }); hasDocker = true; } catch {}
34
34
  try { await execa("aws", ["--version"]); hasAws = true; } catch {}
35
+ try { await execa("claude", ["--version"]); hasClaude = true; } catch {}
35
36
  console.log(chalk.cyan(" Prerequisites\n"));
36
37
  console.log(hasGit ? chalk.green(" ✓ Git") : chalk.red(" ✗ Git — install git first"));
37
38
  console.log(hasDocker ? chalk.green(" ✓ Docker") : chalk.red(" ✗ Docker — install and start Docker Desktop"));
39
+ console.log(hasClaude ? chalk.green(" ✓ Claude CLI") : chalk.red(" ✗ Claude CLI — run: npm install (included as a dependency)"));
38
40
  console.log(hasAws ? chalk.green(" ✓ AWS CLI") : chalk.yellow(" ⚠ AWS CLI — install for ECR image pulls (brew install awscli)"));
39
41
  const netrcPath = path.join(os.homedir(), ".netrc");
40
42
  const hasNetrc = fs.existsSync(netrcPath) && fs.readFileSync(netrcPath, "utf8").includes("machine github.com");
41
43
  console.log(hasNetrc ? chalk.green(" ✓ GitHub credentials (~/.netrc)") : chalk.yellow(" ⚠ GitHub credentials — add to ~/.netrc (needed for private submodules)"));
42
44
  console.log("");
43
- if (!hasGit || !hasDocker) {
45
+ if (!hasGit || !hasDocker || !hasClaude) {
44
46
  console.log(chalk.red("Fix the missing prerequisites above, then run fops init again.\n"));
45
47
  process.exit(1);
46
48
  }
@@ -55,8 +57,9 @@ export async function runInitWizard() {
55
57
  const { repoUrl } = await inquirer.prompt([
56
58
  { type: "input", name: "repoUrl", message: "Repository URL:", default: "https://github.com/meshxdata/foundation-compose.git", validate: (v) => (v?.trim() ? true : "Repository URL is required.") },
57
59
  ]);
60
+ const repoName = repoUrl.trim().replace(/\.git$/, "").split("/").pop() || "foundation-compose";
58
61
  const { targetDir } = await inquirer.prompt([
59
- { type: "input", name: "targetDir", message: "Clone into:", default: cwd },
62
+ { type: "input", name: "targetDir", message: "Clone into:", default: path.join(cwd, repoName) },
60
63
  ]);
61
64
  const resolved = path.resolve(targetDir.trim());
62
65
  if (fs.existsSync(resolved)) {
@@ -64,9 +67,9 @@ export async function runInitWizard() {
64
67
  if (isFoundationRoot(resolved)) { console.log(chalk.green(" Looks like a Foundation project — using it.\n")); projectRoot = resolved; }
65
68
  else { console.log(chalk.red(" Not a Foundation project. Remove it or choose a different path.\n")); process.exit(1); }
66
69
  } else {
67
- console.log(chalk.blue("\nCloning (this may take a minute)...\n"));
70
+ console.log(chalk.blue(`\nCloning (branch: ${CLONE_BRANCH}, this may take a minute)...\n`));
68
71
  try {
69
- await execa("git", ["clone", repoUrl.trim(), resolved], { stdio: "inherit" });
72
+ await execa("git", ["clone", "-b", CLONE_BRANCH, repoUrl.trim(), resolved], { stdio: "inherit" });
70
73
  } catch (err) {
71
74
  console.log(chalk.red("\n Clone failed. Check the URL and your credentials.\n"));
72
75
  if (!hasNetrc) {
@@ -75,19 +78,20 @@ export async function runInitWizard() {
75
78
  }
76
79
  process.exit(1);
77
80
  }
78
- console.log(chalk.blue("\nInitializing submodules (checking out main)...\n"));
81
+ console.log(chalk.blue(`\nInitializing submodules (checking out ${CLONE_BRANCH})...\n`));
79
82
  try {
80
83
  await execa("git", ["submodule", "update", "--init", "--remote", "--recursive"], { cwd: resolved, stdio: "inherit" });
81
- console.log(chalk.green("\n Cloned successfully all submodules on main.\n"));
84
+ await execa("git", ["submodule", "foreach", `git fetch origin && git checkout origin/${CLONE_BRANCH} 2>/dev/null || git checkout origin/main`], { cwd: resolved, stdio: "inherit" });
85
+ console.log(chalk.green(`\n Cloned successfully — submodules on ${CLONE_BRANCH} (falling back to main).\n`));
82
86
  } catch {
83
- console.log(chalk.yellow("\n ⚠ Some submodules had issues. Attempting to check out main individually...\n"));
87
+ console.log(chalk.yellow(`\n ⚠ Some submodules had issues. Attempting to check out ${CLONE_BRANCH} individually...\n`));
84
88
  try {
85
89
  await execa("git", ["submodule", "init"], { cwd: resolved, stdio: "inherit" });
86
- await execa("git", ["submodule", "foreach", "git fetch origin main && git checkout origin/main"], { cwd: resolved, stdio: "inherit" });
90
+ await execa("git", ["submodule", "foreach", `git fetch origin && git checkout origin/${CLONE_BRANCH} 2>/dev/null || git checkout origin/main`], { cwd: resolved, stdio: "inherit" });
87
91
  console.log(chalk.green(" Submodules recovered.\n"));
88
92
  } catch {
89
93
  console.log(chalk.yellow(" Some submodules still failed. Fix manually with:"));
90
- console.log(chalk.gray(` cd ${resolved} && git submodule foreach 'git checkout main && git pull'\n`));
94
+ console.log(chalk.gray(` cd ${resolved} && git submodule foreach 'git checkout ${CLONE_BRANCH} || git checkout main && git pull'\n`));
91
95
  }
92
96
  }
93
97
  projectRoot = resolved;
package/src/shell.js CHANGED
@@ -1,9 +1,9 @@
1
1
  import { execa } from "execa";
2
2
 
3
3
  export async function make(root, target, args = []) {
4
- return execa("make", [target, ...args], { cwd: root, stdio: "inherit" });
4
+ return execa("make", [target, ...args], { cwd: root, stdio: "inherit", reject: false });
5
5
  }
6
6
 
7
7
  export async function dockerCompose(root, args) {
8
- return execa("docker", ["compose", ...args], { cwd: root, stdio: "inherit" });
8
+ return execa("docker", ["compose", ...args], { cwd: root, stdio: "inherit", reject: false });
9
9
  }
@@ -1,107 +1,241 @@
1
1
  ---
2
2
  name: foundation-stack
3
- description: Managing the Foundation data mesh stack with fops
3
+ description: Complete Foundation data mesh platform knowledge — domain model, API, operations, and troubleshooting
4
4
  ---
5
- ## Foundation Stack Management
6
5
 
7
- ### Lifecycle
6
+ ## What is Foundation
7
+
8
+ Foundation is a data mesh platform. Users create **meshes** (domains), register **data systems** and **data sources**, define **data products** from **data objects**, and connect downstream **applications**. Data flows through compute pipelines (Spark/Ray) that transform raw data into queryable products via Trino SQL.
9
+
10
+ ## Domain Model
8
11
 
9
- Start the full stack:
10
- ```bash
11
- fops up
12
+ ```
13
+ Mesh (data domain)
14
+ ├─ Data Product (SADP — source/atomic)
15
+ │ ├─ Data Object (file/table resource)
16
+ │ │ └─ Data Source (connector)
17
+ │ │ └─ Data System (external platform)
18
+ │ ├─ Compute (builder + transformations)
19
+ │ └─ Schema (column definitions)
20
+ ├─ Data Product (CADP — composite/aggregated)
21
+ │ └─ depends on other Data Products
22
+ ├─ Application (downstream consumer)
23
+ ├─ Data System
24
+ └─ Data Source
12
25
  ```
13
26
 
14
- Stop all services:
15
- ```bash
16
- fops down
27
+ ### Entity Types
28
+
29
+ - **Mesh**: top-level domain container. Has name, label, description, purpose, assignees, security policies.
30
+ - **Data Product**: unit of data ownership. Two types:
31
+ - **SADP** (Source-Aligned): wraps raw data objects from external sources.
32
+ - **CADP** (Consumer-Aligned): derived from other data products via transformations.
33
+ - **Data Object**: atomic data resource (CSV, Parquet, table). Has path, delimiter, schema.
34
+ - **Data Source**: connection to external storage (S3, databases). Holds credentials via Vault.
35
+ - **Data System**: external platform (e.g. Procore, SAP, Salesforce).
36
+ - **Application**: downstream consumer of data products (dashboards, APIs, ML models).
37
+ - **Compute**: processing config — builder code (PySpark/Ray), transformations, scheduling.
38
+
39
+ ## API Reference
40
+
41
+ Base URL: `http://localhost:9001/api`
42
+ Auth: `POST /api/iam/login` with `{"username":"compose@meshx.io","password":""}` → returns bearer token.
43
+ Headers: `Authorization: Bearer <token>`, `x-org: root`
44
+
45
+ ### Mesh
46
+
47
+ ```
48
+ POST /api/data/mesh — Create mesh
49
+ GET /api/data/mesh?identifier=<id> — Get mesh
50
+ GET /api/data/mesh/list — List meshes (paginated)
51
+ PUT /api/data/mesh — Update mesh
52
+ DELETE /api/data/mesh/cascade — Delete mesh + all contents
53
+ GET /api/data/mesh/landscape — Get mesh DAG (visual graph)
17
54
  ```
18
55
 
19
- Stop and remove all volumes (clean slate):
20
- ```bash
21
- fops down --clean
56
+ Create body:
57
+ ```json
58
+ {
59
+ "entity": {"name":"My Mesh","entity_type":"mesh","label":"MM","description":"...","purpose":"...","assignees":[{"email":"user@meshx.io","role":""}]},
60
+ "entity_info": {"owner":"user@meshx.io","contact_ids":[],"links":[]}
61
+ }
22
62
  ```
23
63
 
24
- Check what's running:
25
- ```bash
26
- fops status
64
+ ### Data System
65
+
66
+ ```
67
+ POST /api/data/data_system — Create
68
+ GET /api/data/data_system/list — List
69
+ DELETE /api/data/data_system — Delete
27
70
  ```
28
71
 
29
- ### Debugging
72
+ ### Data Source
30
73
 
31
- Run full environment diagnostics:
32
- ```bash
33
- fops doctor
74
+ ```
75
+ POST /api/data/data_source — Create
76
+ PUT /api/data/data_source/connection — Set connection (S3, etc.)
77
+ POST /api/data/data_source/secret — Set secrets (access keys)
78
+ GET /api/data/data_source/list — List
34
79
  ```
35
80
 
36
- Auto-fix detected issues:
37
- ```bash
38
- fops doctor --fix
81
+ Connection body (S3):
82
+ ```json
83
+ {"connection":{"connection_type":"s3","url":"http://foundation-storage-engine:8080","access_key":{"env_key":"S3_KEY"},"access_secret":{"env_key":"S3_SECRET"}}}
39
84
  ```
40
85
 
41
- Tail logs for all services:
42
- ```bash
43
- fops logs
86
+ ### Data Object
87
+
88
+ ```
89
+ POST /api/data/data_object — Create
90
+ PUT /api/data/data_object/config — Configure (type, path, delimiter)
91
+ GET /api/data/data_object/schema — Get inferred schema
92
+ GET /api/data/data_object/list — List
44
93
  ```
45
94
 
46
- Tail logs for a specific service:
47
- ```bash
48
- fops logs backend
49
- fops logs frontend
50
- fops logs postgres
95
+ Config body:
96
+ ```json
97
+ {"configuration":{"data_object_type":"csv","path":"/spark/samples/file.csv","has_header":true,"delimiter":","}}
51
98
  ```
52
99
 
53
- ### Suggesting Commands
100
+ ### Data Product
101
+
102
+ ```
103
+ POST /api/data/data_product — Create (SADP or CADP)
104
+ PUT /api/data/data_product/schema — Set schema
105
+ PUT /api/data/data_product/compute/builder — Set builder + transformations
106
+ POST /api/data/data_product/compute/run — Trigger compute
107
+ GET /api/data/data_product/data — Query product data
108
+ POST /api/data/data_product/data/query — Trino SQL query
109
+ GET /api/data/data_product/list — List
110
+ ```
111
+
112
+ ### Linking Entities
54
113
 
55
- Always suggest **2–3 commands** in separate fenced blocks so the user can choose. Pair the primary action with a useful follow-up:
114
+ All links follow the pattern:
115
+ ```
116
+ POST /api/data/link/{parent_type}/{child_type}?identifier={parent_id}&child_identifier={child_id}
117
+ DELETE /api/data/link/{parent_type}/{child_type}?identifier={parent_id}&child_identifier={child_id}
118
+ ```
56
119
 
57
- - Restart → then logs: `fops restart kafka` + `fops logs kafka`
58
- - Debugthen fix: `fops doctor` + `fops doctor --fix`
59
- - Statusthen logs: `fops status` + `fops logs`
60
- - Setupthen verify: `fops init` + `fops doctor`
120
+ Common links:
121
+ - `data_systemdata_source` (source belongs to system)
122
+ - `data_sourcedata_object` (object comes from source)
123
+ - `data_objectdata_product` (SADP wraps object)
124
+ - `data_product → data_product` (CADP depends on SADP)
125
+ - `data_product → application` (app consumes product)
61
126
 
62
- Never suggest only 1 command when a follow-up would be useful.
127
+ ### Applications
63
128
 
64
- ### Stale Images
129
+ ```
130
+ POST /api/data/application — Create
131
+ GET /api/data/application/list — List
132
+ ```
133
+
134
+ ### Search
65
135
 
66
- If an image is more than 7 days old, it's likely stale — dependencies may have changed. When you see errors about missing packages, commands not found, or broken virtualenvs, **always check image ages first** and suggest a rebuild:
136
+ ```
137
+ GET /api/data/search?q=<term> — Full-text search across all entities
138
+ ```
139
+
140
+ ### Pagination
141
+
142
+ All list endpoints accept `page` and `per_page` (default 20, max 100). Response includes `total`, `page`, `per_page`, `total_pages`.
143
+
144
+ ## Workflow: Creating a Data Mesh from Scratch
145
+
146
+ 1. **Login**: `POST /api/iam/login` → get token
147
+ 2. **Create Mesh**: `POST /api/data/mesh`
148
+ 3. **Create Data Systems**: `POST /api/data/data_system` (one per external platform)
149
+ 4. **Create Data Sources**: `POST /api/data/data_source` + configure connection + set secrets
150
+ 5. **Link sources to systems**: `POST /api/data/link/data_system/data_source`
151
+ 6. **Create Data Objects**: `POST /api/data/data_object` + configure (path, format)
152
+ 7. **Link objects to sources**: `POST /api/data/link/data_source/data_object`
153
+ 8. **Create SADPs**: `POST /api/data/data_product` (type: SADP)
154
+ 9. **Link objects to products**: `POST /api/data/link/data_object/data_product`
155
+ 10. **Set schemas + builders**: configure compute pipelines
156
+ 11. **Create CADPs**: `POST /api/data/data_product` (type: CADP), link to SADPs
157
+ 12. **Run compute**: `POST /api/data/data_product/compute/run`
158
+ 13. **Create Applications**: link apps to products they consume
159
+ 14. **Query data**: via Trino SQL or the data endpoint
160
+
161
+ ## Services & Ports
162
+
163
+ | Service | Port | Purpose |
164
+ |---------|------|---------|
165
+ | Frontend | 3002 | Web UI (Next.js) |
166
+ | Backend | 9001 | REST API (Python/FastAPI) |
167
+ | Storage Engine | 9002 | S3-compatible storage (MinIO) |
168
+ | Trino | 8081 | Distributed SQL engine |
169
+ | OPA | 8181 | Policy/authorization engine |
170
+ | Kafka/Redpanda | 9092 | Event streaming |
171
+ | Postgres | 5432 | Metadata database |
172
+ | Hive Metastore | 9083 | Table metadata catalog |
173
+ | Vault | 18201 | Secrets management |
174
+ | NATS | 4222 | Message queue |
175
+ | Redis | 6379 | Caching |
176
+ | Watcher | 8000 | Compute orchestration |
177
+ | Scheduler | — | Job scheduling (cron) |
178
+ | Profiler | — | Data quality profiling |
179
+
180
+ ### Access Points
181
+ - **Web UI**: http://localhost:3002
182
+ - **API**: http://localhost:9001/api
183
+ - **SQL Workbench**: http://localhost:3002/home/sql-workbench
184
+ - **Storage (MinIO)**: http://localhost:9002
185
+
186
+ ## Stack Operations
187
+
188
+ ### Lifecycle
67
189
  ```bash
68
- docker compose build --pull
190
+ fops up # start all services
191
+ fops down # stop services
192
+ fops down --clean # stop + remove volumes (clean slate)
193
+ fops status # show running containers
194
+ fops restart # restart all services
69
195
  ```
196
+
197
+ ### Debugging
70
198
  ```bash
71
- fops doctor
199
+ fops doctor # full environment diagnostics
200
+ fops doctor --fix # auto-fix detected issues
201
+ fops logs # tail all logs
202
+ fops logs backend # tail specific service
72
203
  ```
73
204
 
74
- ### Common Issues
205
+ ### First-Time Setup
206
+ ```bash
207
+ npm install -g @meshxdata/fops
208
+ fops init # clone repo, set up .env, init submodules
209
+ fops up # boot the stack
210
+ fops doctor # verify health
211
+ ```
212
+
213
+ If the context shows no containers running or .env not configured, **always lead with `fops init` (then `fops up`)** before anything else — don't assume the environment is ready.
75
214
 
76
- **Containers stuck in "unhealthy"**: Check logs for the specific service, then restart. Often caused by a dependency not being ready yet — `fops down` then `fops up` usually resolves it.
215
+ ### Suggesting Commands
216
+ Always suggest **2–3 commands** in separate fenced blocks. Pair primary action with follow-up:
217
+ - Restart → logs: `fops restart kafka` + `fops logs kafka`
218
+ - Debug → fix: `fops doctor` + `fops doctor --fix`
219
+ - Status → logs: `fops status` + `fops logs`
220
+
221
+ ### Stale Images
222
+ Images older than 7 days are likely stale. For errors about missing packages or broken deps:
223
+ ```bash
224
+ docker compose build --pull
225
+ ```
77
226
 
78
- **Port conflicts**: Run `fops doctor` to see which ports are in use. Kill the conflicting process or change the port mapping in `.env`.
227
+ ## Common Issues
79
228
 
80
- **ECR auth expired**: Run `fops login` or `fops doctor --fix` to re-authenticate with AWS ECR.
229
+ **"how do I create a foundation/mesh?"**: Use the Web UI at http://localhost:3002 or the API. Create a mesh first, then add systems, sources, objects, and products. See "Workflow" section above.
81
230
 
82
- **Missing .env**: Run `fops setup` to regenerate from `.env.example`.
231
+ **Containers stuck unhealthy**: dependency not ready. `fops down` then `fops up`.
83
232
 
84
- **Submodules out of date**: Run `fops setup` to re-init and pull submodules.
233
+ **Port conflicts**: `fops doctor` shows which ports are in use.
85
234
 
86
- ### Services & Ports
235
+ **ECR auth expired**: `fops doctor --fix` to re-authenticate.
87
236
 
88
- | Service | Port | Purpose |
89
- |-----------------|-------|----------------------------|
90
- | Backend | 9001 | Core API server |
91
- | Frontend | 3002 | Web UI |
92
- | Storage Engine | 9002 | Object storage layer |
93
- | Trino | 8081 | Distributed SQL engine |
94
- | OPA | 8181 | Policy engine |
95
- | Kafka | 9092 | Event streaming |
96
- | Postgres | 5432 | Metadata database |
97
- | Hive Metastore | 9083 | Table metadata catalog |
98
- | Vault | 18201 | Secrets management |
237
+ **Missing .env**: `fops setup` regenerates from `.env.example`.
99
238
 
100
- ### First-Time Setup
239
+ **Backend migrations failed**: check `fops logs backend` — usually a Postgres connection issue. Ensure Postgres is healthy first, then restart: `docker compose restart foundation-backend-migrations`.
101
240
 
102
- ```bash
103
- npm install -g @meshxdata/fops
104
- fops init
105
- fops up
106
- fops doctor
107
- ```
241
+ **Compute jobs failing**: check watcher logs `fops logs watcher`, verify storage engine is healthy, check Spark/K3s resources.
package/src/ui/input.js CHANGED
@@ -4,19 +4,24 @@ import { render, Box, Text, useInput, useApp } from "ink";
4
4
  const h = React.createElement;
5
5
 
6
6
  /**
7
- * Input box with history support
7
+ * Full-width horizontal separator line
8
8
  */
9
- export function InputBox({ onSubmit, onExit, history = [], placeholder = "Type a message..." }) {
9
+ function Separator() {
10
+ const cols = process.stdout.columns || 80;
11
+ return h(Text, { dimColor: true }, "\u2500".repeat(cols));
12
+ }
13
+
14
+ /**
15
+ * Input with history support — Claude Code style (lines + status bar)
16
+ */
17
+ export function InputBox({ onSubmit, onExit, history = [], statusText }) {
10
18
  const [input, setInput] = useState("");
11
19
  const [historyIndex, setHistoryIndex] = useState(-1);
12
20
  const [cursorVisible, setCursorVisible] = useState(true);
13
21
  const { exit } = useApp();
14
22
 
15
- // Blinking cursor effect
16
23
  useEffect(() => {
17
- const interval = setInterval(() => {
18
- setCursorVisible(v => !v);
19
- }, 500);
24
+ const interval = setInterval(() => setCursorVisible(v => !v), 500);
20
25
  return () => clearInterval(interval);
21
26
  }, []);
22
27
 
@@ -69,29 +74,24 @@ export function InputBox({ onSubmit, onExit, history = [], placeholder = "Type a
69
74
  }
70
75
  });
71
76
 
72
- const cursor = cursorVisible ? "" : " ";
73
- const displayText = input || "";
77
+ const cursor = cursorVisible ? "\u258b" : " ";
74
78
 
75
- return h(Box, {
76
- flexDirection: "column",
77
- borderStyle: "round",
78
- borderColor: "cyan",
79
- paddingX: 1,
80
- marginTop: 1,
81
- },
79
+ return h(Box, { flexDirection: "column" },
80
+ h(Separator),
82
81
  h(Box, null,
83
- h(Text, { color: "cyan", bold: true }, " "),
84
- h(Text, null, displayText),
82
+ h(Text, { color: "cyan", bold: true }, "\u276f "),
83
+ h(Text, null, input),
85
84
  h(Text, { color: "cyan" }, cursor)
86
85
  ),
87
- !input && h(Text, { dimColor: true }, placeholder)
86
+ h(Separator),
87
+ statusText && h(Text, { dimColor: true }, " " + statusText)
88
88
  );
89
89
  }
90
90
 
91
91
  // State for standalone input
92
92
  let inputState = { resolve: null, history: [] };
93
93
 
94
- export function StandaloneInput({ placeholder, onResult }) {
94
+ export function StandaloneInput({ onResult, statusText }) {
95
95
  const [input, setInput] = useState("");
96
96
  const [historyIndex, setHistoryIndex] = useState(-1);
97
97
  const [cursorVisible, setCursorVisible] = useState(true);
@@ -151,43 +151,40 @@ export function StandaloneInput({ placeholder, onResult }) {
151
151
  }
152
152
  });
153
153
 
154
- return h(Box, {
155
- flexDirection: "column",
156
- borderStyle: "round",
157
- borderColor: "cyan",
158
- paddingX: 1,
159
- },
154
+ const cursor = cursorVisible ? "\u258b" : " ";
155
+
156
+ return h(Box, { flexDirection: "column" },
157
+ h(Separator),
160
158
  h(Box, null,
161
- h(Text, { color: "cyan", bold: true }, " "),
159
+ h(Text, { color: "cyan", bold: true }, "\u276f "),
162
160
  h(Text, null, input),
163
- h(Text, { color: "cyan" }, cursorVisible ? "▋" : " ")
161
+ h(Text, { color: "cyan" }, cursor)
164
162
  ),
165
- !input && placeholder && h(Text, { dimColor: true }, placeholder)
163
+ h(Separator),
164
+ statusText && h(Text, { dimColor: true }, " " + statusText)
166
165
  );
167
166
  }
168
167
 
169
168
  /**
170
- * Prompt for input with history support
169
+ * Prompt for input with history support — Claude Code style
171
170
  * Returns the input string or null if cancelled
172
171
  */
173
- export async function promptInput(placeholder = "Type a message... (↑↓ history, esc to exit)") {
172
+ export async function promptInput(statusText = "/exit to quit \u00b7 \u2191\u2193 history \u00b7 esc to cancel") {
174
173
  return new Promise((resolve) => {
175
174
  let resolved = false;
176
- let userInput = null;
177
175
  const onResult = (result) => {
178
176
  if (resolved) return;
179
177
  resolved = true;
180
- userInput = result;
181
178
  clear();
182
179
  unmount();
183
180
  // Echo what user typed and reset terminal
184
181
  process.stdout.write("\x1b[0m\n");
185
182
  if (result) {
186
- console.log("\x1b[36m❯\x1b[0m " + result + "\n");
183
+ console.log("\x1b[36m\u276f\x1b[0m " + result + "\n");
187
184
  }
188
185
  setTimeout(() => resolve(result), 50);
189
186
  };
190
- const { unmount, clear } = render(h(StandaloneInput, { placeholder, onResult }));
187
+ const { unmount, clear } = render(h(StandaloneInput, { onResult, statusText }));
191
188
  });
192
189
  }
193
190