archbyte 0.2.0 → 0.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin/archbyte.js CHANGED
@@ -27,7 +27,7 @@ const program = new Command();
27
27
  program
28
28
  .name('archbyte')
29
29
  .description('ArchByte - See what agents build. As they build it.')
30
- .version('0.2.0')
30
+ .version('0.2.2')
31
31
  .addHelpText('after', `
32
32
  Quick start:
33
33
  $ archbyte login Sign in
@@ -79,6 +79,7 @@ program
79
79
  .option('--api-key <key>', 'Model API key (overrides config)')
80
80
  .option('-p, --port <number>', 'UI server port (default: 3847)', parseInt)
81
81
  .option('-v, --verbose', 'Show detailed output')
82
+ .option('--force', 'Force full re-scan (skip incremental detection)')
82
83
  .option('--dry-run', 'Preview without running')
83
84
  .action(async (options) => {
84
85
  await requireLicense('analyze');
@@ -94,7 +95,7 @@ program
94
95
  .option('--api-key <key>', 'Model API key (overrides config)')
95
96
  .option('--static', 'Static-only analysis (no model, free)')
96
97
  .option('--skip-llm', 'Alias for --static')
97
- .option('--full', 'Force full re-scan (skip incremental detection)')
98
+ .option('--force', 'Force full re-scan (skip incremental detection)')
98
99
  .option('--dry-run', 'Preview without running')
99
100
  .action(async (options) => {
100
101
  await requireLicense('analyze');
@@ -15,7 +15,7 @@ export async function enhanceWithLLM(analysis, provider, config, projectRoot, on
15
15
  if (analysis.gaps.length > 0) {
16
16
  onProgress?.(`Including ${analysis.gaps.length} gap(s) for LLM resolution`);
17
17
  }
18
- const model = resolveModel(config.provider, "standard", config.modelOverrides);
18
+ const model = resolveModel(config.provider, "standard", config.modelOverrides, config.model);
19
19
  onProgress?.(`Calling ${model} for enhancement...`);
20
20
  const response = await provider.chat({
21
21
  model,
@@ -100,7 +100,7 @@ const MAX_TOKENS = {
100
100
  };
101
101
  async function runAgent(agent, ctx, provider, config, priorResults, onProgress) {
102
102
  const start = Date.now();
103
- const model = resolveModel(config.provider, agent.modelTier, config.modelOverrides);
103
+ const model = resolveModel(config.provider, agent.modelTier, config.modelOverrides, config.model);
104
104
  const { system, user } = agent.buildPrompt(ctx, priorResults);
105
105
  onProgress?.(` ${agent.name}: calling ${model}...`);
106
106
  const maxTokens = MAX_TOKENS[agent.id] ?? 4096;
@@ -10,7 +10,7 @@ export class OllamaProvider {
10
10
  ...params.messages.map((m) => this.toOllamaMessage(m)),
11
11
  ];
12
12
  const body = {
13
- model: params.model ?? "llama3.3",
13
+ model: params.model,
14
14
  messages,
15
15
  stream: false,
16
16
  };
@@ -65,7 +65,7 @@ export class OllamaProvider {
65
65
  ...params.messages.map((m) => this.toOllamaMessage(m)),
66
66
  ];
67
67
  const body = {
68
- model: params.model ?? "llama3.3",
68
+ model: params.model,
69
69
  messages,
70
70
  stream: true,
71
71
  };
@@ -90,7 +90,7 @@ export class Orchestrator {
90
90
  return false;
91
91
  }
92
92
  async runAgent(agent, priorResults, onProgress) {
93
- const model = resolveModel(this.config.provider, agent.modelTier, this.config.modelOverrides);
93
+ const model = resolveModel(this.config.provider, agent.modelTier, this.config.modelOverrides, this.config.model);
94
94
  // Create a model-routed provider wrapper
95
95
  const routedProvider = {
96
96
  name: this.provider.name,
@@ -101,6 +101,7 @@ export type ProviderName = "anthropic" | "openai" | "google" | "ollama";
101
101
  export interface ArchByteConfig {
102
102
  provider: ProviderName;
103
103
  apiKey: string;
104
+ model?: string;
104
105
  modelOverrides?: Partial<Record<ModelTier, string>>;
105
106
  ollamaBaseUrl?: string;
106
107
  }
@@ -114,4 +115,4 @@ export interface PipelineResult {
114
115
  }>;
115
116
  }
116
117
  export declare const MODEL_MAP: Record<ProviderName, Record<ModelTier, string>>;
117
- export declare function resolveModel(provider: ProviderName, tier: ModelTier, overrides?: Partial<Record<ModelTier, string>>): string;
118
+ export declare function resolveModel(provider: ProviderName, tier: ModelTier, overrides?: Partial<Record<ModelTier, string>>, model?: string): string;
@@ -17,13 +17,15 @@ export const MODEL_MAP = {
17
17
  advanced: "gemini-2.5-pro",
18
18
  },
19
19
  ollama: {
20
- fast: "llama3.2",
21
- standard: "llama3.3",
22
- advanced: "llama3.3",
20
+ fast: "qwen2.5-coder",
21
+ standard: "qwen2.5-coder",
22
+ advanced: "qwen2.5-coder",
23
23
  },
24
24
  };
25
- export function resolveModel(provider, tier, overrides) {
25
+ export function resolveModel(provider, tier, overrides, model) {
26
26
  if (overrides?.[tier])
27
27
  return overrides[tier];
28
+ if (model)
29
+ return model;
28
30
  return MODEL_MAP[provider][tier];
29
31
  }
@@ -7,7 +7,7 @@ interface AnalyzeOptions {
7
7
  static?: boolean;
8
8
  skipLlm?: boolean;
9
9
  dryRun?: boolean;
10
- full?: boolean;
10
+ force?: boolean;
11
11
  }
12
12
  export declare function handleAnalyze(options: AnalyzeOptions): Promise<void>;
13
13
  /**
@@ -6,6 +6,7 @@ import { resolveConfig } from "./config.js";
6
6
  import { recordUsage } from "./license-gate.js";
7
7
  import { staticResultToSpec, writeSpec, writeMetadata, loadSpec, loadMetadata } from "./yaml-io.js";
8
8
  import { getChangedFiles, mapFilesToComponents, shouldRunAgents, isGitAvailable } from "./incremental.js";
9
+ import { progressBar } from "./ui.js";
9
10
  export async function handleAnalyze(options) {
10
11
  const rootDir = process.cwd();
11
12
  const isStaticOnly = options.static || options.skipLlm;
@@ -40,13 +41,13 @@ export async function handleAnalyze(options) {
40
41
  // ─── Static-only mode (--static / --skip-llm) ───
41
42
  if (isStaticOnly) {
42
43
  const startTime = Date.now();
43
- console.log(chalk.bold("Running static analysis (no model)..."));
44
- console.log();
44
+ const progress = progressBar(3);
45
+ progress.update(0, "Running static analysis...");
45
46
  const { runStaticAnalysis } = await import("../agents/static/index.js");
46
47
  const result = await runStaticAnalysis(rootDir, (msg) => {
47
- const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
48
- console.log(chalk.gray(` [${elapsed}s] ${msg}`));
48
+ progress.update(0, `Static analysis: ${msg}`);
49
49
  });
50
+ progress.update(1, "Building analysis...");
50
51
  const analysis = buildAnalysisFromStatic(result, rootDir);
51
52
  const duration = Date.now() - startTime;
52
53
  // Stamp scan metadata on analysis.json (backward compat)
@@ -59,8 +60,9 @@ export async function handleAnalyze(options) {
59
60
  const spec = staticResultToSpec(result, rootDir, existingSpec?.rules);
60
61
  writeSpec(rootDir, spec);
61
62
  writeScanMetadata(rootDir, duration, "static");
62
- console.log(chalk.green(`Saved spec to: .archbyte/archbyte.yaml`));
63
+ progress.update(2, "Generating diagram...");
63
64
  await autoGenerate(rootDir, options);
65
+ progress.done("Analysis complete");
64
66
  printSummary(analysis, duration, "static");
65
67
  return;
66
68
  }
@@ -113,16 +115,16 @@ export async function handleAnalyze(options) {
113
115
  let incrementalContext;
114
116
  const priorSpec = loadSpec(rootDir);
115
117
  const priorMeta = loadMetadata(rootDir);
116
- if (priorSpec && !options.full && isGitAvailable(rootDir) && priorMeta?.lastCommit) {
118
+ if (priorSpec && !options.force && isGitAvailable(rootDir) && priorMeta?.lastCommit) {
117
119
  const changedFiles = getChangedFiles(rootDir, priorMeta.lastCommit);
118
120
  if (changedFiles.length === 0) {
119
- console.log(chalk.green("No changes detected since last scan. Use --full to force re-scan."));
121
+ console.log(chalk.green("No changes detected since last scan. Use --force to re-scan."));
120
122
  console.log();
121
123
  return;
122
124
  }
123
125
  const { affected, unmapped } = mapFilesToComponents(changedFiles, priorSpec);
124
126
  if (!shouldRunAgents(affected, unmapped)) {
125
- console.log(chalk.green("Only config changes detected — no re-scan needed. Use --full to force."));
127
+ console.log(chalk.green("Only config changes detected — no re-scan needed. Use --force to re-scan."));
126
128
  console.log();
127
129
  return;
128
130
  }
@@ -140,11 +142,11 @@ export async function handleAnalyze(options) {
140
142
  console.log();
141
143
  }
142
144
  // 4. Run static context collection → LLM pipeline
143
- console.log(chalk.bold("Phase 1: Collecting static context..."));
145
+ const progress = progressBar(7);
146
+ progress.update(0, "Collecting static context...");
144
147
  const { runStaticContextCollection } = await import("../agents/static/index.js");
145
148
  const ctx = await runStaticContextCollection(rootDir, (msg) => {
146
- const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
147
- console.log(chalk.gray(` [${elapsed}s] ${msg}`));
149
+ progress.update(0, `Static context: ${msg}`);
148
150
  });
149
151
  // Save static context for debugging / re-runs
150
152
  const ctxPath = path.join(rootDir, ".archbyte", "static-context.json");
@@ -152,15 +154,41 @@ export async function handleAnalyze(options) {
152
154
  fs.mkdirSync(path.dirname(ctxPath), { recursive: true });
153
155
  }
154
156
  fs.writeFileSync(ctxPath, JSON.stringify(ctx, null, 2), "utf-8");
155
- console.log(chalk.gray(` Saved static context to: .archbyte/static-context.json`));
156
- console.log();
157
- console.log(chalk.bold(`Phase 2: Running model pipeline${incrementalContext ? " (incremental)" : ""} (3 parallel + 2 sequential)...`));
157
+ progress.update(1, "Static context collected");
158
+ progress.update(1, `Running 3 agents in parallel${incrementalContext ? " (incremental)" : ""}...`);
158
159
  const { runPipeline } = await import("../agents/pipeline/index.js");
159
160
  let result;
161
+ let pipelineStep = 1;
160
162
  try {
161
163
  result = await runPipeline(ctx, provider, config, (msg) => {
162
- const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
163
- console.log(chalk.gray(` [${elapsed}s] ${msg}`));
164
+ // Map pipeline progress messages to bar steps
165
+ if (msg.startsWith("Phase 1:")) {
166
+ pipelineStep = 1;
167
+ progress.update(1, "Running 3 agents in parallel...");
168
+ }
169
+ else if (msg.startsWith("Phase 2:")) {
170
+ pipelineStep = 3;
171
+ }
172
+ else if (msg.includes(": done") && pipelineStep < 3) {
173
+ // A parallel agent finished
174
+ pipelineStep = Math.min(pipelineStep + 0.5, 2);
175
+ progress.update(pipelineStep, `Running agents in parallel... (${msg.trim()})`);
176
+ }
177
+ else if (msg.includes("Connection Mapper") || msg.includes("connection-mapper")) {
178
+ progress.update(3, "Running connection-mapper...");
179
+ }
180
+ else if (msg.includes("Validator") || msg.includes("validator")) {
181
+ progress.update(4, "Running validator...");
182
+ }
183
+ else if (msg.startsWith("Merging")) {
184
+ progress.update(5, "Merging results...");
185
+ }
186
+ else if (msg.startsWith("Token usage")) {
187
+ progress.update(5, msg.trim());
188
+ }
189
+ else {
190
+ progress.update(pipelineStep, msg.trim());
191
+ }
164
192
  }, incrementalContext);
165
193
  }
166
194
  catch (err) {
@@ -196,8 +224,8 @@ export async function handleAnalyze(options) {
196
224
  }
197
225
  return;
198
226
  }
199
- console.log();
200
227
  // 4. Convert to analysis.json format and write
228
+ progress.update(5, "Merging results...");
201
229
  const analysis = buildAnalysisFromStatic(result, rootDir);
202
230
  const duration = Date.now() - startTime;
203
231
  // Stamp scan metadata on analysis.json (backward compat)
@@ -215,8 +243,9 @@ export async function handleAnalyze(options) {
215
243
  const spec = staticResultToSpec(result, rootDir, existingSpec?.rules);
216
244
  writeSpec(rootDir, spec);
217
245
  writeScanMetadata(rootDir, duration, "pipeline", ctx.fileTree.totalFiles, result.tokenUsage);
218
- console.log(chalk.green(`Saved spec to: .archbyte/archbyte.yaml`));
246
+ progress.update(6, "Generating diagram...");
219
247
  await autoGenerate(rootDir, options);
248
+ progress.done("Analysis complete");
220
249
  // Record usage (best-effort, non-blocking)
221
250
  recordUsage({
222
251
  projectName: path.basename(rootDir),
@@ -269,8 +298,6 @@ function writeScanMetadata(rootDir, durationMs, mode, filesScanned, tokenUsage)
269
298
  writeMetadata(rootDir, meta);
270
299
  }
271
300
  async function autoGenerate(rootDir, options) {
272
- console.log();
273
- console.log(chalk.gray("Generating architecture diagram..."));
274
301
  const analysisPath = path.join(rootDir, ".archbyte", "analysis.json");
275
302
  try {
276
303
  const { handleGenerate } = await import("./generate.js");
package/dist/cli/auth.js CHANGED
@@ -51,8 +51,8 @@ export async function handleLogin(provider) {
51
51
  console.error(chalk.red(`Login failed: ${err instanceof Error ? err.message : "Unknown error"}`));
52
52
  console.log();
53
53
  console.log(chalk.gray("You can also log in manually:"));
54
- console.log(chalk.gray(` 1. Visit ${API_BASE.replace("api.", "")}/auth`));
55
- console.log(chalk.gray(" 2. Copy your token"));
54
+ console.log(chalk.gray(" 1. Visit https://archbyte.heartbyte.io"));
55
+ console.log(chalk.gray(" 2. Sign in and copy your token from the dashboard"));
56
56
  console.log(chalk.gray(" 3. Run: archbyte login --token <your-token>"));
57
57
  process.exit(1);
58
58
  }
@@ -326,7 +326,11 @@ function startOAuthFlow(provider = "github") {
326
326
  const server = http.createServer(async (req, res) => {
327
327
  const url = new URL(req.url ?? "/", `http://localhost:${CLI_CALLBACK_PORT}`);
328
328
  if (url.pathname === "/callback") {
329
- const token = url.searchParams.get("token");
329
+ // Extract token from raw query string, not URLSearchParams
330
+ // (URLSearchParams decodes '+' as space per x-www-form-urlencoded, corrupting JWT signatures)
331
+ const rawQuery = (req.url ?? "").split("?")[1] ?? "";
332
+ const tokenMatch = rawQuery.match(/(?:^|&)token=([^&]+)/);
333
+ const token = tokenMatch ? decodeURIComponent(tokenMatch[1]) : null;
330
334
  if (!token) {
331
335
  res.writeHead(400, { "Content-Type": "text/html" });
332
336
  res.end("<h1>Login failed</h1><p>No token received. Close this window and try again.</p>");
@@ -342,7 +346,8 @@ function startOAuthFlow(provider = "github") {
342
346
  headers: { Authorization: `Bearer ${token}` },
343
347
  });
344
348
  if (!meRes.ok) {
345
- reject(new Error("Failed to fetch user info"));
349
+ const errBody = await meRes.text().catch(() => "");
350
+ reject(new Error(`Failed to fetch user info (${meRes.status}: ${errBody})`));
346
351
  return;
347
352
  }
348
353
  const { user } = (await meRes.json());
@@ -73,6 +73,9 @@ function showConfig() {
73
73
  }
74
74
  console.log(` ${chalk.bold("provider")}: ${config.provider ?? chalk.gray("not set")}`);
75
75
  console.log(` ${chalk.bold("api-key")}: ${config.apiKey ? maskKey(config.apiKey) : chalk.gray("not set")}`);
76
+ if (config.model) {
77
+ console.log(` ${chalk.bold("model")}: ${config.model}`);
78
+ }
76
79
  if (config.ollamaBaseUrl) {
77
80
  console.log(` ${chalk.bold("ollama-url")}: ${config.ollamaBaseUrl}`);
78
81
  }
@@ -86,7 +89,13 @@ function setConfig(key, value) {
86
89
  console.error(chalk.red(`Invalid provider: ${value}. Must be: ${VALID_PROVIDERS.join(", ")}`));
87
90
  process.exit(1);
88
91
  }
92
+ const oldProvider = config.provider;
89
93
  config.provider = value;
94
+ // Clear model override when switching providers — model names are provider-specific
95
+ if (oldProvider && oldProvider !== value && config.model) {
96
+ delete config.model;
97
+ console.log(chalk.yellow(`Cleared model override (was for ${oldProvider})`));
98
+ }
90
99
  break;
91
100
  }
92
101
  case "api-key":
@@ -94,13 +103,22 @@ function setConfig(key, value) {
94
103
  case "key":
95
104
  config.apiKey = value;
96
105
  break;
106
+ case "model":
107
+ if (value === "" || value === "default") {
108
+ delete config.model;
109
+ saveConfig(config);
110
+ console.log(chalk.green("Cleared model override (will use provider defaults)"));
111
+ return;
112
+ }
113
+ config.model = value;
114
+ break;
97
115
  case "ollama-url":
98
116
  case "ollamaUrl":
99
117
  config.ollamaBaseUrl = value;
100
118
  break;
101
119
  default:
102
120
  console.error(chalk.red(`Unknown config key: ${key}`));
103
- console.error(chalk.gray(" Valid keys: provider, api-key, ollama-url"));
121
+ console.error(chalk.gray(" Valid keys: provider, api-key, model, ollama-url"));
104
122
  process.exit(1);
105
123
  }
106
124
  saveConfig(config);
@@ -117,6 +135,9 @@ function getConfig(key) {
117
135
  case "key":
118
136
  console.log(config.apiKey ?? "");
119
137
  break;
138
+ case "model":
139
+ console.log(config.model ?? "");
140
+ break;
120
141
  case "ollama-url":
121
142
  case "ollamaUrl":
122
143
  console.log(config.ollamaBaseUrl ?? "");
@@ -162,6 +183,7 @@ export function resolveConfig() {
162
183
  return {
163
184
  provider: "ollama",
164
185
  apiKey: "",
186
+ model: process.env.ARCHBYTE_MODEL ?? config.model,
165
187
  ollamaBaseUrl: process.env.OLLAMA_BASE_URL ??
166
188
  config.ollamaBaseUrl ??
167
189
  "http://localhost:11434",
@@ -172,6 +194,7 @@ export function resolveConfig() {
172
194
  return {
173
195
  provider,
174
196
  apiKey,
197
+ model: process.env.ARCHBYTE_MODEL ?? config.model,
175
198
  ollamaBaseUrl: config.ollamaBaseUrl,
176
199
  };
177
200
  }
@@ -3,6 +3,7 @@ import * as fs from "fs";
3
3
  import chalk from "chalk";
4
4
  import { generateArchitecture } from "../server/src/generator/index.js";
5
5
  import { loadSpec, specToAnalysis } from "./yaml-io.js";
6
+ import { spinner } from "./ui.js";
6
7
  /**
7
8
  * Generate excalidraw diagram from analysis JSON
8
9
  */
@@ -117,6 +118,7 @@ export async function handleGenerate(options) {
117
118
  },
118
119
  };
119
120
  // Generate diagram
121
+ const sp = spinner("Generating diagram");
120
122
  const diagram = generateArchitecture(project);
121
123
  // Determine output path
122
124
  const outputPath = options.output || path.join(rootDir, ".archbyte", "architecture.json");
@@ -213,7 +215,7 @@ export async function handleGenerate(options) {
213
215
  }
214
216
  // Write diagram
215
217
  fs.writeFileSync(outputPath, JSON.stringify(diagram, null, 2), "utf-8");
216
- console.log(chalk.green("✓ Diagram generated successfully!"));
218
+ sp.stop("done");
217
219
  console.log();
218
220
  console.log(chalk.bold("Summary:"));
219
221
  console.log(chalk.gray(` Components: ${analysis.components.length}`));
package/dist/cli/run.d.ts CHANGED
@@ -5,6 +5,7 @@ interface RunOptions {
5
5
  apiKey?: string;
6
6
  port?: number;
7
7
  verbose?: boolean;
8
+ force?: boolean;
8
9
  dryRun?: boolean;
9
10
  }
10
11
  export declare function handleRun(options: RunOptions): Promise<void>;
package/dist/cli/run.js CHANGED
@@ -13,6 +13,7 @@ export async function handleRun(options) {
13
13
  skipLlm: options.skipLlm,
14
14
  provider: options.provider,
15
15
  apiKey: options.apiKey,
16
+ force: options.force,
16
17
  dryRun: options.dryRun,
17
18
  });
18
19
  if (options.dryRun)
package/dist/cli/setup.js CHANGED
@@ -2,7 +2,7 @@ import * as fs from "fs";
2
2
  import * as path from "path";
3
3
  import { fileURLToPath } from "url";
4
4
  import chalk from "chalk";
5
- import { resolveModel, MODEL_MAP } from "../agents/runtime/types.js";
5
+ import { resolveModel } from "../agents/runtime/types.js";
6
6
  import { createProvider } from "../agents/providers/router.js";
7
7
  import { select, spinner, confirm } from "./ui.js";
8
8
  const __filename = fileURLToPath(import.meta.url);
@@ -75,6 +75,21 @@ function askHidden(prompt) {
75
75
  stdin.on("data", onData);
76
76
  });
77
77
  }
78
+ async function fetchOllamaModels(baseUrl) {
79
+ try {
80
+ const controller = new AbortController();
81
+ const timeout = setTimeout(() => controller.abort(), 5000);
82
+ const res = await fetch(`${baseUrl}/api/tags`, { signal: controller.signal });
83
+ clearTimeout(timeout);
84
+ if (!res.ok)
85
+ return [];
86
+ const data = await res.json();
87
+ return (data.models ?? []).map((m) => m.name);
88
+ }
89
+ catch {
90
+ return [];
91
+ }
92
+ }
78
93
  async function validateProviderSilent(providerName, apiKey, ollamaBaseUrl) {
79
94
  try {
80
95
  if (providerName === "ollama") {
@@ -86,30 +101,7 @@ async function validateProviderSilent(providerName, apiKey, ollamaBaseUrl) {
86
101
  clearTimeout(timeout);
87
102
  if (!res.ok)
88
103
  return false;
89
- // Check installed models
90
- try {
91
- const tagsController = new AbortController();
92
- const tagsTimeout = setTimeout(() => tagsController.abort(), 5000);
93
- const tagsRes = await fetch(`${url}/api/tags`, { signal: tagsController.signal });
94
- clearTimeout(tagsTimeout);
95
- if (tagsRes.ok) {
96
- const data = await tagsRes.json();
97
- const installedModels = (data.models ?? []).map((m) => m.name.split(":")[0]);
98
- const requiredModels = Object.values(MODEL_MAP.ollama);
99
- const uniqueRequired = [...new Set(requiredModels)];
100
- const compatible = uniqueRequired.filter((m) => installedModels.some((installed) => installed === m));
101
- if (compatible.length > 0) {
102
- console.log(chalk.gray(` Compatible models: ${compatible.join(", ")}`));
103
- }
104
- else {
105
- console.log(chalk.yellow(` Warning: no compatible models found.`));
106
- console.log(chalk.gray(` Run: ollama pull ${uniqueRequired[0]}`));
107
- }
108
- }
109
- }
110
- catch {
111
- // Model check is best-effort
112
- }
104
+ // Model listing happens in setup flow, not here
113
105
  return true;
114
106
  }
115
107
  catch {
@@ -153,12 +145,27 @@ export async function handleSetup() {
153
145
  config.provider = provider;
154
146
  const selected = PROVIDERS[idx];
155
147
  console.log(chalk.green(`\n ✓ Provider: ${selected.label}`));
156
- // Step 2: API key (skip for Ollama)
148
+ // Step 2: API key / model selection
157
149
  if (provider === "ollama") {
158
150
  config.ollamaBaseUrl = config.ollamaBaseUrl ?? "http://localhost:11434";
159
151
  console.log(chalk.gray(` Ollama URL: ${config.ollamaBaseUrl}`));
152
+ // Let user pick from installed models
153
+ const models = await fetchOllamaModels(config.ollamaBaseUrl);
154
+ if (models.length > 0) {
155
+ const modelIdx = await select("\n Choose a model:", models.map((m) => m));
156
+ config.model = models[modelIdx];
157
+ console.log(chalk.green(` ✓ Model: ${config.model}`));
158
+ }
159
+ else {
160
+ console.log(chalk.yellow(` Warning: no models installed.`));
161
+ console.log(chalk.gray(` Run: ollama pull <model-name>`));
162
+ }
160
163
  }
161
164
  else {
165
+ // Clear Ollama model override — model names are provider-specific
166
+ if (config.model) {
167
+ delete config.model;
168
+ }
162
169
  console.log();
163
170
  const apiKey = await askHidden(chalk.bold(" API key: "));
164
171
  if (!apiKey) {
package/dist/cli/ui.d.ts CHANGED
@@ -10,6 +10,16 @@ export declare function spinner(label: string): Spinner;
10
10
  * Non-TTY fallback: returns 0 (first option).
11
11
  */
12
12
  export declare function select(prompt: string, options: string[]): Promise<number>;
13
+ interface ProgressBar {
14
+ update(step: number, label: string): void;
15
+ done(label?: string): void;
16
+ }
17
+ /**
18
+ * Step-based progress bar with elapsed time.
19
+ * Renders: ▓▓▓▓▓▓▓▓░░░░░░░░░░░░ 45% │ Running agents... (12.3s)
20
+ * Falls back to simple log lines when not a TTY.
21
+ */
22
+ export declare function progressBar(totalSteps: number): ProgressBar;
13
23
  /**
14
24
  * Y/n confirmation prompt. Returns true for y/Enter, false for n.
15
25
  * Non-TTY fallback: returns true.
package/dist/cli/ui.js CHANGED
@@ -105,6 +105,50 @@ export function select(prompt, options) {
105
105
  stdin.on("data", onData);
106
106
  });
107
107
  }
108
+ /**
109
+ * Step-based progress bar with elapsed time.
110
+ * Renders: ▓▓▓▓▓▓▓▓░░░░░░░░░░░░ 45% │ Running agents... (12.3s)
111
+ * Falls back to simple log lines when not a TTY.
112
+ */
113
+ export function progressBar(totalSteps) {
114
+ const startTime = Date.now();
115
+ let lastLabel = "";
116
+ function elapsed() {
117
+ return ((Date.now() - startTime) / 1000).toFixed(1);
118
+ }
119
+ if (!process.stdout.isTTY) {
120
+ return {
121
+ update(_step, label) {
122
+ if (label !== lastLabel) {
123
+ console.log(` [${elapsed()}s] ${label}`);
124
+ lastLabel = label;
125
+ }
126
+ },
127
+ done(label) {
128
+ console.log(` [${elapsed()}s] ${label ?? "Done"}`);
129
+ },
130
+ };
131
+ }
132
+ const BAR_WIDTH = 20;
133
+ function render(step, label) {
134
+ const pct = Math.min(Math.round((step / totalSteps) * 100), 100);
135
+ const filled = Math.round((step / totalSteps) * BAR_WIDTH);
136
+ const empty = BAR_WIDTH - filled;
137
+ const bar = chalk.cyan("▓".repeat(filled)) + chalk.gray("░".repeat(empty));
138
+ const pctStr = `${pct}%`.padStart(4);
139
+ process.stdout.write(`\r\x1b[K ${bar} ${chalk.white(pctStr)} ${chalk.gray("│")} ${label} ${chalk.gray(`(${elapsed()}s)`)}`);
140
+ }
141
+ return {
142
+ update(step, label) {
143
+ lastLabel = label;
144
+ render(step, label);
145
+ },
146
+ done(label) {
147
+ render(totalSteps, label ?? "Done");
148
+ process.stdout.write("\n");
149
+ },
150
+ };
151
+ }
108
152
  /**
109
153
  * Y/n confirmation prompt. Returns true for y/Enter, false for n.
110
154
  * Non-TTY fallback: returns true.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "archbyte",
3
- "version": "0.2.0",
3
+ "version": "0.2.2",
4
4
  "description": "ArchByte - See what agents build. As they build it.",
5
5
  "type": "module",
6
6
  "bin": {