postgresai 0.14.0-dev.52 → 0.14.0-dev.54

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -29,11 +29,10 @@ brew install postgresai
29
29
 
30
30
  ## Usage
31
31
 
32
- The `postgresai` package provides three command aliases (prefer `postgresai`):
32
+ The `postgresai` package provides two command aliases:
33
33
  ```bash
34
- postgres-ai --help
35
- postgresai --help
36
- pgai --help
34
+ postgresai --help # Canonical, discoverable
35
+ pgai --help # Short and convenient
37
36
  ```
38
37
 
39
38
  You can also run it without installing via `npx`:
@@ -126,17 +125,17 @@ This will:
126
125
 
127
126
  Start monitoring with demo database:
128
127
  ```bash
129
- postgres-ai mon local-install --demo
128
+ postgresai mon local-install --demo
130
129
  ```
131
130
 
132
131
  Start monitoring with your own database:
133
132
  ```bash
134
- postgres-ai mon local-install --db-url postgresql://user:pass@host:5432/db
133
+ postgresai mon local-install --db-url postgresql://user:pass@host:5432/db
135
134
  ```
136
135
 
137
136
  Complete automated setup with API key and database:
138
137
  ```bash
139
- postgres-ai mon local-install --api-key your_key --db-url postgresql://user:pass@host:5432/db -y
138
+ postgresai mon local-install --api-key your_key --db-url postgresql://user:pass@host:5432/db -y
140
139
  ```
141
140
 
142
141
  This will:
@@ -153,19 +152,19 @@ This will:
153
152
  #### Service lifecycle
154
153
  ```bash
155
154
  # Complete setup with various options
156
- postgres-ai mon local-install # Interactive setup for production
157
- postgres-ai mon local-install --demo # Demo mode with sample database
158
- postgres-ai mon local-install --api-key <key> # Setup with API key
159
- postgres-ai mon local-install --db-url <url> # Setup with database URL
160
- postgres-ai mon local-install --api-key <key> --db-url <url> # Complete automated setup
161
- postgres-ai mon local-install -y # Auto-accept all defaults
155
+ postgresai mon local-install # Interactive setup for production
156
+ postgresai mon local-install --demo # Demo mode with sample database
157
+ postgresai mon local-install --api-key <key> # Setup with API key
158
+ postgresai mon local-install --db-url <url> # Setup with database URL
159
+ postgresai mon local-install --api-key <key> --db-url <url> # Complete automated setup
160
+ postgresai mon local-install -y # Auto-accept all defaults
162
161
 
163
162
  # Service management
164
- postgres-ai mon start # Start monitoring services
165
- postgres-ai mon stop # Stop monitoring services
166
- postgres-ai mon restart [service] # Restart all or specific monitoring service
167
- postgres-ai mon status # Show monitoring services status
168
- postgres-ai mon health [--wait <sec>] # Check monitoring services health
163
+ postgresai mon start # Start monitoring services
164
+ postgresai mon stop # Stop monitoring services
165
+ postgresai mon restart [service] # Restart all or specific monitoring service
166
+ postgresai mon status # Show monitoring services status
167
+ postgresai mon health [--wait <sec>] # Check monitoring services health
169
168
  ```
170
169
 
171
170
  ##### local-install options
@@ -176,21 +175,21 @@ postgres-ai mon health [--wait <sec>] # Check monitoring services health
176
175
 
177
176
  #### Monitoring target databases (`mon targets` subgroup)
178
177
  ```bash
179
- postgres-ai mon targets list # List databases to monitor
180
- postgres-ai mon targets add <conn-string> <name> # Add database to monitor
181
- postgres-ai mon targets remove <name> # Remove monitoring target
182
- postgres-ai mon targets test <name> # Test target connectivity
178
+ postgresai mon targets list # List databases to monitor
179
+ postgresai mon targets add <conn-string> <name> # Add database to monitor
180
+ postgresai mon targets remove <name> # Remove monitoring target
181
+ postgresai mon targets test <name> # Test target connectivity
183
182
  ```
184
183
 
185
184
  #### Configuration and maintenance
186
185
  ```bash
187
- postgres-ai mon config # Show monitoring configuration
188
- postgres-ai mon update-config # Apply configuration changes
189
- postgres-ai mon update # Update monitoring stack
190
- postgres-ai mon reset [service] # Reset service data
191
- postgres-ai mon clean # Cleanup artifacts
192
- postgres-ai mon check # System readiness check
193
- postgres-ai mon shell <service> # Open shell to monitoring service
186
+ postgresai mon config # Show monitoring configuration
187
+ postgresai mon update-config # Apply configuration changes
188
+ postgresai mon update # Update monitoring stack
189
+ postgresai mon reset [service] # Reset service data
190
+ postgresai mon clean # Cleanup artifacts
191
+ postgresai mon check # System readiness check
192
+ postgresai mon shell <service> # Open shell to monitoring service
194
193
  ```
195
194
 
196
195
  ### MCP server (`mcp` group)
@@ -250,16 +249,16 @@ postgresai issues view <issueId> > issue.json
250
249
 
251
250
  #### Grafana management
252
251
  ```bash
253
- postgres-ai mon generate-grafana-password # Generate new Grafana password
254
- postgres-ai mon show-grafana-credentials # Show Grafana credentials
252
+ postgresai mon generate-grafana-password # Generate new Grafana password
253
+ postgresai mon show-grafana-credentials # Show Grafana credentials
255
254
  ```
256
255
 
257
256
  ### Authentication and API key management
258
257
  ```bash
259
- postgres-ai auth # Authenticate via browser (OAuth)
260
- postgres-ai auth --set-key <key> # Store API key directly
261
- postgres-ai show-key # Show stored key (masked)
262
- postgres-ai remove-key # Remove stored key
258
+ postgresai auth # Authenticate via browser (OAuth)
259
+ postgresai auth --set-key <key> # Store API key directly
260
+ postgresai show-key # Show stored key (masked)
261
+ postgresai remove-key # Remove stored key
263
262
  ```
264
263
 
265
264
  ## Configuration
@@ -7,6 +7,7 @@ import * as yaml from "js-yaml";
7
7
  import * as fs from "fs";
8
8
  import * as path from "path";
9
9
  import * as os from "os";
10
+ import * as crypto from "node:crypto";
10
11
  import { Client } from "pg";
11
12
  import { startMcpServer } from "../lib/mcp-server";
12
13
  import { fetchIssues, fetchIssueComments, createIssueComment, fetchIssue } from "../lib/issues";
@@ -17,6 +18,8 @@ import * as authServer from "../lib/auth-server";
17
18
  import { maskSecret } from "../lib/util";
18
19
  import { createInterface } from "readline";
19
20
  import * as childProcess from "child_process";
21
+ import { REPORT_GENERATORS, CHECK_INFO, generateAllReports } from "../lib/checkup";
22
+ import { createCheckupReport, uploadCheckupReportJson, RpcError, formatRpcErrorForDisplay, withRetry } from "../lib/checkup-api";
20
23
 
21
24
  // Singleton readline interface for stdin prompts
22
25
  let rl: ReturnType<typeof createInterface> | null = null;
@@ -109,6 +112,255 @@ async function question(prompt: string): Promise<string> {
109
112
  });
110
113
  }
111
114
 
115
+ function expandHomePath(p: string): string {
116
+ const s = (p || "").trim();
117
+ if (!s) return s;
118
+ if (s === "~") return os.homedir();
119
+ if (s.startsWith("~/") || s.startsWith("~\\")) {
120
+ return path.join(os.homedir(), s.slice(2));
121
+ }
122
+ return s;
123
+ }
124
+
125
+ function createTtySpinner(
126
+ enabled: boolean,
127
+ initialText: string
128
+ ): { update: (text: string) => void; stop: (finalText?: string) => void } {
129
+ if (!enabled) {
130
+ return {
131
+ update: () => {},
132
+ stop: () => {},
133
+ };
134
+ }
135
+
136
+ const frames = ["|", "/", "-", "\\"];
137
+ const startTs = Date.now();
138
+ let text = initialText;
139
+ let frameIdx = 0;
140
+ let stopped = false;
141
+
142
+ const render = (): void => {
143
+ if (stopped) return;
144
+ const elapsedSec = ((Date.now() - startTs) / 1000).toFixed(1);
145
+ const frame = frames[frameIdx % frames.length] ?? frames[0] ?? "⠿";
146
+ frameIdx += 1;
147
+ process.stdout.write(`\r\x1b[2K${frame} ${text} (${elapsedSec}s)`);
148
+ };
149
+
150
+ const timer = setInterval(render, 120);
151
+ render(); // immediate feedback
152
+
153
+ return {
154
+ update: (t: string) => {
155
+ text = t;
156
+ render();
157
+ },
158
+ stop: (finalText?: string) => {
159
+ if (stopped) return;
160
+ // Set flag first so any queued render() calls exit early.
161
+ // JavaScript is single-threaded, so this is safe: queued callbacks
162
+ // run after stop() returns and will see stopped=true immediately.
163
+ stopped = true;
164
+ clearInterval(timer);
165
+ process.stdout.write("\r\x1b[2K");
166
+ if (finalText && finalText.trim()) {
167
+ process.stdout.write(finalText);
168
+ }
169
+ process.stdout.write("\n");
170
+ },
171
+ };
172
+ }
173
+
174
+ // ============================================================================
175
+ // Checkup command helpers
176
+ // ============================================================================
177
+
178
+ interface CheckupOptions {
179
+ checkId: string;
180
+ nodeName: string;
181
+ output?: string;
182
+ upload?: boolean;
183
+ project?: string;
184
+ json?: boolean;
185
+ }
186
+
187
+ interface UploadConfig {
188
+ apiKey: string;
189
+ apiBaseUrl: string;
190
+ project: string;
191
+ }
192
+
193
+ interface UploadSummary {
194
+ project: string;
195
+ reportId: number;
196
+ uploaded: Array<{ checkId: string; filename: string; chunkId: number }>;
197
+ }
198
+
199
+ /**
200
+ * Prepare and validate output directory for checkup reports.
201
+ * @returns Output path if valid, null if should exit with error
202
+ */
203
+ function prepareOutputDirectory(outputOpt: string | undefined): string | null | undefined {
204
+ if (!outputOpt) return undefined;
205
+
206
+ const outputDir = expandHomePath(outputOpt);
207
+ const outputPath = path.isAbsolute(outputDir) ? outputDir : path.resolve(process.cwd(), outputDir);
208
+
209
+ if (!fs.existsSync(outputPath)) {
210
+ try {
211
+ fs.mkdirSync(outputPath, { recursive: true });
212
+ } catch (e) {
213
+ const errAny = e as any;
214
+ const code = typeof errAny?.code === "string" ? errAny.code : "";
215
+ const msg = errAny instanceof Error ? errAny.message : String(errAny);
216
+ if (code === "EACCES" || code === "EPERM" || code === "ENOENT") {
217
+ console.error(`Error: Failed to create output directory: ${outputPath}`);
218
+ console.error(`Reason: ${msg}`);
219
+ console.error("Tip: choose a writable path, e.g. --output ./reports or --output ~/reports");
220
+ return null; // Signal to exit
221
+ }
222
+ throw e;
223
+ }
224
+ }
225
+ return outputPath;
226
+ }
227
+
228
+ /**
229
+ * Prepare upload configuration for checkup reports.
230
+ * @returns Upload config if valid, null if should exit, undefined if upload not needed
231
+ */
232
+ function prepareUploadConfig(
233
+ opts: CheckupOptions,
234
+ rootOpts: CliOptions,
235
+ shouldUpload: boolean,
236
+ uploadExplicitlyRequested: boolean
237
+ ): { config: UploadConfig; projectWasGenerated: boolean } | null | undefined {
238
+ if (!shouldUpload) return undefined;
239
+
240
+ const { apiKey } = getConfig(rootOpts);
241
+ if (!apiKey) {
242
+ if (uploadExplicitlyRequested) {
243
+ console.error("Error: API key is required for upload");
244
+ console.error("Tip: run 'postgresai auth' or pass --api-key / set PGAI_API_KEY");
245
+ return null; // Signal to exit
246
+ }
247
+ return undefined; // Skip upload silently
248
+ }
249
+
250
+ const cfg = config.readConfig();
251
+ const { apiBaseUrl } = resolveBaseUrls(rootOpts, cfg);
252
+ let project = ((opts.project || cfg.defaultProject) || "").trim();
253
+ let projectWasGenerated = false;
254
+
255
+ if (!project) {
256
+ project = `project_${crypto.randomBytes(6).toString("hex")}`;
257
+ projectWasGenerated = true;
258
+ try {
259
+ config.writeConfig({ defaultProject: project });
260
+ } catch (e) {
261
+ const message = e instanceof Error ? e.message : String(e);
262
+ console.error(`Warning: Failed to save generated default project: ${message}`);
263
+ }
264
+ }
265
+
266
+ return {
267
+ config: { apiKey, apiBaseUrl, project },
268
+ projectWasGenerated,
269
+ };
270
+ }
271
+
272
+ /**
273
+ * Upload checkup reports to PostgresAI API.
274
+ */
275
+ async function uploadCheckupReports(
276
+ uploadCfg: UploadConfig,
277
+ reports: Record<string, any>,
278
+ spinner: ReturnType<typeof createTtySpinner>,
279
+ logUpload: (msg: string) => void
280
+ ): Promise<UploadSummary> {
281
+ spinner.update("Creating remote checkup report");
282
+ const created = await withRetry(
283
+ () => createCheckupReport({
284
+ apiKey: uploadCfg.apiKey,
285
+ apiBaseUrl: uploadCfg.apiBaseUrl,
286
+ project: uploadCfg.project,
287
+ }),
288
+ { maxAttempts: 3 },
289
+ (attempt, err, delayMs) => {
290
+ const errMsg = err instanceof Error ? err.message : String(err);
291
+ logUpload(`[Retry ${attempt}/3] createCheckupReport failed: ${errMsg}, retrying in ${delayMs}ms...`);
292
+ }
293
+ );
294
+
295
+ const reportId = created.reportId;
296
+ logUpload(`Created remote checkup report: ${reportId}`);
297
+
298
+ const uploaded: Array<{ checkId: string; filename: string; chunkId: number }> = [];
299
+ for (const [checkId, report] of Object.entries(reports)) {
300
+ spinner.update(`Uploading ${checkId}.json`);
301
+ const jsonText = JSON.stringify(report, null, 2);
302
+ const r = await withRetry(
303
+ () => uploadCheckupReportJson({
304
+ apiKey: uploadCfg.apiKey,
305
+ apiBaseUrl: uploadCfg.apiBaseUrl,
306
+ reportId,
307
+ filename: `${checkId}.json`,
308
+ checkId,
309
+ jsonText,
310
+ }),
311
+ { maxAttempts: 3 },
312
+ (attempt, err, delayMs) => {
313
+ const errMsg = err instanceof Error ? err.message : String(err);
314
+ logUpload(`[Retry ${attempt}/3] Upload ${checkId}.json failed: ${errMsg}, retrying in ${delayMs}ms...`);
315
+ }
316
+ );
317
+ uploaded.push({ checkId, filename: `${checkId}.json`, chunkId: r.reportChunkId });
318
+ }
319
+ logUpload("Upload completed");
320
+
321
+ return { project: uploadCfg.project, reportId, uploaded };
322
+ }
323
+
324
+ /**
325
+ * Write checkup reports to files.
326
+ */
327
+ function writeReportFiles(reports: Record<string, any>, outputPath: string): void {
328
+ for (const [checkId, report] of Object.entries(reports)) {
329
+ const filePath = path.join(outputPath, `${checkId}.json`);
330
+ fs.writeFileSync(filePath, JSON.stringify(report, null, 2), "utf8");
331
+ console.log(`✓ ${checkId}: ${filePath}`);
332
+ }
333
+ }
334
+
335
+ /**
336
+ * Print upload summary to console.
337
+ */
338
+ function printUploadSummary(
339
+ summary: UploadSummary,
340
+ projectWasGenerated: boolean,
341
+ useStderr: boolean
342
+ ): void {
343
+ const out = useStderr ? console.error : console.log;
344
+ out("\nCheckup report uploaded");
345
+ out("======================\n");
346
+ if (projectWasGenerated) {
347
+ out(`Project: ${summary.project} (generated and saved as default)`);
348
+ } else {
349
+ out(`Project: ${summary.project}`);
350
+ }
351
+ out(`Report ID: ${summary.reportId}`);
352
+ out("View in Console: console.postgres.ai → Support → checkup reports");
353
+ out("");
354
+ out("Files:");
355
+ for (const item of summary.uploaded) {
356
+ out(`- ${item.checkId}: ${item.filename}`);
357
+ }
358
+ }
359
+
360
+ // ============================================================================
361
+ // CLI configuration
362
+ // ============================================================================
363
+
112
364
  /**
113
365
  * CLI configuration options
114
366
  */
@@ -286,6 +538,20 @@ program
286
538
  "UI base URL for browser routes (overrides PGAI_UI_BASE_URL)"
287
539
  );
288
540
 
541
+ program
542
+ .command("set-default-project <project>")
543
+ .description("store default project for checkup uploads")
544
+ .action(async (project: string) => {
545
+ const value = (project || "").trim();
546
+ if (!value) {
547
+ console.error("Error: project is required");
548
+ process.exitCode = 1;
549
+ return;
550
+ }
551
+ config.writeConfig({ defaultProject: value });
552
+ console.log(`Default project saved: ${value}`);
553
+ });
554
+
289
555
  program
290
556
  .command("prepare-db [conn]")
291
557
  .description("prepare database for monitoring: create monitoring user, required view(s), and grant permissions (idempotent)")
@@ -613,6 +879,143 @@ program
613
879
  }
614
880
  });
615
881
 
882
+ program
883
+ .command("checkup [conn]")
884
+ .description("generate health check reports directly from PostgreSQL (express mode)")
885
+ .option("--check-id <id>", `specific check to run: ${Object.keys(CHECK_INFO).join(", ")}, or ALL`, "ALL")
886
+ .option("--node-name <name>", "node name for reports", "node-01")
887
+ .option("--output <path>", "output directory for JSON files")
888
+ .option("--[no-]upload", "upload JSON results to PostgresAI (default: enabled; requires API key)", undefined)
889
+ .option(
890
+ "--project <project>",
891
+ "project name or ID for remote upload (used with --upload; defaults to config defaultProject; auto-generated on first run)"
892
+ )
893
+ .option("--json", "output JSON to stdout (implies --no-upload)")
894
+ .addHelpText(
895
+ "after",
896
+ [
897
+ "",
898
+ "Available checks:",
899
+ ...Object.entries(CHECK_INFO).map(([id, title]) => ` ${id}: ${title}`),
900
+ "",
901
+ "Examples:",
902
+ " postgresai checkup postgresql://user:pass@host:5432/db",
903
+ " postgresai checkup postgresql://user:pass@host:5432/db --check-id A003",
904
+ " postgresai checkup postgresql://user:pass@host:5432/db --output ./reports",
905
+ " postgresai checkup postgresql://user:pass@host:5432/db --project my_project",
906
+ " postgresai set-default-project my_project",
907
+ " postgresai checkup postgresql://user:pass@host:5432/db",
908
+ " postgresai checkup postgresql://user:pass@host:5432/db --no-upload --json",
909
+ ].join("\n")
910
+ )
911
+ .action(async (conn: string | undefined, opts: CheckupOptions, cmd: Command) => {
912
+ if (!conn) {
913
+ cmd.outputHelp();
914
+ process.exitCode = 1;
915
+ return;
916
+ }
917
+
918
+ const shouldPrintJson = !!opts.json;
919
+ const uploadExplicitlyRequested = opts.upload === true;
920
+ const uploadExplicitlyDisabled = opts.upload === false || shouldPrintJson;
921
+ let shouldUpload = !uploadExplicitlyDisabled;
922
+
923
+ // Preflight: validate/create output directory BEFORE connecting / running checks.
924
+ const outputPath = prepareOutputDirectory(opts.output);
925
+ if (outputPath === null) {
926
+ process.exitCode = 1;
927
+ return;
928
+ }
929
+
930
+ // Preflight: validate upload flags/credentials BEFORE connecting / running checks.
931
+ const rootOpts = program.opts() as CliOptions;
932
+ const uploadResult = prepareUploadConfig(opts, rootOpts, shouldUpload, uploadExplicitlyRequested);
933
+ if (uploadResult === null) {
934
+ process.exitCode = 1;
935
+ return;
936
+ }
937
+ const uploadCfg = uploadResult?.config;
938
+ const projectWasGenerated = uploadResult?.projectWasGenerated ?? false;
939
+ shouldUpload = !!uploadCfg;
940
+
941
+ // Connect and run checks
942
+ const adminConn = resolveAdminConnection({
943
+ conn,
944
+ envPassword: process.env.PGPASSWORD,
945
+ });
946
+ let client: Client | undefined;
947
+ const spinnerEnabled = !!process.stdout.isTTY && shouldUpload;
948
+ const spinner = createTtySpinner(spinnerEnabled, "Connecting to Postgres");
949
+
950
+ try {
951
+ spinner.update("Connecting to Postgres");
952
+ const connResult = await connectWithSslFallback(Client, adminConn);
953
+ client = connResult.client as Client;
954
+
955
+ // Generate reports
956
+ let reports: Record<string, any>;
957
+ if (opts.checkId === "ALL") {
958
+ reports = await generateAllReports(client, opts.nodeName, (p) => {
959
+ spinner.update(`Running ${p.checkId}: ${p.checkTitle} (${p.index}/${p.total})`);
960
+ });
961
+ } else {
962
+ const checkId = opts.checkId.toUpperCase();
963
+ const generator = REPORT_GENERATORS[checkId];
964
+ if (!generator) {
965
+ spinner.stop();
966
+ console.error(`Unknown check ID: ${opts.checkId}`);
967
+ console.error(`Available: ${Object.keys(CHECK_INFO).join(", ")}, ALL`);
968
+ process.exitCode = 1;
969
+ return;
970
+ }
971
+ spinner.update(`Running ${checkId}: ${CHECK_INFO[checkId] || checkId}`);
972
+ reports = { [checkId]: await generator(client, opts.nodeName) };
973
+ }
974
+
975
+ // Upload to PostgresAI API (if configured)
976
+ let uploadSummary: UploadSummary | undefined;
977
+ if (uploadCfg) {
978
+ const logUpload = (msg: string): void => {
979
+ (shouldPrintJson ? console.error : console.log)(msg);
980
+ };
981
+ uploadSummary = await uploadCheckupReports(uploadCfg, reports, spinner, logUpload);
982
+ }
983
+
984
+ spinner.stop();
985
+
986
+ // Write to files (if output path specified)
987
+ if (outputPath) {
988
+ writeReportFiles(reports, outputPath);
989
+ }
990
+
991
+ // Print upload summary
992
+ if (uploadSummary) {
993
+ printUploadSummary(uploadSummary, projectWasGenerated, shouldPrintJson);
994
+ }
995
+
996
+ // Output JSON to stdout
997
+ if (shouldPrintJson || (!shouldUpload && !opts.output)) {
998
+ console.log(JSON.stringify(reports, null, 2));
999
+ }
1000
+ } catch (error) {
1001
+ if (error instanceof RpcError) {
1002
+ for (const line of formatRpcErrorForDisplay(error)) {
1003
+ console.error(line);
1004
+ }
1005
+ } else {
1006
+ const message = error instanceof Error ? error.message : String(error);
1007
+ console.error(`Error: ${message}`);
1008
+ }
1009
+ process.exitCode = 1;
1010
+ } finally {
1011
+ // Always stop spinner to prevent interval leak (idempotent - safe to call multiple times)
1012
+ spinner.stop();
1013
+ if (client) {
1014
+ await client.end();
1015
+ }
1016
+ }
1017
+ });
1018
+
616
1019
  /**
617
1020
  * Stub function for not implemented commands
618
1021
  */
@@ -1596,8 +1999,21 @@ auth
1596
1999
  return;
1597
2000
  }
1598
2001
 
2002
+ // Read existing config to check for defaultProject before updating
2003
+ const existingConfig = config.readConfig();
2004
+ const existingProject = existingConfig.defaultProject;
2005
+
1599
2006
  config.writeConfig({ apiKey: trimmedKey });
2007
+ // When API key is set directly, only clear orgId (org selection may differ).
2008
+ // Preserve defaultProject to avoid orphaning historical reports.
2009
+ // If the new key lacks access to the project, upload will fail with a clear error.
2010
+ config.deleteConfigKeys(["orgId"]);
2011
+
1600
2012
  console.log(`API key saved to ${config.getConfigPath()}`);
2013
+ if (existingProject) {
2014
+ console.log(`Note: Your default project "${existingProject}" has been preserved.`);
2015
+ console.log(` If this key belongs to a different account, use --project to specify a new one.`);
2016
+ }
1601
2017
  return;
1602
2018
  }
1603
2019
 
@@ -1622,10 +2038,10 @@ auth
1622
2038
  const requestedPort = opts.port || 0; // 0 = OS assigns available port
1623
2039
  const callbackServer = authServer.createCallbackServer(requestedPort, params.state, 120000); // 2 minute timeout
1624
2040
 
1625
- // Wait a bit for server to start and get port
1626
- await new Promise(resolve => setTimeout(resolve, 100));
1627
- const actualPort = callbackServer.getPort();
1628
- const redirectUri = `http://localhost:${actualPort}/callback`;
2041
+ // Wait for server to start and get the actual port
2042
+ const actualPort = await callbackServer.ready;
2043
+ // Use 127.0.0.1 to match the server bind address (avoids IPv6 issues on some hosts)
2044
+ const redirectUri = `http://127.0.0.1:${actualPort}/callback`;
1629
2045
 
1630
2046
  console.log(`Callback server listening on port ${actualPort}`);
1631
2047
 
@@ -1770,15 +2186,31 @@ auth
1770
2186
  const orgId = result.org_id || result?.[0]?.result?.org_id; // There is a bug with PostgREST Caching that may return an array, not single object, it's a workaround to support both cases.
1771
2187
 
1772
2188
  // Step 6: Save token to config
2189
+ // Check if org changed to decide whether to preserve defaultProject
2190
+ const existingConfig = config.readConfig();
2191
+ const existingOrgId = existingConfig.orgId;
2192
+ const existingProject = existingConfig.defaultProject;
2193
+ const orgChanged = existingOrgId && existingOrgId !== orgId;
2194
+
1773
2195
  config.writeConfig({
1774
2196
  apiKey: apiToken,
1775
2197
  baseUrl: apiBaseUrl,
1776
2198
  orgId: orgId,
1777
2199
  });
2200
+
2201
+ // Only clear defaultProject if org actually changed
2202
+ if (orgChanged && existingProject) {
2203
+ config.deleteConfigKeys(["defaultProject"]);
2204
+ console.log(`\nNote: Organization changed (${existingOrgId} → ${orgId}).`);
2205
+ console.log(` Default project "${existingProject}" has been cleared.`);
2206
+ }
1778
2207
 
1779
2208
  console.log("\nAuthentication successful!");
1780
2209
  console.log(`API key saved to: ${config.getConfigPath()}`);
1781
2210
  console.log(`Organization ID: ${orgId}`);
2211
+ if (!orgChanged && existingProject) {
2212
+ console.log(`Default project: ${existingProject} (preserved)`);
2213
+ }
1782
2214
  console.log(`\nYou can now use the CLI without specifying an API key.`);
1783
2215
  process.exit(0);
1784
2216
  } catch (err) {
package/bun.lock CHANGED
@@ -14,6 +14,8 @@
14
14
  "@types/bun": "^1.1.14",
15
15
  "@types/js-yaml": "^4.0.9",
16
16
  "@types/pg": "^8.15.6",
17
+ "ajv": "^8.17.1",
18
+ "ajv-formats": "^3.0.1",
17
19
  "typescript": "^5.3.3",
18
20
  },
19
21
  },
@@ -129,7 +131,7 @@
129
131
 
130
132
  "jose": ["jose@6.1.3", "", {}, "sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ=="],
131
133
 
132
- "js-yaml": ["js-yaml@4.1.1", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": "bin/js-yaml.js" }, "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA=="],
134
+ "js-yaml": ["js-yaml@4.1.1", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA=="],
133
135
 
134
136
  "json-schema-traverse": ["json-schema-traverse@1.0.0", "", {}, "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="],
135
137
 
package/bunfig.toml ADDED
@@ -0,0 +1,11 @@
1
+ # Bun configuration for postgres_ai CLI
2
+ # https://bun.sh/docs/runtime/bunfig
3
+
4
+ [test]
5
+ # Default timeout for all tests (30 seconds)
6
+ # Integration tests that connect to databases need longer timeouts
7
+ timeout = 30000
8
+
9
+ # Coverage settings (if needed in future)
10
+ # coverage = true
11
+ # coverageDir = "coverage"